Working apalis cron and worker with 0.6.0-rc.5
Also renamed `pool` variables throughout codebase to `db` for clarity.
This commit is contained in:
parent
764d3f23b8
commit
a3450e202a
118
Cargo.lock
generated
118
Cargo.lock
generated
@ -140,14 +140,11 @@ checksum = "25bdb32cbbdce2b519a9cd7df3a678443100e265d5e25ca763b7572a5104f5f3"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "apalis"
|
name = "apalis"
|
||||||
version = "0.5.3"
|
version = "0.6.0-rc.5"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "be13bf89e734a1ec4d44233429aafea5a9e693c98a4a126b00a29f321d4a2e03"
|
checksum = "940921ba55b294cdb5535bb67a0fc29a8b637e0225f99337e4c86432d80da3f0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"apalis-core",
|
"apalis-core",
|
||||||
"apalis-cron",
|
|
||||||
"apalis-redis",
|
|
||||||
"apalis-sql",
|
|
||||||
"futures",
|
"futures",
|
||||||
"pin-project-lite",
|
"pin-project-lite",
|
||||||
"serde",
|
"serde",
|
||||||
@ -160,9 +157,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "apalis-core"
|
name = "apalis-core"
|
||||||
version = "0.5.3"
|
version = "0.6.0-rc.5"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7fb0704a3274e289bebbe042d7adf2b1455a2afd084c7a835cfc2e918cad2eff"
|
checksum = "9ca0df44161ac62045e29a56fd7d4356b6ac138413b761a23e5a0ce9973c1239"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"async-oneshot",
|
"async-oneshot",
|
||||||
"futures",
|
"futures",
|
||||||
@ -177,9 +174,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "apalis-cron"
|
name = "apalis-cron"
|
||||||
version = "0.5.3"
|
version = "0.6.0-rc.5"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "f3918af413df3fb888bb662b7504ea16cbbabd20293a08f9e7548c57764612db"
|
checksum = "ebc25afa9a8d8c1408d1969e15639b120afc53ee1cfa6464ca4fb4fa3c1f1d3f"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"apalis-core",
|
"apalis-core",
|
||||||
"async-stream",
|
"async-stream",
|
||||||
@ -191,9 +188,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "apalis-redis"
|
name = "apalis-redis"
|
||||||
version = "0.5.3"
|
version = "0.6.0-rc.5"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e8deabd06576b44f87e0fa709e44aa7edc47937b4325eac78384168df47ba30b"
|
checksum = "58b90be4cc2db77f062b3ee08ed913dffba0c509b71fd944e4e8241c2618705f"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"apalis-core",
|
"apalis-core",
|
||||||
"async-stream",
|
"async-stream",
|
||||||
@ -203,24 +200,9 @@ dependencies = [
|
|||||||
"log",
|
"log",
|
||||||
"redis",
|
"redis",
|
||||||
"serde",
|
"serde",
|
||||||
"tokio",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "apalis-sql"
|
|
||||||
version = "0.5.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "eb4df1ac2762e170a12a920d1f74207816341e5eed5870887cc3bcd9e8c59028"
|
|
||||||
dependencies = [
|
|
||||||
"apalis-core",
|
|
||||||
"async-stream",
|
|
||||||
"futures",
|
|
||||||
"futures-lite",
|
|
||||||
"log",
|
|
||||||
"serde",
|
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"sqlx",
|
|
||||||
"tokio",
|
"tokio",
|
||||||
|
"tower",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -663,6 +645,8 @@ dependencies = [
|
|||||||
"ansi-to-html",
|
"ansi-to-html",
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"apalis",
|
"apalis",
|
||||||
|
"apalis-cron",
|
||||||
|
"apalis-redis",
|
||||||
"async-trait",
|
"async-trait",
|
||||||
"axum",
|
"axum",
|
||||||
"axum-client-ip",
|
"axum-client-ip",
|
||||||
@ -1118,19 +1102,6 @@ version = "0.3.30"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1"
|
checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "futures-lite"
|
|
||||||
version = "2.3.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "52527eb5074e35e9339c6b4e8d12600c7128b68fb25dcb9fa9dec18f7c25f3a5"
|
|
||||||
dependencies = [
|
|
||||||
"fastrand",
|
|
||||||
"futures-core",
|
|
||||||
"futures-io",
|
|
||||||
"parking",
|
|
||||||
"pin-project-lite",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "futures-macro"
|
name = "futures-macro"
|
||||||
version = "0.3.30"
|
version = "0.3.30"
|
||||||
@ -2194,12 +2165,6 @@ version = "0.1.1"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
|
checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "parking"
|
|
||||||
version = "2.2.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "bb813b8af86854136c6922af0598d719255ecb2179515e6e7730d468f05c9cae"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "parking_lot"
|
name = "parking_lot"
|
||||||
version = "0.12.2"
|
version = "0.12.2"
|
||||||
@ -2712,21 +2677,6 @@ dependencies = [
|
|||||||
"winreg 0.52.0",
|
"winreg 0.52.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "ring"
|
|
||||||
version = "0.17.8"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d"
|
|
||||||
dependencies = [
|
|
||||||
"cc",
|
|
||||||
"cfg-if",
|
|
||||||
"getrandom",
|
|
||||||
"libc",
|
|
||||||
"spin 0.9.8",
|
|
||||||
"untrusted",
|
|
||||||
"windows-sys 0.52.0",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rmp"
|
name = "rmp"
|
||||||
version = "0.8.14"
|
version = "0.8.14"
|
||||||
@ -2788,17 +2738,6 @@ dependencies = [
|
|||||||
"windows-sys 0.52.0",
|
"windows-sys 0.52.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rustls"
|
|
||||||
version = "0.21.12"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e"
|
|
||||||
dependencies = [
|
|
||||||
"ring",
|
|
||||||
"rustls-webpki",
|
|
||||||
"sct",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rustls-pemfile"
|
name = "rustls-pemfile"
|
||||||
version = "1.0.4"
|
version = "1.0.4"
|
||||||
@ -2824,16 +2763,6 @@ version = "1.7.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "976295e77ce332211c0d24d92c0e83e50f5c5f046d11082cea19f3df13a3562d"
|
checksum = "976295e77ce332211c0d24d92c0e83e50f5c5f046d11082cea19f3df13a3562d"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rustls-webpki"
|
|
||||||
version = "0.101.7"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765"
|
|
||||||
dependencies = [
|
|
||||||
"ring",
|
|
||||||
"untrusted",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rustversion"
|
name = "rustversion"
|
||||||
version = "1.0.16"
|
version = "1.0.16"
|
||||||
@ -2870,16 +2799,6 @@ version = "1.2.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
|
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "sct"
|
|
||||||
version = "0.7.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414"
|
|
||||||
dependencies = [
|
|
||||||
"ring",
|
|
||||||
"untrusted",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "security-framework"
|
name = "security-framework"
|
||||||
version = "2.11.0"
|
version = "2.11.0"
|
||||||
@ -3163,8 +3082,6 @@ dependencies = [
|
|||||||
"once_cell",
|
"once_cell",
|
||||||
"paste",
|
"paste",
|
||||||
"percent-encoding",
|
"percent-encoding",
|
||||||
"rustls",
|
|
||||||
"rustls-pemfile 1.0.4",
|
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"sha2",
|
"sha2",
|
||||||
@ -3176,7 +3093,6 @@ dependencies = [
|
|||||||
"tracing",
|
"tracing",
|
||||||
"url",
|
"url",
|
||||||
"uuid",
|
"uuid",
|
||||||
"webpki-roots",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -3923,12 +3839,6 @@ version = "0.1.1"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e"
|
checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "untrusted"
|
|
||||||
version = "0.9.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "url"
|
name = "url"
|
||||||
version = "2.5.0"
|
version = "2.5.0"
|
||||||
@ -4133,12 +4043,6 @@ dependencies = [
|
|||||||
"wasm-bindgen",
|
"wasm-bindgen",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "webpki-roots"
|
|
||||||
version = "0.25.4"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "whoami"
|
name = "whoami"
|
||||||
version = "1.5.1"
|
version = "1.5.1"
|
||||||
|
@ -15,7 +15,10 @@ path = "src/lib.rs"
|
|||||||
ammonia = "4"
|
ammonia = "4"
|
||||||
ansi-to-html = "0.2"
|
ansi-to-html = "0.2"
|
||||||
anyhow = "1"
|
anyhow = "1"
|
||||||
apalis = { version = "0.5", features = ["redis", "cron", "retry"] }
|
# apalis v0.6 fixes this issue: https://github.com/geofmureithi/apalis/issues/351
|
||||||
|
apalis = { version = "0.6.0-rc.5", features = ["retry"] }
|
||||||
|
apalis-cron = "0.6.0-rc.5"
|
||||||
|
apalis-redis = "0.6.0-rc.5"
|
||||||
async-trait = "0.1"
|
async-trait = "0.1"
|
||||||
axum = { version = "0.7", features = ["form", "multipart", "query"] }
|
axum = { version = "0.7", features = ["form", "multipart", "query"] }
|
||||||
axum-client-ip = "0.6"
|
axum-client-ip = "0.6"
|
||||||
|
@ -56,7 +56,7 @@ Install these requirements to get started developing crawlnicle.
|
|||||||
directory with the contents:
|
directory with the contents:
|
||||||
|
|
||||||
```env
|
```env
|
||||||
RUST_LOG=crawlnicle=debug,cli=debug,lib=debug,tower_http=debug,sqlx=debug
|
RUST_LOG=crawlnicle=debug,cli=debug,web=debug,worker=debug,crawler=debug,lib=debug,tower_http=debug,sqlx=debug
|
||||||
HOST=127.0.0.1
|
HOST=127.0.0.1
|
||||||
PORT=3000
|
PORT=3000
|
||||||
PUBLIC_URL=http://localhost:3000
|
PUBLIC_URL=http://localhost:3000
|
||||||
|
@ -97,7 +97,7 @@ pub async fn main() -> Result<()> {
|
|||||||
|
|
||||||
tracing_subscriber::fmt::init();
|
tracing_subscriber::fmt::init();
|
||||||
|
|
||||||
let pool = PgPoolOptions::new()
|
let db = PgPoolOptions::new()
|
||||||
.max_connections(env::var("DATABASE_MAX_CONNECTIONS")?.parse()?)
|
.max_connections(env::var("DATABASE_MAX_CONNECTIONS")?.parse()?)
|
||||||
.connect(&env::var("DATABASE_URL")?)
|
.connect(&env::var("DATABASE_URL")?)
|
||||||
.await?;
|
.await?;
|
||||||
@ -108,7 +108,7 @@ pub async fn main() -> Result<()> {
|
|||||||
match cli.commands {
|
match cli.commands {
|
||||||
Commands::AddFeed(args) => {
|
Commands::AddFeed(args) => {
|
||||||
let feed = Feed::create(
|
let feed = Feed::create(
|
||||||
&pool,
|
&db,
|
||||||
CreateFeed {
|
CreateFeed {
|
||||||
title: args.title,
|
title: args.title,
|
||||||
url: args.url,
|
url: args.url,
|
||||||
@ -119,12 +119,12 @@ pub async fn main() -> Result<()> {
|
|||||||
info!("Created feed with id {}", Base62Uuid::from(feed.feed_id));
|
info!("Created feed with id {}", Base62Uuid::from(feed.feed_id));
|
||||||
}
|
}
|
||||||
Commands::DeleteFeed(args) => {
|
Commands::DeleteFeed(args) => {
|
||||||
Feed::delete(&pool, args.id).await?;
|
Feed::delete(&db, args.id).await?;
|
||||||
info!("Deleted feed with id {}", Base62Uuid::from(args.id));
|
info!("Deleted feed with id {}", Base62Uuid::from(args.id));
|
||||||
}
|
}
|
||||||
Commands::AddEntry(args) => {
|
Commands::AddEntry(args) => {
|
||||||
let entry = Entry::create(
|
let entry = Entry::create(
|
||||||
&pool,
|
&db,
|
||||||
CreateEntry {
|
CreateEntry {
|
||||||
title: args.title,
|
title: args.title,
|
||||||
url: args.url,
|
url: args.url,
|
||||||
@ -137,7 +137,7 @@ pub async fn main() -> Result<()> {
|
|||||||
info!("Created entry with id {}", Base62Uuid::from(entry.entry_id));
|
info!("Created entry with id {}", Base62Uuid::from(entry.entry_id));
|
||||||
}
|
}
|
||||||
Commands::DeleteEntry(args) => {
|
Commands::DeleteEntry(args) => {
|
||||||
Entry::delete(&pool, args.id).await?;
|
Entry::delete(&db, args.id).await?;
|
||||||
info!("Deleted entry with id {}", Base62Uuid::from(args.id));
|
info!("Deleted entry with id {}", Base62Uuid::from(args.id));
|
||||||
}
|
}
|
||||||
Commands::Crawl(CrawlFeed { id }) => {
|
Commands::Crawl(CrawlFeed { id }) => {
|
||||||
@ -147,7 +147,7 @@ pub async fn main() -> Result<()> {
|
|||||||
// server is running, it will *not* serialize same-domain requests with it.
|
// server is running, it will *not* serialize same-domain requests with it.
|
||||||
let domain_locks = DomainLocks::new();
|
let domain_locks = DomainLocks::new();
|
||||||
let feed_crawler = FeedCrawlerHandle::new(
|
let feed_crawler = FeedCrawlerHandle::new(
|
||||||
pool.clone(),
|
db.clone(),
|
||||||
client.clone(),
|
client.clone(),
|
||||||
domain_locks.clone(),
|
domain_locks.clone(),
|
||||||
env::var("CONTENT_DIR")?,
|
env::var("CONTENT_DIR")?,
|
||||||
|
@ -1,19 +1,17 @@
|
|||||||
use anyhow::{anyhow, Result};
|
|
||||||
use apalis::cron::{CronStream, Schedule};
|
|
||||||
use apalis::layers::retry::{RetryLayer, RetryPolicy};
|
use apalis::layers::retry::{RetryLayer, RetryPolicy};
|
||||||
use apalis::layers::tracing::TraceLayer;
|
use apalis::layers::tracing::TraceLayer;
|
||||||
use apalis::prelude::*;
|
use apalis::prelude::*;
|
||||||
use apalis::redis::RedisStorage;
|
use apalis_cron::{CronStream, Schedule};
|
||||||
|
use apalis_redis::RedisStorage;
|
||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
use lib::actors::crawl_scheduler::CrawlSchedulerError;
|
|
||||||
use lib::jobs::AsyncJob;
|
use lib::jobs::AsyncJob;
|
||||||
use lib::models::feed::{Feed, GetFeedsOptions};
|
use lib::models::feed::{Feed, GetFeedsOptions};
|
||||||
use sqlx::postgres::PgPoolOptions;
|
use sqlx::postgres::PgPoolOptions;
|
||||||
use sqlx::PgPool;
|
use sqlx::PgPool;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use tower::ServiceBuilder;
|
use thiserror::Error;
|
||||||
use tracing::{info, instrument};
|
use tracing::{info, instrument};
|
||||||
|
|
||||||
use dotenvy::dotenv;
|
use dotenvy::dotenv;
|
||||||
@ -29,26 +27,32 @@ impl From<DateTime<Utc>> for Crawl {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Job for Crawl {
|
#[derive(Debug, Error)]
|
||||||
const NAME: &'static str = "apalis::Crawl";
|
enum CrawlError {
|
||||||
|
#[error("error fetching feeds")]
|
||||||
|
FetchFeedsError(#[from] sqlx::Error),
|
||||||
|
#[error("error queueing crawl feed job")]
|
||||||
|
QueueJobError(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
struct State {
|
struct State {
|
||||||
pool: PgPool,
|
pool: PgPool,
|
||||||
apalis: RedisStorage<AsyncJob>,
|
apalis: RedisStorage<AsyncJob>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip_all)]
|
#[instrument(skip_all)]
|
||||||
pub async fn crawl_fn(job: Crawl, state: Data<Arc<State>>) -> Result<()> {
|
pub async fn crawl_fn(job: Crawl, state: Data<Arc<State>>) -> Result<(), CrawlError> {
|
||||||
tracing::info!(job = ?job, "crawl");
|
tracing::info!(job = ?job, "crawl");
|
||||||
let mut apalis = (state.apalis).clone();
|
let mut apalis = (state.apalis).clone();
|
||||||
let mut options = GetFeedsOptions::default();
|
let mut options = GetFeedsOptions::default();
|
||||||
loop {
|
loop {
|
||||||
info!("fetching feeds before: {:?}", options.before);
|
info!("fetching feeds before: {:?}", options.before);
|
||||||
|
// TODO: filter to feeds where:
|
||||||
|
// now >= feed.last_crawled_at + feed.crawl_interval_minutes
|
||||||
|
// may need more indices...
|
||||||
let feeds = match Feed::get_all(&state.pool, &options).await {
|
let feeds = match Feed::get_all(&state.pool, &options).await {
|
||||||
Err(err) => {
|
Err(err) => return Err(CrawlError::FetchFeedsError(err)),
|
||||||
return Err(anyhow!(err));
|
|
||||||
}
|
|
||||||
Ok(feeds) if feeds.is_empty() => {
|
Ok(feeds) if feeds.is_empty() => {
|
||||||
info!("no more feeds found");
|
info!("no more feeds found");
|
||||||
break;
|
break;
|
||||||
@ -62,14 +66,15 @@ pub async fn crawl_fn(job: Crawl, state: Data<Arc<State>>) -> Result<()> {
|
|||||||
// self.spawn_crawler_loop(feed, respond_to.clone());
|
// self.spawn_crawler_loop(feed, respond_to.clone());
|
||||||
apalis
|
apalis
|
||||||
.push(AsyncJob::HelloWorld(feed.feed_id.to_string()))
|
.push(AsyncJob::HelloWorld(feed.feed_id.to_string()))
|
||||||
.await?;
|
.await
|
||||||
|
.map_err(|err| CrawlError::QueueJobError(err.to_string()))?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
async fn main() -> Result<()> {
|
async fn main() -> anyhow::Result<()> {
|
||||||
dotenv().ok();
|
dotenv().ok();
|
||||||
let config = Config::parse();
|
let config = Config::parse();
|
||||||
let _guard = init_worker_tracing()?;
|
let _guard = init_worker_tracing()?;
|
||||||
@ -80,24 +85,24 @@ async fn main() -> Result<()> {
|
|||||||
.connect(&config.database_url)
|
.connect(&config.database_url)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
// TODO: use redis_pool from above instead of making a new connection
|
// TODO: create connection from redis_pool for each job instead using a single connection
|
||||||
// See: https://github.com/geofmureithi/apalis/issues/290
|
// See: https://github.com/geofmureithi/apalis/issues/290
|
||||||
let redis_conn = apalis::redis::connect(config.redis_url.clone()).await?;
|
let redis_conn = apalis_redis::connect(config.redis_url.clone()).await?;
|
||||||
let apalis_config = apalis::redis::Config::default();
|
let apalis_config = apalis_redis::Config::default();
|
||||||
let mut apalis: RedisStorage<AsyncJob> =
|
let apalis_storage = RedisStorage::new_with_config(redis_conn, apalis_config);
|
||||||
RedisStorage::new_with_config(redis_conn, apalis_config);
|
|
||||||
|
let state = Arc::new(State {
|
||||||
|
pool,
|
||||||
|
apalis: apalis_storage.clone(),
|
||||||
|
});
|
||||||
|
|
||||||
let schedule = Schedule::from_str("0 * * * * *").unwrap();
|
let schedule = Schedule::from_str("0 * * * * *").unwrap();
|
||||||
// let service = ServiceBuilder::new()
|
|
||||||
// .layer(RetryLayer::new(RetryPolicy::default()))
|
|
||||||
// .layer(TraceLayer::new())
|
|
||||||
// .service(service_fn(crawl_fn));
|
|
||||||
|
|
||||||
let worker = WorkerBuilder::new("crawler")
|
let worker = WorkerBuilder::new("crawler")
|
||||||
.stream(CronStream::new(schedule).into_stream())
|
|
||||||
.layer(RetryLayer::new(RetryPolicy::default()))
|
.layer(RetryLayer::new(RetryPolicy::default()))
|
||||||
.layer(TraceLayer::new())
|
.layer(TraceLayer::new())
|
||||||
.data(Arc::new(State { pool, apalis }))
|
.data(state)
|
||||||
|
.backend(CronStream::new(schedule))
|
||||||
.build_fn(crawl_fn);
|
.build_fn(crawl_fn);
|
||||||
|
|
||||||
Monitor::<TokioExecutor>::new()
|
Monitor::<TokioExecutor>::new()
|
||||||
|
@ -2,7 +2,7 @@ use std::{collections::HashMap, net::SocketAddr, path::Path, sync::Arc};
|
|||||||
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use apalis::prelude::*;
|
use apalis::prelude::*;
|
||||||
use apalis::redis::RedisStorage;
|
use apalis_redis::RedisStorage;
|
||||||
use axum::{
|
use axum::{
|
||||||
routing::{get, post},
|
routing::{get, post},
|
||||||
Router,
|
Router,
|
||||||
@ -66,7 +66,7 @@ async fn main() -> Result<()> {
|
|||||||
let domain_locks = DomainLocks::new();
|
let domain_locks = DomainLocks::new();
|
||||||
let client = Client::builder().user_agent(USER_AGENT).build()?;
|
let client = Client::builder().user_agent(USER_AGENT).build()?;
|
||||||
|
|
||||||
let pool = PgPoolOptions::new()
|
let db = PgPoolOptions::new()
|
||||||
.max_connections(config.database_max_connections)
|
.max_connections(config.database_max_connections)
|
||||||
.acquire_timeout(std::time::Duration::from_secs(3))
|
.acquire_timeout(std::time::Duration::from_secs(3))
|
||||||
.connect(&config.database_url)
|
.connect(&config.database_url)
|
||||||
@ -84,7 +84,7 @@ async fn main() -> Result<()> {
|
|||||||
)))
|
)))
|
||||||
.with_signed(Key::from(&BASE64_STANDARD.decode(&config.session_secret)?));
|
.with_signed(Key::from(&BASE64_STANDARD.decode(&config.session_secret)?));
|
||||||
|
|
||||||
let backend = Backend::new(pool.clone());
|
let backend = Backend::new(db.clone());
|
||||||
let auth_layer = AuthManagerLayerBuilder::new(backend, session_layer).build();
|
let auth_layer = AuthManagerLayerBuilder::new(backend, session_layer).build();
|
||||||
|
|
||||||
let smtp_creds = Credentials::new(config.smtp_user.clone(), config.smtp_password.clone());
|
let smtp_creds = Credentials::new(config.smtp_user.clone(), config.smtp_password.clone());
|
||||||
@ -94,12 +94,12 @@ async fn main() -> Result<()> {
|
|||||||
.credentials(smtp_creds)
|
.credentials(smtp_creds)
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
sqlx::migrate!().run(&pool).await?;
|
sqlx::migrate!().run(&db).await?;
|
||||||
|
|
||||||
// TODO: use redis_pool from above instead of making a new connection
|
// TODO: use redis_pool from above instead of making a new connection
|
||||||
// See: https://github.com/geofmureithi/apalis/issues/290
|
// See: https://github.com/geofmureithi/apalis/issues/290
|
||||||
let redis_conn = apalis::redis::connect(config.redis_url.clone()).await?;
|
let redis_conn = apalis_redis::connect(config.redis_url.clone()).await?;
|
||||||
let apalis_config = apalis::redis::Config::default();
|
let apalis_config = apalis_redis::Config::default();
|
||||||
let mut apalis: RedisStorage<AsyncJob> =
|
let mut apalis: RedisStorage<AsyncJob> =
|
||||||
RedisStorage::new_with_config(redis_conn, apalis_config);
|
RedisStorage::new_with_config(redis_conn, apalis_config);
|
||||||
|
|
||||||
@ -108,14 +108,14 @@ async fn main() -> Result<()> {
|
|||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
let crawl_scheduler = CrawlSchedulerHandle::new(
|
let crawl_scheduler = CrawlSchedulerHandle::new(
|
||||||
pool.clone(),
|
db.clone(),
|
||||||
client.clone(),
|
client.clone(),
|
||||||
domain_locks.clone(),
|
domain_locks.clone(),
|
||||||
config.content_dir.clone(),
|
config.content_dir.clone(),
|
||||||
crawls.clone(),
|
crawls.clone(),
|
||||||
);
|
);
|
||||||
let _ = crawl_scheduler.bootstrap().await;
|
let _ = crawl_scheduler.bootstrap().await;
|
||||||
let importer = ImporterHandle::new(pool.clone(), crawl_scheduler.clone(), imports.clone());
|
let importer = ImporterHandle::new(db.clone(), crawl_scheduler.clone(), imports.clone());
|
||||||
|
|
||||||
let ip_source_extension = config.ip_source.0.clone().into_extension();
|
let ip_source_extension = config.ip_source.0.clone().into_extension();
|
||||||
|
|
||||||
@ -154,7 +154,7 @@ async fn main() -> Result<()> {
|
|||||||
.route("/reset-password", post(handlers::reset_password::post))
|
.route("/reset-password", post(handlers::reset_password::post))
|
||||||
.nest_service("/static", ServeDir::new("static"))
|
.nest_service("/static", ServeDir::new("static"))
|
||||||
.with_state(AppState {
|
.with_state(AppState {
|
||||||
pool,
|
db,
|
||||||
config,
|
config,
|
||||||
log_receiver,
|
log_receiver,
|
||||||
crawls,
|
crawls,
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use apalis::layers::tracing::TraceLayer;
|
use apalis::layers::tracing::TraceLayer;
|
||||||
use apalis::prelude::*;
|
use apalis::prelude::*;
|
||||||
use apalis::redis::RedisStorage;
|
use apalis_redis::RedisStorage;
|
||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
|
|
||||||
use dotenvy::dotenv;
|
use dotenvy::dotenv;
|
||||||
@ -18,15 +18,18 @@ async fn main() -> Result<()> {
|
|||||||
dotenv().ok();
|
dotenv().ok();
|
||||||
let config = Config::parse();
|
let config = Config::parse();
|
||||||
let _guard = init_worker_tracing()?;
|
let _guard = init_worker_tracing()?;
|
||||||
let redis_conn = apalis::redis::connect(config.redis_url.clone()).await?;
|
// TODO: create connection from redis_pool for each job instead using a single connection
|
||||||
let apalis_config = apalis::redis::Config::default();
|
// See: https://github.com/geofmureithi/apalis/issues/290
|
||||||
let apalis: RedisStorage<AsyncJob> = RedisStorage::new_with_config(redis_conn, apalis_config);
|
let redis_conn = apalis_redis::connect(config.redis_url.clone()).await?;
|
||||||
|
let apalis_config = apalis_redis::Config::default();
|
||||||
|
let apalis_storage: RedisStorage<AsyncJob> =
|
||||||
|
RedisStorage::new_with_config(redis_conn, apalis_config);
|
||||||
|
|
||||||
Monitor::<TokioExecutor>::new()
|
Monitor::<TokioExecutor>::new()
|
||||||
.register_with_count(2, {
|
.register_with_count(2, {
|
||||||
WorkerBuilder::new("worker")
|
WorkerBuilder::new("worker")
|
||||||
.layer(TraceLayer::new())
|
.layer(TraceLayer::new())
|
||||||
.with_storage(apalis.clone())
|
.backend(apalis_storage)
|
||||||
.build_fn(worker_fn)
|
.build_fn(worker_fn)
|
||||||
})
|
})
|
||||||
.run()
|
.run()
|
||||||
|
@ -27,7 +27,7 @@ pub enum Error {
|
|||||||
#[error("validation error in request body")]
|
#[error("validation error in request body")]
|
||||||
InvalidEntity(#[from] ValidationErrors),
|
InvalidEntity(#[from] ValidationErrors),
|
||||||
|
|
||||||
#[error("error with file upload: (0)")]
|
#[error("error with file upload")]
|
||||||
Upload(#[from] MultipartError),
|
Upload(#[from] MultipartError),
|
||||||
|
|
||||||
#[error("no file uploaded")]
|
#[error("no file uploaded")]
|
||||||
@ -49,7 +49,7 @@ pub enum Error {
|
|||||||
Unauthorized,
|
Unauthorized,
|
||||||
|
|
||||||
#[error("bad request: {0}")]
|
#[error("bad request: {0}")]
|
||||||
BadRequest(&'static str)
|
BadRequest(&'static str),
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type Result<T, E = Error> = ::std::result::Result<T, E>;
|
pub type Result<T, E = Error> = ::std::result::Result<T, E>;
|
||||||
|
@ -13,9 +13,9 @@ use crate::partials::entry_list::entry_list;
|
|||||||
pub async fn get(
|
pub async fn get(
|
||||||
Query(options): Query<GetEntriesOptions>,
|
Query(options): Query<GetEntriesOptions>,
|
||||||
accept: Option<TypedHeader<Accept>>,
|
accept: Option<TypedHeader<Accept>>,
|
||||||
State(pool): State<PgPool>,
|
State(db): State<PgPool>,
|
||||||
) -> Result<impl IntoResponse, impl IntoResponse> {
|
) -> Result<impl IntoResponse, impl IntoResponse> {
|
||||||
let entries = Entry::get_all(&pool, &options).await.map_err(Error::from)?;
|
let entries = Entry::get_all(&db, &options).await.map_err(Error::from)?;
|
||||||
if let Some(TypedHeader(accept)) = accept {
|
if let Some(TypedHeader(accept)) = accept {
|
||||||
if accept == Accept::ApplicationJson {
|
if accept == Accept::ApplicationJson {
|
||||||
return Ok::<ApiResponse<Vec<Entry>>, Error>(ApiResponse::Json(entries));
|
return Ok::<ApiResponse<Vec<Entry>>, Error>(ApiResponse::Json(entries));
|
||||||
|
@ -9,15 +9,15 @@ use crate::models::entry::{CreateEntry, Entry};
|
|||||||
use crate::uuid::Base62Uuid;
|
use crate::uuid::Base62Uuid;
|
||||||
|
|
||||||
pub async fn get(
|
pub async fn get(
|
||||||
State(pool): State<PgPool>,
|
State(db): State<PgPool>,
|
||||||
Path(id): Path<Base62Uuid>,
|
Path(id): Path<Base62Uuid>,
|
||||||
) -> Result<Json<Entry>, Error> {
|
) -> Result<Json<Entry>, Error> {
|
||||||
Ok(Json(Entry::get(&pool, id.as_uuid()).await?))
|
Ok(Json(Entry::get(&db, id.as_uuid()).await?))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn post(
|
pub async fn post(
|
||||||
State(pool): State<PgPool>,
|
State(db): State<PgPool>,
|
||||||
Json(payload): Json<CreateEntry>,
|
Json(payload): Json<CreateEntry>,
|
||||||
) -> Result<Json<Entry>, Error> {
|
) -> Result<Json<Entry>, Error> {
|
||||||
Ok(Json(Entry::create(&pool, payload).await?))
|
Ok(Json(Entry::create(&db, payload).await?))
|
||||||
}
|
}
|
||||||
|
@ -8,17 +8,17 @@ use crate::error::{Error, Result};
|
|||||||
use crate::models::feed::{CreateFeed, Feed};
|
use crate::models::feed::{CreateFeed, Feed};
|
||||||
use crate::uuid::Base62Uuid;
|
use crate::uuid::Base62Uuid;
|
||||||
|
|
||||||
pub async fn get(State(pool): State<PgPool>, Path(id): Path<Base62Uuid>) -> Result<Json<Feed>> {
|
pub async fn get(State(db): State<PgPool>, Path(id): Path<Base62Uuid>) -> Result<Json<Feed>> {
|
||||||
Ok(Json(Feed::get(&pool, id.as_uuid()).await?))
|
Ok(Json(Feed::get(&db, id.as_uuid()).await?))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn post(
|
pub async fn post(
|
||||||
State(pool): State<PgPool>,
|
State(db): State<PgPool>,
|
||||||
Json(payload): Json<CreateFeed>,
|
Json(payload): Json<CreateFeed>,
|
||||||
) -> Result<Json<Feed>, Error> {
|
) -> Result<Json<Feed>, Error> {
|
||||||
Ok(Json(Feed::create(&pool, payload).await?))
|
Ok(Json(Feed::create(&db, payload).await?))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn delete(State(pool): State<PgPool>, Path(id): Path<Base62Uuid>) -> Result<()> {
|
pub async fn delete(State(db): State<PgPool>, Path(id): Path<Base62Uuid>) -> Result<()> {
|
||||||
Feed::delete(&pool, id.as_uuid()).await
|
Feed::delete(&db, id.as_uuid()).await
|
||||||
}
|
}
|
||||||
|
@ -13,9 +13,9 @@ use crate::partials::feed_list::feed_list;
|
|||||||
pub async fn get(
|
pub async fn get(
|
||||||
Query(options): Query<GetFeedsOptions>,
|
Query(options): Query<GetFeedsOptions>,
|
||||||
accept: Option<TypedHeader<Accept>>,
|
accept: Option<TypedHeader<Accept>>,
|
||||||
State(pool): State<PgPool>,
|
State(db): State<PgPool>,
|
||||||
) -> Result<impl IntoResponse, impl IntoResponse> {
|
) -> Result<impl IntoResponse, impl IntoResponse> {
|
||||||
let feeds = Feed::get_all(&pool, &options).await.map_err(Error::from)?;
|
let feeds = Feed::get_all(&db, &options).await.map_err(Error::from)?;
|
||||||
if let Some(TypedHeader(accept)) = accept {
|
if let Some(TypedHeader(accept)) = accept {
|
||||||
if accept == Accept::ApplicationJson {
|
if accept == Accept::ApplicationJson {
|
||||||
return Ok::<ApiResponse<Vec<Feed>>, Error>(ApiResponse::Json(feeds));
|
return Ok::<ApiResponse<Vec<Feed>>, Error>(ApiResponse::Json(feeds));
|
||||||
|
@ -70,7 +70,7 @@ pub fn confirm_email_page(
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get(
|
pub async fn get(
|
||||||
State(pool): State<PgPool>,
|
State(db): State<PgPool>,
|
||||||
auth: AuthSession,
|
auth: AuthSession,
|
||||||
hx_target: Option<TypedHeader<HXTarget>>,
|
hx_target: Option<TypedHeader<HXTarget>>,
|
||||||
layout: Layout,
|
layout: Layout,
|
||||||
@ -78,7 +78,7 @@ pub async fn get(
|
|||||||
) -> Result<Response> {
|
) -> Result<Response> {
|
||||||
if let Some(token_id) = query.token_id {
|
if let Some(token_id) = query.token_id {
|
||||||
info!(token_id = %token_id.as_uuid(), "get with token_id");
|
info!(token_id = %token_id.as_uuid(), "get with token_id");
|
||||||
let token = match UserEmailVerificationToken::get(&pool, token_id.as_uuid()).await {
|
let token = match UserEmailVerificationToken::get(&db, token_id.as_uuid()).await {
|
||||||
Ok(token) => token,
|
Ok(token) => token,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
if let Error::NotFoundUuid(_, _) = err {
|
if let Error::NotFoundUuid(_, _) = err {
|
||||||
@ -112,8 +112,8 @@ pub async fn get(
|
|||||||
}))
|
}))
|
||||||
} else {
|
} else {
|
||||||
info!(token_id = %token.token_id, "token valid, verifying email");
|
info!(token_id = %token.token_id, "token valid, verifying email");
|
||||||
User::verify_email(&pool, token.user_id).await?;
|
User::verify_email(&db, token.user_id).await?;
|
||||||
UserEmailVerificationToken::delete(&pool, token.token_id).await?;
|
UserEmailVerificationToken::delete(&db, token.token_id).await?;
|
||||||
Ok(layout
|
Ok(layout
|
||||||
.with_subtitle("confirm email")
|
.with_subtitle("confirm email")
|
||||||
.targeted(hx_target)
|
.targeted(hx_target)
|
||||||
@ -152,7 +152,7 @@ pub struct ConfirmEmail {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn post(
|
pub async fn post(
|
||||||
State(pool): State<PgPool>,
|
State(db): State<PgPool>,
|
||||||
State(mailer): State<SmtpTransport>,
|
State(mailer): State<SmtpTransport>,
|
||||||
State(config): State<Config>,
|
State(config): State<Config>,
|
||||||
hx_target: Option<TypedHeader<HXTarget>>,
|
hx_target: Option<TypedHeader<HXTarget>>,
|
||||||
@ -161,11 +161,11 @@ pub async fn post(
|
|||||||
) -> Result<Response> {
|
) -> Result<Response> {
|
||||||
if let Some(token_id) = confirm_email.token {
|
if let Some(token_id) = confirm_email.token {
|
||||||
info!(%token_id, "posted with token_id");
|
info!(%token_id, "posted with token_id");
|
||||||
let token = UserEmailVerificationToken::get(&pool, token_id).await?;
|
let token = UserEmailVerificationToken::get(&db, token_id).await?;
|
||||||
let user = User::get(&pool, token.user_id).await?;
|
let user = User::get(&db, token.user_id).await?;
|
||||||
if !user.email_verified {
|
if !user.email_verified {
|
||||||
info!(user_id = %user.user_id, "user exists, resending confirmation email");
|
info!(user_id = %user.user_id, "user exists, resending confirmation email");
|
||||||
send_confirmation_email(pool, mailer, config, user);
|
send_confirmation_email(db, mailer, config, user);
|
||||||
} else {
|
} else {
|
||||||
warn!(user_id = %user.user_id, "confirm email submitted for already verified user, skip resend");
|
warn!(user_id = %user.user_id, "confirm email submitted for already verified user, skip resend");
|
||||||
}
|
}
|
||||||
@ -184,10 +184,10 @@ pub async fn post(
|
|||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
if let Some(email) = confirm_email.email {
|
if let Some(email) = confirm_email.email {
|
||||||
if let Ok(user) = User::get_by_email(&pool, email).await {
|
if let Ok(user) = User::get_by_email(&db, email).await {
|
||||||
if !user.email_verified {
|
if !user.email_verified {
|
||||||
info!(user_id = %user.user_id, "user exists, resending confirmation email");
|
info!(user_id = %user.user_id, "user exists, resending confirmation email");
|
||||||
send_confirmation_email(pool, mailer, config, user);
|
send_confirmation_email(db, mailer, config, user);
|
||||||
} else {
|
} else {
|
||||||
warn!(user_id = %user.user_id, "confirm email submitted for already verified user, skip resend");
|
warn!(user_id = %user.user_id, "confirm email submitted for already verified user, skip resend");
|
||||||
}
|
}
|
||||||
|
@ -8,8 +8,8 @@ use crate::partials::entry_list::entry_list;
|
|||||||
|
|
||||||
pub async fn get(
|
pub async fn get(
|
||||||
Query(options): Query<GetEntriesOptions>,
|
Query(options): Query<GetEntriesOptions>,
|
||||||
State(pool): State<PgPool>,
|
State(db): State<PgPool>,
|
||||||
) -> Result<Markup> {
|
) -> Result<Markup> {
|
||||||
let entries = Entry::get_all(&pool, &options).await?;
|
let entries = Entry::get_all(&db, &options).await?;
|
||||||
Ok(entry_list(entries, &options, false))
|
Ok(entry_list(entries, &options, false))
|
||||||
}
|
}
|
||||||
|
@ -16,12 +16,12 @@ use crate::uuid::Base62Uuid;
|
|||||||
|
|
||||||
pub async fn get(
|
pub async fn get(
|
||||||
Path(id): Path<Base62Uuid>,
|
Path(id): Path<Base62Uuid>,
|
||||||
State(pool): State<PgPool>,
|
State(db): State<PgPool>,
|
||||||
State(config): State<Config>,
|
State(config): State<Config>,
|
||||||
hx_target: Option<TypedHeader<HXTarget>>,
|
hx_target: Option<TypedHeader<HXTarget>>,
|
||||||
layout: Layout,
|
layout: Layout,
|
||||||
) -> Result<Response> {
|
) -> Result<Response> {
|
||||||
let entry = Entry::get(&pool, id.as_uuid()).await?;
|
let entry = Entry::get(&db, id.as_uuid()).await?;
|
||||||
let content_dir = std::path::Path::new(&config.content_dir);
|
let content_dir = std::path::Path::new(&config.content_dir);
|
||||||
let content_path = content_dir.join(format!("{}.html", entry.entry_id));
|
let content_path = content_dir.join(format!("{}.html", entry.entry_id));
|
||||||
let title = entry.title.unwrap_or_else(|| "Untitled Entry".to_string());
|
let title = entry.title.unwrap_or_else(|| "Untitled Entry".to_string());
|
||||||
|
@ -28,17 +28,17 @@ use crate::uuid::Base62Uuid;
|
|||||||
|
|
||||||
pub async fn get(
|
pub async fn get(
|
||||||
Path(id): Path<Base62Uuid>,
|
Path(id): Path<Base62Uuid>,
|
||||||
State(pool): State<PgPool>,
|
State(db): State<PgPool>,
|
||||||
hx_target: Option<TypedHeader<HXTarget>>,
|
hx_target: Option<TypedHeader<HXTarget>>,
|
||||||
layout: Layout,
|
layout: Layout,
|
||||||
) -> Result<Response> {
|
) -> Result<Response> {
|
||||||
let feed = Feed::get(&pool, id.as_uuid()).await?;
|
let feed = Feed::get(&db, id.as_uuid()).await?;
|
||||||
let options = GetEntriesOptions {
|
let options = GetEntriesOptions {
|
||||||
feed_id: Some(feed.feed_id),
|
feed_id: Some(feed.feed_id),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
let title = feed.title.unwrap_or_else(|| "Untitled Feed".to_string());
|
let title = feed.title.unwrap_or_else(|| "Untitled Feed".to_string());
|
||||||
let entries = Entry::get_all(&pool, &options).await?;
|
let entries = Entry::get_all(&db, &options).await?;
|
||||||
let delete_url = format!("/feed/{}/delete", id);
|
let delete_url = format!("/feed/{}/delete", id);
|
||||||
Ok(layout.with_subtitle(&title).targeted(hx_target).render(html! {
|
Ok(layout.with_subtitle(&title).targeted(hx_target).render(html! {
|
||||||
header class="mb-4 flex flex-row items-center gap-4" {
|
header class="mb-4 flex flex-row items-center gap-4" {
|
||||||
@ -115,13 +115,13 @@ impl IntoResponse for AddFeedError {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn post(
|
pub async fn post(
|
||||||
State(pool): State<PgPool>,
|
State(db): State<PgPool>,
|
||||||
State(crawls): State<Crawls>,
|
State(crawls): State<Crawls>,
|
||||||
State(crawl_scheduler): State<CrawlSchedulerHandle>,
|
State(crawl_scheduler): State<CrawlSchedulerHandle>,
|
||||||
Form(add_feed): Form<AddFeed>,
|
Form(add_feed): Form<AddFeed>,
|
||||||
) -> AddFeedResult<Response> {
|
) -> AddFeedResult<Response> {
|
||||||
let feed = Feed::create(
|
let feed = Feed::create(
|
||||||
&pool,
|
&db,
|
||||||
CreateFeed {
|
CreateFeed {
|
||||||
title: add_feed.title,
|
title: add_feed.title,
|
||||||
url: add_feed.url.clone(),
|
url: add_feed.url.clone(),
|
||||||
@ -233,7 +233,7 @@ pub async fn stream(
|
|||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn delete(State(pool): State<PgPool>, Path(id): Path<Base62Uuid>) -> Result<Redirect> {
|
pub async fn delete(State(db): State<PgPool>, Path(id): Path<Base62Uuid>) -> Result<Redirect> {
|
||||||
Feed::delete(&pool, id.as_uuid()).await?;
|
Feed::delete(&db, id.as_uuid()).await?;
|
||||||
Ok(Redirect::to("/feeds"))
|
Ok(Redirect::to("/feeds"))
|
||||||
}
|
}
|
||||||
|
@ -13,12 +13,12 @@ use crate::partials::layout::Layout;
|
|||||||
use crate::partials::opml_import_form::opml_import_form;
|
use crate::partials::opml_import_form::opml_import_form;
|
||||||
|
|
||||||
pub async fn get(
|
pub async fn get(
|
||||||
State(pool): State<PgPool>,
|
State(db): State<PgPool>,
|
||||||
hx_target: Option<TypedHeader<HXTarget>>,
|
hx_target: Option<TypedHeader<HXTarget>>,
|
||||||
layout: Layout,
|
layout: Layout,
|
||||||
) -> Result<Response> {
|
) -> Result<Response> {
|
||||||
let options = GetFeedsOptions::default();
|
let options = GetFeedsOptions::default();
|
||||||
let feeds = Feed::get_all(&pool, &options).await?;
|
let feeds = Feed::get_all(&db, &options).await?;
|
||||||
Ok(layout
|
Ok(layout
|
||||||
.with_subtitle("feeds")
|
.with_subtitle("feeds")
|
||||||
.targeted(hx_target)
|
.targeted(hx_target)
|
||||||
|
@ -82,7 +82,7 @@ pub async fn get(
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn post(
|
pub async fn post(
|
||||||
State(pool): State<PgPool>,
|
State(db): State<PgPool>,
|
||||||
State(mailer): State<SmtpTransport>,
|
State(mailer): State<SmtpTransport>,
|
||||||
State(config): State<Config>,
|
State(config): State<Config>,
|
||||||
SecureClientIp(ip): SecureClientIp,
|
SecureClientIp(ip): SecureClientIp,
|
||||||
@ -91,7 +91,7 @@ pub async fn post(
|
|||||||
layout: Layout,
|
layout: Layout,
|
||||||
Form(forgot_password): Form<ForgotPassword>,
|
Form(forgot_password): Form<ForgotPassword>,
|
||||||
) -> Result<Response> {
|
) -> Result<Response> {
|
||||||
let user: User = match User::get_by_email(&pool, forgot_password.email.clone()).await {
|
let user: User = match User::get_by_email(&db, forgot_password.email.clone()).await {
|
||||||
Ok(user) => user,
|
Ok(user) => user,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
if let Error::NotFoundString(_, _) = err {
|
if let Error::NotFoundString(_, _) = err {
|
||||||
@ -105,7 +105,7 @@ pub async fn post(
|
|||||||
if user.email_verified {
|
if user.email_verified {
|
||||||
info!(user_id = %user.user_id, "user exists with verified email, sending password reset email");
|
info!(user_id = %user.user_id, "user exists with verified email, sending password reset email");
|
||||||
send_forgot_password_email(
|
send_forgot_password_email(
|
||||||
pool,
|
db,
|
||||||
mailer,
|
mailer,
|
||||||
config,
|
config,
|
||||||
user,
|
user,
|
||||||
|
@ -10,12 +10,12 @@ use crate::models::entry::Entry;
|
|||||||
use crate::partials::{entry_list::entry_list, layout::Layout};
|
use crate::partials::{entry_list::entry_list, layout::Layout};
|
||||||
|
|
||||||
pub async fn get(
|
pub async fn get(
|
||||||
State(pool): State<PgPool>,
|
State(db): State<PgPool>,
|
||||||
hx_target: Option<TypedHeader<HXTarget>>,
|
hx_target: Option<TypedHeader<HXTarget>>,
|
||||||
layout: Layout,
|
layout: Layout,
|
||||||
) -> Result<Response> {
|
) -> Result<Response> {
|
||||||
let options = Default::default();
|
let options = Default::default();
|
||||||
let entries = Entry::get_all(&pool, &options).await?;
|
let entries = Entry::get_all(&db, &options).await?;
|
||||||
Ok(layout.targeted(hx_target).render(html! {
|
Ok(layout.targeted(hx_target).render(html! {
|
||||||
ul class="list-none flex flex-col gap-4" {
|
ul class="list-none flex flex-col gap-4" {
|
||||||
(entry_list(entries, &options, true))
|
(entry_list(entries, &options, true))
|
||||||
|
@ -59,7 +59,7 @@ pub async fn get(hx_target: Option<TypedHeader<HXTarget>>, layout: Layout) -> Re
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn post(
|
pub async fn post(
|
||||||
State(pool): State<PgPool>,
|
State(db): State<PgPool>,
|
||||||
State(mailer): State<SmtpTransport>,
|
State(mailer): State<SmtpTransport>,
|
||||||
State(config): State<Config>,
|
State(config): State<Config>,
|
||||||
mut auth: AuthSession,
|
mut auth: AuthSession,
|
||||||
@ -80,7 +80,7 @@ pub async fn post(
|
|||||||
));
|
));
|
||||||
}
|
}
|
||||||
let user = match User::create(
|
let user = match User::create(
|
||||||
&pool,
|
&db,
|
||||||
CreateUser {
|
CreateUser {
|
||||||
email: register.email.clone(),
|
email: register.email.clone(),
|
||||||
password: register.password.clone(),
|
password: register.password.clone(),
|
||||||
@ -144,7 +144,7 @@ pub async fn post(
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
send_confirmation_email(pool, mailer, config, user.clone());
|
send_confirmation_email(db, mailer, config, user.clone());
|
||||||
|
|
||||||
auth.login(&user)
|
auth.login(&user)
|
||||||
.await
|
.await
|
||||||
|
@ -126,14 +126,14 @@ pub fn reset_password_page(
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get(
|
pub async fn get(
|
||||||
State(pool): State<PgPool>,
|
State(db): State<PgPool>,
|
||||||
hx_target: Option<TypedHeader<HXTarget>>,
|
hx_target: Option<TypedHeader<HXTarget>>,
|
||||||
layout: Layout,
|
layout: Layout,
|
||||||
query: Query<ResetPasswordQuery>,
|
query: Query<ResetPasswordQuery>,
|
||||||
) -> Result<Response> {
|
) -> Result<Response> {
|
||||||
if let Some(token_id) = query.token_id {
|
if let Some(token_id) = query.token_id {
|
||||||
info!(token_id = %token_id.as_uuid(), "get with token_id");
|
info!(token_id = %token_id.as_uuid(), "get with token_id");
|
||||||
let token = match UserPasswordResetToken::get(&pool, token_id.as_uuid()).await {
|
let token = match UserPasswordResetToken::get(&db, token_id.as_uuid()).await {
|
||||||
Ok(token) => token,
|
Ok(token) => token,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
if let Error::NotFoundUuid(_, _) = err {
|
if let Error::NotFoundUuid(_, _) = err {
|
||||||
@ -158,7 +158,7 @@ pub async fn get(
|
|||||||
}))
|
}))
|
||||||
} else {
|
} else {
|
||||||
info!(token_id = %token.token_id, "token valid, showing reset password form");
|
info!(token_id = %token.token_id, "token valid, showing reset password form");
|
||||||
let user = User::get(&pool, token.user_id).await?;
|
let user = User::get(&db, token.user_id).await?;
|
||||||
Ok(reset_password_page(ResetPasswordPageProps {
|
Ok(reset_password_page(ResetPasswordPageProps {
|
||||||
hx_target,
|
hx_target,
|
||||||
layout,
|
layout,
|
||||||
@ -181,7 +181,7 @@ pub async fn get(
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn post(
|
pub async fn post(
|
||||||
State(pool): State<PgPool>,
|
State(db): State<PgPool>,
|
||||||
State(mailer): State<SmtpTransport>,
|
State(mailer): State<SmtpTransport>,
|
||||||
State(config): State<Config>,
|
State(config): State<Config>,
|
||||||
SecureClientIp(ip): SecureClientIp,
|
SecureClientIp(ip): SecureClientIp,
|
||||||
@ -203,7 +203,7 @@ pub async fn post(
|
|||||||
..Default::default()
|
..Default::default()
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
let token = match UserPasswordResetToken::get(&pool, reset_password.token).await {
|
let token = match UserPasswordResetToken::get(&db, reset_password.token).await {
|
||||||
Ok(token) => token,
|
Ok(token) => token,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
if let Error::NotFoundUuid(_, _) = err {
|
if let Error::NotFoundUuid(_, _) = err {
|
||||||
@ -241,7 +241,7 @@ pub async fn post(
|
|||||||
..Default::default()
|
..Default::default()
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
let user = match User::get(&pool, token.user_id).await {
|
let user = match User::get(&db, token.user_id).await {
|
||||||
Ok(user) => user,
|
Ok(user) => user,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
if let Error::NotFoundString(_, _) = err {
|
if let Error::NotFoundString(_, _) = err {
|
||||||
@ -266,7 +266,7 @@ pub async fn post(
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
info!(user_id = %user.user_id, "user exists with verified email, resetting password");
|
info!(user_id = %user.user_id, "user exists with verified email, resetting password");
|
||||||
let mut tx = pool.begin().await?;
|
let mut tx = db.begin().await?;
|
||||||
UserPasswordResetToken::delete(tx.as_mut(), reset_password.token).await?;
|
UserPasswordResetToken::delete(tx.as_mut(), reset_password.token).await?;
|
||||||
let user = match user
|
let user = match user
|
||||||
.update_password(
|
.update_password(
|
||||||
|
8
src/jobs/crawl_feed.rs
Normal file
8
src/jobs/crawl_feed.rs
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
use crate::models::feed::Feed;
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize, Serialize, Clone)]
|
||||||
|
pub struct CrawlFeedJob {
|
||||||
|
pub feed: Feed,
|
||||||
|
}
|
@ -1,11 +1,8 @@
|
|||||||
use apalis::prelude::*;
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
mod crawl_feed;
|
||||||
|
|
||||||
#[derive(Debug, Deserialize, Serialize, Clone)]
|
#[derive(Debug, Deserialize, Serialize, Clone)]
|
||||||
pub enum AsyncJob {
|
pub enum AsyncJob {
|
||||||
HelloWorld(String),
|
HelloWorld(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Job for AsyncJob {
|
|
||||||
const NAME: &'static str = "apalis::AsyncJob";
|
|
||||||
}
|
|
||||||
|
@ -17,7 +17,7 @@ use crate::uuid::Base62Uuid;
|
|||||||
// TODO: put in config
|
// TODO: put in config
|
||||||
const USER_EMAIL_VERIFICATION_TOKEN_EXPIRATION: Duration = Duration::from_secs(24 * 60 * 60);
|
const USER_EMAIL_VERIFICATION_TOKEN_EXPIRATION: Duration = Duration::from_secs(24 * 60 * 60);
|
||||||
|
|
||||||
pub fn send_confirmation_email(pool: PgPool, mailer: SmtpTransport, config: Config, user: User) {
|
pub fn send_confirmation_email(db: PgPool, mailer: SmtpTransport, config: Config, user: User) {
|
||||||
tokio::spawn(async move {
|
tokio::spawn(async move {
|
||||||
let user_email_address = match user.email.parse() {
|
let user_email_address = match user.email.parse() {
|
||||||
Ok(address) => address,
|
Ok(address) => address,
|
||||||
@ -28,7 +28,7 @@ pub fn send_confirmation_email(pool: PgPool, mailer: SmtpTransport, config: Conf
|
|||||||
};
|
};
|
||||||
let mailbox = Mailbox::new(user.name.clone(), user_email_address);
|
let mailbox = Mailbox::new(user.name.clone(), user_email_address);
|
||||||
let token = match UserEmailVerificationToken::create(
|
let token = match UserEmailVerificationToken::create(
|
||||||
&pool,
|
&db,
|
||||||
CreateUserEmailVerificationToken {
|
CreateUserEmailVerificationToken {
|
||||||
user_id: user.user_id,
|
user_id: user.user_id,
|
||||||
expires_at: Utc::now() + USER_EMAIL_VERIFICATION_TOKEN_EXPIRATION,
|
expires_at: Utc::now() + USER_EMAIL_VERIFICATION_TOKEN_EXPIRATION,
|
||||||
@ -42,11 +42,10 @@ pub fn send_confirmation_email(pool: PgPool, mailer: SmtpTransport, config: Conf
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let mut confirm_link = config
|
let mut confirm_link = config.public_url.clone();
|
||||||
.public_url
|
|
||||||
.clone();
|
|
||||||
confirm_link.set_path("confirm-email");
|
confirm_link.set_path("confirm-email");
|
||||||
confirm_link.query_pairs_mut()
|
confirm_link
|
||||||
|
.query_pairs_mut()
|
||||||
.append_pair("token_id", &Base62Uuid::from(token.token_id).to_string());
|
.append_pair("token_id", &Base62Uuid::from(token.token_id).to_string());
|
||||||
let confirm_link = confirm_link.as_str();
|
let confirm_link = confirm_link.as_str();
|
||||||
|
|
||||||
|
@ -18,7 +18,7 @@ use crate::uuid::Base62Uuid;
|
|||||||
const PASSWORD_RESET_TOKEN_EXPIRATION: Duration = Duration::from_secs(24 * 60 * 60);
|
const PASSWORD_RESET_TOKEN_EXPIRATION: Duration = Duration::from_secs(24 * 60 * 60);
|
||||||
|
|
||||||
pub fn send_forgot_password_email(
|
pub fn send_forgot_password_email(
|
||||||
pool: PgPool,
|
db: PgPool,
|
||||||
mailer: SmtpTransport,
|
mailer: SmtpTransport,
|
||||||
config: Config,
|
config: Config,
|
||||||
user: User,
|
user: User,
|
||||||
@ -35,7 +35,7 @@ pub fn send_forgot_password_email(
|
|||||||
};
|
};
|
||||||
let mailbox = Mailbox::new(user.name.clone(), user_email_address);
|
let mailbox = Mailbox::new(user.name.clone(), user_email_address);
|
||||||
let token = match UserPasswordResetToken::create(
|
let token = match UserPasswordResetToken::create(
|
||||||
&pool,
|
&db,
|
||||||
CreatePasswordResetToken {
|
CreatePasswordResetToken {
|
||||||
token_id: Uuid::new_v4(), // cyptographically-secure random uuid
|
token_id: Uuid::new_v4(), // cyptographically-secure random uuid
|
||||||
user_id: user.user_id,
|
user_id: user.user_id,
|
||||||
|
@ -32,7 +32,7 @@ impl UserPasswordResetToken {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get(
|
pub async fn get(
|
||||||
pool: impl Executor<'_, Database = Postgres>,
|
db: impl Executor<'_, Database = Postgres>,
|
||||||
token_id: Uuid,
|
token_id: Uuid,
|
||||||
) -> Result<UserPasswordResetToken> {
|
) -> Result<UserPasswordResetToken> {
|
||||||
sqlx::query_as!(
|
sqlx::query_as!(
|
||||||
@ -43,7 +43,7 @@ impl UserPasswordResetToken {
|
|||||||
where token_id = $1"#,
|
where token_id = $1"#,
|
||||||
token_id
|
token_id
|
||||||
)
|
)
|
||||||
.fetch_one(pool)
|
.fetch_one(db)
|
||||||
.await
|
.await
|
||||||
.map_err(|error| {
|
.map_err(|error| {
|
||||||
if let sqlx::error::Error::RowNotFound = error {
|
if let sqlx::error::Error::RowNotFound = error {
|
||||||
@ -54,7 +54,7 @@ impl UserPasswordResetToken {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn create(
|
pub async fn create(
|
||||||
pool: impl Executor<'_, Database = Postgres>,
|
db: impl Executor<'_, Database = Postgres>,
|
||||||
payload: CreatePasswordResetToken,
|
payload: CreatePasswordResetToken,
|
||||||
) -> Result<UserPasswordResetToken> {
|
) -> Result<UserPasswordResetToken> {
|
||||||
Ok(sqlx::query_as!(
|
Ok(sqlx::query_as!(
|
||||||
@ -70,20 +70,17 @@ impl UserPasswordResetToken {
|
|||||||
payload.request_ip,
|
payload.request_ip,
|
||||||
payload.expires_at
|
payload.expires_at
|
||||||
)
|
)
|
||||||
.fetch_one(pool)
|
.fetch_one(db)
|
||||||
.await?)
|
.await?)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn delete(
|
pub async fn delete(db: impl Executor<'_, Database = Postgres>, token_id: Uuid) -> Result<()> {
|
||||||
pool: impl Executor<'_, Database = Postgres>,
|
|
||||||
token_id: Uuid,
|
|
||||||
) -> Result<()> {
|
|
||||||
sqlx::query!(
|
sqlx::query!(
|
||||||
r#"delete from user_password_reset_token
|
r#"delete from user_password_reset_token
|
||||||
where token_id = $1"#,
|
where token_id = $1"#,
|
||||||
token_id
|
token_id
|
||||||
)
|
)
|
||||||
.execute(pool)
|
.execute(db)
|
||||||
.await?;
|
.await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use apalis::redis::RedisStorage;
|
use apalis_redis::RedisStorage;
|
||||||
use axum::extract::FromRef;
|
use axum::extract::FromRef;
|
||||||
use bytes::Bytes;
|
use bytes::Bytes;
|
||||||
use lettre::SmtpTransport;
|
use lettre::SmtpTransport;
|
||||||
@ -41,7 +41,7 @@ pub type Imports = Arc<Mutex<HashMap<Uuid, broadcast::Receiver<ImporterHandleMes
|
|||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct AppState {
|
pub struct AppState {
|
||||||
pub pool: PgPool,
|
pub db: PgPool,
|
||||||
pub config: Config,
|
pub config: Config,
|
||||||
pub log_receiver: watch::Receiver<Bytes>,
|
pub log_receiver: watch::Receiver<Bytes>,
|
||||||
pub crawls: Crawls,
|
pub crawls: Crawls,
|
||||||
@ -56,7 +56,7 @@ pub struct AppState {
|
|||||||
|
|
||||||
impl FromRef<AppState> for PgPool {
|
impl FromRef<AppState> for PgPool {
|
||||||
fn from_ref(state: &AppState) -> Self {
|
fn from_ref(state: &AppState) -> Self {
|
||||||
state.pool.clone()
|
state.db.clone()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user