Working apalis cron and worker with 0.6.0-rc.5

Also renamed `pool` variables throughout codebase to `db` for clarity.
This commit is contained in:
2024-08-21 01:10:26 -04:00
parent 764d3f23b8
commit a3450e202a
27 changed files with 148 additions and 232 deletions

View File

@@ -97,7 +97,7 @@ pub async fn main() -> Result<()> {
tracing_subscriber::fmt::init();
let pool = PgPoolOptions::new()
let db = PgPoolOptions::new()
.max_connections(env::var("DATABASE_MAX_CONNECTIONS")?.parse()?)
.connect(&env::var("DATABASE_URL")?)
.await?;
@@ -108,7 +108,7 @@ pub async fn main() -> Result<()> {
match cli.commands {
Commands::AddFeed(args) => {
let feed = Feed::create(
&pool,
&db,
CreateFeed {
title: args.title,
url: args.url,
@@ -119,12 +119,12 @@ pub async fn main() -> Result<()> {
info!("Created feed with id {}", Base62Uuid::from(feed.feed_id));
}
Commands::DeleteFeed(args) => {
Feed::delete(&pool, args.id).await?;
Feed::delete(&db, args.id).await?;
info!("Deleted feed with id {}", Base62Uuid::from(args.id));
}
Commands::AddEntry(args) => {
let entry = Entry::create(
&pool,
&db,
CreateEntry {
title: args.title,
url: args.url,
@@ -137,7 +137,7 @@ pub async fn main() -> Result<()> {
info!("Created entry with id {}", Base62Uuid::from(entry.entry_id));
}
Commands::DeleteEntry(args) => {
Entry::delete(&pool, args.id).await?;
Entry::delete(&db, args.id).await?;
info!("Deleted entry with id {}", Base62Uuid::from(args.id));
}
Commands::Crawl(CrawlFeed { id }) => {
@@ -147,7 +147,7 @@ pub async fn main() -> Result<()> {
// server is running, it will *not* serialize same-domain requests with it.
let domain_locks = DomainLocks::new();
let feed_crawler = FeedCrawlerHandle::new(
pool.clone(),
db.clone(),
client.clone(),
domain_locks.clone(),
env::var("CONTENT_DIR")?,

View File

@@ -1,19 +1,17 @@
use anyhow::{anyhow, Result};
use apalis::cron::{CronStream, Schedule};
use apalis::layers::retry::{RetryLayer, RetryPolicy};
use apalis::layers::tracing::TraceLayer;
use apalis::prelude::*;
use apalis::redis::RedisStorage;
use apalis_cron::{CronStream, Schedule};
use apalis_redis::RedisStorage;
use chrono::{DateTime, Utc};
use clap::Parser;
use lib::actors::crawl_scheduler::CrawlSchedulerError;
use lib::jobs::AsyncJob;
use lib::models::feed::{Feed, GetFeedsOptions};
use sqlx::postgres::PgPoolOptions;
use sqlx::PgPool;
use std::str::FromStr;
use std::sync::Arc;
use tower::ServiceBuilder;
use thiserror::Error;
use tracing::{info, instrument};
use dotenvy::dotenv;
@@ -29,26 +27,32 @@ impl From<DateTime<Utc>> for Crawl {
}
}
impl Job for Crawl {
const NAME: &'static str = "apalis::Crawl";
#[derive(Debug, Error)]
enum CrawlError {
#[error("error fetching feeds")]
FetchFeedsError(#[from] sqlx::Error),
#[error("error queueing crawl feed job")]
QueueJobError(String),
}
#[derive(Clone)]
struct State {
pool: PgPool,
apalis: RedisStorage<AsyncJob>,
}
#[instrument(skip_all)]
pub async fn crawl_fn(job: Crawl, state: Data<Arc<State>>) -> Result<()> {
pub async fn crawl_fn(job: Crawl, state: Data<Arc<State>>) -> Result<(), CrawlError> {
tracing::info!(job = ?job, "crawl");
let mut apalis = (state.apalis).clone();
let mut options = GetFeedsOptions::default();
loop {
info!("fetching feeds before: {:?}", options.before);
// TODO: filter to feeds where:
// now >= feed.last_crawled_at + feed.crawl_interval_minutes
// may need more indices...
let feeds = match Feed::get_all(&state.pool, &options).await {
Err(err) => {
return Err(anyhow!(err));
}
Err(err) => return Err(CrawlError::FetchFeedsError(err)),
Ok(feeds) if feeds.is_empty() => {
info!("no more feeds found");
break;
@@ -62,14 +66,15 @@ pub async fn crawl_fn(job: Crawl, state: Data<Arc<State>>) -> Result<()> {
// self.spawn_crawler_loop(feed, respond_to.clone());
apalis
.push(AsyncJob::HelloWorld(feed.feed_id.to_string()))
.await?;
.await
.map_err(|err| CrawlError::QueueJobError(err.to_string()))?;
}
}
Ok(())
}
#[tokio::main]
async fn main() -> Result<()> {
async fn main() -> anyhow::Result<()> {
dotenv().ok();
let config = Config::parse();
let _guard = init_worker_tracing()?;
@@ -80,24 +85,24 @@ async fn main() -> Result<()> {
.connect(&config.database_url)
.await?;
// TODO: use redis_pool from above instead of making a new connection
// TODO: create connection from redis_pool for each job instead using a single connection
// See: https://github.com/geofmureithi/apalis/issues/290
let redis_conn = apalis::redis::connect(config.redis_url.clone()).await?;
let apalis_config = apalis::redis::Config::default();
let mut apalis: RedisStorage<AsyncJob> =
RedisStorage::new_with_config(redis_conn, apalis_config);
let redis_conn = apalis_redis::connect(config.redis_url.clone()).await?;
let apalis_config = apalis_redis::Config::default();
let apalis_storage = RedisStorage::new_with_config(redis_conn, apalis_config);
let state = Arc::new(State {
pool,
apalis: apalis_storage.clone(),
});
let schedule = Schedule::from_str("0 * * * * *").unwrap();
// let service = ServiceBuilder::new()
// .layer(RetryLayer::new(RetryPolicy::default()))
// .layer(TraceLayer::new())
// .service(service_fn(crawl_fn));
let worker = WorkerBuilder::new("crawler")
.stream(CronStream::new(schedule).into_stream())
.layer(RetryLayer::new(RetryPolicy::default()))
.layer(TraceLayer::new())
.data(Arc::new(State { pool, apalis }))
.data(state)
.backend(CronStream::new(schedule))
.build_fn(crawl_fn);
Monitor::<TokioExecutor>::new()

View File

@@ -2,7 +2,7 @@ use std::{collections::HashMap, net::SocketAddr, path::Path, sync::Arc};
use anyhow::Result;
use apalis::prelude::*;
use apalis::redis::RedisStorage;
use apalis_redis::RedisStorage;
use axum::{
routing::{get, post},
Router,
@@ -66,7 +66,7 @@ async fn main() -> Result<()> {
let domain_locks = DomainLocks::new();
let client = Client::builder().user_agent(USER_AGENT).build()?;
let pool = PgPoolOptions::new()
let db = PgPoolOptions::new()
.max_connections(config.database_max_connections)
.acquire_timeout(std::time::Duration::from_secs(3))
.connect(&config.database_url)
@@ -84,7 +84,7 @@ async fn main() -> Result<()> {
)))
.with_signed(Key::from(&BASE64_STANDARD.decode(&config.session_secret)?));
let backend = Backend::new(pool.clone());
let backend = Backend::new(db.clone());
let auth_layer = AuthManagerLayerBuilder::new(backend, session_layer).build();
let smtp_creds = Credentials::new(config.smtp_user.clone(), config.smtp_password.clone());
@@ -94,12 +94,12 @@ async fn main() -> Result<()> {
.credentials(smtp_creds)
.build();
sqlx::migrate!().run(&pool).await?;
sqlx::migrate!().run(&db).await?;
// TODO: use redis_pool from above instead of making a new connection
// See: https://github.com/geofmureithi/apalis/issues/290
let redis_conn = apalis::redis::connect(config.redis_url.clone()).await?;
let apalis_config = apalis::redis::Config::default();
let redis_conn = apalis_redis::connect(config.redis_url.clone()).await?;
let apalis_config = apalis_redis::Config::default();
let mut apalis: RedisStorage<AsyncJob> =
RedisStorage::new_with_config(redis_conn, apalis_config);
@@ -108,14 +108,14 @@ async fn main() -> Result<()> {
.await?;
let crawl_scheduler = CrawlSchedulerHandle::new(
pool.clone(),
db.clone(),
client.clone(),
domain_locks.clone(),
config.content_dir.clone(),
crawls.clone(),
);
let _ = crawl_scheduler.bootstrap().await;
let importer = ImporterHandle::new(pool.clone(), crawl_scheduler.clone(), imports.clone());
let importer = ImporterHandle::new(db.clone(), crawl_scheduler.clone(), imports.clone());
let ip_source_extension = config.ip_source.0.clone().into_extension();
@@ -154,7 +154,7 @@ async fn main() -> Result<()> {
.route("/reset-password", post(handlers::reset_password::post))
.nest_service("/static", ServeDir::new("static"))
.with_state(AppState {
pool,
db,
config,
log_receiver,
crawls,

View File

@@ -1,7 +1,7 @@
use anyhow::Result;
use apalis::layers::tracing::TraceLayer;
use apalis::prelude::*;
use apalis::redis::RedisStorage;
use apalis_redis::RedisStorage;
use clap::Parser;
use dotenvy::dotenv;
@@ -18,15 +18,18 @@ async fn main() -> Result<()> {
dotenv().ok();
let config = Config::parse();
let _guard = init_worker_tracing()?;
let redis_conn = apalis::redis::connect(config.redis_url.clone()).await?;
let apalis_config = apalis::redis::Config::default();
let apalis: RedisStorage<AsyncJob> = RedisStorage::new_with_config(redis_conn, apalis_config);
// TODO: create connection from redis_pool for each job instead using a single connection
// See: https://github.com/geofmureithi/apalis/issues/290
let redis_conn = apalis_redis::connect(config.redis_url.clone()).await?;
let apalis_config = apalis_redis::Config::default();
let apalis_storage: RedisStorage<AsyncJob> =
RedisStorage::new_with_config(redis_conn, apalis_config);
Monitor::<TokioExecutor>::new()
.register_with_count(2, {
WorkerBuilder::new("worker")
.layer(TraceLayer::new())
.with_storage(apalis.clone())
.backend(apalis_storage)
.build_fn(worker_fn)
})
.run()

View File

@@ -27,7 +27,7 @@ pub enum Error {
#[error("validation error in request body")]
InvalidEntity(#[from] ValidationErrors),
#[error("error with file upload: (0)")]
#[error("error with file upload")]
Upload(#[from] MultipartError),
#[error("no file uploaded")]
@@ -49,7 +49,7 @@ pub enum Error {
Unauthorized,
#[error("bad request: {0}")]
BadRequest(&'static str)
BadRequest(&'static str),
}
pub type Result<T, E = Error> = ::std::result::Result<T, E>;

View File

@@ -13,9 +13,9 @@ use crate::partials::entry_list::entry_list;
pub async fn get(
Query(options): Query<GetEntriesOptions>,
accept: Option<TypedHeader<Accept>>,
State(pool): State<PgPool>,
State(db): State<PgPool>,
) -> Result<impl IntoResponse, impl IntoResponse> {
let entries = Entry::get_all(&pool, &options).await.map_err(Error::from)?;
let entries = Entry::get_all(&db, &options).await.map_err(Error::from)?;
if let Some(TypedHeader(accept)) = accept {
if accept == Accept::ApplicationJson {
return Ok::<ApiResponse<Vec<Entry>>, Error>(ApiResponse::Json(entries));

View File

@@ -9,15 +9,15 @@ use crate::models::entry::{CreateEntry, Entry};
use crate::uuid::Base62Uuid;
pub async fn get(
State(pool): State<PgPool>,
State(db): State<PgPool>,
Path(id): Path<Base62Uuid>,
) -> Result<Json<Entry>, Error> {
Ok(Json(Entry::get(&pool, id.as_uuid()).await?))
Ok(Json(Entry::get(&db, id.as_uuid()).await?))
}
pub async fn post(
State(pool): State<PgPool>,
State(db): State<PgPool>,
Json(payload): Json<CreateEntry>,
) -> Result<Json<Entry>, Error> {
Ok(Json(Entry::create(&pool, payload).await?))
Ok(Json(Entry::create(&db, payload).await?))
}

View File

@@ -8,17 +8,17 @@ use crate::error::{Error, Result};
use crate::models::feed::{CreateFeed, Feed};
use crate::uuid::Base62Uuid;
pub async fn get(State(pool): State<PgPool>, Path(id): Path<Base62Uuid>) -> Result<Json<Feed>> {
Ok(Json(Feed::get(&pool, id.as_uuid()).await?))
pub async fn get(State(db): State<PgPool>, Path(id): Path<Base62Uuid>) -> Result<Json<Feed>> {
Ok(Json(Feed::get(&db, id.as_uuid()).await?))
}
pub async fn post(
State(pool): State<PgPool>,
State(db): State<PgPool>,
Json(payload): Json<CreateFeed>,
) -> Result<Json<Feed>, Error> {
Ok(Json(Feed::create(&pool, payload).await?))
Ok(Json(Feed::create(&db, payload).await?))
}
pub async fn delete(State(pool): State<PgPool>, Path(id): Path<Base62Uuid>) -> Result<()> {
Feed::delete(&pool, id.as_uuid()).await
pub async fn delete(State(db): State<PgPool>, Path(id): Path<Base62Uuid>) -> Result<()> {
Feed::delete(&db, id.as_uuid()).await
}

View File

@@ -13,9 +13,9 @@ use crate::partials::feed_list::feed_list;
pub async fn get(
Query(options): Query<GetFeedsOptions>,
accept: Option<TypedHeader<Accept>>,
State(pool): State<PgPool>,
State(db): State<PgPool>,
) -> Result<impl IntoResponse, impl IntoResponse> {
let feeds = Feed::get_all(&pool, &options).await.map_err(Error::from)?;
let feeds = Feed::get_all(&db, &options).await.map_err(Error::from)?;
if let Some(TypedHeader(accept)) = accept {
if accept == Accept::ApplicationJson {
return Ok::<ApiResponse<Vec<Feed>>, Error>(ApiResponse::Json(feeds));

View File

@@ -70,7 +70,7 @@ pub fn confirm_email_page(
}
pub async fn get(
State(pool): State<PgPool>,
State(db): State<PgPool>,
auth: AuthSession,
hx_target: Option<TypedHeader<HXTarget>>,
layout: Layout,
@@ -78,7 +78,7 @@ pub async fn get(
) -> Result<Response> {
if let Some(token_id) = query.token_id {
info!(token_id = %token_id.as_uuid(), "get with token_id");
let token = match UserEmailVerificationToken::get(&pool, token_id.as_uuid()).await {
let token = match UserEmailVerificationToken::get(&db, token_id.as_uuid()).await {
Ok(token) => token,
Err(err) => {
if let Error::NotFoundUuid(_, _) = err {
@@ -112,8 +112,8 @@ pub async fn get(
}))
} else {
info!(token_id = %token.token_id, "token valid, verifying email");
User::verify_email(&pool, token.user_id).await?;
UserEmailVerificationToken::delete(&pool, token.token_id).await?;
User::verify_email(&db, token.user_id).await?;
UserEmailVerificationToken::delete(&db, token.token_id).await?;
Ok(layout
.with_subtitle("confirm email")
.targeted(hx_target)
@@ -152,7 +152,7 @@ pub struct ConfirmEmail {
}
pub async fn post(
State(pool): State<PgPool>,
State(db): State<PgPool>,
State(mailer): State<SmtpTransport>,
State(config): State<Config>,
hx_target: Option<TypedHeader<HXTarget>>,
@@ -161,11 +161,11 @@ pub async fn post(
) -> Result<Response> {
if let Some(token_id) = confirm_email.token {
info!(%token_id, "posted with token_id");
let token = UserEmailVerificationToken::get(&pool, token_id).await?;
let user = User::get(&pool, token.user_id).await?;
let token = UserEmailVerificationToken::get(&db, token_id).await?;
let user = User::get(&db, token.user_id).await?;
if !user.email_verified {
info!(user_id = %user.user_id, "user exists, resending confirmation email");
send_confirmation_email(pool, mailer, config, user);
send_confirmation_email(db, mailer, config, user);
} else {
warn!(user_id = %user.user_id, "confirm email submitted for already verified user, skip resend");
}
@@ -184,10 +184,10 @@ pub async fn post(
}));
}
if let Some(email) = confirm_email.email {
if let Ok(user) = User::get_by_email(&pool, email).await {
if let Ok(user) = User::get_by_email(&db, email).await {
if !user.email_verified {
info!(user_id = %user.user_id, "user exists, resending confirmation email");
send_confirmation_email(pool, mailer, config, user);
send_confirmation_email(db, mailer, config, user);
} else {
warn!(user_id = %user.user_id, "confirm email submitted for already verified user, skip resend");
}

View File

@@ -8,8 +8,8 @@ use crate::partials::entry_list::entry_list;
pub async fn get(
Query(options): Query<GetEntriesOptions>,
State(pool): State<PgPool>,
State(db): State<PgPool>,
) -> Result<Markup> {
let entries = Entry::get_all(&pool, &options).await?;
let entries = Entry::get_all(&db, &options).await?;
Ok(entry_list(entries, &options, false))
}

View File

@@ -16,12 +16,12 @@ use crate::uuid::Base62Uuid;
pub async fn get(
Path(id): Path<Base62Uuid>,
State(pool): State<PgPool>,
State(db): State<PgPool>,
State(config): State<Config>,
hx_target: Option<TypedHeader<HXTarget>>,
layout: Layout,
) -> Result<Response> {
let entry = Entry::get(&pool, id.as_uuid()).await?;
let entry = Entry::get(&db, id.as_uuid()).await?;
let content_dir = std::path::Path::new(&config.content_dir);
let content_path = content_dir.join(format!("{}.html", entry.entry_id));
let title = entry.title.unwrap_or_else(|| "Untitled Entry".to_string());

View File

@@ -28,17 +28,17 @@ use crate::uuid::Base62Uuid;
pub async fn get(
Path(id): Path<Base62Uuid>,
State(pool): State<PgPool>,
State(db): State<PgPool>,
hx_target: Option<TypedHeader<HXTarget>>,
layout: Layout,
) -> Result<Response> {
let feed = Feed::get(&pool, id.as_uuid()).await?;
let feed = Feed::get(&db, id.as_uuid()).await?;
let options = GetEntriesOptions {
feed_id: Some(feed.feed_id),
..Default::default()
};
let title = feed.title.unwrap_or_else(|| "Untitled Feed".to_string());
let entries = Entry::get_all(&pool, &options).await?;
let entries = Entry::get_all(&db, &options).await?;
let delete_url = format!("/feed/{}/delete", id);
Ok(layout.with_subtitle(&title).targeted(hx_target).render(html! {
header class="mb-4 flex flex-row items-center gap-4" {
@@ -115,13 +115,13 @@ impl IntoResponse for AddFeedError {
}
pub async fn post(
State(pool): State<PgPool>,
State(db): State<PgPool>,
State(crawls): State<Crawls>,
State(crawl_scheduler): State<CrawlSchedulerHandle>,
Form(add_feed): Form<AddFeed>,
) -> AddFeedResult<Response> {
let feed = Feed::create(
&pool,
&db,
CreateFeed {
title: add_feed.title,
url: add_feed.url.clone(),
@@ -233,7 +233,7 @@ pub async fn stream(
))
}
pub async fn delete(State(pool): State<PgPool>, Path(id): Path<Base62Uuid>) -> Result<Redirect> {
Feed::delete(&pool, id.as_uuid()).await?;
pub async fn delete(State(db): State<PgPool>, Path(id): Path<Base62Uuid>) -> Result<Redirect> {
Feed::delete(&db, id.as_uuid()).await?;
Ok(Redirect::to("/feeds"))
}

View File

@@ -13,12 +13,12 @@ use crate::partials::layout::Layout;
use crate::partials::opml_import_form::opml_import_form;
pub async fn get(
State(pool): State<PgPool>,
State(db): State<PgPool>,
hx_target: Option<TypedHeader<HXTarget>>,
layout: Layout,
) -> Result<Response> {
let options = GetFeedsOptions::default();
let feeds = Feed::get_all(&pool, &options).await?;
let feeds = Feed::get_all(&db, &options).await?;
Ok(layout
.with_subtitle("feeds")
.targeted(hx_target)

View File

@@ -82,7 +82,7 @@ pub async fn get(
}
pub async fn post(
State(pool): State<PgPool>,
State(db): State<PgPool>,
State(mailer): State<SmtpTransport>,
State(config): State<Config>,
SecureClientIp(ip): SecureClientIp,
@@ -91,7 +91,7 @@ pub async fn post(
layout: Layout,
Form(forgot_password): Form<ForgotPassword>,
) -> Result<Response> {
let user: User = match User::get_by_email(&pool, forgot_password.email.clone()).await {
let user: User = match User::get_by_email(&db, forgot_password.email.clone()).await {
Ok(user) => user,
Err(err) => {
if let Error::NotFoundString(_, _) = err {
@@ -105,7 +105,7 @@ pub async fn post(
if user.email_verified {
info!(user_id = %user.user_id, "user exists with verified email, sending password reset email");
send_forgot_password_email(
pool,
db,
mailer,
config,
user,

View File

@@ -10,12 +10,12 @@ use crate::models::entry::Entry;
use crate::partials::{entry_list::entry_list, layout::Layout};
pub async fn get(
State(pool): State<PgPool>,
State(db): State<PgPool>,
hx_target: Option<TypedHeader<HXTarget>>,
layout: Layout,
) -> Result<Response> {
let options = Default::default();
let entries = Entry::get_all(&pool, &options).await?;
let entries = Entry::get_all(&db, &options).await?;
Ok(layout.targeted(hx_target).render(html! {
ul class="list-none flex flex-col gap-4" {
(entry_list(entries, &options, true))

View File

@@ -59,7 +59,7 @@ pub async fn get(hx_target: Option<TypedHeader<HXTarget>>, layout: Layout) -> Re
}
pub async fn post(
State(pool): State<PgPool>,
State(db): State<PgPool>,
State(mailer): State<SmtpTransport>,
State(config): State<Config>,
mut auth: AuthSession,
@@ -80,7 +80,7 @@ pub async fn post(
));
}
let user = match User::create(
&pool,
&db,
CreateUser {
email: register.email.clone(),
password: register.password.clone(),
@@ -144,7 +144,7 @@ pub async fn post(
}
};
send_confirmation_email(pool, mailer, config, user.clone());
send_confirmation_email(db, mailer, config, user.clone());
auth.login(&user)
.await

View File

@@ -126,14 +126,14 @@ pub fn reset_password_page(
}
pub async fn get(
State(pool): State<PgPool>,
State(db): State<PgPool>,
hx_target: Option<TypedHeader<HXTarget>>,
layout: Layout,
query: Query<ResetPasswordQuery>,
) -> Result<Response> {
if let Some(token_id) = query.token_id {
info!(token_id = %token_id.as_uuid(), "get with token_id");
let token = match UserPasswordResetToken::get(&pool, token_id.as_uuid()).await {
let token = match UserPasswordResetToken::get(&db, token_id.as_uuid()).await {
Ok(token) => token,
Err(err) => {
if let Error::NotFoundUuid(_, _) = err {
@@ -158,7 +158,7 @@ pub async fn get(
}))
} else {
info!(token_id = %token.token_id, "token valid, showing reset password form");
let user = User::get(&pool, token.user_id).await?;
let user = User::get(&db, token.user_id).await?;
Ok(reset_password_page(ResetPasswordPageProps {
hx_target,
layout,
@@ -181,7 +181,7 @@ pub async fn get(
}
pub async fn post(
State(pool): State<PgPool>,
State(db): State<PgPool>,
State(mailer): State<SmtpTransport>,
State(config): State<Config>,
SecureClientIp(ip): SecureClientIp,
@@ -203,7 +203,7 @@ pub async fn post(
..Default::default()
}));
}
let token = match UserPasswordResetToken::get(&pool, reset_password.token).await {
let token = match UserPasswordResetToken::get(&db, reset_password.token).await {
Ok(token) => token,
Err(err) => {
if let Error::NotFoundUuid(_, _) = err {
@@ -241,7 +241,7 @@ pub async fn post(
..Default::default()
}));
}
let user = match User::get(&pool, token.user_id).await {
let user = match User::get(&db, token.user_id).await {
Ok(user) => user,
Err(err) => {
if let Error::NotFoundString(_, _) = err {
@@ -266,7 +266,7 @@ pub async fn post(
}
};
info!(user_id = %user.user_id, "user exists with verified email, resetting password");
let mut tx = pool.begin().await?;
let mut tx = db.begin().await?;
UserPasswordResetToken::delete(tx.as_mut(), reset_password.token).await?;
let user = match user
.update_password(

8
src/jobs/crawl_feed.rs Normal file
View File

@@ -0,0 +1,8 @@
use serde::{Deserialize, Serialize};
use crate::models::feed::Feed;
#[derive(Debug, Deserialize, Serialize, Clone)]
pub struct CrawlFeedJob {
pub feed: Feed,
}

View File

@@ -1,11 +1,8 @@
use apalis::prelude::*;
use serde::{Deserialize, Serialize};
mod crawl_feed;
#[derive(Debug, Deserialize, Serialize, Clone)]
pub enum AsyncJob {
HelloWorld(String),
}
impl Job for AsyncJob {
const NAME: &'static str = "apalis::AsyncJob";
}

View File

@@ -17,7 +17,7 @@ use crate::uuid::Base62Uuid;
// TODO: put in config
const USER_EMAIL_VERIFICATION_TOKEN_EXPIRATION: Duration = Duration::from_secs(24 * 60 * 60);
pub fn send_confirmation_email(pool: PgPool, mailer: SmtpTransport, config: Config, user: User) {
pub fn send_confirmation_email(db: PgPool, mailer: SmtpTransport, config: Config, user: User) {
tokio::spawn(async move {
let user_email_address = match user.email.parse() {
Ok(address) => address,
@@ -28,7 +28,7 @@ pub fn send_confirmation_email(pool: PgPool, mailer: SmtpTransport, config: Conf
};
let mailbox = Mailbox::new(user.name.clone(), user_email_address);
let token = match UserEmailVerificationToken::create(
&pool,
&db,
CreateUserEmailVerificationToken {
user_id: user.user_id,
expires_at: Utc::now() + USER_EMAIL_VERIFICATION_TOKEN_EXPIRATION,
@@ -42,11 +42,10 @@ pub fn send_confirmation_email(pool: PgPool, mailer: SmtpTransport, config: Conf
return;
}
};
let mut confirm_link = config
.public_url
.clone();
let mut confirm_link = config.public_url.clone();
confirm_link.set_path("confirm-email");
confirm_link.query_pairs_mut()
confirm_link
.query_pairs_mut()
.append_pair("token_id", &Base62Uuid::from(token.token_id).to_string());
let confirm_link = confirm_link.as_str();

View File

@@ -18,7 +18,7 @@ use crate::uuid::Base62Uuid;
const PASSWORD_RESET_TOKEN_EXPIRATION: Duration = Duration::from_secs(24 * 60 * 60);
pub fn send_forgot_password_email(
pool: PgPool,
db: PgPool,
mailer: SmtpTransport,
config: Config,
user: User,
@@ -35,7 +35,7 @@ pub fn send_forgot_password_email(
};
let mailbox = Mailbox::new(user.name.clone(), user_email_address);
let token = match UserPasswordResetToken::create(
&pool,
&db,
CreatePasswordResetToken {
token_id: Uuid::new_v4(), // cyptographically-secure random uuid
user_id: user.user_id,

View File

@@ -32,7 +32,7 @@ impl UserPasswordResetToken {
}
pub async fn get(
pool: impl Executor<'_, Database = Postgres>,
db: impl Executor<'_, Database = Postgres>,
token_id: Uuid,
) -> Result<UserPasswordResetToken> {
sqlx::query_as!(
@@ -43,7 +43,7 @@ impl UserPasswordResetToken {
where token_id = $1"#,
token_id
)
.fetch_one(pool)
.fetch_one(db)
.await
.map_err(|error| {
if let sqlx::error::Error::RowNotFound = error {
@@ -54,7 +54,7 @@ impl UserPasswordResetToken {
}
pub async fn create(
pool: impl Executor<'_, Database = Postgres>,
db: impl Executor<'_, Database = Postgres>,
payload: CreatePasswordResetToken,
) -> Result<UserPasswordResetToken> {
Ok(sqlx::query_as!(
@@ -70,20 +70,17 @@ impl UserPasswordResetToken {
payload.request_ip,
payload.expires_at
)
.fetch_one(pool)
.fetch_one(db)
.await?)
}
pub async fn delete(
pool: impl Executor<'_, Database = Postgres>,
token_id: Uuid,
) -> Result<()> {
pub async fn delete(db: impl Executor<'_, Database = Postgres>, token_id: Uuid) -> Result<()> {
sqlx::query!(
r#"delete from user_password_reset_token
where token_id = $1"#,
token_id
)
.execute(pool)
.execute(db)
.await?;
Ok(())
}

View File

@@ -1,7 +1,7 @@
use std::collections::HashMap;
use std::sync::Arc;
use apalis::redis::RedisStorage;
use apalis_redis::RedisStorage;
use axum::extract::FromRef;
use bytes::Bytes;
use lettre::SmtpTransport;
@@ -41,7 +41,7 @@ pub type Imports = Arc<Mutex<HashMap<Uuid, broadcast::Receiver<ImporterHandleMes
#[derive(Clone)]
pub struct AppState {
pub pool: PgPool,
pub db: PgPool,
pub config: Config,
pub log_receiver: watch::Receiver<Bytes>,
pub crawls: Crawls,
@@ -56,7 +56,7 @@ pub struct AppState {
impl FromRef<AppState> for PgPool {
fn from_ref(state: &AppState) -> Self {
state.pool.clone()
state.db.clone()
}
}