2023-07-10 01:18:19 +00:00
|
|
|
use std::collections::HashMap;
|
2023-07-20 03:02:25 +00:00
|
|
|
use std::sync::Arc;
|
2023-07-10 01:18:19 +00:00
|
|
|
|
2023-06-02 04:07:42 +00:00
|
|
|
use axum::extract::FromRef;
|
2023-06-03 23:03:58 +00:00
|
|
|
use bytes::Bytes;
|
2023-09-28 03:11:52 +00:00
|
|
|
use lettre::SmtpTransport;
|
2023-07-20 03:02:25 +00:00
|
|
|
use reqwest::Client;
|
2023-06-02 04:07:42 +00:00
|
|
|
use sqlx::PgPool;
|
2023-09-25 05:35:26 +00:00
|
|
|
use tokio::sync::{broadcast, watch, Mutex};
|
2023-07-10 01:18:19 +00:00
|
|
|
use uuid::Uuid;
|
2023-06-02 04:07:42 +00:00
|
|
|
|
2023-07-23 02:54:57 +00:00
|
|
|
use crate::actors::importer::{ImporterHandle, ImporterHandleMessage};
|
2023-07-16 01:40:31 +00:00
|
|
|
use crate::actors::crawl_scheduler::{CrawlSchedulerHandle, CrawlSchedulerHandleMessage};
|
2023-06-02 04:07:42 +00:00
|
|
|
use crate::config::Config;
|
2023-07-14 04:52:36 +00:00
|
|
|
use crate::domain_locks::DomainLocks;
|
2023-06-02 04:07:42 +00:00
|
|
|
|
2023-07-20 03:02:25 +00:00
|
|
|
/// A map of feed IDs to a channel receiver for the active `CrawlScheduler` running a feed crawl
|
2023-07-16 01:40:31 +00:00
|
|
|
/// for that feed.
|
2023-07-10 01:18:19 +00:00
|
|
|
///
|
2023-07-20 03:02:25 +00:00
|
|
|
/// Currently, the only purpose of this is to keep track of active crawls so that axum handlers can
|
|
|
|
/// subscribe to the result of the crawl via the receiver channel which are then sent to end-users
|
2023-07-10 01:18:19 +00:00
|
|
|
/// as a stream of server-sent events.
|
2023-07-20 03:02:25 +00:00
|
|
|
///
|
|
|
|
/// This map should only contain crawls that have just been created but not yet subscribed to.
|
|
|
|
/// Entries are only added when a user adds a feed in the UI and entries are removed by the same
|
2023-07-10 01:18:19 +00:00
|
|
|
/// user once a server-sent event connection is established.
|
2023-07-16 01:40:31 +00:00
|
|
|
pub type Crawls = Arc<Mutex<HashMap<Uuid, broadcast::Receiver<CrawlSchedulerHandleMessage>>>>;
|
2023-07-10 01:18:19 +00:00
|
|
|
|
2023-07-23 02:54:57 +00:00
|
|
|
/// A map of unique import IDs to a channel receiver for the active `Importer` running that import.
|
|
|
|
///
|
|
|
|
/// Same as the `Crawls` map, the only purpose of this is to keep track of active imports so that
|
|
|
|
/// axum handlers can subscribe to the result of the import via the receiver channel which are then
|
|
|
|
/// sent to end-users as a stream of server-sent events.
|
|
|
|
///
|
|
|
|
/// This map should only contain imports that have just been created but not yet subscribed to.
|
|
|
|
/// Entries are only added when a user adds uploads an OPML to import and entries are removed by
|
|
|
|
/// the same user once a server-sent event connection is established.
|
|
|
|
pub type Imports = Arc<Mutex<HashMap<Uuid, broadcast::Receiver<ImporterHandleMessage>>>>;
|
|
|
|
|
2023-06-02 04:07:42 +00:00
|
|
|
#[derive(Clone)]
|
|
|
|
pub struct AppState {
|
|
|
|
pub pool: PgPool,
|
|
|
|
pub config: Config,
|
2023-07-10 01:18:19 +00:00
|
|
|
pub log_receiver: watch::Receiver<Bytes>,
|
|
|
|
pub crawls: Crawls,
|
2023-07-14 04:52:36 +00:00
|
|
|
pub domain_locks: DomainLocks,
|
2023-07-16 01:40:31 +00:00
|
|
|
pub client: Client,
|
|
|
|
pub crawl_scheduler: CrawlSchedulerHandle,
|
2023-07-23 02:54:57 +00:00
|
|
|
pub importer: ImporterHandle,
|
|
|
|
pub imports: Imports,
|
2023-09-28 03:11:52 +00:00
|
|
|
pub mailer: SmtpTransport,
|
2023-06-02 04:07:42 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
impl FromRef<AppState> for PgPool {
|
|
|
|
fn from_ref(state: &AppState) -> Self {
|
|
|
|
state.pool.clone()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl FromRef<AppState> for Config {
|
|
|
|
fn from_ref(state: &AppState) -> Self {
|
|
|
|
state.config.clone()
|
|
|
|
}
|
|
|
|
}
|
2023-06-03 23:03:58 +00:00
|
|
|
|
2023-07-10 01:18:19 +00:00
|
|
|
impl FromRef<AppState> for watch::Receiver<Bytes> {
|
2023-06-03 23:03:58 +00:00
|
|
|
fn from_ref(state: &AppState) -> Self {
|
|
|
|
state.log_receiver.clone()
|
|
|
|
}
|
|
|
|
}
|
2023-07-10 01:18:19 +00:00
|
|
|
|
|
|
|
impl FromRef<AppState> for Crawls {
|
|
|
|
fn from_ref(state: &AppState) -> Self {
|
|
|
|
state.crawls.clone()
|
|
|
|
}
|
|
|
|
}
|
2023-07-14 04:52:36 +00:00
|
|
|
|
|
|
|
impl FromRef<AppState> for DomainLocks {
|
|
|
|
fn from_ref(state: &AppState) -> Self {
|
|
|
|
state.domain_locks.clone()
|
|
|
|
}
|
|
|
|
}
|
2023-07-16 01:40:31 +00:00
|
|
|
|
|
|
|
impl FromRef<AppState> for Client {
|
|
|
|
fn from_ref(state: &AppState) -> Self {
|
|
|
|
state.client.clone()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl FromRef<AppState> for CrawlSchedulerHandle {
|
|
|
|
fn from_ref(state: &AppState) -> Self {
|
|
|
|
state.crawl_scheduler.clone()
|
|
|
|
}
|
|
|
|
}
|
2023-07-23 02:54:57 +00:00
|
|
|
|
|
|
|
impl FromRef<AppState> for ImporterHandle {
|
|
|
|
fn from_ref(state: &AppState) -> Self {
|
|
|
|
state.importer.clone()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl FromRef<AppState> for Imports {
|
|
|
|
fn from_ref(state: &AppState) -> Self {
|
|
|
|
state.imports.clone()
|
|
|
|
}
|
|
|
|
}
|
2023-09-28 03:11:52 +00:00
|
|
|
|
|
|
|
impl FromRef<AppState> for SmtpTransport {
|
|
|
|
fn from_ref(state: &AppState) -> Self {
|
|
|
|
state.mailer.clone()
|
|
|
|
}
|
|
|
|
}
|