Add EntryCrawler that uses readability lib

Actors delegating to actors baybeeee
This commit is contained in:
Tyler Hallada 2023-07-11 01:52:38 -04:00
parent f13c7e5e70
commit b7efc61cfc
9 changed files with 553 additions and 23 deletions

289
Cargo.lock generated
View File

@ -14,7 +14,7 @@ version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47"
dependencies = [
"getrandom",
"getrandom 0.2.9",
"once_cell",
"version_check",
]
@ -26,7 +26,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f"
dependencies = [
"cfg-if",
"getrandom",
"getrandom 0.2.9",
"once_cell",
"version_check",
]
@ -467,6 +467,7 @@ dependencies = [
"maud",
"notify",
"once_cell",
"readability",
"reqwest",
"serde",
"serde_with",
@ -889,6 +890,16 @@ dependencies = [
"libc",
]
[[package]]
name = "futf"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df420e2e84819663797d1ec6544b13c5be84629e7bb00dc960d6917db2987843"
dependencies = [
"mac",
"new_debug_unreachable",
]
[[package]]
name = "futures"
version = "0.3.28"
@ -999,6 +1010,17 @@ dependencies = [
"version_check",
]
[[package]]
name = "getrandom"
version = "0.1.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce"
dependencies = [
"cfg-if",
"libc",
"wasi 0.9.0+wasi-snapshot-preview1",
]
[[package]]
name = "getrandom"
version = "0.2.9"
@ -1131,6 +1153,20 @@ dependencies = [
"windows-sys 0.48.0",
]
[[package]]
name = "html5ever"
version = "0.25.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e5c13fb08e5d4dfc151ee5e88bae63f7773d61852f3bdc73c9f4b9e1bde03148"
dependencies = [
"log",
"mac",
"markup5ever",
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]]
name = "http"
version = "0.2.9"
@ -1499,6 +1535,38 @@ dependencies = [
"cfg-if",
]
[[package]]
name = "mac"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4"
[[package]]
name = "markup5ever"
version = "0.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a24f40fb03852d1cdd84330cddcaf98e9ec08a7b7768e952fad3b4cf048ec8fd"
dependencies = [
"log",
"phf",
"phf_codegen",
"string_cache",
"string_cache_codegen",
"tendril",
]
[[package]]
name = "markup5ever_rcdom"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f015da43bcd8d4f144559a3423f4591d69b8ce0652c905374da7205df336ae2b"
dependencies = [
"html5ever",
"markup5ever",
"tendril",
"xml5ever",
]
[[package]]
name = "matchers"
version = "0.1.0"
@ -1618,7 +1686,7 @@ version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a51313c5820b0b02bd422f4b44776fbf47961755c74ce64afc73bfad10226c3"
dependencies = [
"getrandom",
"getrandom 0.2.9",
]
[[package]]
@ -1639,6 +1707,12 @@ dependencies = [
"tempfile",
]
[[package]]
name = "new_debug_unreachable"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54"
[[package]]
name = "nom"
version = "7.1.3"
@ -1689,7 +1763,7 @@ dependencies = [
"num-integer",
"num-iter",
"num-traits",
"rand",
"rand 0.8.5",
"smallvec",
"zeroize",
]
@ -1846,6 +1920,63 @@ version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94"
[[package]]
name = "phf"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3dfb61232e34fcb633f43d12c58f83c1df82962dcdfa565a4e866ffc17dafe12"
dependencies = [
"phf_shared 0.8.0",
]
[[package]]
name = "phf_codegen"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cbffee61585b0411840d3ece935cce9cb6321f01c45477d30066498cd5e1a815"
dependencies = [
"phf_generator 0.8.0",
"phf_shared 0.8.0",
]
[[package]]
name = "phf_generator"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "17367f0cc86f2d25802b2c26ee58a7b23faeccf78a396094c13dced0d0182526"
dependencies = [
"phf_shared 0.8.0",
"rand 0.7.3",
]
[[package]]
name = "phf_generator"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5d5285893bb5eb82e6aaf5d59ee909a06a16737a8970984dd7746ba9283498d6"
dependencies = [
"phf_shared 0.10.0",
"rand 0.8.5",
]
[[package]]
name = "phf_shared"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c00cf8b9eafe68dde5e9eaa2cef8ee84a9336a47d566ec55ca16589633b65af7"
dependencies = [
"siphasher",
]
[[package]]
name = "phf_shared"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096"
dependencies = [
"siphasher",
]
[[package]]
name = "pin-project"
version = "1.0.12"
@ -1924,6 +2055,12 @@ version = "0.2.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
[[package]]
name = "precomputed-hash"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c"
[[package]]
name = "proc-macro-error"
version = "1.0.4"
@ -1985,6 +2122,20 @@ dependencies = [
"proc-macro2",
]
[[package]]
name = "rand"
version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03"
dependencies = [
"getrandom 0.1.16",
"libc",
"rand_chacha 0.2.2",
"rand_core 0.5.1",
"rand_hc",
"rand_pcg",
]
[[package]]
name = "rand"
version = "0.8.5"
@ -1992,8 +2143,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
dependencies = [
"libc",
"rand_chacha",
"rand_core",
"rand_chacha 0.3.1",
"rand_core 0.6.4",
]
[[package]]
name = "rand_chacha"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402"
dependencies = [
"ppv-lite86",
"rand_core 0.5.1",
]
[[package]]
@ -2003,7 +2164,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
dependencies = [
"ppv-lite86",
"rand_core",
"rand_core 0.6.4",
]
[[package]]
name = "rand_core"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19"
dependencies = [
"getrandom 0.1.16",
]
[[package]]
@ -2012,7 +2182,25 @@ version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
dependencies = [
"getrandom",
"getrandom 0.2.9",
]
[[package]]
name = "rand_hc"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c"
dependencies = [
"rand_core 0.5.1",
]
[[package]]
name = "rand_pcg"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "16abd0c1b639e9eb4d7c50c0b8100b0d0f849be2349829c740fe8e6eb4816429"
dependencies = [
"rand_core 0.5.1",
]
[[package]]
@ -2037,6 +2225,20 @@ dependencies = [
"num_cpus",
]
[[package]]
name = "readability"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e7843b159286299dd2b90f06d904ae1a8017a650d88d716c85dd6f123947f399"
dependencies = [
"html5ever",
"lazy_static",
"markup5ever_rcdom",
"regex",
"reqwest",
"url",
]
[[package]]
name = "redox_syscall"
version = "0.2.16"
@ -2142,7 +2344,7 @@ dependencies = [
"num-traits",
"pkcs1",
"pkcs8",
"rand_core",
"rand_core 0.6.4",
"signature",
"spki",
"subtle",
@ -2389,7 +2591,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e1788eed21689f9cf370582dfc467ef36ed9c707f073528ddafa8d83e3b8500"
dependencies = [
"digest",
"rand_core",
"rand_core 0.6.4",
]
[[package]]
@ -2590,7 +2792,7 @@ dependencies = [
"memchr",
"once_cell",
"percent-encoding",
"rand",
"rand 0.8.5",
"rsa",
"serde",
"sha1",
@ -2631,7 +2833,7 @@ dependencies = [
"md-5",
"memchr",
"once_cell",
"rand",
"rand 0.8.5",
"serde",
"serde_json",
"sha1",
@ -2669,6 +2871,32 @@ dependencies = [
"uuid",
]
[[package]]
name = "string_cache"
version = "0.8.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f91138e76242f575eb1d3b38b4f1362f10d3a43f47d182a5b359af488a02293b"
dependencies = [
"new_debug_unreachable",
"once_cell",
"parking_lot",
"phf_shared 0.10.0",
"precomputed-hash",
"serde",
]
[[package]]
name = "string_cache_codegen"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6bb30289b722be4ff74a408c3cc27edeaad656e06cb1fe8fa9231fa59c728988"
dependencies = [
"phf_generator 0.10.0",
"phf_shared 0.10.0",
"proc-macro2",
"quote",
]
[[package]]
name = "stringprep"
version = "0.1.2"
@ -2732,6 +2960,17 @@ dependencies = [
"windows-sys 0.45.0",
]
[[package]]
name = "tendril"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d24a120c5fc464a3458240ee02c299ebcb9d67b5249c8848b09d639dca8d7bb0"
dependencies = [
"futf",
"mac",
"utf-8",
]
[[package]]
name = "termcolor"
version = "1.2.0"
@ -3113,6 +3352,12 @@ dependencies = [
"percent-encoding",
]
[[package]]
name = "utf-8"
version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9"
[[package]]
name = "utf8parse"
version = "0.2.1"
@ -3125,7 +3370,7 @@ version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4dad5567ad0cf5b760e5665964bec1b47dfd077ba8a2544b513f3556d3d239a2"
dependencies = [
"getrandom",
"getrandom 0.2.9",
"serde",
]
@ -3209,6 +3454,12 @@ dependencies = [
"try-lock",
]
[[package]]
name = "wasi"
version = "0.9.0+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519"
[[package]]
name = "wasi"
version = "0.10.0+wasi-snapshot-preview1"
@ -3518,6 +3769,18 @@ dependencies = [
"winapi",
]
[[package]]
name = "xml5ever"
version = "0.16.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9234163818fd8e2418fcde330655e757900d4236acd8cc70fef345ef91f6d865"
dependencies = [
"log",
"mac",
"markup5ever",
"time 0.1.45",
]
[[package]]
name = "zeroize"
version = "1.6.0"

View File

@ -24,6 +24,7 @@ feed-rs = "1.3"
maud = { version = "0.25", features = ["axum"] }
notify = "6"
once_cell = "1.17"
readability = "0.2"
reqwest = { version = "0.11", features = ["json"] }
serde = { version = "1", features = ["derive"] }
serde_with = "3"

180
src/actors/entry_crawler.rs Normal file
View File

@ -0,0 +1,180 @@
use std::fmt::{self, Display, Formatter};
use std::fs;
use std::path::Path;
use std::sync::Arc;
use bytes::Buf;
use feed_rs::parser;
use readability::extractor;
use reqwest::Client;
use sqlx::PgPool;
use tokio::sync::{broadcast, mpsc, Mutex};
use tracing::{info, instrument};
use url::Url;
use crate::config::Config;
use crate::models::entry::{update_entry, CreateEntry, Entry};
use crate::models::feed::{upsert_feed, CreateFeed, Feed};
/// The `EntryCrawler` actor fetches an entry url, extracts the content, and saves the content to
/// the file system and any associated metadata to the database.
///
/// It receives `EntryCrawlerMessage` messages via the `receiver` channel. It communicates back to
/// the sender of those messages via the `respond_to` channel on the `EntryCrawlerMessage`.
///
/// `EntryCrawler` should not be instantiated directly. Instead, use the `EntryCrawlerHandle`.
struct EntryCrawler {
receiver: mpsc::Receiver<EntryCrawlerMessage>,
pool: PgPool,
client: Client,
content_dir: String,
}
#[derive(Debug)]
enum EntryCrawlerMessage {
Crawl {
entry: Entry,
respond_to: broadcast::Sender<EntryCrawlerHandleMessage>,
},
}
impl Display for EntryCrawlerMessage {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match self {
EntryCrawlerMessage::Crawl { entry, .. } => write!(f, "Crawl({})", entry.url),
}
}
}
/// An error type that enumerates possible failures during a crawl and is cloneable and can be sent
/// across threads (does not reference the originating Errors which are usually not cloneable).
#[derive(thiserror::Error, Debug, Clone)]
pub enum EntryCrawlerError {
#[error("invalid entry url: {0}")]
InvalidUrl(String),
#[error("failed to fetch entry: {0}")]
FetchError(String),
#[error("failed to extract content for entry: {0}")]
ExtractError(String),
#[error("failed to create entry: {0}")]
CreateEntryError(String),
#[error("failed to save entry content: {0}")]
SaveContentError(String),
}
pub type EntryCrawlerResult<T, E = EntryCrawlerError> = ::std::result::Result<T, E>;
impl EntryCrawler {
fn new(
receiver: mpsc::Receiver<EntryCrawlerMessage>,
pool: PgPool,
client: Client,
content_dir: String,
) -> Self {
EntryCrawler {
receiver,
pool,
client,
content_dir,
}
}
#[instrument(skip_all, fields(entry = %entry.url))]
async fn crawl_entry(&self, entry: Entry) -> EntryCrawlerResult<Entry> {
info!("Fetching and parsing entry");
let content_dir = Path::new(&self.content_dir);
let url =
Url::parse(&entry.url).map_err(|_| EntryCrawlerError::InvalidUrl(entry.url.clone()))?;
let bytes = self
.client
.get(url.clone())
.send()
.await
.map_err(|_| EntryCrawlerError::FetchError(entry.url.clone()))?
.bytes()
.await
.map_err(|_| EntryCrawlerError::FetchError(entry.url.clone()))?;
let article = extractor::extract(&mut bytes.reader(), &url)
.map_err(|_| EntryCrawlerError::ExtractError(entry.url.clone()))?;
let id = entry.entry_id;
// TODO: update entry with scraped data
// if let Some(date) = article.date {
// // prefer scraped date over rss feed date
// let mut updated_entry = entry.clone();
// updated_entry.published_at = date;
// entry = update_entry(&self.pool, updated_entry)
// .await
// .map_err(|_| EntryCrawlerError::CreateEntryError(entry.url.clone()))?;
// };
fs::write(content_dir.join(format!("{}.html", id)), article.content)
.map_err(|_| EntryCrawlerError::SaveContentError(entry.url.clone()))?;
fs::write(content_dir.join(format!("{}.txt", id)), article.text)
.map_err(|_| EntryCrawlerError::SaveContentError(entry.url.clone()))?;
Ok(entry)
}
#[instrument(skip_all, fields(msg = %msg))]
async fn handle_message(&mut self, msg: EntryCrawlerMessage) {
match msg {
EntryCrawlerMessage::Crawl { entry, respond_to } => {
let result = self.crawl_entry(entry).await;
// ignore the result since the initiator may have cancelled waiting for the
// response, and that is ok
let _ = respond_to.send(EntryCrawlerHandleMessage::Entry(result));
}
}
}
#[instrument(skip_all)]
async fn run(&mut self) {
info!("starting entry crawler");
while let Some(msg) = self.receiver.recv().await {
self.handle_message(msg).await;
}
}
}
/// The `EntryCrawlerHandle` is used to initialize and communicate with a `EntryCrawler` actor.
///
/// The `EntryCrawler` actor fetches a feed url, parses it, and saves it to the database. It runs
/// as a separate asynchronous task from the main web server and communicates via channels.
#[derive(Clone)]
pub struct EntryCrawlerHandle {
sender: mpsc::Sender<EntryCrawlerMessage>,
}
/// The `EntryCrawlerHandleMessage` is the response to a `EntryCrawlerMessage` sent to the
/// `EntryCrawlerHandle`.
///
/// `EntryCrawlerHandleMessage::Entry` contains the result of crawling an entry url.
#[derive(Clone)]
pub enum EntryCrawlerHandleMessage {
Entry(EntryCrawlerResult<Entry>),
}
impl EntryCrawlerHandle {
/// Creates an async actor task that will listen for messages on the `sender` channel.
pub fn new(pool: PgPool, client: Client, content_dir: String) -> Self {
let (sender, receiver) = mpsc::channel(8);
let mut crawler = EntryCrawler::new(receiver, pool, client, content_dir);
tokio::spawn(async move { crawler.run().await });
Self { sender }
}
/// Sends a `EntryCrawlerMessage::Crawl` message to the running `EntryCrawler` actor.
///
/// Listen to the result of the crawl via the returned `broadcast::Receiver`.
pub async fn crawl(&self, entry: Entry) -> broadcast::Receiver<EntryCrawlerHandleMessage> {
let (sender, receiver) = broadcast::channel(8);
let msg = EntryCrawlerMessage::Crawl {
entry,
respond_to: sender,
};
self.sender
.send(msg)
.await
.expect("entry crawler task has died");
receiver
}
}

View File

@ -1,18 +1,21 @@
use std::fmt::{self, Display, Formatter};
use chrono::Utc;
use feed_rs::parser;
use reqwest::Client;
use sqlx::PgPool;
use tokio::sync::{broadcast, mpsc};
use tracing::{info, instrument};
use tracing::log::warn;
use tracing::{info, info_span, instrument};
use url::Url;
use crate::models::entry::Entry;
use crate::actors::entry_crawler::EntryCrawlerHandle;
use crate::models::entry::{upsert_entries, CreateEntry, Entry};
use crate::models::feed::{upsert_feed, CreateFeed, Feed};
/// The `FeedCrawler` actor fetches a feed url, parses it, and saves it to the database.
///
/// It receives `FeedCrawlerMessage` messages via the `receiver` channel. It communicates back to
/// It receives `FeedCrawlerMessage` messages via the `receiver` channel. It communicates back to
/// the sender of those messages via the `respond_to` channel on the `FeedCrawlerMessage`.
///
/// `FeedCrawler` should not be instantiated directly. Instead, use the `FeedCrawlerHandle`.
@ -20,6 +23,7 @@ struct FeedCrawler {
receiver: mpsc::Receiver<FeedCrawlerMessage>,
pool: PgPool,
client: Client,
content_dir: String,
}
#[derive(Debug)]
@ -38,7 +42,7 @@ impl Display for FeedCrawlerMessage {
}
}
/// An error type that enumerates possible failures during a crawl and is cloneable and can be sent
/// An error type that enumerates possible failures during a crawl and is cloneable and can be sent
/// across threads (does not reference the originating Errors which are usually not cloneable).
#[derive(thiserror::Error, Debug, Clone)]
pub enum FeedCrawlerError {
@ -48,15 +52,23 @@ pub enum FeedCrawlerError {
ParseError(Url),
#[error("failed to create feed: {0}")]
CreateFeedError(Url),
#[error("failed to create feed entries: {0}")]
CreateFeedEntriesError(Url),
}
pub type FeedCrawlerResult<T, E = FeedCrawlerError> = ::std::result::Result<T, E>;
impl FeedCrawler {
fn new(receiver: mpsc::Receiver<FeedCrawlerMessage>, pool: PgPool, client: Client) -> Self {
fn new(
receiver: mpsc::Receiver<FeedCrawlerMessage>,
pool: PgPool,
client: Client,
content_dir: String,
) -> Self {
FeedCrawler {
receiver,
pool,
client,
content_dir,
}
}
@ -87,6 +99,40 @@ impl FeedCrawler {
.await
.map_err(|_| FeedCrawlerError::CreateFeedError(url.clone()))?;
info!(%feed.feed_id, "upserted feed");
let mut payload = Vec::with_capacity(parsed_feed.entries.len());
for entry in parsed_feed.entries {
let entry_span = info_span!("entry", id = entry.id);
let _entry_span_guard = entry_span.enter();
if let Some(link) = entry.links.get(0) {
// if no scraped or feed date is available, fallback to the current time
let published_at = entry.published.unwrap_or_else(Utc::now);
let entry = CreateEntry {
title: entry.title.map(|t| t.content),
url: link.href.clone(),
description: entry.summary.map(|s| s.content),
feed_id: feed.feed_id,
published_at,
};
payload.push(entry);
} else {
warn!("Skipping feed entry with no links");
}
}
let entries = upsert_entries(&self.pool, payload)
.await
.map_err(|_| FeedCrawlerError::CreateFeedEntriesError(url.clone()))?;
info!("Created {} entries", entries.len());
for entry in entries {
let entry_crawler = EntryCrawlerHandle::new(
self.pool.clone(),
self.client.clone(),
self.content_dir.clone(),
);
// TODO: ignoring this receiver for the time being, pipe through events eventually
let _ = entry_crawler.crawl(entry).await;
}
Ok(feed)
}
@ -124,7 +170,7 @@ pub struct FeedCrawlerHandle {
/// `FeedCrawlerHandle`.
///
/// `FeedCrawlerHandleMessage::Feed` contains the result of crawling a feed url.
/// `FeedCrawlerHandleMessage::Entry` contains the result of crawling an entry url.
/// `FeedCrawlerHandleMessage::Entry` contains the result of crawling an entry url within the feed.
#[derive(Clone)]
pub enum FeedCrawlerHandleMessage {
Feed(FeedCrawlerResult<Feed>),
@ -133,9 +179,9 @@ pub enum FeedCrawlerHandleMessage {
impl FeedCrawlerHandle {
/// Creates an async actor task that will listen for messages on the `sender` channel.
pub fn new(pool: PgPool, client: Client) -> Self {
pub fn new(pool: PgPool, client: Client, content_dir: String) -> Self {
let (sender, receiver) = mpsc::channel(8);
let mut crawler = FeedCrawler::new(receiver, pool, client);
let mut crawler = FeedCrawler::new(receiver, pool, client, content_dir);
tokio::spawn(async move { crawler.run().await });
Self { sender }

View File

@ -1 +1,2 @@
pub mod entry_crawler;
pub mod feed_crawler;

View File

@ -17,6 +17,7 @@ use tokio_stream::StreamExt;
use url::Url;
use crate::actors::feed_crawler::{FeedCrawlerHandle, FeedCrawlerHandleMessage};
use crate::config::Config;
use crate::error::{Error, Result};
use crate::models::entry::get_entries_for_feed;
use crate::models::feed::{create_feed, delete_feed, get_feed, CreateFeed, FeedType};
@ -108,11 +109,13 @@ impl IntoResponse for AddFeedError {
pub async fn post(
State(pool): State<PgPool>,
State(crawls): State<Crawls>,
State(config): State<Config>,
Form(add_feed): Form<AddFeed>,
) -> AddFeedResult<Response> {
// TODO: store the client in axum state (as long as it can be used concurrently?)
let client = Client::new();
let feed_crawler = FeedCrawlerHandle::new(pool.clone(), client.clone());
let feed_crawler =
FeedCrawlerHandle::new(pool.clone(), client.clone(), config.content_dir.clone());
let feed = create_feed(
&pool,

View File

@ -13,6 +13,7 @@ use crate::models::feed::get_feeds;
use crate::models::entry::{update_entry, upsert_entries, CreateEntry};
use crate::uuid::Base62Uuid;
/// DEPRECATED: Use FeedCrawler instead, keeping this for reference until I set up scheduled jobs.
/// For every feed in the database, fetches the feed, parses it, and saves new entries to the
/// database.
pub async fn crawl(pool: &PgPool) -> anyhow::Result<()> {

View File

@ -146,6 +146,37 @@ pub async fn create_entry(pool: &PgPool, payload: CreateEntry) -> Result<Entry>
})
}
pub async fn upsert_entry(pool: &PgPool, payload: CreateEntry) -> Result<Entry> {
payload.validate()?;
sqlx::query_as!(
Entry,
"insert into entry (
title, url, description, feed_id, published_at
) values (
$1, $2, $3, $4, $5
) on conflict (url, feed_id) do update set
title = excluded.title,
description = excluded.description,
published_at = excluded.published_at
returning *",
payload.title,
payload.url,
payload.description,
payload.feed_id,
payload.published_at,
)
.fetch_one(pool)
.await
.map_err(|error| {
if let sqlx::error::Error::Database(ref psql_error) = error {
if psql_error.code().as_deref() == Some("23503") {
return Error::RelationNotFound("feed");
}
}
Error::Sqlx(error)
})
}
pub async fn create_entries(pool: &PgPool, payload: Vec<CreateEntry>) -> Result<Vec<Entry>> {
let mut titles = Vec::with_capacity(payload.len());
let mut urls = Vec::with_capacity(payload.len());
@ -209,7 +240,10 @@ pub async fn upsert_entries(pool: &PgPool, payload: Vec<CreateEntry>) -> Result<
"insert into entry (
title, url, description, feed_id, published_at
) select * from unnest($1::text[], $2::text[], $3::text[], $4::uuid[], $5::timestamptz[])
on conflict do nothing
on conflict (url, feed_id) do update set
title = excluded.title,
description = excluded.description,
published_at = excluded.published_at
returning *",
titles.as_slice() as &[Option<String>],
urls.as_slice(),

View File

@ -94,6 +94,7 @@ impl Layout {
head {
meta charset="utf-8";
title { (self.title) }
// TODO: vendor this before going to prod
script type="module" {
r#"import * as Turbo from 'https://cdn.skypack.dev/@hotwired/turbo';"#
}