diff --git a/migrations/20230507201612_initial.sql b/migrations/20230507201612_initial.sql index 12a9771..8f6b5cf 100644 --- a/migrations/20230507201612_initial.sql +++ b/migrations/20230507201612_initial.sql @@ -13,7 +13,7 @@ CREATE TABLE IF NOT EXISTS "feeds" ( CREATE INDEX "feeds_deleted_at" ON "feeds" ("deleted_at"); CREATE UNIQUE INDEX "feeds_url" ON "feeds" ("url"); -CREATE TABLE IF NOT EXISTS "items" ( +CREATE TABLE IF NOT EXISTS "entries" ( "id" SERIAL PRIMARY KEY NOT NULL, "title" VARCHAR(255) NOT NULL, "url" VARCHAR(2048) NOT NULL, @@ -23,5 +23,5 @@ CREATE TABLE IF NOT EXISTS "items" ( "updated_at" timestamp(3) NOT NULL, "deleted_at" timestamp(3) ); -CREATE INDEX "items_deleted_at" ON "items" ("deleted_at"); -CREATE UNIQUE INDEX "items_url_and_feed_id" ON "items" ("url", "feed_id"); +CREATE INDEX "entries_deleted_at" ON "entries" ("deleted_at"); +CREATE UNIQUE INDEX "entries_url_and_feed_id" ON "entries" ("url", "feed_id"); diff --git a/src/bin/cli.rs b/src/bin/cli.rs index fd92358..54c7a3f 100644 --- a/src/bin/cli.rs +++ b/src/bin/cli.rs @@ -7,7 +7,7 @@ use tracing::info; use lib::jobs::crawl::crawl; use lib::models::feed::{create_feed, delete_feed, CreateFeed, FeedType}; -use lib::models::item::{create_item, delete_item, CreateItem}; +use lib::models::entry::{create_entry, delete_entry, CreateEntry}; #[derive(FromArgs)] /// CLI for crawlnicle @@ -21,8 +21,8 @@ struct Args { enum Commands { AddFeed(AddFeed), DeleteFeed(DeleteFeed), - AddItem(AddItem), - DeleteItem(DeleteItem), + AddEntry(AddEntry), + DeleteEntry(DeleteEntry), Crawl(Crawl), } @@ -54,34 +54,34 @@ struct DeleteFeed { } #[derive(FromArgs)] -/// Add an item to the database -#[argh(subcommand, name = "add-item")] -struct AddItem { +/// Add an entry to the database +#[argh(subcommand, name = "add-entry")] +struct AddEntry { #[argh(option)] - /// title of the item (max 255 characters) + /// title of the entry (max 255 characters) title: String, #[argh(option)] - /// URL of the item (max 2048 characters) + /// URL of the entry (max 2048 characters) url: String, #[argh(option)] - /// description of the item + /// description of the entry description: Option, #[argh(option)] - /// source feed for the item + /// source feed for the entry feed_id: i32, } #[derive(FromArgs)] -/// Delete an item from the database -#[argh(subcommand, name = "delete-item")] -struct DeleteItem { +/// Delete an entry from the database +#[argh(subcommand, name = "delete-entry")] +struct DeleteEntry { #[argh(positional)] - /// id of the item to delete + /// id of the entry to delete id: i32, } #[derive(FromArgs)] -/// Delete an item from the database +/// Delete an entry from the database #[argh(subcommand, name = "crawl")] struct Crawl {} @@ -118,10 +118,10 @@ pub async fn main() -> Result<()> { delete_feed(&pool, args.id).await?; info!("Deleted feed with id {}", args.id); } - Commands::AddItem(args) => { - let item = create_item( + Commands::AddEntry(args) => { + let entry = create_entry( &pool, - CreateItem { + CreateEntry { title: args.title, url: args.url, description: args.description, @@ -129,11 +129,11 @@ pub async fn main() -> Result<()> { }, ) .await?; - info!("Created item with id {}", item.id); + info!("Created entry with id {}", entry.id); } - Commands::DeleteItem(args) => { - delete_item(&pool, args.id).await?; - info!("Deleted item with id {}", args.id); + Commands::DeleteEntry(args) => { + delete_entry(&pool, args.id).await?; + info!("Deleted entry with id {}", args.id); } Commands::Crawl(_) => { info!("Crawling..."); diff --git a/src/handlers/items.rs b/src/handlers/entries.rs similarity index 57% rename from src/handlers/items.rs rename to src/handlers/entries.rs index 63485b9..d84818d 100644 --- a/src/handlers/items.rs +++ b/src/handlers/entries.rs @@ -2,8 +2,8 @@ use axum::{extract::State, Json}; use sqlx::PgPool; use crate::error::Error; -use crate::models::item::{get_items, Item}; +use crate::models::entry::{get_entries, Entry}; -pub async fn get(State(pool): State) -> Result>, Error> { - Ok(Json(get_items(&pool).await?)) +pub async fn get(State(pool): State) -> Result>, Error> { + Ok(Json(get_entries(&pool).await?)) } diff --git a/src/handlers/entry.rs b/src/handlers/entry.rs new file mode 100644 index 0000000..816fa6a --- /dev/null +++ b/src/handlers/entry.rs @@ -0,0 +1,19 @@ +use axum::{ + extract::{Path, State}, + Json, +}; +use sqlx::PgPool; + +use crate::error::Error; +use crate::models::entry::{create_entry, get_entry, CreateEntry, Entry}; + +pub async fn get(State(pool): State, Path(id): Path) -> Result, Error> { + Ok(Json(get_entry(&pool, id).await?)) +} + +pub async fn post( + State(pool): State, + Json(payload): Json, +) -> Result, Error> { + Ok(Json(create_entry(&pool, payload).await?)) +} diff --git a/src/handlers/item.rs b/src/handlers/item.rs deleted file mode 100644 index 3c08093..0000000 --- a/src/handlers/item.rs +++ /dev/null @@ -1,19 +0,0 @@ -use axum::{ - extract::{Path, State}, - Json, -}; -use sqlx::PgPool; - -use crate::error::Error; -use crate::models::item::{create_item, get_item, CreateItem, Item}; - -pub async fn get(State(pool): State, Path(id): Path) -> Result, Error> { - Ok(Json(get_item(&pool, id).await?)) -} - -pub async fn post( - State(pool): State, - Json(payload): Json, -) -> Result, Error> { - Ok(Json(create_item(&pool, payload).await?)) -} diff --git a/src/handlers/mod.rs b/src/handlers/mod.rs index f0d504c..bbbde54 100644 --- a/src/handlers/mod.rs +++ b/src/handlers/mod.rs @@ -1,4 +1,4 @@ pub mod feed; pub mod feeds; -pub mod item; -pub mod items; +pub mod entry; +pub mod entries; diff --git a/src/jobs/crawl.rs b/src/jobs/crawl.rs index bbc6305..13ffb45 100644 --- a/src/jobs/crawl.rs +++ b/src/jobs/crawl.rs @@ -4,9 +4,9 @@ use sqlx::PgPool; use tracing::info; use crate::models::feed::get_feeds; -use crate::models::item::{upsert_items, CreateItem}; +use crate::models::entry::{upsert_entries, CreateEntry}; -/// For every feed in the database, fetches the feed, parses it, and saves new items to the +/// For every feed in the database, fetches the feed, parses it, and saves new entries to the /// database. pub async fn crawl(pool: &PgPool) -> anyhow::Result<()> { let client = Client::new(); @@ -16,7 +16,7 @@ pub async fn crawl(pool: &PgPool) -> anyhow::Result<()> { let parsed_feed = parser::parse(&bytes[..])?; let mut payload = Vec::with_capacity(parsed_feed.entries.len()); for entry in parsed_feed.entries { - let item = CreateItem { + let entry = CreateEntry { title: entry .title .map_or_else(|| "No title".to_string(), |t| t.content), @@ -27,10 +27,10 @@ pub async fn crawl(pool: &PgPool) -> anyhow::Result<()> { description: entry.summary.map(|s| s.content), feed_id: feed.id, }; - payload.push(item); + payload.push(entry); } - let items = upsert_items(pool, payload).await?; - info!("Created {} items for feed {}", items.len(), feed.id); + let entries = upsert_entries(pool, payload).await?; + info!("Created {} entries for feed {}", entries.len(), feed.id); } Ok(()) } diff --git a/src/main.rs b/src/main.rs index d1d52df..48d39bc 100644 --- a/src/main.rs +++ b/src/main.rs @@ -28,9 +28,9 @@ async fn main() -> anyhow::Result<()> { .route("/v1/feeds", get(handlers::feeds::get)) .route("/v1/feed", post(handlers::feed::post)) .route("/v1/feed/:id", get(handlers::feed::get)) - .route("/v1/items", get(handlers::items::get)) - .route("/v1/item", post(handlers::item::post)) - .route("/v1/item/:id", get(handlers::item::get)) + .route("/v1/entries", get(handlers::entries::get)) + .route("/v1/entry", post(handlers::entry::post)) + .route("/v1/entry/:id", get(handlers::entry::get)) .with_state(pool) .layer(ServiceBuilder::new().layer(TraceLayer::new_for_http())); diff --git a/src/models/item.rs b/src/models/entry.rs similarity index 71% rename from src/models/item.rs rename to src/models/entry.rs index 1b8d291..3813ae7 100644 --- a/src/models/item.rs +++ b/src/models/entry.rs @@ -6,7 +6,7 @@ use validator::{Validate, ValidationErrors}; use crate::error::{Error, Result}; #[derive(Debug, Serialize, Deserialize)] -pub struct Item { +pub struct Entry { pub id: i32, pub title: String, pub url: String, @@ -18,7 +18,7 @@ pub struct Item { } #[derive(Debug, Deserialize, Validate)] -pub struct CreateItem { +pub struct CreateEntry { #[validate(length(max = 255))] pub title: String, #[validate(url)] @@ -29,29 +29,29 @@ pub struct CreateItem { pub feed_id: i32, } -pub async fn get_item(pool: &PgPool, id: i32) -> Result { - sqlx::query_as!(Item, "SELECT * FROM items WHERE id = $1", id) +pub async fn get_entry(pool: &PgPool, id: i32) -> Result { + sqlx::query_as!(Entry, "SELECT * FROM entries WHERE id = $1", id) .fetch_one(pool) .await .map_err(|error| { if let sqlx::error::Error::RowNotFound = error { - return Error::NotFound("item", id); + return Error::NotFound("entry", id); } Error::Sqlx(error) }) } -pub async fn get_items(pool: &PgPool) -> sqlx::Result> { - sqlx::query_as!(Item, "SELECT * FROM items WHERE deleted_at IS NULL") +pub async fn get_entries(pool: &PgPool) -> sqlx::Result> { + sqlx::query_as!(Entry, "SELECT * FROM entries WHERE deleted_at IS NULL") .fetch_all(pool) .await } -pub async fn create_item(pool: &PgPool, payload: CreateItem) -> Result { +pub async fn create_entry(pool: &PgPool, payload: CreateEntry) -> Result { payload.validate()?; sqlx::query_as!( - Item, - "INSERT INTO items ( + Entry, + "INSERT INTO entries ( title, url, description, feed_id, created_at, updated_at ) VALUES ( $1, $2, $3, $4, now(), now() @@ -73,21 +73,21 @@ pub async fn create_item(pool: &PgPool, payload: CreateItem) -> Result { }) } -pub async fn create_items(pool: &PgPool, payload: Vec) -> Result> { +pub async fn create_entries(pool: &PgPool, payload: Vec) -> Result> { let mut titles = Vec::with_capacity(payload.len()); let mut urls = Vec::with_capacity(payload.len()); let mut descriptions: Vec> = Vec::with_capacity(payload.len()); let mut feed_ids = Vec::with_capacity(payload.len()); - payload.iter().map(|item| { - titles.push(item.title.clone()); - urls.push(item.url.clone()); - descriptions.push(item.description.clone()); - feed_ids.push(item.feed_id); - item.validate() + payload.iter().map(|entry| { + titles.push(entry.title.clone()); + urls.push(entry.url.clone()); + descriptions.push(entry.description.clone()); + feed_ids.push(entry.feed_id); + entry.validate() }).collect::, ValidationErrors>>()?; sqlx::query_as!( - Item, - "INSERT INTO items ( + Entry, + "INSERT INTO entries ( title, url, description, feed_id, created_at, updated_at ) SELECT *, now(), now() FROM UNNEST($1::text[], $2::text[], $3::text[], $4::int[]) RETURNING *", @@ -108,21 +108,21 @@ pub async fn create_items(pool: &PgPool, payload: Vec) -> Result) -> Result> { +pub async fn upsert_entries(pool: &PgPool, payload: Vec) -> Result> { let mut titles = Vec::with_capacity(payload.len()); let mut urls = Vec::with_capacity(payload.len()); let mut descriptions: Vec> = Vec::with_capacity(payload.len()); let mut feed_ids = Vec::with_capacity(payload.len()); - payload.iter().map(|item| { - titles.push(item.title.clone()); - urls.push(item.url.clone()); - descriptions.push(item.description.clone()); - feed_ids.push(item.feed_id); - item.validate() + payload.iter().map(|entry| { + titles.push(entry.title.clone()); + urls.push(entry.url.clone()); + descriptions.push(entry.description.clone()); + feed_ids.push(entry.feed_id); + entry.validate() }).collect::, ValidationErrors>>()?; sqlx::query_as!( - Item, - "INSERT INTO items ( + Entry, + "INSERT INTO entries ( title, url, description, feed_id, created_at, updated_at ) SELECT *, now(), now() FROM UNNEST($1::text[], $2::text[], $3::text[], $4::int[]) ON CONFLICT DO NOTHING @@ -144,8 +144,8 @@ pub async fn upsert_items(pool: &PgPool, payload: Vec) -> Result Result<()> { - sqlx::query!("UPDATE items SET deleted_at = now() WHERE id = $1", id) +pub async fn delete_entry(pool: &PgPool, id: i32) -> Result<()> { + sqlx::query!("UPDATE entries SET deleted_at = now() WHERE id = $1", id) .execute(pool) .await?; Ok(()) diff --git a/src/models/mod.rs b/src/models/mod.rs index e755316..2b1385d 100644 --- a/src/models/mod.rs +++ b/src/models/mod.rs @@ -1,2 +1,2 @@ -pub mod item; +pub mod entry; pub mod feed;