Rename item to entry

This commit is contained in:
Tyler Hallada 2023-05-13 15:39:39 -04:00
parent a331e63466
commit bf40b803a9
10 changed files with 89 additions and 89 deletions

View File

@ -13,7 +13,7 @@ CREATE TABLE IF NOT EXISTS "feeds" (
CREATE INDEX "feeds_deleted_at" ON "feeds" ("deleted_at");
CREATE UNIQUE INDEX "feeds_url" ON "feeds" ("url");
CREATE TABLE IF NOT EXISTS "items" (
CREATE TABLE IF NOT EXISTS "entries" (
"id" SERIAL PRIMARY KEY NOT NULL,
"title" VARCHAR(255) NOT NULL,
"url" VARCHAR(2048) NOT NULL,
@ -23,5 +23,5 @@ CREATE TABLE IF NOT EXISTS "items" (
"updated_at" timestamp(3) NOT NULL,
"deleted_at" timestamp(3)
);
CREATE INDEX "items_deleted_at" ON "items" ("deleted_at");
CREATE UNIQUE INDEX "items_url_and_feed_id" ON "items" ("url", "feed_id");
CREATE INDEX "entries_deleted_at" ON "entries" ("deleted_at");
CREATE UNIQUE INDEX "entries_url_and_feed_id" ON "entries" ("url", "feed_id");

View File

@ -7,7 +7,7 @@ use tracing::info;
use lib::jobs::crawl::crawl;
use lib::models::feed::{create_feed, delete_feed, CreateFeed, FeedType};
use lib::models::item::{create_item, delete_item, CreateItem};
use lib::models::entry::{create_entry, delete_entry, CreateEntry};
#[derive(FromArgs)]
/// CLI for crawlnicle
@ -21,8 +21,8 @@ struct Args {
enum Commands {
AddFeed(AddFeed),
DeleteFeed(DeleteFeed),
AddItem(AddItem),
DeleteItem(DeleteItem),
AddEntry(AddEntry),
DeleteEntry(DeleteEntry),
Crawl(Crawl),
}
@ -54,34 +54,34 @@ struct DeleteFeed {
}
#[derive(FromArgs)]
/// Add an item to the database
#[argh(subcommand, name = "add-item")]
struct AddItem {
/// Add an entry to the database
#[argh(subcommand, name = "add-entry")]
struct AddEntry {
#[argh(option)]
/// title of the item (max 255 characters)
/// title of the entry (max 255 characters)
title: String,
#[argh(option)]
/// URL of the item (max 2048 characters)
/// URL of the entry (max 2048 characters)
url: String,
#[argh(option)]
/// description of the item
/// description of the entry
description: Option<String>,
#[argh(option)]
/// source feed for the item
/// source feed for the entry
feed_id: i32,
}
#[derive(FromArgs)]
/// Delete an item from the database
#[argh(subcommand, name = "delete-item")]
struct DeleteItem {
/// Delete an entry from the database
#[argh(subcommand, name = "delete-entry")]
struct DeleteEntry {
#[argh(positional)]
/// id of the item to delete
/// id of the entry to delete
id: i32,
}
#[derive(FromArgs)]
/// Delete an item from the database
/// Delete an entry from the database
#[argh(subcommand, name = "crawl")]
struct Crawl {}
@ -118,10 +118,10 @@ pub async fn main() -> Result<()> {
delete_feed(&pool, args.id).await?;
info!("Deleted feed with id {}", args.id);
}
Commands::AddItem(args) => {
let item = create_item(
Commands::AddEntry(args) => {
let entry = create_entry(
&pool,
CreateItem {
CreateEntry {
title: args.title,
url: args.url,
description: args.description,
@ -129,11 +129,11 @@ pub async fn main() -> Result<()> {
},
)
.await?;
info!("Created item with id {}", item.id);
info!("Created entry with id {}", entry.id);
}
Commands::DeleteItem(args) => {
delete_item(&pool, args.id).await?;
info!("Deleted item with id {}", args.id);
Commands::DeleteEntry(args) => {
delete_entry(&pool, args.id).await?;
info!("Deleted entry with id {}", args.id);
}
Commands::Crawl(_) => {
info!("Crawling...");

View File

@ -2,8 +2,8 @@ use axum::{extract::State, Json};
use sqlx::PgPool;
use crate::error::Error;
use crate::models::item::{get_items, Item};
use crate::models::entry::{get_entries, Entry};
pub async fn get(State(pool): State<PgPool>) -> Result<Json<Vec<Item>>, Error> {
Ok(Json(get_items(&pool).await?))
pub async fn get(State(pool): State<PgPool>) -> Result<Json<Vec<Entry>>, Error> {
Ok(Json(get_entries(&pool).await?))
}

19
src/handlers/entry.rs Normal file
View File

@ -0,0 +1,19 @@
use axum::{
extract::{Path, State},
Json,
};
use sqlx::PgPool;
use crate::error::Error;
use crate::models::entry::{create_entry, get_entry, CreateEntry, Entry};
pub async fn get(State(pool): State<PgPool>, Path(id): Path<i32>) -> Result<Json<Entry>, Error> {
Ok(Json(get_entry(&pool, id).await?))
}
pub async fn post(
State(pool): State<PgPool>,
Json(payload): Json<CreateEntry>,
) -> Result<Json<Entry>, Error> {
Ok(Json(create_entry(&pool, payload).await?))
}

View File

@ -1,19 +0,0 @@
use axum::{
extract::{Path, State},
Json,
};
use sqlx::PgPool;
use crate::error::Error;
use crate::models::item::{create_item, get_item, CreateItem, Item};
pub async fn get(State(pool): State<PgPool>, Path(id): Path<i32>) -> Result<Json<Item>, Error> {
Ok(Json(get_item(&pool, id).await?))
}
pub async fn post(
State(pool): State<PgPool>,
Json(payload): Json<CreateItem>,
) -> Result<Json<Item>, Error> {
Ok(Json(create_item(&pool, payload).await?))
}

View File

@ -1,4 +1,4 @@
pub mod feed;
pub mod feeds;
pub mod item;
pub mod items;
pub mod entry;
pub mod entries;

View File

@ -4,9 +4,9 @@ use sqlx::PgPool;
use tracing::info;
use crate::models::feed::get_feeds;
use crate::models::item::{upsert_items, CreateItem};
use crate::models::entry::{upsert_entries, CreateEntry};
/// For every feed in the database, fetches the feed, parses it, and saves new items to the
/// For every feed in the database, fetches the feed, parses it, and saves new entries to the
/// database.
pub async fn crawl(pool: &PgPool) -> anyhow::Result<()> {
let client = Client::new();
@ -16,7 +16,7 @@ pub async fn crawl(pool: &PgPool) -> anyhow::Result<()> {
let parsed_feed = parser::parse(&bytes[..])?;
let mut payload = Vec::with_capacity(parsed_feed.entries.len());
for entry in parsed_feed.entries {
let item = CreateItem {
let entry = CreateEntry {
title: entry
.title
.map_or_else(|| "No title".to_string(), |t| t.content),
@ -27,10 +27,10 @@ pub async fn crawl(pool: &PgPool) -> anyhow::Result<()> {
description: entry.summary.map(|s| s.content),
feed_id: feed.id,
};
payload.push(item);
payload.push(entry);
}
let items = upsert_items(pool, payload).await?;
info!("Created {} items for feed {}", items.len(), feed.id);
let entries = upsert_entries(pool, payload).await?;
info!("Created {} entries for feed {}", entries.len(), feed.id);
}
Ok(())
}

View File

@ -28,9 +28,9 @@ async fn main() -> anyhow::Result<()> {
.route("/v1/feeds", get(handlers::feeds::get))
.route("/v1/feed", post(handlers::feed::post))
.route("/v1/feed/:id", get(handlers::feed::get))
.route("/v1/items", get(handlers::items::get))
.route("/v1/item", post(handlers::item::post))
.route("/v1/item/:id", get(handlers::item::get))
.route("/v1/entries", get(handlers::entries::get))
.route("/v1/entry", post(handlers::entry::post))
.route("/v1/entry/:id", get(handlers::entry::get))
.with_state(pool)
.layer(ServiceBuilder::new().layer(TraceLayer::new_for_http()));

View File

@ -6,7 +6,7 @@ use validator::{Validate, ValidationErrors};
use crate::error::{Error, Result};
#[derive(Debug, Serialize, Deserialize)]
pub struct Item {
pub struct Entry {
pub id: i32,
pub title: String,
pub url: String,
@ -18,7 +18,7 @@ pub struct Item {
}
#[derive(Debug, Deserialize, Validate)]
pub struct CreateItem {
pub struct CreateEntry {
#[validate(length(max = 255))]
pub title: String,
#[validate(url)]
@ -29,29 +29,29 @@ pub struct CreateItem {
pub feed_id: i32,
}
pub async fn get_item(pool: &PgPool, id: i32) -> Result<Item> {
sqlx::query_as!(Item, "SELECT * FROM items WHERE id = $1", id)
pub async fn get_entry(pool: &PgPool, id: i32) -> Result<Entry> {
sqlx::query_as!(Entry, "SELECT * FROM entries WHERE id = $1", id)
.fetch_one(pool)
.await
.map_err(|error| {
if let sqlx::error::Error::RowNotFound = error {
return Error::NotFound("item", id);
return Error::NotFound("entry", id);
}
Error::Sqlx(error)
})
}
pub async fn get_items(pool: &PgPool) -> sqlx::Result<Vec<Item>> {
sqlx::query_as!(Item, "SELECT * FROM items WHERE deleted_at IS NULL")
pub async fn get_entries(pool: &PgPool) -> sqlx::Result<Vec<Entry>> {
sqlx::query_as!(Entry, "SELECT * FROM entries WHERE deleted_at IS NULL")
.fetch_all(pool)
.await
}
pub async fn create_item(pool: &PgPool, payload: CreateItem) -> Result<Item> {
pub async fn create_entry(pool: &PgPool, payload: CreateEntry) -> Result<Entry> {
payload.validate()?;
sqlx::query_as!(
Item,
"INSERT INTO items (
Entry,
"INSERT INTO entries (
title, url, description, feed_id, created_at, updated_at
) VALUES (
$1, $2, $3, $4, now(), now()
@ -73,21 +73,21 @@ pub async fn create_item(pool: &PgPool, payload: CreateItem) -> Result<Item> {
})
}
pub async fn create_items(pool: &PgPool, payload: Vec<CreateItem>) -> Result<Vec<Item>> {
pub async fn create_entries(pool: &PgPool, payload: Vec<Create<Entry>) -> Result<Vec<Entry>> {
let mut titles = Vec::with_capacity(payload.len());
let mut urls = Vec::with_capacity(payload.len());
let mut descriptions: Vec<Option<String>> = Vec::with_capacity(payload.len());
let mut feed_ids = Vec::with_capacity(payload.len());
payload.iter().map(|item| {
titles.push(item.title.clone());
urls.push(item.url.clone());
descriptions.push(item.description.clone());
feed_ids.push(item.feed_id);
item.validate()
payload.iter().map(|entry| {
titles.push(entry.title.clone());
urls.push(entry.url.clone());
descriptions.push(entry.description.clone());
feed_ids.push(entry.feed_id);
entry.validate()
}).collect::<Result<Vec<()>, ValidationErrors>>()?;
sqlx::query_as!(
Item,
"INSERT INTO items (
Entry,
"INSERT INTO entries (
title, url, description, feed_id, created_at, updated_at
) SELECT *, now(), now() FROM UNNEST($1::text[], $2::text[], $3::text[], $4::int[])
RETURNING *",
@ -108,21 +108,21 @@ pub async fn create_items(pool: &PgPool, payload: Vec<CreateItem>) -> Result<Vec
})
}
pub async fn upsert_items(pool: &PgPool, payload: Vec<CreateItem>) -> Result<Vec<Item>> {
pub async fn upsert_entries(pool: &PgPool, payload: Vec<CreateEntry>) -> Result<Vec<Entry>> {
let mut titles = Vec::with_capacity(payload.len());
let mut urls = Vec::with_capacity(payload.len());
let mut descriptions: Vec<Option<String>> = Vec::with_capacity(payload.len());
let mut feed_ids = Vec::with_capacity(payload.len());
payload.iter().map(|item| {
titles.push(item.title.clone());
urls.push(item.url.clone());
descriptions.push(item.description.clone());
feed_ids.push(item.feed_id);
item.validate()
payload.iter().map(|entry| {
titles.push(entry.title.clone());
urls.push(entry.url.clone());
descriptions.push(entry.description.clone());
feed_ids.push(entry.feed_id);
entry.validate()
}).collect::<Result<Vec<()>, ValidationErrors>>()?;
sqlx::query_as!(
Item,
"INSERT INTO items (
Entry,
"INSERT INTO entries (
title, url, description, feed_id, created_at, updated_at
) SELECT *, now(), now() FROM UNNEST($1::text[], $2::text[], $3::text[], $4::int[])
ON CONFLICT DO NOTHING
@ -144,8 +144,8 @@ pub async fn upsert_items(pool: &PgPool, payload: Vec<CreateItem>) -> Result<Vec
})
}
pub async fn delete_item(pool: &PgPool, id: i32) -> Result<()> {
sqlx::query!("UPDATE items SET deleted_at = now() WHERE id = $1", id)
pub async fn delete_entry(pool: &PgPool, id: i32) -> Result<()> {
sqlx::query!("UPDATE entries SET deleted_at = now() WHERE id = $1", id)
.execute(pool)
.await?;
Ok(())

View File

@ -1,2 +1,2 @@
pub mod item;
pub mod entry;
pub mod feed;