Rename item to entry
This commit is contained in:
parent
a331e63466
commit
bf40b803a9
@ -13,7 +13,7 @@ CREATE TABLE IF NOT EXISTS "feeds" (
|
|||||||
CREATE INDEX "feeds_deleted_at" ON "feeds" ("deleted_at");
|
CREATE INDEX "feeds_deleted_at" ON "feeds" ("deleted_at");
|
||||||
CREATE UNIQUE INDEX "feeds_url" ON "feeds" ("url");
|
CREATE UNIQUE INDEX "feeds_url" ON "feeds" ("url");
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS "items" (
|
CREATE TABLE IF NOT EXISTS "entries" (
|
||||||
"id" SERIAL PRIMARY KEY NOT NULL,
|
"id" SERIAL PRIMARY KEY NOT NULL,
|
||||||
"title" VARCHAR(255) NOT NULL,
|
"title" VARCHAR(255) NOT NULL,
|
||||||
"url" VARCHAR(2048) NOT NULL,
|
"url" VARCHAR(2048) NOT NULL,
|
||||||
@ -23,5 +23,5 @@ CREATE TABLE IF NOT EXISTS "items" (
|
|||||||
"updated_at" timestamp(3) NOT NULL,
|
"updated_at" timestamp(3) NOT NULL,
|
||||||
"deleted_at" timestamp(3)
|
"deleted_at" timestamp(3)
|
||||||
);
|
);
|
||||||
CREATE INDEX "items_deleted_at" ON "items" ("deleted_at");
|
CREATE INDEX "entries_deleted_at" ON "entries" ("deleted_at");
|
||||||
CREATE UNIQUE INDEX "items_url_and_feed_id" ON "items" ("url", "feed_id");
|
CREATE UNIQUE INDEX "entries_url_and_feed_id" ON "entries" ("url", "feed_id");
|
||||||
|
@ -7,7 +7,7 @@ use tracing::info;
|
|||||||
|
|
||||||
use lib::jobs::crawl::crawl;
|
use lib::jobs::crawl::crawl;
|
||||||
use lib::models::feed::{create_feed, delete_feed, CreateFeed, FeedType};
|
use lib::models::feed::{create_feed, delete_feed, CreateFeed, FeedType};
|
||||||
use lib::models::item::{create_item, delete_item, CreateItem};
|
use lib::models::entry::{create_entry, delete_entry, CreateEntry};
|
||||||
|
|
||||||
#[derive(FromArgs)]
|
#[derive(FromArgs)]
|
||||||
/// CLI for crawlnicle
|
/// CLI for crawlnicle
|
||||||
@ -21,8 +21,8 @@ struct Args {
|
|||||||
enum Commands {
|
enum Commands {
|
||||||
AddFeed(AddFeed),
|
AddFeed(AddFeed),
|
||||||
DeleteFeed(DeleteFeed),
|
DeleteFeed(DeleteFeed),
|
||||||
AddItem(AddItem),
|
AddEntry(AddEntry),
|
||||||
DeleteItem(DeleteItem),
|
DeleteEntry(DeleteEntry),
|
||||||
Crawl(Crawl),
|
Crawl(Crawl),
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -54,34 +54,34 @@ struct DeleteFeed {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(FromArgs)]
|
#[derive(FromArgs)]
|
||||||
/// Add an item to the database
|
/// Add an entry to the database
|
||||||
#[argh(subcommand, name = "add-item")]
|
#[argh(subcommand, name = "add-entry")]
|
||||||
struct AddItem {
|
struct AddEntry {
|
||||||
#[argh(option)]
|
#[argh(option)]
|
||||||
/// title of the item (max 255 characters)
|
/// title of the entry (max 255 characters)
|
||||||
title: String,
|
title: String,
|
||||||
#[argh(option)]
|
#[argh(option)]
|
||||||
/// URL of the item (max 2048 characters)
|
/// URL of the entry (max 2048 characters)
|
||||||
url: String,
|
url: String,
|
||||||
#[argh(option)]
|
#[argh(option)]
|
||||||
/// description of the item
|
/// description of the entry
|
||||||
description: Option<String>,
|
description: Option<String>,
|
||||||
#[argh(option)]
|
#[argh(option)]
|
||||||
/// source feed for the item
|
/// source feed for the entry
|
||||||
feed_id: i32,
|
feed_id: i32,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(FromArgs)]
|
#[derive(FromArgs)]
|
||||||
/// Delete an item from the database
|
/// Delete an entry from the database
|
||||||
#[argh(subcommand, name = "delete-item")]
|
#[argh(subcommand, name = "delete-entry")]
|
||||||
struct DeleteItem {
|
struct DeleteEntry {
|
||||||
#[argh(positional)]
|
#[argh(positional)]
|
||||||
/// id of the item to delete
|
/// id of the entry to delete
|
||||||
id: i32,
|
id: i32,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(FromArgs)]
|
#[derive(FromArgs)]
|
||||||
/// Delete an item from the database
|
/// Delete an entry from the database
|
||||||
#[argh(subcommand, name = "crawl")]
|
#[argh(subcommand, name = "crawl")]
|
||||||
struct Crawl {}
|
struct Crawl {}
|
||||||
|
|
||||||
@ -118,10 +118,10 @@ pub async fn main() -> Result<()> {
|
|||||||
delete_feed(&pool, args.id).await?;
|
delete_feed(&pool, args.id).await?;
|
||||||
info!("Deleted feed with id {}", args.id);
|
info!("Deleted feed with id {}", args.id);
|
||||||
}
|
}
|
||||||
Commands::AddItem(args) => {
|
Commands::AddEntry(args) => {
|
||||||
let item = create_item(
|
let entry = create_entry(
|
||||||
&pool,
|
&pool,
|
||||||
CreateItem {
|
CreateEntry {
|
||||||
title: args.title,
|
title: args.title,
|
||||||
url: args.url,
|
url: args.url,
|
||||||
description: args.description,
|
description: args.description,
|
||||||
@ -129,11 +129,11 @@ pub async fn main() -> Result<()> {
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
info!("Created item with id {}", item.id);
|
info!("Created entry with id {}", entry.id);
|
||||||
}
|
}
|
||||||
Commands::DeleteItem(args) => {
|
Commands::DeleteEntry(args) => {
|
||||||
delete_item(&pool, args.id).await?;
|
delete_entry(&pool, args.id).await?;
|
||||||
info!("Deleted item with id {}", args.id);
|
info!("Deleted entry with id {}", args.id);
|
||||||
}
|
}
|
||||||
Commands::Crawl(_) => {
|
Commands::Crawl(_) => {
|
||||||
info!("Crawling...");
|
info!("Crawling...");
|
||||||
|
@ -2,8 +2,8 @@ use axum::{extract::State, Json};
|
|||||||
use sqlx::PgPool;
|
use sqlx::PgPool;
|
||||||
|
|
||||||
use crate::error::Error;
|
use crate::error::Error;
|
||||||
use crate::models::item::{get_items, Item};
|
use crate::models::entry::{get_entries, Entry};
|
||||||
|
|
||||||
pub async fn get(State(pool): State<PgPool>) -> Result<Json<Vec<Item>>, Error> {
|
pub async fn get(State(pool): State<PgPool>) -> Result<Json<Vec<Entry>>, Error> {
|
||||||
Ok(Json(get_items(&pool).await?))
|
Ok(Json(get_entries(&pool).await?))
|
||||||
}
|
}
|
19
src/handlers/entry.rs
Normal file
19
src/handlers/entry.rs
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
use axum::{
|
||||||
|
extract::{Path, State},
|
||||||
|
Json,
|
||||||
|
};
|
||||||
|
use sqlx::PgPool;
|
||||||
|
|
||||||
|
use crate::error::Error;
|
||||||
|
use crate::models::entry::{create_entry, get_entry, CreateEntry, Entry};
|
||||||
|
|
||||||
|
pub async fn get(State(pool): State<PgPool>, Path(id): Path<i32>) -> Result<Json<Entry>, Error> {
|
||||||
|
Ok(Json(get_entry(&pool, id).await?))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn post(
|
||||||
|
State(pool): State<PgPool>,
|
||||||
|
Json(payload): Json<CreateEntry>,
|
||||||
|
) -> Result<Json<Entry>, Error> {
|
||||||
|
Ok(Json(create_entry(&pool, payload).await?))
|
||||||
|
}
|
@ -1,19 +0,0 @@
|
|||||||
use axum::{
|
|
||||||
extract::{Path, State},
|
|
||||||
Json,
|
|
||||||
};
|
|
||||||
use sqlx::PgPool;
|
|
||||||
|
|
||||||
use crate::error::Error;
|
|
||||||
use crate::models::item::{create_item, get_item, CreateItem, Item};
|
|
||||||
|
|
||||||
pub async fn get(State(pool): State<PgPool>, Path(id): Path<i32>) -> Result<Json<Item>, Error> {
|
|
||||||
Ok(Json(get_item(&pool, id).await?))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn post(
|
|
||||||
State(pool): State<PgPool>,
|
|
||||||
Json(payload): Json<CreateItem>,
|
|
||||||
) -> Result<Json<Item>, Error> {
|
|
||||||
Ok(Json(create_item(&pool, payload).await?))
|
|
||||||
}
|
|
@ -1,4 +1,4 @@
|
|||||||
pub mod feed;
|
pub mod feed;
|
||||||
pub mod feeds;
|
pub mod feeds;
|
||||||
pub mod item;
|
pub mod entry;
|
||||||
pub mod items;
|
pub mod entries;
|
||||||
|
@ -4,9 +4,9 @@ use sqlx::PgPool;
|
|||||||
use tracing::info;
|
use tracing::info;
|
||||||
|
|
||||||
use crate::models::feed::get_feeds;
|
use crate::models::feed::get_feeds;
|
||||||
use crate::models::item::{upsert_items, CreateItem};
|
use crate::models::entry::{upsert_entries, CreateEntry};
|
||||||
|
|
||||||
/// For every feed in the database, fetches the feed, parses it, and saves new items to the
|
/// For every feed in the database, fetches the feed, parses it, and saves new entries to the
|
||||||
/// database.
|
/// database.
|
||||||
pub async fn crawl(pool: &PgPool) -> anyhow::Result<()> {
|
pub async fn crawl(pool: &PgPool) -> anyhow::Result<()> {
|
||||||
let client = Client::new();
|
let client = Client::new();
|
||||||
@ -16,7 +16,7 @@ pub async fn crawl(pool: &PgPool) -> anyhow::Result<()> {
|
|||||||
let parsed_feed = parser::parse(&bytes[..])?;
|
let parsed_feed = parser::parse(&bytes[..])?;
|
||||||
let mut payload = Vec::with_capacity(parsed_feed.entries.len());
|
let mut payload = Vec::with_capacity(parsed_feed.entries.len());
|
||||||
for entry in parsed_feed.entries {
|
for entry in parsed_feed.entries {
|
||||||
let item = CreateItem {
|
let entry = CreateEntry {
|
||||||
title: entry
|
title: entry
|
||||||
.title
|
.title
|
||||||
.map_or_else(|| "No title".to_string(), |t| t.content),
|
.map_or_else(|| "No title".to_string(), |t| t.content),
|
||||||
@ -27,10 +27,10 @@ pub async fn crawl(pool: &PgPool) -> anyhow::Result<()> {
|
|||||||
description: entry.summary.map(|s| s.content),
|
description: entry.summary.map(|s| s.content),
|
||||||
feed_id: feed.id,
|
feed_id: feed.id,
|
||||||
};
|
};
|
||||||
payload.push(item);
|
payload.push(entry);
|
||||||
}
|
}
|
||||||
let items = upsert_items(pool, payload).await?;
|
let entries = upsert_entries(pool, payload).await?;
|
||||||
info!("Created {} items for feed {}", items.len(), feed.id);
|
info!("Created {} entries for feed {}", entries.len(), feed.id);
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -28,9 +28,9 @@ async fn main() -> anyhow::Result<()> {
|
|||||||
.route("/v1/feeds", get(handlers::feeds::get))
|
.route("/v1/feeds", get(handlers::feeds::get))
|
||||||
.route("/v1/feed", post(handlers::feed::post))
|
.route("/v1/feed", post(handlers::feed::post))
|
||||||
.route("/v1/feed/:id", get(handlers::feed::get))
|
.route("/v1/feed/:id", get(handlers::feed::get))
|
||||||
.route("/v1/items", get(handlers::items::get))
|
.route("/v1/entries", get(handlers::entries::get))
|
||||||
.route("/v1/item", post(handlers::item::post))
|
.route("/v1/entry", post(handlers::entry::post))
|
||||||
.route("/v1/item/:id", get(handlers::item::get))
|
.route("/v1/entry/:id", get(handlers::entry::get))
|
||||||
.with_state(pool)
|
.with_state(pool)
|
||||||
.layer(ServiceBuilder::new().layer(TraceLayer::new_for_http()));
|
.layer(ServiceBuilder::new().layer(TraceLayer::new_for_http()));
|
||||||
|
|
||||||
|
@ -6,7 +6,7 @@ use validator::{Validate, ValidationErrors};
|
|||||||
use crate::error::{Error, Result};
|
use crate::error::{Error, Result};
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
pub struct Item {
|
pub struct Entry {
|
||||||
pub id: i32,
|
pub id: i32,
|
||||||
pub title: String,
|
pub title: String,
|
||||||
pub url: String,
|
pub url: String,
|
||||||
@ -18,7 +18,7 @@ pub struct Item {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Deserialize, Validate)]
|
#[derive(Debug, Deserialize, Validate)]
|
||||||
pub struct CreateItem {
|
pub struct CreateEntry {
|
||||||
#[validate(length(max = 255))]
|
#[validate(length(max = 255))]
|
||||||
pub title: String,
|
pub title: String,
|
||||||
#[validate(url)]
|
#[validate(url)]
|
||||||
@ -29,29 +29,29 @@ pub struct CreateItem {
|
|||||||
pub feed_id: i32,
|
pub feed_id: i32,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_item(pool: &PgPool, id: i32) -> Result<Item> {
|
pub async fn get_entry(pool: &PgPool, id: i32) -> Result<Entry> {
|
||||||
sqlx::query_as!(Item, "SELECT * FROM items WHERE id = $1", id)
|
sqlx::query_as!(Entry, "SELECT * FROM entries WHERE id = $1", id)
|
||||||
.fetch_one(pool)
|
.fetch_one(pool)
|
||||||
.await
|
.await
|
||||||
.map_err(|error| {
|
.map_err(|error| {
|
||||||
if let sqlx::error::Error::RowNotFound = error {
|
if let sqlx::error::Error::RowNotFound = error {
|
||||||
return Error::NotFound("item", id);
|
return Error::NotFound("entry", id);
|
||||||
}
|
}
|
||||||
Error::Sqlx(error)
|
Error::Sqlx(error)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_items(pool: &PgPool) -> sqlx::Result<Vec<Item>> {
|
pub async fn get_entries(pool: &PgPool) -> sqlx::Result<Vec<Entry>> {
|
||||||
sqlx::query_as!(Item, "SELECT * FROM items WHERE deleted_at IS NULL")
|
sqlx::query_as!(Entry, "SELECT * FROM entries WHERE deleted_at IS NULL")
|
||||||
.fetch_all(pool)
|
.fetch_all(pool)
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn create_item(pool: &PgPool, payload: CreateItem) -> Result<Item> {
|
pub async fn create_entry(pool: &PgPool, payload: CreateEntry) -> Result<Entry> {
|
||||||
payload.validate()?;
|
payload.validate()?;
|
||||||
sqlx::query_as!(
|
sqlx::query_as!(
|
||||||
Item,
|
Entry,
|
||||||
"INSERT INTO items (
|
"INSERT INTO entries (
|
||||||
title, url, description, feed_id, created_at, updated_at
|
title, url, description, feed_id, created_at, updated_at
|
||||||
) VALUES (
|
) VALUES (
|
||||||
$1, $2, $3, $4, now(), now()
|
$1, $2, $3, $4, now(), now()
|
||||||
@ -73,21 +73,21 @@ pub async fn create_item(pool: &PgPool, payload: CreateItem) -> Result<Item> {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn create_items(pool: &PgPool, payload: Vec<CreateItem>) -> Result<Vec<Item>> {
|
pub async fn create_entries(pool: &PgPool, payload: Vec<Create<Entry>) -> Result<Vec<Entry>> {
|
||||||
let mut titles = Vec::with_capacity(payload.len());
|
let mut titles = Vec::with_capacity(payload.len());
|
||||||
let mut urls = Vec::with_capacity(payload.len());
|
let mut urls = Vec::with_capacity(payload.len());
|
||||||
let mut descriptions: Vec<Option<String>> = Vec::with_capacity(payload.len());
|
let mut descriptions: Vec<Option<String>> = Vec::with_capacity(payload.len());
|
||||||
let mut feed_ids = Vec::with_capacity(payload.len());
|
let mut feed_ids = Vec::with_capacity(payload.len());
|
||||||
payload.iter().map(|item| {
|
payload.iter().map(|entry| {
|
||||||
titles.push(item.title.clone());
|
titles.push(entry.title.clone());
|
||||||
urls.push(item.url.clone());
|
urls.push(entry.url.clone());
|
||||||
descriptions.push(item.description.clone());
|
descriptions.push(entry.description.clone());
|
||||||
feed_ids.push(item.feed_id);
|
feed_ids.push(entry.feed_id);
|
||||||
item.validate()
|
entry.validate()
|
||||||
}).collect::<Result<Vec<()>, ValidationErrors>>()?;
|
}).collect::<Result<Vec<()>, ValidationErrors>>()?;
|
||||||
sqlx::query_as!(
|
sqlx::query_as!(
|
||||||
Item,
|
Entry,
|
||||||
"INSERT INTO items (
|
"INSERT INTO entries (
|
||||||
title, url, description, feed_id, created_at, updated_at
|
title, url, description, feed_id, created_at, updated_at
|
||||||
) SELECT *, now(), now() FROM UNNEST($1::text[], $2::text[], $3::text[], $4::int[])
|
) SELECT *, now(), now() FROM UNNEST($1::text[], $2::text[], $3::text[], $4::int[])
|
||||||
RETURNING *",
|
RETURNING *",
|
||||||
@ -108,21 +108,21 @@ pub async fn create_items(pool: &PgPool, payload: Vec<CreateItem>) -> Result<Vec
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn upsert_items(pool: &PgPool, payload: Vec<CreateItem>) -> Result<Vec<Item>> {
|
pub async fn upsert_entries(pool: &PgPool, payload: Vec<CreateEntry>) -> Result<Vec<Entry>> {
|
||||||
let mut titles = Vec::with_capacity(payload.len());
|
let mut titles = Vec::with_capacity(payload.len());
|
||||||
let mut urls = Vec::with_capacity(payload.len());
|
let mut urls = Vec::with_capacity(payload.len());
|
||||||
let mut descriptions: Vec<Option<String>> = Vec::with_capacity(payload.len());
|
let mut descriptions: Vec<Option<String>> = Vec::with_capacity(payload.len());
|
||||||
let mut feed_ids = Vec::with_capacity(payload.len());
|
let mut feed_ids = Vec::with_capacity(payload.len());
|
||||||
payload.iter().map(|item| {
|
payload.iter().map(|entry| {
|
||||||
titles.push(item.title.clone());
|
titles.push(entry.title.clone());
|
||||||
urls.push(item.url.clone());
|
urls.push(entry.url.clone());
|
||||||
descriptions.push(item.description.clone());
|
descriptions.push(entry.description.clone());
|
||||||
feed_ids.push(item.feed_id);
|
feed_ids.push(entry.feed_id);
|
||||||
item.validate()
|
entry.validate()
|
||||||
}).collect::<Result<Vec<()>, ValidationErrors>>()?;
|
}).collect::<Result<Vec<()>, ValidationErrors>>()?;
|
||||||
sqlx::query_as!(
|
sqlx::query_as!(
|
||||||
Item,
|
Entry,
|
||||||
"INSERT INTO items (
|
"INSERT INTO entries (
|
||||||
title, url, description, feed_id, created_at, updated_at
|
title, url, description, feed_id, created_at, updated_at
|
||||||
) SELECT *, now(), now() FROM UNNEST($1::text[], $2::text[], $3::text[], $4::int[])
|
) SELECT *, now(), now() FROM UNNEST($1::text[], $2::text[], $3::text[], $4::int[])
|
||||||
ON CONFLICT DO NOTHING
|
ON CONFLICT DO NOTHING
|
||||||
@ -144,8 +144,8 @@ pub async fn upsert_items(pool: &PgPool, payload: Vec<CreateItem>) -> Result<Vec
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn delete_item(pool: &PgPool, id: i32) -> Result<()> {
|
pub async fn delete_entry(pool: &PgPool, id: i32) -> Result<()> {
|
||||||
sqlx::query!("UPDATE items SET deleted_at = now() WHERE id = $1", id)
|
sqlx::query!("UPDATE entries SET deleted_at = now() WHERE id = $1", id)
|
||||||
.execute(pool)
|
.execute(pool)
|
||||||
.await?;
|
.await?;
|
||||||
Ok(())
|
Ok(())
|
@ -1,2 +1,2 @@
|
|||||||
pub mod item;
|
pub mod entry;
|
||||||
pub mod feed;
|
pub mod feed;
|
||||||
|
Loading…
Reference in New Issue
Block a user