diff --git a/migrations/20230507201612_initial.sql b/migrations/20230507201612_initial.sql index aee36d6..ec86200 100644 --- a/migrations/20230507201612_initial.sql +++ b/migrations/20230507201612_initial.sql @@ -1,9 +1,23 @@ -CREATE TABLE IF NOT EXISTS "items" ( +CREATE TYPE feed_type AS ENUM ('atom', 'rss'); + +CREATE TABLE IF NOT EXISTS "feeds" ( "id" SERIAL PRIMARY KEY NOT NULL, "title" VARCHAR(255) NOT NULL, "url" VARCHAR(2048) NOT NULL, + "type" feed_type NOT NULL, "description" TEXT, "created_at" timestamp(3) NOT NULL, "updated_at" timestamp(3) NOT NULL, "deleted_at" timestamp(3) ); + +CREATE TABLE IF NOT EXISTS "items" ( + "id" SERIAL PRIMARY KEY NOT NULL, + "title" VARCHAR(255) NOT NULL, + "url" VARCHAR(2048) NOT NULL, + "description" TEXT, + "feed_id" INTEGER REFERENCES "feeds"(id) NOT NULL, + "created_at" timestamp(3) NOT NULL, + "updated_at" timestamp(3) NOT NULL, + "deleted_at" timestamp(3) +); diff --git a/src/error.rs b/src/error.rs index 1a3bdba..4cfcc27 100644 --- a/src/error.rs +++ b/src/error.rs @@ -22,8 +22,11 @@ pub enum Error { #[error("validation error in request body")] InvalidEntity(#[from] ValidationErrors), - #[error("{0} not found")] - NotFound(&'static str), + #[error("{0}: {1} not found")] + NotFound(&'static str, i32), + + #[error("referenced {0}: {1} not found")] + RelationNotFound(&'static str, i32), } pub type Result = ::std::result::Result; @@ -64,9 +67,9 @@ impl Error { use Error::*; match self { - NotFound(_) => StatusCode::NOT_FOUND, + NotFound(_, _) => StatusCode::NOT_FOUND, Sqlx(_) | Anyhow(_) => StatusCode::INTERNAL_SERVER_ERROR, - InvalidEntity(_) => StatusCode::UNPROCESSABLE_ENTITY, + InvalidEntity(_) | RelationNotFound(_, _) => StatusCode::UNPROCESSABLE_ENTITY, } } } diff --git a/src/handlers/feed.rs b/src/handlers/feed.rs new file mode 100644 index 0000000..33b1659 --- /dev/null +++ b/src/handlers/feed.rs @@ -0,0 +1,19 @@ +use axum::{ + extract::{Path, State}, + Json, +}; +use sqlx::PgPool; + +use crate::error::Error; +use crate::models::feed::{create_feed, get_feed, CreateFeed, Feed}; + +pub async fn get(State(pool): State, Path(id): Path) -> Result, Error> { + Ok(Json(get_feed(pool, id).await?)) +} + +pub async fn post( + State(pool): State, + Json(payload): Json, +) -> Result, Error> { + Ok(Json(create_feed(pool, payload).await?)) +} diff --git a/src/handlers/feeds.rs b/src/handlers/feeds.rs new file mode 100644 index 0000000..7473152 --- /dev/null +++ b/src/handlers/feeds.rs @@ -0,0 +1,9 @@ +use axum::{extract::State, Json}; +use sqlx::PgPool; + +use crate::error::Error; +use crate::models::feed::{get_feeds, Feed}; + +pub async fn get(State(pool): State) -> Result>, Error> { + Ok(Json(get_feeds(pool).await?)) +} diff --git a/src/handlers/item.rs b/src/handlers/item.rs index c451d18..2fb0319 100644 --- a/src/handlers/item.rs +++ b/src/handlers/item.rs @@ -1,13 +1,13 @@ -use axum::{extract::{State, Path}, Json}; +use axum::{ + extract::{Path, State}, + Json, +}; use sqlx::PgPool; use crate::error::Error; use crate::models::item::{create_item, get_item, CreateItem, Item}; -pub async fn get( - State(pool): State, - Path(id): Path, -) -> Result, Error> { +pub async fn get(State(pool): State, Path(id): Path) -> Result, Error> { Ok(Json(get_item(pool, id).await?)) } diff --git a/src/handlers/mod.rs b/src/handlers/mod.rs index acb1b13..f0d504c 100644 --- a/src/handlers/mod.rs +++ b/src/handlers/mod.rs @@ -1,2 +1,4 @@ +pub mod feed; +pub mod feeds; pub mod item; pub mod items; diff --git a/src/main.rs b/src/main.rs index b423581..93b9ce7 100644 --- a/src/main.rs +++ b/src/main.rs @@ -27,6 +27,9 @@ async fn main() -> anyhow::Result<()> { sqlx::migrate!().run(&pool).await?; let app = Router::new() + .route("/v1/feeds", get(handlers::feeds::get)) + .route("/v1/feed", post(handlers::feed::post)) + .route("/v1/feed/:id", get(handlers::feed::get)) .route("/v1/items", get(handlers::items::get)) .route("/v1/item", post(handlers::item::post)) .route("/v1/item/:id", get(handlers::item::get)) diff --git a/src/models/feed.rs b/src/models/feed.rs new file mode 100644 index 0000000..d2dd0f1 --- /dev/null +++ b/src/models/feed.rs @@ -0,0 +1,109 @@ +use chrono::NaiveDateTime; +use serde::{Deserialize, Serialize}; +use sqlx::PgPool; +use validator::Validate; + +use crate::error::{Error, Result}; + +#[derive(Debug, Serialize, Deserialize, sqlx::Type)] +#[sqlx(type_name = "feed_type", rename_all = "lowercase")] +#[serde(rename_all = "lowercase")] +pub enum FeedType { + Atom, + Rss, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct Feed { + id: i32, + title: String, + url: String, + #[serde(rename = "type")] + feed_type: FeedType, + description: Option, + created_at: NaiveDateTime, + updated_at: NaiveDateTime, + deleted_at: Option, +} + +#[derive(Debug, Deserialize, Validate)] +pub struct CreateFeed { + #[validate(length(max = 255))] + title: String, + #[validate(url)] + url: String, + #[serde(rename = "type")] + feed_type: FeedType, + #[validate(length(max = 524288))] + description: Option, +} + +pub async fn get_feed(pool: PgPool, id: i32) -> Result { + sqlx::query_as!( + Feed, + // Unable to SELECT * here due to https://github.com/launchbadge/sqlx/issues/1004 + r#"SELECT + id, + title, + url, + type as "feed_type: FeedType", + description, + created_at, + updated_at, + deleted_at + FROM feeds WHERE id = $1"#, + id + ) + .fetch_one(&pool) + .await + .map_err(|error| { + if let sqlx::error::Error::RowNotFound = error { + return Error::NotFound("feed", id); + } + Error::Sqlx(error) + }) +} + +pub async fn get_feeds(pool: PgPool) -> sqlx::Result> { + sqlx::query_as!( + Feed, + r#"SELECT + id, + title, + url, + type as "feed_type: FeedType", + description, + created_at, + updated_at, + deleted_at + FROM feeds"#) + .fetch_all(&pool) + .await +} + +pub async fn create_feed(pool: PgPool, payload: CreateFeed) -> Result { + payload.validate()?; + Ok(sqlx::query_as!( + Feed, + r#"INSERT INTO feeds ( + title, url, type, description, created_at, updated_at + ) VALUES ( + $1, $2, $3, $4, now(), now() + ) RETURNING + id, + title, + url, + type as "feed_type: FeedType", + description, + created_at, + updated_at, + deleted_at + "#, + payload.title, + payload.url, + payload.feed_type as FeedType, + payload.description + ) + .fetch_one(&pool) + .await?) +} diff --git a/src/models/item.rs b/src/models/item.rs index 848c3c2..ab5fa92 100644 --- a/src/models/item.rs +++ b/src/models/item.rs @@ -11,6 +11,7 @@ pub struct Item { title: String, url: String, description: Option, + feed_id: i32, created_at: NaiveDateTime, updated_at: NaiveDateTime, deleted_at: Option, @@ -24,6 +25,8 @@ pub struct CreateItem { url: String, #[validate(length(max = 524288))] description: Option, + #[validate(range(min = 1))] + feed_id: i32, } pub async fn get_item(pool: PgPool, id: i32) -> Result { @@ -32,7 +35,7 @@ pub async fn get_item(pool: PgPool, id: i32) -> Result { .await .map_err(|error| { if let sqlx::error::Error::RowNotFound = error { - return Error::NotFound("item"); + return Error::NotFound("item", id); } Error::Sqlx(error) }) @@ -46,17 +49,26 @@ pub async fn get_items(pool: PgPool) -> sqlx::Result> { pub async fn create_item(pool: PgPool, payload: CreateItem) -> Result { payload.validate()?; - Ok(sqlx::query_as!( + sqlx::query_as!( Item, "INSERT INTO items ( - title, url, description, created_at, updated_at + title, url, description, feed_id, created_at, updated_at ) VALUES ( - $1, $2, $3, now(), now() + $1, $2, $3, $4, now(), now() ) RETURNING *", payload.title, payload.url, - payload.description + payload.description, + payload.feed_id, ) .fetch_one(&pool) - .await?) + .await + .map_err(|error| { + if let sqlx::error::Error::Database(ref psql_error) = error { + if psql_error.code().as_deref() == Some("23503") { + return Error::RelationNotFound("feed", payload.feed_id); + } + } + Error::Sqlx(error) + }) } diff --git a/src/models/mod.rs b/src/models/mod.rs index a35e98d..e755316 100644 --- a/src/models/mod.rs +++ b/src/models/mod.rs @@ -1 +1,2 @@ pub mod item; +pub mod feed;