From abd540d2ff6e56ec73bf6f807f9b50602436a513 Mon Sep 17 00:00:00 2001 From: Tyler Hallada Date: Tue, 27 Jun 2023 14:03:52 -0400 Subject: [PATCH] Better database layout with uuid primary keys Serialize and deserialize the uuid ids as base62 strings in the URLs. --- Cargo.lock | 3 + Cargo.toml | 4 +- drop_all.psql | 8 -- drop_all.sql | 9 +++ migrations/20230507201612_initial.sql | 87 ++++++++++++++------- src/bin/cli.rs | 18 +++-- src/error.rs | 3 +- src/handlers/api/entry.rs | 8 +- src/handlers/api/feed.rs | 11 +-- src/handlers/entry.rs | 11 ++- src/handlers/home.rs | 3 +- src/jobs/crawl.rs | 10 ++- src/lib.rs | 1 + src/models/entry.rs | 78 +++++++++---------- src/models/feed.rs | 48 ++++++------ src/utils.rs | 2 + src/uuid.rs | 107 ++++++++++++++++++++++++++ 17 files changed, 290 insertions(+), 121 deletions(-) delete mode 100644 drop_all.psql create mode 100644 drop_all.sql create mode 100644 src/uuid.rs diff --git a/Cargo.lock b/Cargo.lock index 2c18ba8..2d72de3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -385,6 +385,7 @@ dependencies = [ "tracing-appender", "tracing-subscriber", "url", + "uuid", "validator", ] @@ -2286,6 +2287,7 @@ dependencies = [ "thiserror", "tokio-stream", "url", + "uuid", "whoami", ] @@ -2771,6 +2773,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4dad5567ad0cf5b760e5665964bec1b47dfd077ba8a2544b513f3556d3d239a2" dependencies = [ "getrandom", + "serde", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index d7ee4c8..cd91d3a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -32,6 +32,7 @@ sqlx = { version = "0.6", features = [ "macros", "migrate", "chrono", + "uuid", ] } thiserror = "1" tokio = { version = "1", features = ["full"] } @@ -39,8 +40,9 @@ tokio-stream = { version = "0.1", features = ["sync"] } tower = "0.4" tower-livereload = "0.8" tower-http = { version = "0.4", features = ["trace", "fs"] } -tracing = "0.1" +tracing = { version = "0.1", features = ["valuable"] } tracing-appender = "0.2" tracing-subscriber = { version = "0.3", features = ["env-filter"] } +uuid = { version = "1.3", features = ["serde"] } url = "2.4" validator = { version = "0.16", features = ["derive"] } diff --git a/drop_all.psql b/drop_all.psql deleted file mode 100644 index e8a3cc3..0000000 --- a/drop_all.psql +++ /dev/null @@ -1,8 +0,0 @@ -/* !!! THIS DROPS ALL TABLES IN THE DATABASE WHICH DELETES ALL DATA IN THE DATABASE !!! - * - * ONLY RUN IN DEVELOPMENT! - */ -DROP TABLE _sqlx_migrations CASCADE; -DROP TABLE entries CASCADE; -DROP TABLE feeds CASCADE; -DROP TYPE feed_type; diff --git a/drop_all.sql b/drop_all.sql new file mode 100644 index 0000000..69cd591 --- /dev/null +++ b/drop_all.sql @@ -0,0 +1,9 @@ +/* !!! THIS DROPS ALL TABLES IN THE DATABASE WHICH DELETES ALL DATA IN THE DATABASE !!! + * + * ONLY RUN IN DEVELOPMENT! + */ +drop table _sqlx_migrations cascade; +drop collation case_insensitive; +drop table entry cascade; +drop table feed cascade; +drop type feed_type; diff --git a/migrations/20230507201612_initial.sql b/migrations/20230507201612_initial.sql index 14f2d7f..c3a93dc 100644 --- a/migrations/20230507201612_initial.sql +++ b/migrations/20230507201612_initial.sql @@ -1,29 +1,64 @@ -CREATE TYPE feed_type AS ENUM ('atom', 'rss'); +-- This extension gives us `uuid_generate_v1mc()` which generates UUIDs that cluster better than `gen_random_uuid()` +-- while still being difficult to predict and enumerate. +-- Also, while unlikely, `gen_random_uuid()` can in theory produce collisions which can trigger spurious errors on +-- insertion, whereas it's much less likely with `uuid_generate_v1mc()`. +create extension if not exists "uuid-ossp"; -CREATE TABLE IF NOT EXISTS "feeds" ( - "id" SERIAL PRIMARY KEY NOT NULL, - "title" VARCHAR(255), - "url" VARCHAR(2048) NOT NULL, - "type" feed_type NOT NULL, - "description" TEXT, - "created_at" timestamp(3) NOT NULL, - "updated_at" timestamp(3) NOT NULL, - "deleted_at" timestamp(3) -); -CREATE INDEX "feeds_deleted_at" ON "feeds" ("deleted_at"); -CREATE UNIQUE INDEX "feeds_url" ON "feeds" ("url"); +-- Set up trigger to auto-set `updated_at` columns when rows are modified +create or replace function set_updated_at() + returns trigger as +$$ +begin + NEW.updated_at = now(); + return NEW; +end; +$$ language plpgsql; -CREATE TABLE IF NOT EXISTS "entries" ( - "id" SERIAL PRIMARY KEY NOT NULL, - "title" VARCHAR(255), - "url" VARCHAR(2048) NOT NULL, - "description" TEXT, - "html_content" TEXT, - "feed_id" INTEGER REFERENCES "feeds"(id) NOT NULL, - "published_at" timestamp(3) NOT NULL, - "created_at" timestamp(3) NOT NULL, - "updated_at" timestamp(3) NOT NULL, - "deleted_at" timestamp(3) +create or replace function trigger_updated_at(tablename regclass) + returns void as +$$ +begin + execute format('CREATE TRIGGER set_updated_at + BEFORE UPDATE + ON %s + FOR EACH ROW + WHEN (OLD is distinct from NEW) + EXECUTE FUNCTION set_updated_at();', tablename); +end; +$$ language plpgsql; + +-- This is a text collation that sorts text case-insensitively, useful for `UNIQUE` indexes + -- over things like usernames and emails, ithout needing to remember to do case-conversion. + create collation case_insensitive (provider = icu, locale = 'und-u-ks-level2', deterministic = false); + + create type feed_type as enum ('atom', 'rss'); + + create table if not exists "feed" ( + feed_id uuid primary key default uuid_generate_v1mc(), + title text, + url varchar(2048) not null, + type feed_type not null, + description text, + created_at timestamptz not null default now(), + updated_at timestamptz, + deleted_at timestamptz ); -CREATE INDEX "entries_published_at_where_deleted_at_is_null" ON "entries" ("published_at" DESC) WHERE "deleted_at" IS NULL; -CREATE UNIQUE INDEX "entries_url_and_feed_id" ON "entries" ("url", "feed_id"); +create index on "feed" (deleted_at); +create unique index on "feed" (url); +select trigger_updated_at('"feed"'); + +create table if not exists "entry" ( + entry_id uuid primary key default uuid_generate_v1mc(), + title text, + url varchar(2048) not null, + description text, + html_content text, + feed_id uuid not null references "feed" (feed_id) on delete cascade, + published_at timestamptz not null, + created_at timestamptz not null default now(), + updated_at timestamptz, + deleted_at timestamptz +); +create index on "entry" (published_at desc) where deleted_at is null; +create unique index on "entry" (url, feed_id); +select trigger_updated_at('"entry"'); diff --git a/src/bin/cli.rs b/src/bin/cli.rs index 15912de..175ec0a 100644 --- a/src/bin/cli.rs +++ b/src/bin/cli.rs @@ -5,10 +5,12 @@ use dotenvy::dotenv; use sqlx::postgres::PgPoolOptions; use std::env; use tracing::info; +use uuid::Uuid; use lib::jobs::crawl::crawl; use lib::models::feed::{create_feed, delete_feed, CreateFeed, FeedType}; use lib::models::entry::{create_entry, delete_entry, CreateEntry}; +use lib::uuid::Base62Uuid; #[derive(FromArgs)] /// CLI for crawlnicle @@ -51,7 +53,7 @@ struct AddFeed { struct DeleteFeed { #[argh(positional)] /// id of the feed to delete - id: i32, + id: Uuid, } #[derive(FromArgs)] @@ -69,7 +71,7 @@ struct AddEntry { description: Option, #[argh(option)] /// source feed for the entry - feed_id: i32, + feed_id: Uuid, } #[derive(FromArgs)] @@ -78,7 +80,7 @@ struct AddEntry { struct DeleteEntry { #[argh(positional)] /// id of the entry to delete - id: i32, + id: Uuid, } #[derive(FromArgs)] @@ -111,11 +113,11 @@ pub async fn main() -> Result<()> { }, ) .await?; - info!("Created feed with id {}", feed.id); + info!("Created feed with id {}", Base62Uuid::from(feed.feed_id)); } Commands::DeleteFeed(args) => { delete_feed(&pool, args.id).await?; - info!("Deleted feed with id {}", args.id); + info!("Deleted feed with id {}", Base62Uuid::from(args.id)); } Commands::AddEntry(args) => { let entry = create_entry( @@ -126,15 +128,15 @@ pub async fn main() -> Result<()> { description: args.description, html_content: None, feed_id: args.feed_id, - published_at: Utc::now().naive_utc(), + published_at: Utc::now(), }, ) .await?; - info!("Created entry with id {}", entry.id); + info!("Created entry with id {}", Base62Uuid::from(entry.entry_id)); } Commands::DeleteEntry(args) => { delete_entry(&pool, args.id).await?; - info!("Deleted entry with id {}", args.id); + info!("Deleted entry with id {}", Base62Uuid::from(args.id)); } Commands::Crawl(_) => { info!("Crawling..."); diff --git a/src/error.rs b/src/error.rs index 7b532bc..1806142 100644 --- a/src/error.rs +++ b/src/error.rs @@ -3,6 +3,7 @@ use axum::response::{IntoResponse, Response}; use axum::Json; use tracing::error; use serde_with::DisplayFromStr; +use uuid::Uuid; use validator::ValidationErrors; /// An API-friendly error type. @@ -23,7 +24,7 @@ pub enum Error { InvalidEntity(#[from] ValidationErrors), #[error("{0}: {1} not found")] - NotFound(&'static str, i32), + NotFound(&'static str, Uuid), #[error("referenced {0} not found")] RelationNotFound(&'static str), diff --git a/src/handlers/api/entry.rs b/src/handlers/api/entry.rs index 816fa6a..bcb810f 100644 --- a/src/handlers/api/entry.rs +++ b/src/handlers/api/entry.rs @@ -6,9 +6,13 @@ use sqlx::PgPool; use crate::error::Error; use crate::models::entry::{create_entry, get_entry, CreateEntry, Entry}; +use crate::uuid::Base62Uuid; -pub async fn get(State(pool): State, Path(id): Path) -> Result, Error> { - Ok(Json(get_entry(&pool, id).await?)) +pub async fn get( + State(pool): State, + Path(id): Path, +) -> Result, Error> { + Ok(Json(get_entry(&pool, id.as_uuid()).await?)) } pub async fn post( diff --git a/src/handlers/api/feed.rs b/src/handlers/api/feed.rs index daecb16..db2b924 100644 --- a/src/handlers/api/feed.rs +++ b/src/handlers/api/feed.rs @@ -5,10 +5,11 @@ use axum::{ use sqlx::PgPool; use crate::error::{Error, Result}; -use crate::models::feed::{create_feed, get_feed, delete_feed, CreateFeed, Feed}; +use crate::models::feed::{create_feed, delete_feed, get_feed, CreateFeed, Feed}; +use crate::uuid::Base62Uuid; -pub async fn get(State(pool): State, Path(id): Path) -> Result> { - Ok(Json(get_feed(&pool, id).await?)) +pub async fn get(State(pool): State, Path(id): Path) -> Result> { + Ok(Json(get_feed(&pool, id.as_uuid()).await?)) } pub async fn post( @@ -18,6 +19,6 @@ pub async fn post( Ok(Json(create_feed(&pool, payload).await?)) } -pub async fn delete(State(pool): State, Path(id): Path) -> Result<()> { - delete_feed(&pool, id).await +pub async fn delete(State(pool): State, Path(id): Path) -> Result<()> { + delete_feed(&pool, id.as_uuid()).await } diff --git a/src/handlers/entry.rs b/src/handlers/entry.rs index b995b46..be48989 100644 --- a/src/handlers/entry.rs +++ b/src/handlers/entry.rs @@ -1,4 +1,4 @@ -use axum::extract::{State, Path}; +use axum::extract::{Path, State}; use axum::response::Response; use maud::{html, PreEscaped}; use sqlx::PgPool; @@ -6,9 +6,14 @@ use sqlx::PgPool; use crate::error::Result; use crate::models::entry::get_entry; use crate::partials::layout::Layout; +use crate::uuid::Base62Uuid; -pub async fn get(Path(id): Path, State(pool): State, layout: Layout) -> Result { - let entry = get_entry(&pool, id).await?; +pub async fn get( + Path(id): Path, + State(pool): State, + layout: Layout, +) -> Result { + let entry = get_entry(&pool, id.as_uuid()).await?; Ok(layout.render(html! { @let title = entry.title.unwrap_or_else(|| "Untitled".to_string()); h1 { a href=(entry.url) { (title) } } diff --git a/src/handlers/home.rs b/src/handlers/home.rs index b023682..5181d8c 100644 --- a/src/handlers/home.rs +++ b/src/handlers/home.rs @@ -7,6 +7,7 @@ use crate::error::Result; use crate::models::entry::{get_entries, GetEntriesOptions}; use crate::partials::layout::Layout; use crate::utils::get_domain; +use crate::uuid::Base62Uuid; pub async fn get(State(pool): State, layout: Layout) -> Result { let entries = get_entries(&pool, GetEntriesOptions::default()).await?; @@ -14,7 +15,7 @@ pub async fn get(State(pool): State, layout: Layout) -> Result ul class="entries" { @for entry in entries { @let title = entry.title.unwrap_or_else(|| "Untitled".to_string()); - @let url = format!("/entry/{}", entry.id); + @let url = format!("/entry/{}", Base62Uuid::from(entry.entry_id)); @let domain = get_domain(&entry.url).unwrap_or_default(); li { a href=(url) { (title) } em class="domain" { (domain) }} } diff --git a/src/jobs/crawl.rs b/src/jobs/crawl.rs index 6a73b95..1c3d36b 100644 --- a/src/jobs/crawl.rs +++ b/src/jobs/crawl.rs @@ -7,6 +7,7 @@ use tracing::{info, info_span, warn}; use crate::models::feed::get_feeds; use crate::models::entry::{upsert_entries, CreateEntry}; +use crate::uuid::Base62Uuid; /// For every feed in the database, fetches the feed, parses it, and saves new entries to the /// database. @@ -15,7 +16,8 @@ pub async fn crawl(pool: &PgPool) -> anyhow::Result<()> { let client = Client::new(); let feeds = get_feeds(pool).await?; for feed in feeds { - let feed_span = info_span!("feed", id = feed.id, url = feed.url.as_str()); + let feed_id_str: String = Base62Uuid::from(feed.feed_id).into(); + let feed_span = info_span!("feed", id = feed_id_str, url = feed.url.as_str()); let _feed_span_guard = feed_span.enter(); info!("Fetching feed"); // TODO: handle these results @@ -28,20 +30,20 @@ pub async fn crawl(pool: &PgPool) -> anyhow::Result<()> { let _entry_span_guard = entry_span.enter(); if let Some(link) = entry.links.get(0) { // if no scraped or feed date is available, fallback to the current time - let published_at = entry.published.unwrap_or_else(Utc::now).naive_utc(); + let published_at = entry.published.unwrap_or_else(Utc::now); let mut entry = CreateEntry { title: entry.title.map(|t| t.content), url: link.href.clone(), description: entry.summary.map(|s| s.content), html_content: None, - feed_id: feed.id, + feed_id: feed.feed_id, published_at, }; info!("Fetching and parsing entry link: {}", link.href); if let Ok(article) = scraper.parse(&Url::parse(&link.href)?, true, &client, None).await { if let Some(date) = article.date { // prefer scraped date over rss feed date - entry.published_at = date.naive_utc() + entry.published_at = date; }; entry.html_content = article.get_content(); } else { diff --git a/src/lib.rs b/src/lib.rs index 96217d9..1fe4a63 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -7,3 +7,4 @@ pub mod models; pub mod partials; pub mod state; pub mod utils; +pub mod uuid; diff --git a/src/models/entry.rs b/src/models/entry.rs index dcdab6f..44425fd 100644 --- a/src/models/entry.rs +++ b/src/models/entry.rs @@ -1,6 +1,7 @@ -use chrono::NaiveDateTime; +use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; use sqlx::PgPool; +use uuid::Uuid; use validator::{Validate, ValidationErrors}; use crate::error::{Error, Result}; @@ -9,16 +10,16 @@ const DEFAULT_ENTRIES_PAGE_SIZE: i64 = 50; #[derive(Debug, Serialize, Deserialize)] pub struct Entry { - pub id: i32, + pub entry_id: Uuid, pub title: Option, pub url: String, pub description: Option, pub html_content: Option, - pub feed_id: i32, - pub published_at: NaiveDateTime, - pub created_at: NaiveDateTime, - pub updated_at: NaiveDateTime, - pub deleted_at: Option, + pub feed_id: Uuid, + pub published_at: DateTime, + pub created_at: DateTime, + pub updated_at: Option>, + pub deleted_at: Option>, } #[derive(Debug, Deserialize, Validate)] @@ -30,18 +31,17 @@ pub struct CreateEntry { #[validate(length(max = 524288))] pub description: Option, pub html_content: Option, - #[validate(range(min = 1))] - pub feed_id: i32, - pub published_at: NaiveDateTime, + pub feed_id: Uuid, + pub published_at: DateTime, } -pub async fn get_entry(pool: &PgPool, id: i32) -> Result { - sqlx::query_as!(Entry, "SELECT * FROM entries WHERE id = $1", id) +pub async fn get_entry(pool: &PgPool, entry_id: Uuid) -> Result { + sqlx::query_as!(Entry, "select * from entry where entry_id = $1", entry_id) .fetch_one(pool) .await .map_err(|error| { if let sqlx::error::Error::RowNotFound = error { - return Error::NotFound("entry", id); + return Error::NotFound("entry", entry_id); } Error::Sqlx(error) }) @@ -49,7 +49,7 @@ pub async fn get_entry(pool: &PgPool, id: i32) -> Result { #[derive(Default)] pub struct GetEntriesOptions { - pub published_before: Option, + pub published_before: Option>, pub limit: Option, } @@ -60,11 +60,11 @@ pub async fn get_entries( if let Some(published_before) = options.published_before { sqlx::query_as!( Entry, - "SELECT * FROM entries - WHERE deleted_at IS NULL - AND published_at < $1 - ORDER BY published_at DESC - LIMIT $2 + "select * from entry + where deleted_at is null + and published_at < $1 + order by published_at desc + limit $2 ", published_before, options.limit.unwrap_or(DEFAULT_ENTRIES_PAGE_SIZE) @@ -74,10 +74,10 @@ pub async fn get_entries( } else { sqlx::query_as!( Entry, - "SELECT * FROM entries - WHERE deleted_at IS NULL - ORDER BY published_at DESC - LIMIT $1 + "select * from entry + where deleted_at is null + order by published_at desc + limit $1 ", options.limit.unwrap_or(DEFAULT_ENTRIES_PAGE_SIZE) ) @@ -91,11 +91,11 @@ pub async fn create_entry(pool: &PgPool, payload: CreateEntry) -> Result payload.validate()?; sqlx::query_as!( Entry, - "INSERT INTO entries ( - title, url, description, html_content, feed_id, published_at, created_at, updated_at - ) VALUES ( - $1, $2, $3, $4, $5, $6, now(), now() - ) RETURNING *", + "insert into entry ( + title, url, description, html_content, feed_id, published_at + ) values ( + $1, $2, $3, $4, $5, $6 + ) returning *", payload.title, payload.url, payload.description, @@ -136,10 +136,10 @@ pub async fn create_entries(pool: &PgPool, payload: Vec) -> Result< .collect::, ValidationErrors>>()?; sqlx::query_as!( Entry, - "INSERT INTO entries ( - title, url, description, html_content, feed_id, published_at, created_at, updated_at - ) SELECT *, now(), now() FROM UNNEST($1::text[], $2::text[], $3::text[], $4::text[], $5::int[], $6::timestamp(3)[]) - RETURNING *", + "insert into entry ( + title, url, description, html_content, feed_id, published_at + ) select * from unnest($1::text[], $2::text[], $3::text[], $4::text[], $5::uuid[], $6::timestamptz[]) + returning *", titles.as_slice() as &[Option], urls.as_slice(), descriptions.as_slice() as &[Option], @@ -180,11 +180,11 @@ pub async fn upsert_entries(pool: &PgPool, payload: Vec) -> Result< .collect::, ValidationErrors>>()?; sqlx::query_as!( Entry, - "INSERT INTO entries ( - title, url, description, html_content, feed_id, published_at, created_at, updated_at - ) SELECT *, now(), now() FROM UNNEST($1::text[], $2::text[], $3::text[], $4::text[], $5::int[], $6::timestamp(3)[]) - ON CONFLICT DO NOTHING - RETURNING *", + "insert into entry ( + title, url, description, html_content, feed_id, published_at + ) select * from unnest($1::text[], $2::text[], $3::text[], $4::text[], $5::uuid[], $6::timestamptz[]) + on conflict do nothing + returning *", titles.as_slice() as &[Option], urls.as_slice(), descriptions.as_slice() as &[Option], @@ -204,8 +204,8 @@ pub async fn upsert_entries(pool: &PgPool, payload: Vec) -> Result< }) } -pub async fn delete_entry(pool: &PgPool, id: i32) -> Result<()> { - sqlx::query!("UPDATE entries SET deleted_at = now() WHERE id = $1", id) +pub async fn delete_entry(pool: &PgPool, entry_id: Uuid) -> Result<()> { + sqlx::query!("update entry set deleted_at = now() where entry_id = $1", entry_id) .execute(pool) .await?; Ok(()) diff --git a/src/models/feed.rs b/src/models/feed.rs index 1b32ee2..11e12df 100644 --- a/src/models/feed.rs +++ b/src/models/feed.rs @@ -1,8 +1,9 @@ use std::str::FromStr; -use chrono::NaiveDateTime; +use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; use sqlx::PgPool; +use uuid::Uuid; use validator::Validate; use crate::error::{Error, Result}; @@ -28,15 +29,15 @@ impl FromStr for FeedType { #[derive(Debug, Serialize, Deserialize)] pub struct Feed { - pub id: i32, + pub feed_id: Uuid, pub title: Option, pub url: String, #[serde(rename = "type")] pub feed_type: FeedType, pub description: Option, - pub created_at: NaiveDateTime, - pub updated_at: NaiveDateTime, - pub deleted_at: Option, + pub created_at: DateTime, + pub updated_at: Option>, + pub deleted_at: Option>, } #[derive(Debug, Deserialize, Validate)] @@ -51,12 +52,13 @@ pub struct CreateFeed { pub description: Option, } -pub async fn get_feed(pool: &PgPool, id: i32) -> Result { +pub async fn get_feed(pool: &PgPool, feed_id: Uuid) -> Result { sqlx::query_as!( Feed, // Unable to SELECT * here due to https://github.com/launchbadge/sqlx/issues/1004 - r#"SELECT - id, + // language=PostGreSQL + r#"select + feed_id, title, url, type as "feed_type: FeedType", @@ -64,14 +66,14 @@ pub async fn get_feed(pool: &PgPool, id: i32) -> Result { created_at, updated_at, deleted_at - FROM feeds WHERE id = $1"#, - id + from feed where feed_id = $1"#, + feed_id ) .fetch_one(pool) .await .map_err(|error| { if let sqlx::error::Error::RowNotFound = error { - return Error::NotFound("feed", id); + return Error::NotFound("feed", feed_id); } Error::Sqlx(error) }) @@ -80,8 +82,8 @@ pub async fn get_feed(pool: &PgPool, id: i32) -> Result { pub async fn get_feeds(pool: &PgPool) -> sqlx::Result> { sqlx::query_as!( Feed, - r#"SELECT - id, + r#"select + feed_id, title, url, type as "feed_type: FeedType", @@ -89,8 +91,8 @@ pub async fn get_feeds(pool: &PgPool) -> sqlx::Result> { created_at, updated_at, deleted_at - FROM feeds - WHERE deleted_at IS NULL"# + from feed + where deleted_at is null"# ) .fetch_all(pool) .await @@ -100,12 +102,12 @@ pub async fn create_feed(pool: &PgPool, payload: CreateFeed) -> Result { payload.validate()?; Ok(sqlx::query_as!( Feed, - r#"INSERT INTO feeds ( - title, url, type, description, created_at, updated_at - ) VALUES ( - $1, $2, $3, $4, now(), now() - ) RETURNING - id, + r#"insert into feed ( + title, url, type, description + ) values ( + $1, $2, $3, $4 + ) returning + feed_id, title, url, type as "feed_type: FeedType", @@ -123,8 +125,8 @@ pub async fn create_feed(pool: &PgPool, payload: CreateFeed) -> Result { .await?) } -pub async fn delete_feed(pool: &PgPool, id: i32) -> Result<()> { - sqlx::query!("UPDATE feeds SET deleted_at = now() WHERE id = $1", id) +pub async fn delete_feed(pool: &PgPool, feed_id: Uuid) -> Result<()> { + sqlx::query!("update feed set deleted_at = now() where feed_id = $1", feed_id) .execute(pool) .await?; Ok(()) diff --git a/src/utils.rs b/src/utils.rs index dca8b1e..b5f03b4 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -1,5 +1,7 @@ use url::Url; +const BASE62_CHARS: &[u8] = b"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"; + pub fn get_domain(url: &str) -> Option { Url::parse(url) .ok() diff --git a/src/uuid.rs b/src/uuid.rs new file mode 100644 index 0000000..91f1462 --- /dev/null +++ b/src/uuid.rs @@ -0,0 +1,107 @@ +use std::fmt::{Display, Formatter, self}; + +use serde::{Deserialize, Serialize}; +use uuid::Uuid; + +const BASE62_CHARS: &[u8] = b"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"; + +#[derive(Debug, Serialize, Deserialize)] +pub struct Base62Uuid( + #[serde(deserialize_with = "uuid_from_base62_str")] + #[serde(serialize_with = "uuid_to_base62_str")] + Uuid +); + +impl Base62Uuid { + pub fn as_uuid(&self) -> Uuid { + self.0 + } +} + +impl From for Base62Uuid { + fn from(uuid: Uuid) -> Self { + Self(uuid) + } +} + +impl Display for Base62Uuid { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + write!(f, "{}", base62_encode(self.0.as_u128())) + } +} + +impl From<&str> for Base62Uuid { + fn from(s: &str) -> Self { + Self(Uuid::from_u128(base62_decode(s))) + } +} + +impl From for String { + fn from(s: Base62Uuid) -> Self { + base62_encode(s.0.as_u128()) + } +} + +fn uuid_to_base62_str(uuid: &Uuid, s: S) -> Result +where + S: serde::Serializer, +{ + s.serialize_str(&base62_encode(uuid.as_u128())) +} + +fn uuid_from_base62_str<'de, D>(deserializer: D) -> Result +where + D: serde::Deserializer<'de>, +{ + let s = String::deserialize(deserializer)?; + Ok(Uuid::from_u128(base62_decode(&s))) +} + +pub fn base62_encode(mut number: u128) -> String { + let base = BASE62_CHARS.len() as u128; + let mut encoded = Vec::new(); + + while number > 0 { + let remainder = (number % base) as usize; + number /= base; + encoded.push(BASE62_CHARS[remainder]); + } + + encoded.reverse(); + String::from_utf8(encoded).unwrap() +} + +pub fn base62_decode(input: &str) -> u128 { + let base = BASE62_CHARS.len() as u128; + let mut number = 0u128; + + for &byte in input.as_bytes() { + number = number * base + (BASE62_CHARS.iter().position(|&ch| ch == byte).unwrap() as u128); + } + + number +} + +#[cfg(test)] +mod tests { + use uuid::Uuid; + + use super::*; + + #[test] + fn test_encode_decode() { + let original_uuids = [ + Uuid::new_v4(), + Uuid::new_v4(), + Uuid::new_v4(), + Uuid::new_v4(), + ]; + + for original_uuid in original_uuids.iter() { + let encoded = base62_encode(original_uuid.as_u128()); + let decoded_uuid = Uuid::from_u128(base62_decode(&encoded)); + + assert_eq!(*original_uuid, decoded_uuid); + } + } +}