Better database layout with uuid primary keys

Serialize and deserialize the uuid ids as base62 strings in the URLs.
This commit is contained in:
Tyler Hallada 2023-06-27 14:03:52 -04:00
parent 4e41bbd6e1
commit abd540d2ff
17 changed files with 290 additions and 121 deletions

3
Cargo.lock generated
View File

@ -385,6 +385,7 @@ dependencies = [
"tracing-appender", "tracing-appender",
"tracing-subscriber", "tracing-subscriber",
"url", "url",
"uuid",
"validator", "validator",
] ]
@ -2286,6 +2287,7 @@ dependencies = [
"thiserror", "thiserror",
"tokio-stream", "tokio-stream",
"url", "url",
"uuid",
"whoami", "whoami",
] ]
@ -2771,6 +2773,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4dad5567ad0cf5b760e5665964bec1b47dfd077ba8a2544b513f3556d3d239a2" checksum = "4dad5567ad0cf5b760e5665964bec1b47dfd077ba8a2544b513f3556d3d239a2"
dependencies = [ dependencies = [
"getrandom", "getrandom",
"serde",
] ]
[[package]] [[package]]

View File

@ -32,6 +32,7 @@ sqlx = { version = "0.6", features = [
"macros", "macros",
"migrate", "migrate",
"chrono", "chrono",
"uuid",
] } ] }
thiserror = "1" thiserror = "1"
tokio = { version = "1", features = ["full"] } tokio = { version = "1", features = ["full"] }
@ -39,8 +40,9 @@ tokio-stream = { version = "0.1", features = ["sync"] }
tower = "0.4" tower = "0.4"
tower-livereload = "0.8" tower-livereload = "0.8"
tower-http = { version = "0.4", features = ["trace", "fs"] } tower-http = { version = "0.4", features = ["trace", "fs"] }
tracing = "0.1" tracing = { version = "0.1", features = ["valuable"] }
tracing-appender = "0.2" tracing-appender = "0.2"
tracing-subscriber = { version = "0.3", features = ["env-filter"] } tracing-subscriber = { version = "0.3", features = ["env-filter"] }
uuid = { version = "1.3", features = ["serde"] }
url = "2.4" url = "2.4"
validator = { version = "0.16", features = ["derive"] } validator = { version = "0.16", features = ["derive"] }

View File

@ -1,8 +0,0 @@
/* !!! THIS DROPS ALL TABLES IN THE DATABASE WHICH DELETES ALL DATA IN THE DATABASE !!!
*
* ONLY RUN IN DEVELOPMENT!
*/
DROP TABLE _sqlx_migrations CASCADE;
DROP TABLE entries CASCADE;
DROP TABLE feeds CASCADE;
DROP TYPE feed_type;

9
drop_all.sql Normal file
View File

@ -0,0 +1,9 @@
/* !!! THIS DROPS ALL TABLES IN THE DATABASE WHICH DELETES ALL DATA IN THE DATABASE !!!
*
* ONLY RUN IN DEVELOPMENT!
*/
drop table _sqlx_migrations cascade;
drop collation case_insensitive;
drop table entry cascade;
drop table feed cascade;
drop type feed_type;

View File

@ -1,29 +1,64 @@
CREATE TYPE feed_type AS ENUM ('atom', 'rss'); -- This extension gives us `uuid_generate_v1mc()` which generates UUIDs that cluster better than `gen_random_uuid()`
-- while still being difficult to predict and enumerate.
-- Also, while unlikely, `gen_random_uuid()` can in theory produce collisions which can trigger spurious errors on
-- insertion, whereas it's much less likely with `uuid_generate_v1mc()`.
create extension if not exists "uuid-ossp";
CREATE TABLE IF NOT EXISTS "feeds" ( -- Set up trigger to auto-set `updated_at` columns when rows are modified
"id" SERIAL PRIMARY KEY NOT NULL, create or replace function set_updated_at()
"title" VARCHAR(255), returns trigger as
"url" VARCHAR(2048) NOT NULL, $$
"type" feed_type NOT NULL, begin
"description" TEXT, NEW.updated_at = now();
"created_at" timestamp(3) NOT NULL, return NEW;
"updated_at" timestamp(3) NOT NULL, end;
"deleted_at" timestamp(3) $$ language plpgsql;
);
CREATE INDEX "feeds_deleted_at" ON "feeds" ("deleted_at");
CREATE UNIQUE INDEX "feeds_url" ON "feeds" ("url");
CREATE TABLE IF NOT EXISTS "entries" ( create or replace function trigger_updated_at(tablename regclass)
"id" SERIAL PRIMARY KEY NOT NULL, returns void as
"title" VARCHAR(255), $$
"url" VARCHAR(2048) NOT NULL, begin
"description" TEXT, execute format('CREATE TRIGGER set_updated_at
"html_content" TEXT, BEFORE UPDATE
"feed_id" INTEGER REFERENCES "feeds"(id) NOT NULL, ON %s
"published_at" timestamp(3) NOT NULL, FOR EACH ROW
"created_at" timestamp(3) NOT NULL, WHEN (OLD is distinct from NEW)
"updated_at" timestamp(3) NOT NULL, EXECUTE FUNCTION set_updated_at();', tablename);
"deleted_at" timestamp(3) end;
$$ language plpgsql;
-- This is a text collation that sorts text case-insensitively, useful for `UNIQUE` indexes
-- over things like usernames and emails, ithout needing to remember to do case-conversion.
create collation case_insensitive (provider = icu, locale = 'und-u-ks-level2', deterministic = false);
create type feed_type as enum ('atom', 'rss');
create table if not exists "feed" (
feed_id uuid primary key default uuid_generate_v1mc(),
title text,
url varchar(2048) not null,
type feed_type not null,
description text,
created_at timestamptz not null default now(),
updated_at timestamptz,
deleted_at timestamptz
); );
CREATE INDEX "entries_published_at_where_deleted_at_is_null" ON "entries" ("published_at" DESC) WHERE "deleted_at" IS NULL; create index on "feed" (deleted_at);
CREATE UNIQUE INDEX "entries_url_and_feed_id" ON "entries" ("url", "feed_id"); create unique index on "feed" (url);
select trigger_updated_at('"feed"');
create table if not exists "entry" (
entry_id uuid primary key default uuid_generate_v1mc(),
title text,
url varchar(2048) not null,
description text,
html_content text,
feed_id uuid not null references "feed" (feed_id) on delete cascade,
published_at timestamptz not null,
created_at timestamptz not null default now(),
updated_at timestamptz,
deleted_at timestamptz
);
create index on "entry" (published_at desc) where deleted_at is null;
create unique index on "entry" (url, feed_id);
select trigger_updated_at('"entry"');

View File

@ -5,10 +5,12 @@ use dotenvy::dotenv;
use sqlx::postgres::PgPoolOptions; use sqlx::postgres::PgPoolOptions;
use std::env; use std::env;
use tracing::info; use tracing::info;
use uuid::Uuid;
use lib::jobs::crawl::crawl; use lib::jobs::crawl::crawl;
use lib::models::feed::{create_feed, delete_feed, CreateFeed, FeedType}; use lib::models::feed::{create_feed, delete_feed, CreateFeed, FeedType};
use lib::models::entry::{create_entry, delete_entry, CreateEntry}; use lib::models::entry::{create_entry, delete_entry, CreateEntry};
use lib::uuid::Base62Uuid;
#[derive(FromArgs)] #[derive(FromArgs)]
/// CLI for crawlnicle /// CLI for crawlnicle
@ -51,7 +53,7 @@ struct AddFeed {
struct DeleteFeed { struct DeleteFeed {
#[argh(positional)] #[argh(positional)]
/// id of the feed to delete /// id of the feed to delete
id: i32, id: Uuid,
} }
#[derive(FromArgs)] #[derive(FromArgs)]
@ -69,7 +71,7 @@ struct AddEntry {
description: Option<String>, description: Option<String>,
#[argh(option)] #[argh(option)]
/// source feed for the entry /// source feed for the entry
feed_id: i32, feed_id: Uuid,
} }
#[derive(FromArgs)] #[derive(FromArgs)]
@ -78,7 +80,7 @@ struct AddEntry {
struct DeleteEntry { struct DeleteEntry {
#[argh(positional)] #[argh(positional)]
/// id of the entry to delete /// id of the entry to delete
id: i32, id: Uuid,
} }
#[derive(FromArgs)] #[derive(FromArgs)]
@ -111,11 +113,11 @@ pub async fn main() -> Result<()> {
}, },
) )
.await?; .await?;
info!("Created feed with id {}", feed.id); info!("Created feed with id {}", Base62Uuid::from(feed.feed_id));
} }
Commands::DeleteFeed(args) => { Commands::DeleteFeed(args) => {
delete_feed(&pool, args.id).await?; delete_feed(&pool, args.id).await?;
info!("Deleted feed with id {}", args.id); info!("Deleted feed with id {}", Base62Uuid::from(args.id));
} }
Commands::AddEntry(args) => { Commands::AddEntry(args) => {
let entry = create_entry( let entry = create_entry(
@ -126,15 +128,15 @@ pub async fn main() -> Result<()> {
description: args.description, description: args.description,
html_content: None, html_content: None,
feed_id: args.feed_id, feed_id: args.feed_id,
published_at: Utc::now().naive_utc(), published_at: Utc::now(),
}, },
) )
.await?; .await?;
info!("Created entry with id {}", entry.id); info!("Created entry with id {}", Base62Uuid::from(entry.entry_id));
} }
Commands::DeleteEntry(args) => { Commands::DeleteEntry(args) => {
delete_entry(&pool, args.id).await?; delete_entry(&pool, args.id).await?;
info!("Deleted entry with id {}", args.id); info!("Deleted entry with id {}", Base62Uuid::from(args.id));
} }
Commands::Crawl(_) => { Commands::Crawl(_) => {
info!("Crawling..."); info!("Crawling...");

View File

@ -3,6 +3,7 @@ use axum::response::{IntoResponse, Response};
use axum::Json; use axum::Json;
use tracing::error; use tracing::error;
use serde_with::DisplayFromStr; use serde_with::DisplayFromStr;
use uuid::Uuid;
use validator::ValidationErrors; use validator::ValidationErrors;
/// An API-friendly error type. /// An API-friendly error type.
@ -23,7 +24,7 @@ pub enum Error {
InvalidEntity(#[from] ValidationErrors), InvalidEntity(#[from] ValidationErrors),
#[error("{0}: {1} not found")] #[error("{0}: {1} not found")]
NotFound(&'static str, i32), NotFound(&'static str, Uuid),
#[error("referenced {0} not found")] #[error("referenced {0} not found")]
RelationNotFound(&'static str), RelationNotFound(&'static str),

View File

@ -6,9 +6,13 @@ use sqlx::PgPool;
use crate::error::Error; use crate::error::Error;
use crate::models::entry::{create_entry, get_entry, CreateEntry, Entry}; use crate::models::entry::{create_entry, get_entry, CreateEntry, Entry};
use crate::uuid::Base62Uuid;
pub async fn get(State(pool): State<PgPool>, Path(id): Path<i32>) -> Result<Json<Entry>, Error> { pub async fn get(
Ok(Json(get_entry(&pool, id).await?)) State(pool): State<PgPool>,
Path(id): Path<Base62Uuid>,
) -> Result<Json<Entry>, Error> {
Ok(Json(get_entry(&pool, id.as_uuid()).await?))
} }
pub async fn post( pub async fn post(

View File

@ -5,10 +5,11 @@ use axum::{
use sqlx::PgPool; use sqlx::PgPool;
use crate::error::{Error, Result}; use crate::error::{Error, Result};
use crate::models::feed::{create_feed, get_feed, delete_feed, CreateFeed, Feed}; use crate::models::feed::{create_feed, delete_feed, get_feed, CreateFeed, Feed};
use crate::uuid::Base62Uuid;
pub async fn get(State(pool): State<PgPool>, Path(id): Path<i32>) -> Result<Json<Feed>> { pub async fn get(State(pool): State<PgPool>, Path(id): Path<Base62Uuid>) -> Result<Json<Feed>> {
Ok(Json(get_feed(&pool, id).await?)) Ok(Json(get_feed(&pool, id.as_uuid()).await?))
} }
pub async fn post( pub async fn post(
@ -18,6 +19,6 @@ pub async fn post(
Ok(Json(create_feed(&pool, payload).await?)) Ok(Json(create_feed(&pool, payload).await?))
} }
pub async fn delete(State(pool): State<PgPool>, Path(id): Path<i32>) -> Result<()> { pub async fn delete(State(pool): State<PgPool>, Path(id): Path<Base62Uuid>) -> Result<()> {
delete_feed(&pool, id).await delete_feed(&pool, id.as_uuid()).await
} }

View File

@ -1,4 +1,4 @@
use axum::extract::{State, Path}; use axum::extract::{Path, State};
use axum::response::Response; use axum::response::Response;
use maud::{html, PreEscaped}; use maud::{html, PreEscaped};
use sqlx::PgPool; use sqlx::PgPool;
@ -6,9 +6,14 @@ use sqlx::PgPool;
use crate::error::Result; use crate::error::Result;
use crate::models::entry::get_entry; use crate::models::entry::get_entry;
use crate::partials::layout::Layout; use crate::partials::layout::Layout;
use crate::uuid::Base62Uuid;
pub async fn get(Path(id): Path<i32>, State(pool): State<PgPool>, layout: Layout) -> Result<Response> { pub async fn get(
let entry = get_entry(&pool, id).await?; Path(id): Path<Base62Uuid>,
State(pool): State<PgPool>,
layout: Layout,
) -> Result<Response> {
let entry = get_entry(&pool, id.as_uuid()).await?;
Ok(layout.render(html! { Ok(layout.render(html! {
@let title = entry.title.unwrap_or_else(|| "Untitled".to_string()); @let title = entry.title.unwrap_or_else(|| "Untitled".to_string());
h1 { a href=(entry.url) { (title) } } h1 { a href=(entry.url) { (title) } }

View File

@ -7,6 +7,7 @@ use crate::error::Result;
use crate::models::entry::{get_entries, GetEntriesOptions}; use crate::models::entry::{get_entries, GetEntriesOptions};
use crate::partials::layout::Layout; use crate::partials::layout::Layout;
use crate::utils::get_domain; use crate::utils::get_domain;
use crate::uuid::Base62Uuid;
pub async fn get(State(pool): State<PgPool>, layout: Layout) -> Result<Response> { pub async fn get(State(pool): State<PgPool>, layout: Layout) -> Result<Response> {
let entries = get_entries(&pool, GetEntriesOptions::default()).await?; let entries = get_entries(&pool, GetEntriesOptions::default()).await?;
@ -14,7 +15,7 @@ pub async fn get(State(pool): State<PgPool>, layout: Layout) -> Result<Response>
ul class="entries" { ul class="entries" {
@for entry in entries { @for entry in entries {
@let title = entry.title.unwrap_or_else(|| "Untitled".to_string()); @let title = entry.title.unwrap_or_else(|| "Untitled".to_string());
@let url = format!("/entry/{}", entry.id); @let url = format!("/entry/{}", Base62Uuid::from(entry.entry_id));
@let domain = get_domain(&entry.url).unwrap_or_default(); @let domain = get_domain(&entry.url).unwrap_or_default();
li { a href=(url) { (title) } em class="domain" { (domain) }} li { a href=(url) { (title) } em class="domain" { (domain) }}
} }

View File

@ -7,6 +7,7 @@ use tracing::{info, info_span, warn};
use crate::models::feed::get_feeds; use crate::models::feed::get_feeds;
use crate::models::entry::{upsert_entries, CreateEntry}; use crate::models::entry::{upsert_entries, CreateEntry};
use crate::uuid::Base62Uuid;
/// For every feed in the database, fetches the feed, parses it, and saves new entries to the /// For every feed in the database, fetches the feed, parses it, and saves new entries to the
/// database. /// database.
@ -15,7 +16,8 @@ pub async fn crawl(pool: &PgPool) -> anyhow::Result<()> {
let client = Client::new(); let client = Client::new();
let feeds = get_feeds(pool).await?; let feeds = get_feeds(pool).await?;
for feed in feeds { for feed in feeds {
let feed_span = info_span!("feed", id = feed.id, url = feed.url.as_str()); let feed_id_str: String = Base62Uuid::from(feed.feed_id).into();
let feed_span = info_span!("feed", id = feed_id_str, url = feed.url.as_str());
let _feed_span_guard = feed_span.enter(); let _feed_span_guard = feed_span.enter();
info!("Fetching feed"); info!("Fetching feed");
// TODO: handle these results // TODO: handle these results
@ -28,20 +30,20 @@ pub async fn crawl(pool: &PgPool) -> anyhow::Result<()> {
let _entry_span_guard = entry_span.enter(); let _entry_span_guard = entry_span.enter();
if let Some(link) = entry.links.get(0) { if let Some(link) = entry.links.get(0) {
// if no scraped or feed date is available, fallback to the current time // if no scraped or feed date is available, fallback to the current time
let published_at = entry.published.unwrap_or_else(Utc::now).naive_utc(); let published_at = entry.published.unwrap_or_else(Utc::now);
let mut entry = CreateEntry { let mut entry = CreateEntry {
title: entry.title.map(|t| t.content), title: entry.title.map(|t| t.content),
url: link.href.clone(), url: link.href.clone(),
description: entry.summary.map(|s| s.content), description: entry.summary.map(|s| s.content),
html_content: None, html_content: None,
feed_id: feed.id, feed_id: feed.feed_id,
published_at, published_at,
}; };
info!("Fetching and parsing entry link: {}", link.href); info!("Fetching and parsing entry link: {}", link.href);
if let Ok(article) = scraper.parse(&Url::parse(&link.href)?, true, &client, None).await { if let Ok(article) = scraper.parse(&Url::parse(&link.href)?, true, &client, None).await {
if let Some(date) = article.date { if let Some(date) = article.date {
// prefer scraped date over rss feed date // prefer scraped date over rss feed date
entry.published_at = date.naive_utc() entry.published_at = date;
}; };
entry.html_content = article.get_content(); entry.html_content = article.get_content();
} else { } else {

View File

@ -7,3 +7,4 @@ pub mod models;
pub mod partials; pub mod partials;
pub mod state; pub mod state;
pub mod utils; pub mod utils;
pub mod uuid;

View File

@ -1,6 +1,7 @@
use chrono::NaiveDateTime; use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use sqlx::PgPool; use sqlx::PgPool;
use uuid::Uuid;
use validator::{Validate, ValidationErrors}; use validator::{Validate, ValidationErrors};
use crate::error::{Error, Result}; use crate::error::{Error, Result};
@ -9,16 +10,16 @@ const DEFAULT_ENTRIES_PAGE_SIZE: i64 = 50;
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
pub struct Entry { pub struct Entry {
pub id: i32, pub entry_id: Uuid,
pub title: Option<String>, pub title: Option<String>,
pub url: String, pub url: String,
pub description: Option<String>, pub description: Option<String>,
pub html_content: Option<String>, pub html_content: Option<String>,
pub feed_id: i32, pub feed_id: Uuid,
pub published_at: NaiveDateTime, pub published_at: DateTime<Utc>,
pub created_at: NaiveDateTime, pub created_at: DateTime<Utc>,
pub updated_at: NaiveDateTime, pub updated_at: Option<DateTime<Utc>>,
pub deleted_at: Option<NaiveDateTime>, pub deleted_at: Option<DateTime<Utc>>,
} }
#[derive(Debug, Deserialize, Validate)] #[derive(Debug, Deserialize, Validate)]
@ -30,18 +31,17 @@ pub struct CreateEntry {
#[validate(length(max = 524288))] #[validate(length(max = 524288))]
pub description: Option<String>, pub description: Option<String>,
pub html_content: Option<String>, pub html_content: Option<String>,
#[validate(range(min = 1))] pub feed_id: Uuid,
pub feed_id: i32, pub published_at: DateTime<Utc>,
pub published_at: NaiveDateTime,
} }
pub async fn get_entry(pool: &PgPool, id: i32) -> Result<Entry> { pub async fn get_entry(pool: &PgPool, entry_id: Uuid) -> Result<Entry> {
sqlx::query_as!(Entry, "SELECT * FROM entries WHERE id = $1", id) sqlx::query_as!(Entry, "select * from entry where entry_id = $1", entry_id)
.fetch_one(pool) .fetch_one(pool)
.await .await
.map_err(|error| { .map_err(|error| {
if let sqlx::error::Error::RowNotFound = error { if let sqlx::error::Error::RowNotFound = error {
return Error::NotFound("entry", id); return Error::NotFound("entry", entry_id);
} }
Error::Sqlx(error) Error::Sqlx(error)
}) })
@ -49,7 +49,7 @@ pub async fn get_entry(pool: &PgPool, id: i32) -> Result<Entry> {
#[derive(Default)] #[derive(Default)]
pub struct GetEntriesOptions { pub struct GetEntriesOptions {
pub published_before: Option<NaiveDateTime>, pub published_before: Option<DateTime<Utc>>,
pub limit: Option<i64>, pub limit: Option<i64>,
} }
@ -60,11 +60,11 @@ pub async fn get_entries(
if let Some(published_before) = options.published_before { if let Some(published_before) = options.published_before {
sqlx::query_as!( sqlx::query_as!(
Entry, Entry,
"SELECT * FROM entries "select * from entry
WHERE deleted_at IS NULL where deleted_at is null
AND published_at < $1 and published_at < $1
ORDER BY published_at DESC order by published_at desc
LIMIT $2 limit $2
", ",
published_before, published_before,
options.limit.unwrap_or(DEFAULT_ENTRIES_PAGE_SIZE) options.limit.unwrap_or(DEFAULT_ENTRIES_PAGE_SIZE)
@ -74,10 +74,10 @@ pub async fn get_entries(
} else { } else {
sqlx::query_as!( sqlx::query_as!(
Entry, Entry,
"SELECT * FROM entries "select * from entry
WHERE deleted_at IS NULL where deleted_at is null
ORDER BY published_at DESC order by published_at desc
LIMIT $1 limit $1
", ",
options.limit.unwrap_or(DEFAULT_ENTRIES_PAGE_SIZE) options.limit.unwrap_or(DEFAULT_ENTRIES_PAGE_SIZE)
) )
@ -91,11 +91,11 @@ pub async fn create_entry(pool: &PgPool, payload: CreateEntry) -> Result<Entry>
payload.validate()?; payload.validate()?;
sqlx::query_as!( sqlx::query_as!(
Entry, Entry,
"INSERT INTO entries ( "insert into entry (
title, url, description, html_content, feed_id, published_at, created_at, updated_at title, url, description, html_content, feed_id, published_at
) VALUES ( ) values (
$1, $2, $3, $4, $5, $6, now(), now() $1, $2, $3, $4, $5, $6
) RETURNING *", ) returning *",
payload.title, payload.title,
payload.url, payload.url,
payload.description, payload.description,
@ -136,10 +136,10 @@ pub async fn create_entries(pool: &PgPool, payload: Vec<CreateEntry>) -> Result<
.collect::<Result<Vec<()>, ValidationErrors>>()?; .collect::<Result<Vec<()>, ValidationErrors>>()?;
sqlx::query_as!( sqlx::query_as!(
Entry, Entry,
"INSERT INTO entries ( "insert into entry (
title, url, description, html_content, feed_id, published_at, created_at, updated_at title, url, description, html_content, feed_id, published_at
) SELECT *, now(), now() FROM UNNEST($1::text[], $2::text[], $3::text[], $4::text[], $5::int[], $6::timestamp(3)[]) ) select * from unnest($1::text[], $2::text[], $3::text[], $4::text[], $5::uuid[], $6::timestamptz[])
RETURNING *", returning *",
titles.as_slice() as &[Option<String>], titles.as_slice() as &[Option<String>],
urls.as_slice(), urls.as_slice(),
descriptions.as_slice() as &[Option<String>], descriptions.as_slice() as &[Option<String>],
@ -180,11 +180,11 @@ pub async fn upsert_entries(pool: &PgPool, payload: Vec<CreateEntry>) -> Result<
.collect::<Result<Vec<()>, ValidationErrors>>()?; .collect::<Result<Vec<()>, ValidationErrors>>()?;
sqlx::query_as!( sqlx::query_as!(
Entry, Entry,
"INSERT INTO entries ( "insert into entry (
title, url, description, html_content, feed_id, published_at, created_at, updated_at title, url, description, html_content, feed_id, published_at
) SELECT *, now(), now() FROM UNNEST($1::text[], $2::text[], $3::text[], $4::text[], $5::int[], $6::timestamp(3)[]) ) select * from unnest($1::text[], $2::text[], $3::text[], $4::text[], $5::uuid[], $6::timestamptz[])
ON CONFLICT DO NOTHING on conflict do nothing
RETURNING *", returning *",
titles.as_slice() as &[Option<String>], titles.as_slice() as &[Option<String>],
urls.as_slice(), urls.as_slice(),
descriptions.as_slice() as &[Option<String>], descriptions.as_slice() as &[Option<String>],
@ -204,8 +204,8 @@ pub async fn upsert_entries(pool: &PgPool, payload: Vec<CreateEntry>) -> Result<
}) })
} }
pub async fn delete_entry(pool: &PgPool, id: i32) -> Result<()> { pub async fn delete_entry(pool: &PgPool, entry_id: Uuid) -> Result<()> {
sqlx::query!("UPDATE entries SET deleted_at = now() WHERE id = $1", id) sqlx::query!("update entry set deleted_at = now() where entry_id = $1", entry_id)
.execute(pool) .execute(pool)
.await?; .await?;
Ok(()) Ok(())

View File

@ -1,8 +1,9 @@
use std::str::FromStr; use std::str::FromStr;
use chrono::NaiveDateTime; use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use sqlx::PgPool; use sqlx::PgPool;
use uuid::Uuid;
use validator::Validate; use validator::Validate;
use crate::error::{Error, Result}; use crate::error::{Error, Result};
@ -28,15 +29,15 @@ impl FromStr for FeedType {
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
pub struct Feed { pub struct Feed {
pub id: i32, pub feed_id: Uuid,
pub title: Option<String>, pub title: Option<String>,
pub url: String, pub url: String,
#[serde(rename = "type")] #[serde(rename = "type")]
pub feed_type: FeedType, pub feed_type: FeedType,
pub description: Option<String>, pub description: Option<String>,
pub created_at: NaiveDateTime, pub created_at: DateTime<Utc>,
pub updated_at: NaiveDateTime, pub updated_at: Option<DateTime<Utc>>,
pub deleted_at: Option<NaiveDateTime>, pub deleted_at: Option<DateTime<Utc>>,
} }
#[derive(Debug, Deserialize, Validate)] #[derive(Debug, Deserialize, Validate)]
@ -51,12 +52,13 @@ pub struct CreateFeed {
pub description: Option<String>, pub description: Option<String>,
} }
pub async fn get_feed(pool: &PgPool, id: i32) -> Result<Feed> { pub async fn get_feed(pool: &PgPool, feed_id: Uuid) -> Result<Feed> {
sqlx::query_as!( sqlx::query_as!(
Feed, Feed,
// Unable to SELECT * here due to https://github.com/launchbadge/sqlx/issues/1004 // Unable to SELECT * here due to https://github.com/launchbadge/sqlx/issues/1004
r#"SELECT // language=PostGreSQL
id, r#"select
feed_id,
title, title,
url, url,
type as "feed_type: FeedType", type as "feed_type: FeedType",
@ -64,14 +66,14 @@ pub async fn get_feed(pool: &PgPool, id: i32) -> Result<Feed> {
created_at, created_at,
updated_at, updated_at,
deleted_at deleted_at
FROM feeds WHERE id = $1"#, from feed where feed_id = $1"#,
id feed_id
) )
.fetch_one(pool) .fetch_one(pool)
.await .await
.map_err(|error| { .map_err(|error| {
if let sqlx::error::Error::RowNotFound = error { if let sqlx::error::Error::RowNotFound = error {
return Error::NotFound("feed", id); return Error::NotFound("feed", feed_id);
} }
Error::Sqlx(error) Error::Sqlx(error)
}) })
@ -80,8 +82,8 @@ pub async fn get_feed(pool: &PgPool, id: i32) -> Result<Feed> {
pub async fn get_feeds(pool: &PgPool) -> sqlx::Result<Vec<Feed>> { pub async fn get_feeds(pool: &PgPool) -> sqlx::Result<Vec<Feed>> {
sqlx::query_as!( sqlx::query_as!(
Feed, Feed,
r#"SELECT r#"select
id, feed_id,
title, title,
url, url,
type as "feed_type: FeedType", type as "feed_type: FeedType",
@ -89,8 +91,8 @@ pub async fn get_feeds(pool: &PgPool) -> sqlx::Result<Vec<Feed>> {
created_at, created_at,
updated_at, updated_at,
deleted_at deleted_at
FROM feeds from feed
WHERE deleted_at IS NULL"# where deleted_at is null"#
) )
.fetch_all(pool) .fetch_all(pool)
.await .await
@ -100,12 +102,12 @@ pub async fn create_feed(pool: &PgPool, payload: CreateFeed) -> Result<Feed> {
payload.validate()?; payload.validate()?;
Ok(sqlx::query_as!( Ok(sqlx::query_as!(
Feed, Feed,
r#"INSERT INTO feeds ( r#"insert into feed (
title, url, type, description, created_at, updated_at title, url, type, description
) VALUES ( ) values (
$1, $2, $3, $4, now(), now() $1, $2, $3, $4
) RETURNING ) returning
id, feed_id,
title, title,
url, url,
type as "feed_type: FeedType", type as "feed_type: FeedType",
@ -123,8 +125,8 @@ pub async fn create_feed(pool: &PgPool, payload: CreateFeed) -> Result<Feed> {
.await?) .await?)
} }
pub async fn delete_feed(pool: &PgPool, id: i32) -> Result<()> { pub async fn delete_feed(pool: &PgPool, feed_id: Uuid) -> Result<()> {
sqlx::query!("UPDATE feeds SET deleted_at = now() WHERE id = $1", id) sqlx::query!("update feed set deleted_at = now() where feed_id = $1", feed_id)
.execute(pool) .execute(pool)
.await?; .await?;
Ok(()) Ok(())

View File

@ -1,5 +1,7 @@
use url::Url; use url::Url;
const BASE62_CHARS: &[u8] = b"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz";
pub fn get_domain(url: &str) -> Option<String> { pub fn get_domain(url: &str) -> Option<String> {
Url::parse(url) Url::parse(url)
.ok() .ok()

107
src/uuid.rs Normal file
View File

@ -0,0 +1,107 @@
use std::fmt::{Display, Formatter, self};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
const BASE62_CHARS: &[u8] = b"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz";
#[derive(Debug, Serialize, Deserialize)]
pub struct Base62Uuid(
#[serde(deserialize_with = "uuid_from_base62_str")]
#[serde(serialize_with = "uuid_to_base62_str")]
Uuid
);
impl Base62Uuid {
pub fn as_uuid(&self) -> Uuid {
self.0
}
}
impl From<Uuid> for Base62Uuid {
fn from(uuid: Uuid) -> Self {
Self(uuid)
}
}
impl Display for Base62Uuid {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(f, "{}", base62_encode(self.0.as_u128()))
}
}
impl From<&str> for Base62Uuid {
fn from(s: &str) -> Self {
Self(Uuid::from_u128(base62_decode(s)))
}
}
impl From<Base62Uuid> for String {
fn from(s: Base62Uuid) -> Self {
base62_encode(s.0.as_u128())
}
}
fn uuid_to_base62_str<S>(uuid: &Uuid, s: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
s.serialize_str(&base62_encode(uuid.as_u128()))
}
fn uuid_from_base62_str<'de, D>(deserializer: D) -> Result<Uuid, D::Error>
where
D: serde::Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
Ok(Uuid::from_u128(base62_decode(&s)))
}
pub fn base62_encode(mut number: u128) -> String {
let base = BASE62_CHARS.len() as u128;
let mut encoded = Vec::new();
while number > 0 {
let remainder = (number % base) as usize;
number /= base;
encoded.push(BASE62_CHARS[remainder]);
}
encoded.reverse();
String::from_utf8(encoded).unwrap()
}
pub fn base62_decode(input: &str) -> u128 {
let base = BASE62_CHARS.len() as u128;
let mut number = 0u128;
for &byte in input.as_bytes() {
number = number * base + (BASE62_CHARS.iter().position(|&ch| ch == byte).unwrap() as u128);
}
number
}
#[cfg(test)]
mod tests {
use uuid::Uuid;
use super::*;
#[test]
fn test_encode_decode() {
let original_uuids = [
Uuid::new_v4(),
Uuid::new_v4(),
Uuid::new_v4(),
Uuid::new_v4(),
];
for original_uuid in original_uuids.iter() {
let encoded = base62_encode(original_uuid.as_u128());
let decoded_uuid = Uuid::from_u128(base62_decode(&encoded));
assert_eq!(*original_uuid, decoded_uuid);
}
}
}