Implement entry and feed pagination

This commit is contained in:
Tyler Hallada 2023-09-02 14:01:18 -04:00
parent 0607b46283
commit ec394fc170
29 changed files with 520 additions and 158 deletions

1
.gitignore vendored
View File

@ -3,5 +3,6 @@
.env
/static/js/*
/static/css/*
/static/img/*
.frontend-built
/content

41
Cargo.lock generated
View File

@ -158,6 +158,7 @@ dependencies = [
"bitflags 1.3.2",
"bytes",
"futures-util",
"headers",
"http",
"http-body",
"hyper",
@ -197,6 +198,12 @@ dependencies = [
"tower-service",
]
[[package]]
name = "base64"
version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8"
[[package]]
name = "base64"
version = "0.21.0"
@ -382,6 +389,7 @@ dependencies = [
"dotenvy",
"feed-rs",
"futures",
"http",
"maud",
"notify",
"once_cell",
@ -929,6 +937,31 @@ dependencies = [
"hashbrown 0.12.3",
]
[[package]]
name = "headers"
version = "0.3.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f3e372db8e5c0d213e0cd0b9be18be2aca3d44cf2fe30a9d46a65581cd454584"
dependencies = [
"base64 0.13.1",
"bitflags 1.3.2",
"bytes",
"headers-core",
"http",
"httpdate",
"mime",
"sha1",
]
[[package]]
name = "headers-core"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e7f66481bfee273957b1f20485a4ff3362987f85b2c236580d81b4eb7a326429"
dependencies = [
"http",
]
[[package]]
name = "heck"
version = "0.4.1"
@ -2088,7 +2121,7 @@ version = "0.11.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13293b639a097af28fc8a90f22add145a9c954e49d77da06263d58cf44d5fb91"
dependencies = [
"base64",
"base64 0.21.0",
"bytes",
"encoding_rs",
"futures-core",
@ -2278,7 +2311,7 @@ version = "3.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9f02d8aa6e3c385bf084924f660ce2a3a6bd333ba55b35e8590b321f35d88513"
dependencies = [
"base64",
"base64 0.21.0",
"chrono",
"hex",
"indexmap 1.9.3",
@ -2519,7 +2552,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8264c59b28b6858796acfcedc660aa4c9075cc6e4ec8eb03cdca2a3e725726db"
dependencies = [
"atoi",
"base64",
"base64 0.21.0",
"bitflags 2.3.3",
"byteorder",
"bytes",
@ -2563,7 +2596,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1cab6147b81ca9213a7578f1b4c9d24c449a53953cd2222a7b5d7cd29a5c3139"
dependencies = [
"atoi",
"base64",
"base64 0.21.0",
"bitflags 2.3.3",
"byteorder",
"chrono",

View File

@ -14,7 +14,7 @@ path = "src/lib.rs"
[dependencies]
ansi-to-html = "0.1"
anyhow = "1"
axum = { version = "0.6", features = ["form", "multipart"] }
axum = { version = "0.6", features = ["form", "headers", "multipart"] }
bytes = "1.4"
chrono = { version = "0.4", features = ["serde"] }
clap = { version = "4.3", features = ["derive", "env"] }
@ -50,3 +50,4 @@ uuid = { version = "1.3", features = ["serde"] }
url = "2.4"
validator = { version = "0.16", features = ["derive"] }
ammonia = "3.3.0"
http = "0.2.9"

View File

@ -6,6 +6,27 @@ html {
font-family: "Helvetica Neue", Helvetica, Arial, sans-serif;
}
.htmx-indicator {
display: none;
}
.htmx-request .list-loading {
display: block;
}
.htmx-request.list-loading {
display: block;
}
.list-loading {
margin: 24px auto;
}
img.loading {
filter: invert(100%);
max-width: 64px;
}
/* Header */
header.header nav {

33
frontend/img/three-dots.svg Executable file
View File

@ -0,0 +1,33 @@
<!-- By Sam Herbert (@sherb), for everyone. More @ http://goo.gl/7AJzbL -->
<svg width="120" height="30" viewBox="0 0 120 30" xmlns="http://www.w3.org/2000/svg" fill="#fff">
<circle cx="15" cy="15" r="15">
<animate attributeName="r" from="15" to="15"
begin="0s" dur="0.8s"
values="15;9;15" calcMode="linear"
repeatCount="indefinite" />
<animate attributeName="fill-opacity" from="1" to="1"
begin="0s" dur="0.8s"
values="1;.5;1" calcMode="linear"
repeatCount="indefinite" />
</circle>
<circle cx="60" cy="15" r="9" fill-opacity="0.3">
<animate attributeName="r" from="9" to="9"
begin="0s" dur="0.8s"
values="9;15;9" calcMode="linear"
repeatCount="indefinite" />
<animate attributeName="fill-opacity" from="0.5" to="0.5"
begin="0s" dur="0.8s"
values=".5;1;.5" calcMode="linear"
repeatCount="indefinite" />
</circle>
<circle cx="105" cy="15" r="15">
<animate attributeName="r" from="15" to="15"
begin="0s" dur="0.8s"
values="15;9;15" calcMode="linear"
repeatCount="indefinite" />
<animate attributeName="fill-opacity" from="1" to="1"
begin="0s" dur="0.8s"
values="1;.5;1" calcMode="linear"
repeatCount="indefinite" />
</circle>
</svg>

After

Width:  |  Height:  |  Size: 1.5 KiB

View File

@ -1,7 +1,6 @@
import htmx from 'htmx.org';
import 'htmx.org/dist/ext/sse';
// import CSS so it gets named with a content hash that busts caches
// import assets so they get named with a content hash that busts caches
import '../css/styles.css';
import './localTimeController';
@ -13,3 +12,6 @@ declare global {
}
window.htmx = htmx;
// eslint-disable-next-line import/first
import 'htmx.org/dist/ext/sse';

View File

@ -7,15 +7,18 @@ install-frontend:
bun install --cwd frontend
clean-frontend:
rm -rf ./static/js/* ./static/css/*
rm -rf ./static/js/* ./static/css/* ./static/img/*
build-frontend: clean-frontend
bun build frontend/js/index.ts \
--outdir ./static \
--root ./frontend \
--entry-naming [dir]/[name]-[hash].[ext] \
--chunk-naming [dir]/[name]-[hash].[ext] \
--asset-naming [dir]/[name]-[hash].[ext] \
--minify
mkdir -p static/img
cp frontend/img/* static/img/
touch ./static/js/manifest.txt # create empty manifest to be overwritten by build.rs
touch ./static/css/manifest.txt # create empty manifest to be overwritten by build.rs
touch .frontend-built # trigger build.rs to run
@ -25,7 +28,10 @@ build-dev-frontend: clean-frontend
--outdir ./static \
--root ./frontend \
--entry-naming [dir]/[name]-[hash].[ext] \
--chunk-naming [dir]/[name]-[hash].[ext] \
--asset-naming [dir]/[name]-[hash].[ext]
mkdir -p static/img
cp frontend/img/* static/img/
touch ./static/js/manifest.txt # create empty manifest needed so binary compiles
touch ./static/css/manifest.txt # create empty manifest needed so binary compiles
# in development mode, frontend changes do not trigger a rebuild of the backend

View File

@ -84,7 +84,7 @@ impl CrawlScheduler {
let mut options = GetFeedsOptions::default();
loop {
info!("fetching feeds before: {:?}", options.before);
let feeds = match Feed::get_all(&self.pool, options.clone()).await {
let feeds = match Feed::get_all(&self.pool, &options).await {
Err(err) => {
return Err(CrawlSchedulerError::FetchFeedsError(err.to_string()));
}

25
src/api_response.rs Normal file
View File

@ -0,0 +1,25 @@
use axum::{
response::{Html, IntoResponse, Response},
Json,
};
use serde::Serialize;
/// Wrapper type for API responses that allows endpoints to return either JSON or HTML in the same
/// route.
#[derive(Debug)]
pub enum ApiResponse<T> {
Json(T),
Html(String),
}
impl<T> IntoResponse for ApiResponse<T>
where
T: Serialize,
{
fn into_response(self) -> Response {
match self {
ApiResponse::Json(json) => Json(json).into_response(),
ApiResponse::Html(html) => Html(html).into_response(),
}
}
}

View File

@ -6,7 +6,10 @@ use lib::actors::feed_crawler::FeedCrawlerHandle;
use lib::domain_locks::DomainLocks;
use reqwest::Client;
use sqlx::postgres::PgPoolOptions;
use std::collections::HashMap;
use std::env;
use std::sync::Arc;
use tokio::sync::Mutex;
use tracing::info;
use uuid::Uuid;

View File

@ -1,9 +1,27 @@
use axum::{extract::State, Json};
use axum::extract::Query;
use axum::extract::State;
use axum::response::IntoResponse;
use axum::TypedHeader;
use sqlx::PgPool;
use crate::api_response::ApiResponse;
use crate::error::Error;
use crate::models::entry::Entry;
use crate::headers::Accept;
use crate::models::entry::{Entry, GetEntriesOptions};
use crate::partials::entry_list::entry_list;
pub async fn get(State(pool): State<PgPool>) -> Result<Json<Vec<Entry>>, Error> {
Ok(Json(Entry::get_all(&pool, Default::default()).await?))
pub async fn get(
Query(options): Query<GetEntriesOptions>,
accept: Option<TypedHeader<Accept>>,
State(pool): State<PgPool>,
) -> Result<impl IntoResponse, impl IntoResponse> {
let entries = Entry::get_all(&pool, &options).await.map_err(Error::from)?;
if let Some(TypedHeader(accept)) = accept {
if accept == Accept::ApplicationJson {
return Ok::<ApiResponse<Vec<Entry>>, Error>(ApiResponse::Json(entries));
}
}
Ok(ApiResponse::Html(
entry_list(entries, &options).into_string(),
))
}

View File

@ -1,10 +1,27 @@
use axum::{extract::State, Json};
use axum::TypedHeader;
use axum::extract::Query;
use axum::response::IntoResponse;
use axum::extract::State;
use sqlx::PgPool;
use crate::api_response::ApiResponse;
use crate::error::Error;
use crate::models::feed::Feed;
use crate::headers::Accept;
use crate::models::feed::{Feed, GetFeedsOptions};
use crate::partials::feed_list::feed_list;
pub async fn get(State(pool): State<PgPool>) -> Result<Json<Vec<Feed>>, Error> {
// TODO: pagination
Ok(Json(Feed::get_all(&pool, Default::default()).await?))
pub async fn get(
Query(options): Query<GetFeedsOptions>,
accept: Option<TypedHeader<Accept>>,
State(pool): State<PgPool>,
) -> Result<impl IntoResponse, impl IntoResponse> {
let feeds = Feed::get_all(&pool, &options).await.map_err(Error::from)?;
if let Some(TypedHeader(accept)) = accept {
if accept == Accept::ApplicationJson {
return Ok::<ApiResponse<Vec<Feed>>, Error>(ApiResponse::Json(feeds));
}
}
Ok(ApiResponse::Html(
feed_list(feeds, &options).into_string(),
))
}

15
src/handlers/entries.rs Normal file
View File

@ -0,0 +1,15 @@
use axum::extract::{Query, State};
use maud::Markup;
use sqlx::PgPool;
use crate::error::Result;
use crate::models::entry::{Entry, GetEntriesOptions};
use crate::partials::entry_list::entry_list;
pub async fn get(
Query(options): Query<GetEntriesOptions>,
State(pool): State<PgPool>,
) -> Result<Markup> {
let entries = Entry::get_all(&pool, &options).await?;
Ok(entry_list(entries, &options))
}

View File

@ -20,10 +20,12 @@ pub async fn get(
let entry = Entry::get(&pool, id.as_uuid()).await?;
let content_dir = std::path::Path::new(&config.content_dir);
let content_path = content_dir.join(format!("{}.html", entry.entry_id));
let title = entry.title.unwrap_or_else(|| "Untitled".to_string());
let published_at = entry.published_at.to_rfc3339_opts(chrono::SecondsFormat::Millis, true);
let title = entry.title.unwrap_or_else(|| "Untitled Entry".to_string());
let published_at = entry
.published_at
.to_rfc3339_opts(chrono::SecondsFormat::Millis, true);
let content = fs::read_to_string(content_path).unwrap_or_else(|_| "No content".to_string());
Ok(layout.render(html! {
Ok(layout.with_subtitle(&title).render(html! {
article {
h2 class="title" { a href=(entry.url) { (title) } }
div {

View File

@ -16,7 +16,7 @@ use tokio_stream::StreamExt;
use crate::actors::crawl_scheduler::{CrawlSchedulerHandle, CrawlSchedulerHandleMessage};
use crate::actors::feed_crawler::FeedCrawlerHandleMessage;
use crate::error::{Error, Result};
use crate::models::entry::Entry;
use crate::models::entry::{Entry, GetEntriesOptions};
use crate::models::feed::{CreateFeed, Feed};
use crate::partials::add_feed_form::add_feed_form;
use crate::partials::entry_link::entry_link;
@ -30,11 +30,16 @@ pub async fn get(
layout: Layout,
) -> Result<Response> {
let feed = Feed::get(&pool, id.as_uuid()).await?;
let entries = Entry::get_all_for_feed(&pool, feed.feed_id, Default::default()).await?;
let options = GetEntriesOptions {
feed_id: Some(feed.feed_id),
..Default::default()
};
let title = feed.title.unwrap_or_else(|| "Untitled Feed".to_string());
let entries = Entry::get_all(&pool, &options).await?;
let delete_url = format!("/feed/{}/delete", id);
Ok(layout.render(html! {
Ok(layout.with_subtitle(&title).render(html! {
header class="feed-header" {
h2 { (feed.title.unwrap_or_else(|| "Untitled Feed".to_string())) }
h2 { (title) }
button class="edit-feed" { "✏️ Edit feed" }
form action=(delete_url) method="post" {
button type="submit" class="remove-feed" data-controller="remove-feed" { "❌ Remove feed" }
@ -43,7 +48,7 @@ pub async fn get(
@if let Some(description) = feed.description {
p { (description) }
}
(entry_list(entries))
(entry_list(entries, &options))
}))
}
@ -178,7 +183,7 @@ pub async fn stream(
entry,
)))) => Ok(Event::default().data(
html! {
li { "Crawled entry: " (entry_link(entry)) }
li { "Crawled entry: " (entry_link(&entry)) }
}
.into_string(),
)),

View File

@ -7,16 +7,18 @@ use crate::error::Result;
use crate::models::feed::{Feed, GetFeedsOptions};
use crate::partials::add_feed_form::add_feed_form;
use crate::partials::feed_list::feed_list;
use crate::partials::opml_import_form::opml_import_form;
use crate::partials::layout::Layout;
use crate::partials::opml_import_form::opml_import_form;
pub async fn get(State(pool): State<PgPool>, layout: Layout) -> Result<Response> {
let options = GetFeedsOptions::default();
let feeds = Feed::get_all(&pool, options.clone()).await?;
Ok(layout.render(html! {
let feeds = Feed::get_all(&pool, &options).await?;
Ok(layout.with_subtitle("feeds").render(html! {
h2 { "Feeds" }
div class="feeds" {
(feed_list(feeds, options))
ul id="feeds" {
(feed_list(feeds, &options))
}
div class="add-feed" {
h3 { "Add Feed" }
(add_feed_form())

View File

@ -1,5 +1,6 @@
use axum::extract::State;
use axum::response::Response;
use maud::html;
use sqlx::PgPool;
use crate::error::Result;
@ -7,6 +8,11 @@ use crate::models::entry::Entry;
use crate::partials::{layout::Layout, entry_list::entry_list};
pub async fn get(State(pool): State<PgPool>, layout: Layout) -> Result<Response> {
let entries = Entry::get_all(&pool, Default::default()).await?;
Ok(layout.render(entry_list(entries)))
let options = Default::default();
let entries = Entry::get_all(&pool, &options).await?;
Ok(layout.render(html! {
ul class="entries" {
(entry_list(entries, &options))
}
}))
}

View File

@ -76,7 +76,7 @@ pub async fn stream(
))) => Ok::<Event, String>(
Event::default().data(
html! {
li { "Crawled entry: " (entry_link(entry)) }
li { "Crawled entry: " (entry_link(&entry)) }
}
.into_string(),
),

View File

@ -22,7 +22,7 @@ use crate::partials::layout::Layout;
pub async fn get(layout: Layout) -> Result<Response> {
let mem_buf = MEM_LOG.lock().unwrap();
Ok(layout.render(html! {
Ok(layout.with_subtitle("log").render(html! {
pre id="log" hx-sse="connect:/log/stream swap:message" hx-swap="beforeend" {
(PreEscaped(convert_escaped(from_utf8(mem_buf.as_slices().0).unwrap()).unwrap()))
}

View File

@ -1,4 +1,5 @@
pub mod api;
pub mod entries;
pub mod entry;
pub mod home;
pub mod import;

58
src/headers.rs Normal file
View File

@ -0,0 +1,58 @@
use axum::{
headers::{self, Header},
http::{HeaderName, HeaderValue},
};
/// Typed header implementation for the `Accept` header.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Accept {
TextHtml,
ApplicationJson,
}
impl std::fmt::Display for Accept {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Accept::TextHtml => write!(f, "text/html"),
Accept::ApplicationJson => write!(f, "application/json"),
}
}
}
impl Header for Accept {
fn name() -> &'static HeaderName {
&http::header::ACCEPT
}
fn decode<'i, I>(values: &mut I) -> Result<Self, headers::Error>
where
I: Iterator<Item = &'i HeaderValue>,
{
let value = values.next().ok_or_else(headers::Error::invalid)?;
match value.to_str().map_err(|_| headers::Error::invalid())? {
"text/html" => Ok(Accept::TextHtml),
"application/json" => Ok(Accept::ApplicationJson),
_ => Err(headers::Error::invalid()),
}
}
fn encode<E>(&self, values: &mut E)
where
E: Extend<HeaderValue>,
{
values.extend(std::iter::once(self.into()));
}
}
impl From<Accept> for HeaderValue {
fn from(value: Accept) -> Self {
HeaderValue::from(&value)
}
}
impl From<&Accept> for HeaderValue {
fn from(value: &Accept) -> Self {
HeaderValue::from_str(value.to_string().as_str()).unwrap()
}
}

View File

@ -1,8 +1,10 @@
pub mod actors;
pub mod api_response;
pub mod config;
pub mod domain_locks;
pub mod error;
pub mod handlers;
pub mod headers;
pub mod log;
pub mod models;
pub mod partials;
@ -11,5 +13,5 @@ pub mod utils;
pub mod uuid;
pub const USER_AGENT: &str = "crawlnicle/0.1.0";
pub const JS_BUNDLES: &str = include_str!("../static/js/manifest.txt");
pub const CSS_BUNDLES: &str = include_str!("../static/css/manifest.txt");
pub const JS_MANIFEST: &str = include_str!("../static/js/manifest.txt");
pub const CSS_MANIFEST: &str = include_str!("../static/css/manifest.txt");

View File

@ -89,6 +89,7 @@ async fn main() -> Result<()> {
.route("/feed/:id", get(handlers::feed::get))
.route("/feed/:id/stream", get(handlers::feed::stream))
.route("/feed/:id/delete", post(handlers::feed::delete))
.route("/entries", get(handlers::entries::get))
.route("/entry/:id", get(handlers::entry::get))
.route("/log", get(handlers::log::get))
.route("/log/stream", get(handlers::log::stream))

View File

@ -6,7 +6,7 @@ use validator::{Validate, ValidationErrors};
use crate::error::{Error, Result};
const DEFAULT_ENTRIES_PAGE_SIZE: i64 = 50;
pub const DEFAULT_ENTRIES_PAGE_SIZE: i64 = 50;
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Entry {
@ -35,9 +35,11 @@ pub struct CreateEntry {
pub published_at: DateTime<Utc>,
}
#[derive(Default)]
#[derive(Default, Deserialize)]
pub struct GetEntriesOptions {
pub feed_id: Option<Uuid>,
pub published_before: Option<DateTime<Utc>>,
pub id_before: Option<Uuid>,
pub limit: Option<i64>,
}
@ -54,71 +56,103 @@ impl Entry {
})
}
pub async fn get_all(pool: &PgPool, options: GetEntriesOptions) -> sqlx::Result<Vec<Entry>> {
if let Some(published_before) = options.published_before {
sqlx::query_as!(
Entry,
"select * from entry
where deleted_at is null
and published_at < $1
order by published_at desc
limit $2
",
published_before,
options.limit.unwrap_or(DEFAULT_ENTRIES_PAGE_SIZE)
)
.fetch_all(pool)
.await
pub async fn get_all(pool: &PgPool, options: &GetEntriesOptions) -> sqlx::Result<Vec<Entry>> {
if let Some(feed_id) = options.feed_id {
if let Some(published_before) = options.published_before {
if let Some(id_before) = options.id_before {
sqlx::query_as!(
Entry,
"select * from entry
where deleted_at is null
and feed_id = $1
and (published_at, entry_id) < ($2, $3)
order by published_at desc, entry_id desc
limit $4
",
feed_id,
published_before,
id_before,
options.limit.unwrap_or(DEFAULT_ENTRIES_PAGE_SIZE)
)
.fetch_all(pool)
.await
} else {
sqlx::query_as!(
Entry,
"select * from entry
where deleted_at is null
and feed_id = $1
and published_at < $2
order by published_at desc
limit $3
",
feed_id,
published_before,
options.limit.unwrap_or(DEFAULT_ENTRIES_PAGE_SIZE)
)
.fetch_all(pool)
.await
}
} else {
sqlx::query_as!(
Entry,
"select * from entry
where deleted_at is null
and feed_id = $1
order by published_at desc
limit $2
",
feed_id,
options.limit.unwrap_or(DEFAULT_ENTRIES_PAGE_SIZE)
)
.fetch_all(pool)
.await
}
} else {
sqlx::query_as!(
Entry,
"select * from entry
where deleted_at is null
order by published_at desc
limit $1
",
options.limit.unwrap_or(DEFAULT_ENTRIES_PAGE_SIZE)
)
.fetch_all(pool)
.await
}
}
pub async fn get_all_for_feed(
pool: &PgPool,
feed_id: Uuid,
options: GetEntriesOptions,
) -> sqlx::Result<Vec<Entry>> {
if let Some(published_before) = options.published_before {
sqlx::query_as!(
Entry,
"select * from entry
where deleted_at is null
and feed_id = $1
and published_at < $2
order by published_at desc
limit $3
",
feed_id,
published_before,
options.limit.unwrap_or(DEFAULT_ENTRIES_PAGE_SIZE)
)
.fetch_all(pool)
.await
} else {
sqlx::query_as!(
Entry,
"select * from entry
where deleted_at is null
and feed_id = $1
order by published_at desc
limit $2
",
feed_id,
options.limit.unwrap_or(DEFAULT_ENTRIES_PAGE_SIZE)
)
.fetch_all(pool)
.await
if let Some(published_before) = options.published_before {
if let Some(id_before) = options.id_before {
sqlx::query_as!(
Entry,
"select * from entry
where deleted_at is null
and (published_at, entry_id) < ($1, $2)
order by published_at desc, entry_id desc
limit $3
",
published_before,
id_before,
options.limit.unwrap_or(DEFAULT_ENTRIES_PAGE_SIZE)
)
.fetch_all(pool)
.await
} else {
sqlx::query_as!(
Entry,
"select * from entry
where deleted_at is null
and published_at < $1
order by published_at desc
limit $2
",
published_before,
options.limit.unwrap_or(DEFAULT_ENTRIES_PAGE_SIZE)
)
.fetch_all(pool)
.await
}
} else {
sqlx::query_as!(
Entry,
"select * from entry
where deleted_at is null
order by published_at desc
limit $1
",
options.limit.unwrap_or(DEFAULT_ENTRIES_PAGE_SIZE)
)
.fetch_all(pool)
.await
}
}
}

View File

@ -109,7 +109,7 @@ pub struct UpdateFeed {
pub last_entry_published_at: Option<Option<DateTime<Utc>>>,
}
#[derive(Debug, Clone)]
#[derive(Debug, Clone, Deserialize)]
pub enum GetFeedsSort {
Title,
CreatedAt,
@ -117,11 +117,12 @@ pub enum GetFeedsSort {
LastEntryPublishedAt,
}
#[derive(Debug, Default, Clone)]
#[derive(Debug, Default, Clone, Deserialize)]
pub struct GetFeedsOptions {
pub sort: Option<GetFeedsSort>,
pub before: Option<DateTime<Utc>>,
pub after_title: Option<String>,
pub before_id: Option<Uuid>,
pub limit: Option<i64>,
}
@ -159,11 +160,11 @@ impl Feed {
})
}
pub async fn get_all(pool: &PgPool, options: GetFeedsOptions) -> sqlx::Result<Vec<Feed>> {
pub async fn get_all(pool: &PgPool, options: &GetFeedsOptions) -> sqlx::Result<Vec<Feed>> {
// TODO: make sure there are indices for all of these sort options
match options.sort.unwrap_or(GetFeedsSort::CreatedAt) {
match options.sort.as_ref().unwrap_or(&GetFeedsSort::CreatedAt) {
GetFeedsSort::Title => {
if let Some(after_title) = options.after_title {
if let Some(after_title) = &options.after_title {
sqlx::query_as!(
Feed,
r#"select
@ -183,11 +184,12 @@ impl Feed {
deleted_at
from feed
where deleted_at is null
and title > $1
order by title asc
limit $2
and (title, feed_id) > ($1, $2)
order by title asc, feed_id asc
limit $3
"#,
after_title,
options.before_id.unwrap_or(Uuid::nil()),
options.limit.unwrap_or(DEFAULT_FEEDS_PAGE_SIZE),
)
.fetch_all(pool)
@ -212,7 +214,7 @@ impl Feed {
deleted_at
from feed
where deleted_at is null
order by title asc
order by title asc, feed_id asc
limit $1
"#,
options.limit.unwrap_or(DEFAULT_FEEDS_PAGE_SIZE),
@ -243,11 +245,12 @@ impl Feed {
deleted_at
from feed
where deleted_at is null
and created_at < $1
order by created_at desc
limit $2
and (created_at, feed_id) < ($1, $2)
order by created_at desc, feed_id desc
limit $3
"#,
created_before,
options.before_id.unwrap_or(Uuid::nil()),
options.limit.unwrap_or(DEFAULT_FEEDS_PAGE_SIZE),
)
.fetch_all(pool)
@ -272,7 +275,7 @@ impl Feed {
deleted_at
from feed
where deleted_at is null
order by created_at desc
order by created_at desc, feed_id desc
limit $1
"#,
options.limit.unwrap_or(DEFAULT_FEEDS_PAGE_SIZE),
@ -303,11 +306,12 @@ impl Feed {
deleted_at
from feed
where deleted_at is null
and last_crawled_at < $1
order by last_crawled_at desc
limit $2
and (last_crawled_at, feed_id) < ($1, $2)
order by last_crawled_at desc, feed_id desc
limit $3
"#,
crawled_before,
options.before_id.unwrap_or(Uuid::nil()),
options.limit.unwrap_or(DEFAULT_FEEDS_PAGE_SIZE),
)
.fetch_all(pool)
@ -332,7 +336,7 @@ impl Feed {
deleted_at
from feed
where deleted_at is null
order by last_crawled_at desc
order by last_crawled_at desc, feed_id desc
limit $1
"#,
options.limit.unwrap_or(DEFAULT_FEEDS_PAGE_SIZE),
@ -363,11 +367,12 @@ impl Feed {
deleted_at
from feed
where deleted_at is null
and last_entry_published_at < $1
order by last_entry_published_at desc
limit $2
and (last_entry_published_at, feed_id) < ($1, $2)
order by last_entry_published_at desc, feed_id desc
limit $3
"#,
published_before,
options.before_id.unwrap_or(Uuid::nil()),
options.limit.unwrap_or(DEFAULT_FEEDS_PAGE_SIZE),
)
.fetch_all(pool)
@ -392,7 +397,7 @@ impl Feed {
deleted_at
from feed
where deleted_at is null
order by last_entry_published_at desc
order by last_entry_published_at desc, feed_id desc
limit $1
"#,
options.limit.unwrap_or(DEFAULT_FEEDS_PAGE_SIZE),

View File

@ -4,8 +4,8 @@ use crate::models::entry::Entry;
use crate::utils::get_domain;
use crate::uuid::Base62Uuid;
pub fn entry_link(entry: Entry) -> Markup {
let title = entry.title.unwrap_or_else(|| "Untitled".to_string());
pub fn entry_link(entry: &Entry) -> Markup {
let title = entry.title.as_ref().map(|s| s.clone()).unwrap_or_else(|| "Untitled".to_string());
let url = format!("/entry/{}", Base62Uuid::from(entry.entry_id));
let domain = get_domain(&entry.url).unwrap_or_default();
html! {

View File

@ -1,12 +1,47 @@
use maud::{html, Markup};
use crate::models::entry::Entry;
use crate::models::entry::{Entry, GetEntriesOptions, DEFAULT_ENTRIES_PAGE_SIZE};
use crate::partials::entry_link::entry_link;
pub fn entry_list(entries: Vec<Entry>) -> Markup {
pub fn entry_list(entries: Vec<Entry>, options: &GetEntriesOptions) -> Markup {
let len = entries.len() as i64;
if len == 0 {
return html! { p { "No entries found." } };
}
let mut more_query = None;
if len == options.limit.unwrap_or(DEFAULT_ENTRIES_PAGE_SIZE) {
let last_entry = entries.last().unwrap();
if let Some(feed_id) = options.feed_id {
more_query = Some(format!(
"/api/v1/entries?feed_id={}&published_before={}&id_before={}",
feed_id,
last_entry.published_at,
last_entry.entry_id
));
} else {
more_query = Some(format!(
"/api/v1/entries?published_before={}&id_before={}",
last_entry.published_at,
last_entry.entry_id
));
}
}
html! {
ul class="entries" {
@for entry in entries {
@for (i, entry) in entries.iter().enumerate() {
@if i == entries.len() - 1 {
@if let Some(ref more_query) = more_query {
li class="entry" hx-get=(more_query) hx-trigger="revealed" hx-swap="afterend" {
(entry_link(entry))
div class="htmx-indicator list-loading" {
img class="loading" src="/static/img/three-dots.svg" alt="Loading...";
}
}
} @else {
li class="entry" { (entry_link(entry)) }
}
} @else {
li class="entry" { (entry_link(entry)) }
}
}

View File

@ -3,22 +3,37 @@ use maud::{html, Markup};
use crate::models::feed::{Feed, GetFeedsOptions, DEFAULT_FEEDS_PAGE_SIZE};
use crate::partials::feed_link::feed_link;
pub fn feed_list(feeds: Vec<Feed>, options: GetFeedsOptions) -> Markup {
pub fn feed_list(feeds: Vec<Feed>, options: &GetFeedsOptions) -> Markup {
let len = feeds.len() as i64;
if len == 0 {
return html! { p { "No feeds found." } };
}
let mut more_query = None;
if len == options.limit.unwrap_or(DEFAULT_FEEDS_PAGE_SIZE) {
let last_feed = feeds.last().unwrap();
more_query = Some(format!(
"/api/v1/feeds?sort=CreatedAt&before={}&id_before={}",
last_feed.created_at,
last_feed.feed_id
));
}
html! {
div class="feeds-list" {
@if len == 0 {
p id="no-feeds" { "No feeds found." }
} else {
ul id="feeds" {
@for feed in feeds {
li { (feed_link(&feed, false)) }
@for (i, feed) in feeds.iter().enumerate() {
@if i == feeds.len() - 1 {
@if let Some(ref more_query) = more_query {
li class="feed" hx-get=(more_query) hx-trigger="revealed" hx-swap="afterend" {
(feed_link(feed, false))
div class="htmx-indicator list-loading" {
img class="loading" src="/static/img/three-dots.svg" alt="Loading...";
}
}
} @else {
li class="feed" { (feed_link(feed, false)) }
}
}
// TODO: pagination
@if len == options.limit.unwrap_or(DEFAULT_FEEDS_PAGE_SIZE) {
button id="load-more-feeds" { "Load More" }
} @else {
li class="feed" { (feed_link(feed, false)) }
}
}
}

View File

@ -14,10 +14,12 @@ use maud::{html, Markup, DOCTYPE};
use crate::partials::header::header;
use crate::{config::Config, partials::footer::footer};
#[cfg(not(debug_assertions))]
use crate::{CSS_BUNDLES, JS_BUNDLES};
use crate::{CSS_MANIFEST, JS_MANIFEST};
#[derive(Debug, Default)]
pub struct Layout {
pub title: String,
pub subtitle: Option<String>,
}
#[async_trait]
@ -34,6 +36,7 @@ where
.map_err(|err| err.into_response())?;
Ok(Self {
title: config.title,
..Default::default()
})
}
}
@ -44,7 +47,7 @@ where
// In release mode, this work is done ahead of time in build.rs and saved to static/js/manifest.txt
// and static/css/manifest.txt. The contents of those files are then compiled into the server
// binary so that rendering the Layout does not need to do any filesystem operations.
fn get_bundles(asset_type: &str) -> Vec<String> {
fn get_manifest(asset_type: &str) -> Vec<String> {
let root_dir = Path::new("./");
let dir = root_dir.join(format!("static/{}", asset_type));
@ -68,38 +71,56 @@ fn get_bundles(asset_type: &str) -> Vec<String> {
}
#[cfg(debug_assertions)]
fn js_bundles() -> Vec<String> {
get_bundles("js")
fn js_manifest() -> Vec<String> {
get_manifest("js")
}
#[cfg(not(debug_assertions))]
fn js_bundles() -> Lines<'static> {
JS_BUNDLES.lines()
fn js_manifest() -> Lines<'static> {
JS_MANIFEST.lines()
}
#[cfg(debug_assertions)]
fn css_bundles() -> Vec<String> {
get_bundles("css")
fn css_manifest() -> Vec<String> {
get_manifest("css")
}
#[cfg(not(debug_assertions))]
fn css_bundles() -> Lines<'static> {
CSS_BUNDLES.lines()
fn css_manifest() -> Lines<'static> {
CSS_MANIFEST.lines()
}
impl Layout {
pub fn with_title(mut self, title: &str) -> Self {
self.title = title.to_string();
self
}
pub fn with_subtitle(mut self, subtitle: &str) -> Self {
self.subtitle = Some(subtitle.to_string());
self
}
fn full_title(&self) -> String {
if let Some(subtitle) = &self.subtitle {
format!("{} - {}", self.title, subtitle)
} else {
self.title.to_string()
}
}
pub fn render(self, template: Markup) -> Response {
let with_layout = html! {
(DOCTYPE)
html lang="en" {
head {
meta charset="utf-8";
title { (self.title) }
@for js_bundle in js_bundles() {
script type="module" src=(js_bundle) {}
title { (self.full_title()) }
@for js_file in js_manifest() {
script type="module" src=(js_file) {}
}
@for css_bundle in css_bundles() {
link rel="stylesheet" href=(css_bundle) {}
@for css_file in css_manifest() {
link rel="stylesheet" href=(css_file) {}
}
}
body hx-booster="true" {