diff --git a/frontend/css/styles.css b/frontend/css/styles.css index bae5d86..ea3aacb 100644 --- a/frontend/css/styles.css +++ b/frontend/css/styles.css @@ -103,7 +103,7 @@ div.feeds { } } -ul#feeds { +.feeds-list { grid-area: 'feeds'; list-style: none; padding: 0; diff --git a/src/handlers/api/feeds.rs b/src/handlers/api/feeds.rs index 89e1fe0..24fe421 100644 --- a/src/handlers/api/feeds.rs +++ b/src/handlers/api/feeds.rs @@ -6,5 +6,5 @@ use crate::models::feed::Feed; pub async fn get(State(pool): State) -> Result>, Error> { // TODO: pagination - Ok(Json(Feed::get_all(&pool).await?)) + Ok(Json(Feed::get_all(&pool, Default::default()).await?)) } diff --git a/src/handlers/feed.rs b/src/handlers/feed.rs index a8eb0e8..871c71a 100644 --- a/src/handlers/feed.rs +++ b/src/handlers/feed.rs @@ -14,7 +14,6 @@ use sqlx::PgPool; use tokio_stream::wrappers::errors::BroadcastStreamRecvError; use tokio_stream::wrappers::BroadcastStream; use tokio_stream::StreamExt; -use url::Url; use crate::actors::feed_crawler::{FeedCrawlerHandle, FeedCrawlerHandleMessage}; use crate::config::Config; @@ -145,8 +144,6 @@ pub async fn post( AddFeedError::CreateFeedError(add_feed.url.clone(), err) })?; - let url: Url = Url::parse(&add_feed.url) - .map_err(|err| AddFeedError::InvalidUrl(add_feed.url.clone(), err))?; let receiver = feed_crawler.crawl(feed.feed_id).await; { let mut crawls = crawls.lock().map_err(|_| { @@ -167,6 +164,7 @@ pub async fn post( li id=(feed_id) { (feed_link(&feed, true)) } } } + turbo-stream action="remove" target="no-feeds"; } .into_string(), ), diff --git a/src/handlers/feeds.rs b/src/handlers/feeds.rs index cbf1453..fbd5738 100644 --- a/src/handlers/feeds.rs +++ b/src/handlers/feeds.rs @@ -4,18 +4,29 @@ use maud::html; use sqlx::PgPool; use crate::error::Result; -use crate::models::feed::Feed; +use crate::models::feed::{Feed, GetFeedsOptions, DEFAULT_FEEDS_PAGE_SIZE}; use crate::partials::{feed_link::feed_link, layout::Layout}; pub async fn get(State(pool): State, layout: Layout) -> Result { - // TODO: pagination - let feeds = Feed::get_all(&pool).await?; + let options = GetFeedsOptions::default(); + let feeds = Feed::get_all(&pool, options.clone()).await?; + let len = feeds.len() as i64; Ok(layout.render(html! { h2 { "Feeds" } div class="feeds" { - ul id="feeds" { - @for feed in feeds { - li { (feed_link(&feed, false)) } + div class="feeds-list" { + @if len == 0 { + p id="no-feeds" { "No feeds found." } + } else { + ul id="feeds" { + @for feed in feeds { + li { (feed_link(&feed, false)) } + } + } + } + // TODO: pagination + @if len == options.limit.unwrap_or(DEFAULT_FEEDS_PAGE_SIZE) { + button id="load-more-feeds" { "Load More" } } } div class="add-feed" { diff --git a/src/models/feed.rs b/src/models/feed.rs index 909c974..b3eebf0 100644 --- a/src/models/feed.rs +++ b/src/models/feed.rs @@ -8,6 +8,8 @@ use validator::Validate; use crate::error::{Error, Result}; +pub const DEFAULT_FEEDS_PAGE_SIZE: i64 = 50; + #[derive(Debug, Serialize, Deserialize, sqlx::Type, Clone, Copy)] #[sqlx(type_name = "feed_type", rename_all = "lowercase")] #[serde(rename_all = "lowercase")] @@ -105,6 +107,22 @@ pub struct UpdateFeed { pub last_entry_published_at: Option>>, } +#[derive(Debug, Clone)] +pub enum GetFeedsSort { + Title, + CreatedAt, + LastCrawledAt, + LastEntryPublishedAt, +} + +#[derive(Debug, Default, Clone)] +pub struct GetFeedsOptions { + pub sort: Option, + pub before: Option>, + pub after_title: Option, + pub limit: Option, +} + impl Feed { pub async fn get(pool: &PgPool, feed_id: Uuid) -> Result { sqlx::query_as!( @@ -137,27 +155,234 @@ impl Feed { }) } - pub async fn get_all(pool: &PgPool) -> sqlx::Result> { - sqlx::query_as!( - Feed, - r#"select - feed_id, - title, - url, - type as "feed_type: FeedType", - description, - crawl_interval_minutes, - last_crawl_error, - last_crawled_at, - last_entry_published_at, - created_at, - updated_at, - deleted_at - from feed - where deleted_at is null"# - ) - .fetch_all(pool) - .await + pub async fn get_all(pool: &PgPool, options: GetFeedsOptions) -> sqlx::Result> { + // TODO: make sure there are indices for all of these sort options + match options.sort.unwrap_or(GetFeedsSort::CreatedAt) { + GetFeedsSort::Title => { + if let Some(after_title) = options.after_title { + sqlx::query_as!( + Feed, + r#"select + feed_id, + title, + url, + type as "feed_type: FeedType", + description, + crawl_interval_minutes, + last_crawl_error, + last_crawled_at, + last_entry_published_at, + created_at, + updated_at, + deleted_at + from feed + where deleted_at is null + and title > $1 + order by title asc + limit $2 + "#, + after_title, + options.limit.unwrap_or(DEFAULT_FEEDS_PAGE_SIZE), + ) + .fetch_all(pool) + .await + } else { + sqlx::query_as!( + Feed, + r#"select + feed_id, + title, + url, + type as "feed_type: FeedType", + description, + crawl_interval_minutes, + last_crawl_error, + last_crawled_at, + last_entry_published_at, + created_at, + updated_at, + deleted_at + from feed + where deleted_at is null + order by title asc + limit $1 + "#, + options.limit.unwrap_or(DEFAULT_FEEDS_PAGE_SIZE), + ) + .fetch_all(pool) + .await + + } + } + GetFeedsSort::CreatedAt => { + if let Some(created_before) = options.before { + sqlx::query_as!( + Feed, + r#"select + feed_id, + title, + url, + type as "feed_type: FeedType", + description, + crawl_interval_minutes, + last_crawl_error, + last_crawled_at, + last_entry_published_at, + created_at, + updated_at, + deleted_at + from feed + where deleted_at is null + and created_at < $1 + order by created_at desc + limit $2 + "#, + created_before, + options.limit.unwrap_or(DEFAULT_FEEDS_PAGE_SIZE), + ) + .fetch_all(pool) + .await + } else { + sqlx::query_as!( + Feed, + r#"select + feed_id, + title, + url, + type as "feed_type: FeedType", + description, + crawl_interval_minutes, + last_crawl_error, + last_crawled_at, + last_entry_published_at, + created_at, + updated_at, + deleted_at + from feed + where deleted_at is null + order by created_at desc + limit $1 + "#, + options.limit.unwrap_or(DEFAULT_FEEDS_PAGE_SIZE), + ) + .fetch_all(pool) + .await + + } + } + GetFeedsSort::LastCrawledAt => { + if let Some(crawled_before) = options.before { + sqlx::query_as!( + Feed, + r#"select + feed_id, + title, + url, + type as "feed_type: FeedType", + description, + crawl_interval_minutes, + last_crawl_error, + last_crawled_at, + last_entry_published_at, + created_at, + updated_at, + deleted_at + from feed + where deleted_at is null + and last_crawled_at < $1 + order by last_crawled_at desc + limit $2 + "#, + crawled_before, + options.limit.unwrap_or(DEFAULT_FEEDS_PAGE_SIZE), + ) + .fetch_all(pool) + .await + } else { + sqlx::query_as!( + Feed, + r#"select + feed_id, + title, + url, + type as "feed_type: FeedType", + description, + crawl_interval_minutes, + last_crawl_error, + last_crawled_at, + last_entry_published_at, + created_at, + updated_at, + deleted_at + from feed + where deleted_at is null + order by last_crawled_at desc + limit $1 + "#, + options.limit.unwrap_or(DEFAULT_FEEDS_PAGE_SIZE), + ) + .fetch_all(pool) + .await + + } + } + GetFeedsSort::LastEntryPublishedAt => { + if let Some(published_before) = options.before { + sqlx::query_as!( + Feed, + r#"select + feed_id, + title, + url, + type as "feed_type: FeedType", + description, + crawl_interval_minutes, + last_crawl_error, + last_crawled_at, + last_entry_published_at, + created_at, + updated_at, + deleted_at + from feed + where deleted_at is null + and last_entry_published_at < $1 + order by last_entry_published_at desc + limit $2 + "#, + published_before, + options.limit.unwrap_or(DEFAULT_FEEDS_PAGE_SIZE), + ) + .fetch_all(pool) + .await + } else { + sqlx::query_as!( + Feed, + r#"select + feed_id, + title, + url, + type as "feed_type: FeedType", + description, + crawl_interval_minutes, + last_crawl_error, + last_crawled_at, + last_entry_published_at, + created_at, + updated_at, + deleted_at + from feed + where deleted_at is null + order by last_entry_published_at desc + limit $1 + "#, + options.limit.unwrap_or(DEFAULT_FEEDS_PAGE_SIZE), + ) + .fetch_all(pool) + .await + + } + } + } } pub async fn create(pool: &PgPool, payload: CreateFeed) -> Result {