Add crawl metadata to feed & improve model interface
This commit is contained in:
@@ -2,8 +2,8 @@ use axum::{extract::State, Json};
|
||||
use sqlx::PgPool;
|
||||
|
||||
use crate::error::Error;
|
||||
use crate::models::entry::{get_entries, Entry, GetEntriesOptions};
|
||||
use crate::models::entry::Entry;
|
||||
|
||||
pub async fn get(State(pool): State<PgPool>) -> Result<Json<Vec<Entry>>, Error> {
|
||||
Ok(Json(get_entries(&pool, GetEntriesOptions::default()).await?))
|
||||
Ok(Json(Entry::get_all(&pool, Default::default()).await?))
|
||||
}
|
||||
|
||||
@@ -5,19 +5,19 @@ use axum::{
|
||||
use sqlx::PgPool;
|
||||
|
||||
use crate::error::Error;
|
||||
use crate::models::entry::{create_entry, get_entry, CreateEntry, Entry};
|
||||
use crate::models::entry::{CreateEntry, Entry};
|
||||
use crate::uuid::Base62Uuid;
|
||||
|
||||
pub async fn get(
|
||||
State(pool): State<PgPool>,
|
||||
Path(id): Path<Base62Uuid>,
|
||||
) -> Result<Json<Entry>, Error> {
|
||||
Ok(Json(get_entry(&pool, id.as_uuid()).await?))
|
||||
Ok(Json(Entry::get(&pool, id.as_uuid()).await?))
|
||||
}
|
||||
|
||||
pub async fn post(
|
||||
State(pool): State<PgPool>,
|
||||
Json(payload): Json<CreateEntry>,
|
||||
) -> Result<Json<Entry>, Error> {
|
||||
Ok(Json(create_entry(&pool, payload).await?))
|
||||
Ok(Json(Entry::create(&pool, payload).await?))
|
||||
}
|
||||
|
||||
@@ -5,20 +5,20 @@ use axum::{
|
||||
use sqlx::PgPool;
|
||||
|
||||
use crate::error::{Error, Result};
|
||||
use crate::models::feed::{create_feed, delete_feed, get_feed, CreateFeed, Feed};
|
||||
use crate::models::feed::{CreateFeed, Feed};
|
||||
use crate::uuid::Base62Uuid;
|
||||
|
||||
pub async fn get(State(pool): State<PgPool>, Path(id): Path<Base62Uuid>) -> Result<Json<Feed>> {
|
||||
Ok(Json(get_feed(&pool, id.as_uuid()).await?))
|
||||
Ok(Json(Feed::get(&pool, id.as_uuid()).await?))
|
||||
}
|
||||
|
||||
pub async fn post(
|
||||
State(pool): State<PgPool>,
|
||||
Json(payload): Json<CreateFeed>,
|
||||
) -> Result<Json<Feed>, Error> {
|
||||
Ok(Json(create_feed(&pool, payload).await?))
|
||||
Ok(Json(Feed::create(&pool, payload).await?))
|
||||
}
|
||||
|
||||
pub async fn delete(State(pool): State<PgPool>, Path(id): Path<Base62Uuid>) -> Result<()> {
|
||||
delete_feed(&pool, id.as_uuid()).await
|
||||
Feed::delete(&pool, id.as_uuid()).await
|
||||
}
|
||||
|
||||
@@ -2,8 +2,9 @@ use axum::{extract::State, Json};
|
||||
use sqlx::PgPool;
|
||||
|
||||
use crate::error::Error;
|
||||
use crate::models::feed::{get_feeds, Feed};
|
||||
use crate::models::feed::Feed;
|
||||
|
||||
pub async fn get(State(pool): State<PgPool>) -> Result<Json<Vec<Feed>>, Error> {
|
||||
Ok(Json(get_feeds(&pool).await?))
|
||||
// TODO: pagination
|
||||
Ok(Json(Feed::get_all(&pool).await?))
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ use sqlx::PgPool;
|
||||
|
||||
use crate::config::Config;
|
||||
use crate::error::Result;
|
||||
use crate::models::entry::get_entry;
|
||||
use crate::models::entry::Entry;
|
||||
use crate::partials::layout::Layout;
|
||||
use crate::uuid::Base62Uuid;
|
||||
|
||||
@@ -17,7 +17,7 @@ pub async fn get(
|
||||
State(config): State<Config>,
|
||||
layout: Layout,
|
||||
) -> Result<Response> {
|
||||
let entry = get_entry(&pool, id.as_uuid()).await?;
|
||||
let entry = Entry::get(&pool, id.as_uuid()).await?;
|
||||
let content_dir = std::path::Path::new(&config.content_dir);
|
||||
let content_path = content_dir.join(format!("{}.html", entry.entry_id));
|
||||
Ok(layout.render(html! {
|
||||
|
||||
@@ -20,8 +20,8 @@ use crate::actors::feed_crawler::{FeedCrawlerHandle, FeedCrawlerHandleMessage};
|
||||
use crate::config::Config;
|
||||
use crate::domain_locks::DomainLocks;
|
||||
use crate::error::{Error, Result};
|
||||
use crate::models::entry::get_entries_for_feed;
|
||||
use crate::models::feed::{create_feed, delete_feed, get_feed, CreateFeed, FeedType};
|
||||
use crate::models::entry::Entry;
|
||||
use crate::models::feed::{CreateFeed, Feed};
|
||||
use crate::partials::{entry_list::entry_list, feed_link::feed_link, layout::Layout};
|
||||
use crate::state::Crawls;
|
||||
use crate::turbo_stream::TurboStream;
|
||||
@@ -32,8 +32,8 @@ pub async fn get(
|
||||
State(pool): State<PgPool>,
|
||||
layout: Layout,
|
||||
) -> Result<Response> {
|
||||
let feed = get_feed(&pool, id.as_uuid()).await?;
|
||||
let entries = get_entries_for_feed(&pool, feed.feed_id, Default::default()).await?;
|
||||
let feed = Feed::get(&pool, id.as_uuid()).await?;
|
||||
let entries = Entry::get_all_for_feed(&pool, feed.feed_id, Default::default()).await?;
|
||||
let delete_url = format!("/feed/{}/delete", id);
|
||||
Ok(layout.render(html! {
|
||||
header class="feed-header" {
|
||||
@@ -123,12 +123,11 @@ pub async fn post(
|
||||
config.content_dir.clone(),
|
||||
);
|
||||
|
||||
let feed = create_feed(
|
||||
let feed = Feed::create(
|
||||
&pool,
|
||||
CreateFeed {
|
||||
title: add_feed.title,
|
||||
url: add_feed.url.clone(),
|
||||
feed_type: FeedType::Rss, // eh, get rid of this
|
||||
description: add_feed.description,
|
||||
},
|
||||
)
|
||||
@@ -148,7 +147,7 @@ pub async fn post(
|
||||
|
||||
let url: Url = Url::parse(&add_feed.url)
|
||||
.map_err(|err| AddFeedError::InvalidUrl(add_feed.url.clone(), err))?;
|
||||
let receiver = feed_crawler.crawl(url).await;
|
||||
let receiver = feed_crawler.crawl(feed.feed_id).await;
|
||||
{
|
||||
let mut crawls = crawls.lock().map_err(|_| {
|
||||
AddFeedError::CreateFeedError(add_feed.url.clone(), Error::InternalServerError)
|
||||
@@ -245,6 +244,6 @@ pub async fn stream(
|
||||
}
|
||||
|
||||
pub async fn delete(State(pool): State<PgPool>, Path(id): Path<Base62Uuid>) -> Result<Redirect> {
|
||||
delete_feed(&pool, id.as_uuid()).await?;
|
||||
Feed::delete(&pool, id.as_uuid()).await?;
|
||||
Ok(Redirect::to("/feeds"))
|
||||
}
|
||||
|
||||
@@ -4,11 +4,12 @@ use maud::html;
|
||||
use sqlx::PgPool;
|
||||
|
||||
use crate::error::Result;
|
||||
use crate::models::feed::get_feeds;
|
||||
use crate::models::feed::Feed;
|
||||
use crate::partials::{feed_link::feed_link, layout::Layout};
|
||||
|
||||
pub async fn get(State(pool): State<PgPool>, layout: Layout) -> Result<Response> {
|
||||
let feeds = get_feeds(&pool).await?;
|
||||
// TODO: pagination
|
||||
let feeds = Feed::get_all(&pool).await?;
|
||||
Ok(layout.render(html! {
|
||||
h2 { "Feeds" }
|
||||
div class="feeds" {
|
||||
|
||||
@@ -3,10 +3,10 @@ use axum::response::Response;
|
||||
use sqlx::PgPool;
|
||||
|
||||
use crate::error::Result;
|
||||
use crate::models::entry::{get_entries, GetEntriesOptions};
|
||||
use crate::models::entry::Entry;
|
||||
use crate::partials::{layout::Layout, entry_list::entry_list};
|
||||
|
||||
pub async fn get(State(pool): State<PgPool>, layout: Layout) -> Result<Response> {
|
||||
let entries = get_entries(&pool, GetEntriesOptions::default()).await?;
|
||||
let entries = Entry::get_all(&pool, Default::default()).await?;
|
||||
Ok(layout.render(entry_list(entries)))
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user