Add very basic crawl job

Loops through feeds and adds items from each feed.
This commit is contained in:
2023-05-09 23:46:42 -04:00
parent 89fdf8f95a
commit ae8f15f19b
18 changed files with 327 additions and 56 deletions

View File

@@ -4,16 +4,20 @@ use axum::{
};
use sqlx::PgPool;
use crate::error::Error;
use crate::models::feed::{create_feed, get_feed, CreateFeed, Feed};
use crate::error::{Error, Result};
use crate::models::feed::{create_feed, get_feed, delete_feed, CreateFeed, Feed};
pub async fn get(State(pool): State<PgPool>, Path(id): Path<i32>) -> Result<Json<Feed>, Error> {
Ok(Json(get_feed(pool, id).await?))
pub async fn get(State(pool): State<PgPool>, Path(id): Path<i32>) -> Result<Json<Feed>> {
Ok(Json(get_feed(&pool, id).await?))
}
pub async fn post(
State(pool): State<PgPool>,
Json(payload): Json<CreateFeed>,
) -> Result<Json<Feed>, Error> {
Ok(Json(create_feed(pool, payload).await?))
Ok(Json(create_feed(&pool, payload).await?))
}
pub async fn delete(State(pool): State<PgPool>, Path(id): Path<i32>) -> Result<()> {
delete_feed(&pool, id).await
}