Replace argh with clap

Mostly for the more concise Config parsing and error handling.
This commit is contained in:
2023-06-27 14:03:52 -04:00
parent abd540d2ff
commit 7e06d23bba
6 changed files with 153 additions and 87 deletions

View File

@@ -1,5 +1,5 @@
use anyhow::Result;
use argh::FromArgs;
use clap::{Args, Parser, Subcommand};
use chrono::Utc;
use dotenvy::dotenv;
use sqlx::postgres::PgPoolOptions;
@@ -12,82 +12,73 @@ use lib::models::feed::{create_feed, delete_feed, CreateFeed, FeedType};
use lib::models::entry::{create_entry, delete_entry, CreateEntry};
use lib::uuid::Base62Uuid;
#[derive(FromArgs)]
/// CLI for crawlnicle
struct Args {
#[argh(subcommand)]
#[derive(Parser)]
#[command(author, version, about, long_about = None)]
#[command(propagate_version = true)]
struct Cli {
#[command(subcommand)]
commands: Commands,
}
#[derive(FromArgs)]
#[argh(subcommand)]
#[derive(Subcommand)]
enum Commands {
/// Fetches new entries from all feeds in the database
Crawl,
AddFeed(AddFeed),
DeleteFeed(DeleteFeed),
AddEntry(AddEntry),
DeleteEntry(DeleteEntry),
Crawl(Crawl),
}
#[derive(FromArgs)]
/// Add a feed to the database
#[argh(subcommand, name = "add-feed")]
#[derive(Args)]
struct AddFeed {
#[argh(option)]
/// title of the feed (max 255 characters)
#[arg(short, long)]
title: Option<String>,
#[argh(option)]
/// URL of the feed (max 2048 characters)
#[arg(short, long)]
url: String,
#[argh(option, long = "type")]
/// type of the feed ('rss' or 'atom')
#[arg(short, long)]
feed_type: FeedType,
#[argh(option)]
/// description of the feed
#[arg(short, long)]
description: Option<String>,
}
#[derive(FromArgs)]
#[derive(Args)]
/// Delete a feed from the database
#[argh(subcommand, name = "delete-feed")]
struct DeleteFeed {
#[argh(positional)]
/// id of the feed to delete
id: Uuid,
}
#[derive(FromArgs)]
#[derive(Args)]
/// Add an entry to the database
#[argh(subcommand, name = "add-entry")]
struct AddEntry {
#[argh(option)]
/// title of the entry (max 255 characters)
#[arg(short, long)]
title: Option<String>,
#[argh(option)]
/// URL of the entry (max 2048 characters)
#[arg(short, long)]
url: String,
#[argh(option)]
/// description of the entry
#[arg(short, long)]
description: Option<String>,
#[argh(option)]
/// source feed for the entry
#[arg(short, long)]
feed_id: Uuid,
}
#[derive(FromArgs)]
#[derive(Args)]
/// Delete an entry from the database
#[argh(subcommand, name = "delete-entry")]
struct DeleteEntry {
#[argh(positional)]
/// id of the entry to delete
id: Uuid,
}
#[derive(FromArgs)]
/// Delete an entry from the database
#[argh(subcommand, name = "crawl")]
struct Crawl {}
#[tokio::main]
pub async fn main() -> Result<()> {
dotenv().ok();
@@ -99,9 +90,9 @@ pub async fn main() -> Result<()> {
.connect(&env::var("DATABASE_URL")?)
.await?;
let args: Args = argh::from_env();
let cli: Cli = Cli::parse();
match args.commands {
match cli.commands {
Commands::AddFeed(args) => {
let feed = create_feed(
&pool,
@@ -138,7 +129,7 @@ pub async fn main() -> Result<()> {
delete_entry(&pool, args.id).await?;
info!("Deleted entry with id {}", Base62Uuid::from(args.id));
}
Commands::Crawl(_) => {
Commands::Crawl => {
info!("Crawling...");
crawl(&pool).await?;
}

View File

@@ -1,31 +1,17 @@
use anyhow::{Context, Result};
use clap::Parser;
#[derive(Clone, Debug)]
#[derive(Parser, Clone, Debug)]
pub struct Config {
#[clap(long, env)]
pub database_url: String,
#[clap(long, env)]
pub database_max_connections: u32,
#[clap(long, env)]
pub host: String,
#[clap(long, env)]
pub port: u16,
#[clap(long, env)]
pub title: String,
#[clap(long, env)]
pub max_mem_log_size: usize,
}
impl Config {
pub fn new() -> Result<Config> {
let database_url = std::env::var("DATABASE_URL").context("DATABASE_URL not set")?;
let database_max_connections = std::env::var("DATABASE_MAX_CONNECTIONS").context("DATABASE_MAX_CONNECTIONS not set")?.parse()?;
let host = std::env::var("HOST").context("HOST not set")?;
let port = std::env::var("PORT").context("PORT not set")?.parse()?;
let title = std::env::var("TITLE").context("TITLE not set")?;
let max_mem_log_size = std::env::var("MAX_MEM_LOG_SIZE").context("MAX_MEM_LOG_SIZE not set")?.parse()?;
Ok(Config {
database_url,
database_max_connections,
host,
port,
title,
max_mem_log_size,
})
}
}

View File

@@ -6,6 +6,7 @@ use axum::{
Router,
};
use bytes::Bytes;
use clap::Parser;
use dotenvy::dotenv;
use notify::Watcher;
use sqlx::postgres::PgPoolOptions;
@@ -32,7 +33,7 @@ async fn serve(app: Router, addr: SocketAddr) -> Result<()> {
async fn main() -> Result<()> {
dotenv().ok();
let config = Config::new()?;
let config = Config::parse();
let (log_sender, log_receiver) = channel::<Bytes>(Bytes::new());
let _guards = init_tracing(&config, log_sender)?;

View File

@@ -8,7 +8,7 @@ use validator::Validate;
use crate::error::{Error, Result};
#[derive(Debug, Serialize, Deserialize, sqlx::Type)]
#[derive(Debug, Serialize, Deserialize, sqlx::Type, Clone)]
#[sqlx(type_name = "feed_type", rename_all = "lowercase")]
#[serde(rename_all = "lowercase")]
pub enum FeedType {