Add DomainLocks to serialize requests to each domain
All async tasks must wait 1 second since the last request to make another to the same domain.
This commit is contained in:
@@ -18,6 +18,7 @@ use url::Url;
|
||||
|
||||
use crate::actors::feed_crawler::{FeedCrawlerHandle, FeedCrawlerHandleMessage};
|
||||
use crate::config::Config;
|
||||
use crate::domain_locks::DomainLocks;
|
||||
use crate::error::{Error, Result};
|
||||
use crate::models::entry::get_entries_for_feed;
|
||||
use crate::models::feed::{create_feed, delete_feed, get_feed, CreateFeed, FeedType};
|
||||
@@ -109,13 +110,18 @@ impl IntoResponse for AddFeedError {
|
||||
pub async fn post(
|
||||
State(pool): State<PgPool>,
|
||||
State(crawls): State<Crawls>,
|
||||
State(domain_locks): State<DomainLocks>,
|
||||
State(config): State<Config>,
|
||||
Form(add_feed): Form<AddFeed>,
|
||||
) -> AddFeedResult<Response> {
|
||||
// TODO: store the client in axum state (as long as it can be used concurrently?)
|
||||
let client = Client::new();
|
||||
let feed_crawler =
|
||||
FeedCrawlerHandle::new(pool.clone(), client.clone(), config.content_dir.clone());
|
||||
let feed_crawler = FeedCrawlerHandle::new(
|
||||
pool.clone(),
|
||||
client.clone(),
|
||||
domain_locks.clone(),
|
||||
config.content_dir.clone(),
|
||||
);
|
||||
|
||||
let feed = create_feed(
|
||||
&pool,
|
||||
|
||||
Reference in New Issue
Block a user