crawlnicle/src/handlers/log.rs
Tyler Hallada f13c7e5e70 Add an async actor FeedCrawler for fetching feed details
Currently, this allows the browser to subscribe to the response of the
asynchronous crawl after they add a new feed.

Eventually I will also use this in the main scheduled crawls. Right now,
it only upserts feed metadata based on the parsed feed.
2023-07-09 21:30:23 -04:00

53 lines
1.5 KiB
Rust

use std::convert::Infallible;
use std::str::from_utf8;
use std::time::Duration;
use ansi_to_html::convert_escaped;
use axum::extract::State;
use axum::response::sse::KeepAlive;
use axum::response::{
sse::{Event, Sse},
Response,
};
use bytes::Bytes;
use maud::{html, PreEscaped};
use tokio::sync::watch::Receiver;
use tokio_stream::wrappers::WatchStream;
use tokio_stream::Stream;
use tokio_stream::StreamExt;
use crate::error::Result;
use crate::log::MEM_LOG;
use crate::partials::layout::Layout;
pub async fn get(layout: Layout) -> Result<Response> {
let mem_buf = MEM_LOG.lock().unwrap();
Ok(layout.render(html! {
turbo-stream-source src="/log/stream" {}
pre id="log" { (PreEscaped(convert_escaped(from_utf8(mem_buf.as_slices().0).unwrap()).unwrap())) }
}))
}
pub async fn stream(
State(log_receiver): State<Receiver<Bytes>>,
) -> Sse<impl Stream<Item = Result<Event, Infallible>>> {
let log_stream = WatchStream::new(log_receiver);
let log_stream = log_stream.map(|line| {
Ok(Event::default().data(
html! {
turbo-stream action="append" target="log" {
template {
(PreEscaped(convert_escaped(from_utf8(&line).unwrap()).unwrap()))
}
}
}
.into_string(),
))
});
Sse::new(log_stream).keep_alive(
KeepAlive::new()
.interval(Duration::from_secs(15))
.text("keep-alive-text"),
)
}