Replace hotwire with htmx
In the process, also improve the feedback from the import/add feed forms. I also replaced the frontend code to replace utc timestamps with local time strings with @hotwired/stimulus with vanilla js.
This commit is contained in:
parent
ff0b218da1
commit
1d6f98c6bb
Binary file not shown.
@ -156,7 +156,7 @@ form.feed-form .form-grid button {
|
|||||||
grid-column: 3 / 4;
|
grid-column: 3 / 4;
|
||||||
}
|
}
|
||||||
|
|
||||||
ul#add-feed-messages {
|
ul.stream-messages {
|
||||||
list-style: none;
|
list-style: none;
|
||||||
padding: 0;
|
padding: 0;
|
||||||
margin: 0;
|
margin: 0;
|
||||||
@ -164,7 +164,7 @@ ul#add-feed-messages {
|
|||||||
white-space: nowrap;
|
white-space: nowrap;
|
||||||
}
|
}
|
||||||
|
|
||||||
ul#add-feed-messages li {
|
ul.stream-messages li {
|
||||||
overflow: hidden;
|
overflow: hidden;
|
||||||
white-space: no-wrap;
|
white-space: no-wrap;
|
||||||
text-overflow: ellipsis;
|
text-overflow: ellipsis;
|
||||||
|
@ -1,9 +1,4 @@
|
|||||||
import { Application } from "@hotwired/stimulus";
|
|
||||||
|
|
||||||
import LocalTimeController from "./local_time_controller";
|
|
||||||
|
|
||||||
// import CSS so it gets named with a content hash that busts caches
|
// import CSS so it gets named with a content hash that busts caches
|
||||||
import "../css/styles.css";
|
import "../css/styles.css";
|
||||||
|
|
||||||
window.Stimulus = Application.start();
|
import "./localTimeController";
|
||||||
window.Stimulus.register("local-time", LocalTimeController);
|
|
||||||
|
24
frontend/js/localTimeController.ts
Normal file
24
frontend/js/localTimeController.ts
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
function convertTimeElements() {
|
||||||
|
const timeElements = document.querySelectorAll('time.local-time');
|
||||||
|
timeElements.forEach((element) => {
|
||||||
|
const utcString = element.getAttribute("datetime");
|
||||||
|
if (utcString) {
|
||||||
|
const utcTime = new Date(utcString);
|
||||||
|
element.textContent = utcTime.toLocaleDateString(window.navigator.language, {
|
||||||
|
year: "numeric",
|
||||||
|
month: "long",
|
||||||
|
day: "numeric",
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
console.error("Missing datetime attribute on time.local-time element", element);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
document.addEventListener("DOMContentLoaded", function() {
|
||||||
|
convertTimeElements();
|
||||||
|
});
|
||||||
|
|
||||||
|
document.body.addEventListener('htmx:afterSwap', function() {
|
||||||
|
convertTimeElements();
|
||||||
|
});
|
@ -1,28 +0,0 @@
|
|||||||
import { Controller } from "@hotwired/stimulus";
|
|
||||||
|
|
||||||
// Replaces all UTC timestamps with time formated for the local timezone
|
|
||||||
export default class extends Controller {
|
|
||||||
connect() {
|
|
||||||
this.renderLocalTime();
|
|
||||||
}
|
|
||||||
|
|
||||||
renderLocalTime() {
|
|
||||||
this.element.textContent = this.localTimeString;
|
|
||||||
}
|
|
||||||
|
|
||||||
get localTimeString(): string {
|
|
||||||
if (this.utcTime) {
|
|
||||||
return this.utcTime.toLocaleDateString(window.navigator.language, {
|
|
||||||
year: "numeric",
|
|
||||||
month: "long",
|
|
||||||
day: "numeric",
|
|
||||||
});
|
|
||||||
}
|
|
||||||
return "Unknown datetime"
|
|
||||||
}
|
|
||||||
|
|
||||||
get utcTime(): Date | null {
|
|
||||||
const utcString = this.element.getAttribute("datetime");
|
|
||||||
return utcString ? new Date(utcString) : null;
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,14 +1,11 @@
|
|||||||
{
|
{
|
||||||
"dependencies": {
|
|
||||||
"@hotwired/stimulus": "^3.2.1"
|
|
||||||
},
|
|
||||||
"name": "crawlnicle-frontend",
|
"name": "crawlnicle-frontend",
|
||||||
"module": "js/index.ts",
|
"module": "js/index.ts",
|
||||||
"type": "module",
|
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"bun-types": "^0.6.0"
|
"bun-types": "^0.6.0"
|
||||||
},
|
},
|
||||||
"peerDependencies": {
|
"peerDependencies": {
|
||||||
"typescript": "^5.0.0"
|
"typescript": "^5.0.0"
|
||||||
}
|
},
|
||||||
|
"type": "module"
|
||||||
}
|
}
|
||||||
|
@ -5,11 +5,13 @@ use bytes::Bytes;
|
|||||||
use opml::OPML;
|
use opml::OPML;
|
||||||
use sqlx::PgPool;
|
use sqlx::PgPool;
|
||||||
use tokio::sync::{broadcast, mpsc};
|
use tokio::sync::{broadcast, mpsc};
|
||||||
|
use tokio::task::JoinSet;
|
||||||
use tracing::{debug, error, instrument};
|
use tracing::{debug, error, instrument};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use crate::actors::crawl_scheduler::{CrawlSchedulerHandle, CrawlSchedulerHandleMessage};
|
use crate::actors::crawl_scheduler::{CrawlSchedulerHandle, CrawlSchedulerHandleMessage};
|
||||||
use crate::models::feed::{Feed, UpsertFeed};
|
use crate::error::Error;
|
||||||
|
use crate::models::feed::{Feed, CreateFeed};
|
||||||
use crate::state::Imports;
|
use crate::state::Imports;
|
||||||
use crate::uuid::Base62Uuid;
|
use crate::uuid::Base62Uuid;
|
||||||
|
|
||||||
@ -51,11 +53,12 @@ async fn listen_to_crawl(
|
|||||||
feed_id: Uuid,
|
feed_id: Uuid,
|
||||||
crawl_scheduler: CrawlSchedulerHandle,
|
crawl_scheduler: CrawlSchedulerHandle,
|
||||||
respond_to: broadcast::Sender<ImporterHandleMessage>,
|
respond_to: broadcast::Sender<ImporterHandleMessage>,
|
||||||
) {
|
) -> Uuid {
|
||||||
let mut receiver = crawl_scheduler.schedule(feed_id).await;
|
let mut receiver = crawl_scheduler.schedule(feed_id).await;
|
||||||
while let Ok(msg) = receiver.recv().await {
|
while let Ok(msg) = receiver.recv().await {
|
||||||
let _ = respond_to.send(ImporterHandleMessage::CrawlScheduler(msg));
|
let _ = respond_to.send(ImporterHandleMessage::CrawlScheduler(msg));
|
||||||
}
|
}
|
||||||
|
feed_id
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An error type that enumerates possible failures during a crawl and is cloneable and can be sent
|
/// An error type that enumerates possible failures during a crawl and is cloneable and can be sent
|
||||||
@ -95,25 +98,38 @@ impl Importer {
|
|||||||
let document = OPML::from_reader(&mut Cursor::new(bytes)).map_err(|_| {
|
let document = OPML::from_reader(&mut Cursor::new(bytes)).map_err(|_| {
|
||||||
ImporterError::InvalidOPML(file_name.unwrap_or(Base62Uuid::from(import_id).to_string()))
|
ImporterError::InvalidOPML(file_name.unwrap_or(Base62Uuid::from(import_id).to_string()))
|
||||||
})?;
|
})?;
|
||||||
|
let mut crawls = JoinSet::new();
|
||||||
for url in Self::gather_feed_urls(document.body.outlines) {
|
for url in Self::gather_feed_urls(document.body.outlines) {
|
||||||
let feed = Feed::upsert(
|
dbg!(&url);
|
||||||
|
let feed = Feed::create(
|
||||||
&self.pool,
|
&self.pool,
|
||||||
UpsertFeed {
|
CreateFeed {
|
||||||
url: url.clone(),
|
url: url.clone(),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
.await
|
.await;
|
||||||
.map_err(|_| ImporterError::CreateFeedError(url))?;
|
if let Err(Error::Sqlx(sqlx::error::Error::Database(err))) = feed {
|
||||||
if feed.updated_at.is_none() {
|
if err.is_unique_violation() {
|
||||||
tokio::spawn(listen_to_crawl(
|
dbg!("already imported", &url);
|
||||||
|
let _ = respond_to.send(ImporterHandleMessage::AlreadyImported(url));
|
||||||
|
}
|
||||||
|
} else if let Ok(feed) = feed {
|
||||||
|
crawls.spawn(listen_to_crawl(
|
||||||
feed.feed_id,
|
feed.feed_id,
|
||||||
self.crawl_scheduler.clone(),
|
self.crawl_scheduler.clone(),
|
||||||
respond_to.clone(),
|
respond_to.clone(),
|
||||||
));
|
));
|
||||||
|
} else {
|
||||||
|
let _ = respond_to.send(ImporterHandleMessage::CreateFeedError(url));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
while let Some(feed_id) = crawls.join_next().await {
|
||||||
|
dbg!("done crawling feed", feed_id);
|
||||||
|
}
|
||||||
|
dbg!("done import_opml");
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -137,6 +153,7 @@ impl Importer {
|
|||||||
bytes,
|
bytes,
|
||||||
respond_to,
|
respond_to,
|
||||||
} => {
|
} => {
|
||||||
|
dbg!("handle_message", import_id);
|
||||||
let result = self
|
let result = self
|
||||||
.import_opml(import_id, file_name, bytes, respond_to.clone())
|
.import_opml(import_id, file_name, bytes, respond_to.clone())
|
||||||
.await;
|
.await;
|
||||||
@ -178,6 +195,8 @@ pub struct ImporterHandle {
|
|||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub enum ImporterHandleMessage {
|
pub enum ImporterHandleMessage {
|
||||||
Import(ImporterResult<()>),
|
Import(ImporterResult<()>),
|
||||||
|
CreateFeedError(String),
|
||||||
|
AlreadyImported(String),
|
||||||
CrawlScheduler(CrawlSchedulerHandleMessage),
|
CrawlScheduler(CrawlSchedulerHandleMessage),
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -200,6 +219,7 @@ impl ImporterHandle {
|
|||||||
file_name: Option<String>,
|
file_name: Option<String>,
|
||||||
bytes: Bytes,
|
bytes: Bytes,
|
||||||
) -> broadcast::Receiver<ImporterHandleMessage> {
|
) -> broadcast::Receiver<ImporterHandleMessage> {
|
||||||
|
dbg!(import_id, &file_name, bytes.len());
|
||||||
let (sender, receiver) = broadcast::channel(8);
|
let (sender, receiver) = broadcast::channel(8);
|
||||||
let msg = ImporterMessage::Import {
|
let msg = ImporterMessage::Import {
|
||||||
import_id,
|
import_id,
|
||||||
|
@ -29,7 +29,7 @@ pub async fn get(
|
|||||||
div {
|
div {
|
||||||
span class="published" {
|
span class="published" {
|
||||||
strong { "Published: " }
|
strong { "Published: " }
|
||||||
time datetime=(published_at) data-controller="local-time" {
|
time datetime=(published_at) class="local-time" {
|
||||||
(published_at)
|
(published_at)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -18,9 +18,10 @@ use crate::actors::feed_crawler::FeedCrawlerHandleMessage;
|
|||||||
use crate::error::{Error, Result};
|
use crate::error::{Error, Result};
|
||||||
use crate::models::entry::Entry;
|
use crate::models::entry::Entry;
|
||||||
use crate::models::feed::{CreateFeed, Feed};
|
use crate::models::feed::{CreateFeed, Feed};
|
||||||
|
use crate::partials::add_feed_form::add_feed_form;
|
||||||
|
use crate::partials::entry_link::entry_link;
|
||||||
use crate::partials::{entry_list::entry_list, feed_link::feed_link, layout::Layout};
|
use crate::partials::{entry_list::entry_list, feed_link::feed_link, layout::Layout};
|
||||||
use crate::state::Crawls;
|
use crate::state::Crawls;
|
||||||
use crate::turbo_stream::TurboStream;
|
|
||||||
use crate::uuid::Base62Uuid;
|
use crate::uuid::Base62Uuid;
|
||||||
|
|
||||||
pub async fn get(
|
pub async fn get(
|
||||||
@ -88,16 +89,13 @@ impl IntoResponse for AddFeedError {
|
|||||||
fn into_response(self) -> Response {
|
fn into_response(self) -> Response {
|
||||||
(
|
(
|
||||||
self.status_code(),
|
self.status_code(),
|
||||||
TurboStream(
|
html! {
|
||||||
html! {
|
(add_feed_form())
|
||||||
turbo-stream action="append" target="feeds" {
|
ul class="stream-messages" {
|
||||||
template {
|
li { span class="error" { (self) } }
|
||||||
li { span class="error" { (self) } }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
.into_string(),
|
}
|
||||||
),
|
.into_string(),
|
||||||
)
|
)
|
||||||
.into_response()
|
.into_response()
|
||||||
}
|
}
|
||||||
@ -137,22 +135,18 @@ pub async fn post(
|
|||||||
crawls.insert(feed.feed_id, receiver);
|
crawls.insert(feed.feed_id, receiver);
|
||||||
}
|
}
|
||||||
|
|
||||||
let feed_id = format!("feed-{}", Base62Uuid::from(feed.feed_id));
|
let feed_stream = format!("connect:/feed/{}/stream", Base62Uuid::from(feed.feed_id));
|
||||||
let feed_stream = format!("/feed/{}/stream", Base62Uuid::from(feed.feed_id));
|
|
||||||
Ok((
|
Ok((
|
||||||
StatusCode::CREATED,
|
StatusCode::CREATED,
|
||||||
TurboStream(
|
html! {
|
||||||
html! {
|
(add_feed_form())
|
||||||
turbo-stream-source src=(feed_stream) id="feed-stream" {}
|
div hx-sse=(feed_stream) {
|
||||||
turbo-stream action="append" target="feeds" {
|
ul class="stream-messages" hx-sse="swap:message" hx-swap="beforeend" {
|
||||||
template {
|
li { "Fetching feed..." }
|
||||||
li id=(feed_id) { (feed_link(&feed, true)) }
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
turbo-stream action="remove" target="no-feeds";
|
|
||||||
}
|
}
|
||||||
.into_string(),
|
}
|
||||||
),
|
.into_string(),
|
||||||
)
|
)
|
||||||
.into_response())
|
.into_response())
|
||||||
}
|
}
|
||||||
@ -174,52 +168,39 @@ pub async fn stream(
|
|||||||
Ok::<Event, String>(
|
Ok::<Event, String>(
|
||||||
Event::default().data(
|
Event::default().data(
|
||||||
html! {
|
html! {
|
||||||
turbo-stream action="remove" target="feed-stream" {}
|
li { "Crawled feed: " (feed_link(&feed, false)) }
|
||||||
turbo-stream action="replace" target=(feed_id) {
|
|
||||||
template {
|
|
||||||
li id=(feed_id) { (feed_link(&feed, false)) }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
.into_string(),
|
.into_string(),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
Ok(CrawlSchedulerHandleMessage::FeedCrawler(FeedCrawlerHandleMessage::Entry(Ok(
|
||||||
|
entry,
|
||||||
|
)))) => Ok(Event::default().data(
|
||||||
|
html! {
|
||||||
|
li { "Crawled entry: " (entry_link(entry)) }
|
||||||
|
}
|
||||||
|
.into_string(),
|
||||||
|
)),
|
||||||
Ok(CrawlSchedulerHandleMessage::FeedCrawler(FeedCrawlerHandleMessage::Feed(Err(
|
Ok(CrawlSchedulerHandleMessage::FeedCrawler(FeedCrawlerHandleMessage::Feed(Err(
|
||||||
error,
|
error,
|
||||||
)))) => Ok(Event::default().data(
|
)))) => Ok(Event::default().data(
|
||||||
html! {
|
html! {
|
||||||
turbo-stream action="remove" target="feed-stream" {}
|
li id=(feed_id) { span class="error" { (error) } }
|
||||||
turbo-stream action="replace" target=(feed_id) {
|
|
||||||
template {
|
|
||||||
li id=(feed_id) { span class="error" { (error) } }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
.into_string(),
|
.into_string(),
|
||||||
)),
|
)),
|
||||||
// TODO: these Entry messages are not yet sent, need to handle them better
|
|
||||||
Ok(CrawlSchedulerHandleMessage::FeedCrawler(FeedCrawlerHandleMessage::Entry(Ok(_)))) => {
|
|
||||||
Ok(Event::default().data(
|
|
||||||
html! {
|
|
||||||
turbo-stream action="replace" target=(feed_id) {
|
|
||||||
template {
|
|
||||||
li id=(feed_id) { "fetched entry" }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
.into_string(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
Ok(CrawlSchedulerHandleMessage::FeedCrawler(FeedCrawlerHandleMessage::Entry(Err(
|
Ok(CrawlSchedulerHandleMessage::FeedCrawler(FeedCrawlerHandleMessage::Entry(Err(
|
||||||
error,
|
error,
|
||||||
)))) => Ok(Event::default().data(
|
)))) => Ok(Event::default().data(
|
||||||
html! {
|
html! {
|
||||||
turbo-stream action="replace" target=(feed_id) {
|
li { span class="error" { (error) } }
|
||||||
template {
|
}
|
||||||
li id=(feed_id) { span class="error" { (error) } }
|
.into_string(),
|
||||||
}
|
)),
|
||||||
}
|
Ok(CrawlSchedulerHandleMessage::Schedule(Err(error))) => Ok(Event::default().data(
|
||||||
|
html! {
|
||||||
|
li { span class="error" { (error) } }
|
||||||
}
|
}
|
||||||
.into_string(),
|
.into_string(),
|
||||||
)),
|
)),
|
||||||
|
@ -4,48 +4,23 @@ use maud::html;
|
|||||||
use sqlx::PgPool;
|
use sqlx::PgPool;
|
||||||
|
|
||||||
use crate::error::Result;
|
use crate::error::Result;
|
||||||
use crate::models::feed::{Feed, GetFeedsOptions, DEFAULT_FEEDS_PAGE_SIZE};
|
use crate::models::feed::{Feed, GetFeedsOptions};
|
||||||
use crate::partials::{feed_link::feed_link, layout::Layout};
|
use crate::partials::add_feed_form::add_feed_form;
|
||||||
|
use crate::partials::feed_list::feed_list;
|
||||||
|
use crate::partials::opml_import_form::opml_import_form;
|
||||||
|
use crate::partials::layout::Layout;
|
||||||
|
|
||||||
pub async fn get(State(pool): State<PgPool>, layout: Layout) -> Result<Response> {
|
pub async fn get(State(pool): State<PgPool>, layout: Layout) -> Result<Response> {
|
||||||
let options = GetFeedsOptions::default();
|
let options = GetFeedsOptions::default();
|
||||||
let feeds = Feed::get_all(&pool, options.clone()).await?;
|
let feeds = Feed::get_all(&pool, options.clone()).await?;
|
||||||
let len = feeds.len() as i64;
|
|
||||||
Ok(layout.render(html! {
|
Ok(layout.render(html! {
|
||||||
h2 { "Feeds" }
|
h2 { "Feeds" }
|
||||||
div class="feeds" {
|
div class="feeds" {
|
||||||
div class="feeds-list" {
|
(feed_list(feeds, options))
|
||||||
@if len == 0 {
|
|
||||||
p id="no-feeds" { "No feeds found." }
|
|
||||||
} else {
|
|
||||||
ul id="feeds" {
|
|
||||||
@for feed in feeds {
|
|
||||||
li { (feed_link(&feed, false)) }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// TODO: pagination
|
|
||||||
@if len == options.limit.unwrap_or(DEFAULT_FEEDS_PAGE_SIZE) {
|
|
||||||
button id="load-more-feeds" { "Load More" }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
div class="add-feed" {
|
div class="add-feed" {
|
||||||
h3 { "Add Feed" }
|
h3 { "Add Feed" }
|
||||||
form action="/feed" method="post" class="feed-form" {
|
(add_feed_form())
|
||||||
div class="form-grid" {
|
(opml_import_form())
|
||||||
label for="url" { "URL: " }
|
|
||||||
input type="text" id="url" name="url" placeholder="https://example.com/feed.xml" required="true";
|
|
||||||
button type="submit" { "Add Feed" }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
form action="/import/opml" method="post" enctype="multipart/form-data" class="feed-form" {
|
|
||||||
div class="form-grid" {
|
|
||||||
label for="opml" { "OPML: " }
|
|
||||||
input type="file" id="opml" name="opml" required="true" accept="text/x-opml,application/xml,text/xml";
|
|
||||||
button type="submit" { "Import Feeds" }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
ul id="add-feed-messages" {}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}))
|
}))
|
||||||
|
@ -14,8 +14,8 @@ use crate::actors::importer::{ImporterHandle, ImporterHandleMessage};
|
|||||||
use crate::error::{Error, Result};
|
use crate::error::{Error, Result};
|
||||||
use crate::partials::entry_link::entry_link;
|
use crate::partials::entry_link::entry_link;
|
||||||
use crate::partials::feed_link::feed_link;
|
use crate::partials::feed_link::feed_link;
|
||||||
|
use crate::partials::opml_import_form::opml_import_form;
|
||||||
use crate::state::Imports;
|
use crate::state::Imports;
|
||||||
use crate::turbo_stream::TurboStream;
|
|
||||||
use crate::uuid::Base62Uuid;
|
use crate::uuid::Base62Uuid;
|
||||||
|
|
||||||
pub async fn opml(
|
pub async fn opml(
|
||||||
@ -26,28 +26,27 @@ pub async fn opml(
|
|||||||
if let Some(field) = multipart.next_field().await? {
|
if let Some(field) = multipart.next_field().await? {
|
||||||
let import_id = Base62Uuid::new();
|
let import_id = Base62Uuid::new();
|
||||||
let file_name = field.file_name().map(|s| s.to_string());
|
let file_name = field.file_name().map(|s| s.to_string());
|
||||||
|
dbg!(&file_name);
|
||||||
let bytes = field.bytes().await?;
|
let bytes = field.bytes().await?;
|
||||||
|
dbg!(bytes.len());
|
||||||
let receiver = importer.import(import_id.as_uuid(), file_name, bytes).await;
|
let receiver = importer.import(import_id.as_uuid(), file_name, bytes).await;
|
||||||
{
|
{
|
||||||
let mut imports = imports.lock().await;
|
let mut imports = imports.lock().await;
|
||||||
imports.insert(import_id.as_uuid(), receiver);
|
imports.insert(import_id.as_uuid(), receiver);
|
||||||
}
|
}
|
||||||
|
|
||||||
let import_stream = format!("/import/{}/stream", import_id);
|
let import_stream = format!("connnect:/import/{}/stream", import_id);
|
||||||
return Ok((
|
return Ok((
|
||||||
StatusCode::CREATED,
|
StatusCode::CREATED,
|
||||||
TurboStream(
|
html! {
|
||||||
html! {
|
(opml_import_form())
|
||||||
turbo-stream-source src=(import_stream) id="import-stream" {}
|
div hx-sse=(import_stream) {
|
||||||
turbo-stream action="append" target="add-feed-messages" {
|
ul class="stream-messages" hx-sse="swap:message" hx-swap="beforeend" {
|
||||||
template {
|
li { "Uploading..."}
|
||||||
li { "Uploading file..." }
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
turbo-stream action="remove" target="no-feeds";
|
|
||||||
}
|
}
|
||||||
.into_string(),
|
}
|
||||||
),
|
.into_string(),
|
||||||
)
|
)
|
||||||
.into_response());
|
.into_response());
|
||||||
}
|
}
|
||||||
@ -65,14 +64,11 @@ pub async fn stream(
|
|||||||
.ok_or_else(|| Error::NotFound("import stream", id.as_uuid()))?;
|
.ok_or_else(|| Error::NotFound("import stream", id.as_uuid()))?;
|
||||||
|
|
||||||
let stream = BroadcastStream::new(receiver);
|
let stream = BroadcastStream::new(receiver);
|
||||||
let import_html_id = format!("import-{}", id);
|
|
||||||
let stream = stream.map(move |msg| match msg {
|
let stream = stream.map(move |msg| match msg {
|
||||||
Ok(ImporterHandleMessage::Import(Ok(_))) => Ok::<Event, String>(
|
Ok(ImporterHandleMessage::Import(Ok(_))) => Ok::<Event, String>(
|
||||||
Event::default().data(
|
Event::default().data(
|
||||||
html! {
|
html! {
|
||||||
turbo-stream action="append" target="add-feed-messages" {
|
li { "Finished importing" }
|
||||||
template { li { "Importing...." } }
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
.into_string(),
|
.into_string(),
|
||||||
),
|
),
|
||||||
@ -82,11 +78,7 @@ pub async fn stream(
|
|||||||
))) => Ok::<Event, String>(
|
))) => Ok::<Event, String>(
|
||||||
Event::default().data(
|
Event::default().data(
|
||||||
html! {
|
html! {
|
||||||
turbo-stream action="append" target="add-feed-messages" {
|
li { "Crawled entry: " (entry_link(entry)) }
|
||||||
template {
|
|
||||||
li { "Imported: " (entry_link(entry)) }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
.into_string(),
|
.into_string(),
|
||||||
),
|
),
|
||||||
@ -96,17 +88,7 @@ pub async fn stream(
|
|||||||
))) => Ok::<Event, String>(
|
))) => Ok::<Event, String>(
|
||||||
Event::default().data(
|
Event::default().data(
|
||||||
html! {
|
html! {
|
||||||
turbo-stream action="remove" target="import-stream" {}
|
li { "Crawled feed: " (feed_link(&feed, false)) }
|
||||||
turbo-stream action="append" target="add-feed-messages" {
|
|
||||||
template {
|
|
||||||
li { "Finished import." }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
turbo-stream action="prepend" target="feeds" {
|
|
||||||
template {
|
|
||||||
li id=(format!("feed-{}", feed.feed_id)) { (feed_link(&feed, false)) }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
.into_string(),
|
.into_string(),
|
||||||
),
|
),
|
||||||
@ -116,11 +98,7 @@ pub async fn stream(
|
|||||||
))) => Ok::<Event, String>(
|
))) => Ok::<Event, String>(
|
||||||
Event::default().data(
|
Event::default().data(
|
||||||
html! {
|
html! {
|
||||||
turbo-stream action="append" target="add-feed-messages" {
|
li { span class="error" { (error) } }
|
||||||
template {
|
|
||||||
li { span class="error" { (error) } }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
.into_string(),
|
.into_string(),
|
||||||
),
|
),
|
||||||
@ -130,9 +108,27 @@ pub async fn stream(
|
|||||||
))) => Ok::<Event, String>(
|
))) => Ok::<Event, String>(
|
||||||
Event::default().data(
|
Event::default().data(
|
||||||
html! {
|
html! {
|
||||||
turbo-stream action="append" target="add-feed-messages" {
|
li { span class="error" { (error) } }
|
||||||
template {
|
}
|
||||||
li { span class="error" { (error) } }
|
.into_string(),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
Ok(ImporterHandleMessage::CrawlScheduler(CrawlSchedulerHandleMessage::Schedule(Err(
|
||||||
|
error,
|
||||||
|
)))) => Ok::<Event, String>(
|
||||||
|
Event::default().data(
|
||||||
|
html! {
|
||||||
|
li { span class="error" { (error) } }
|
||||||
|
}
|
||||||
|
.into_string(),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
Ok(ImporterHandleMessage::CreateFeedError(url)) => Ok::<Event, String>(
|
||||||
|
Event::default().data(
|
||||||
|
html! {
|
||||||
|
li {
|
||||||
|
span class="error" {
|
||||||
|
"Could not create feed for url: " a href=(url) { (url) }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -141,17 +137,13 @@ pub async fn stream(
|
|||||||
),
|
),
|
||||||
Ok(ImporterHandleMessage::Import(Err(error))) => Ok(Event::default().data(
|
Ok(ImporterHandleMessage::Import(Err(error))) => Ok(Event::default().data(
|
||||||
html! {
|
html! {
|
||||||
turbo-stream action="remove" target="import-stream" {}
|
li { span class="error" { (error) } }
|
||||||
turbo-stream action="append" target="add-feed-messages" {
|
}
|
||||||
template {
|
.into_string(),
|
||||||
li { span class="error" { (error) } }
|
)),
|
||||||
}
|
Ok(ImporterHandleMessage::AlreadyImported(url)) => Ok(Event::default().data(
|
||||||
}
|
html! {
|
||||||
turbo-stream action="replace" target=(import_html_id) {
|
li { "Already imported feed: " a href=(url) { (url) } }
|
||||||
template {
|
|
||||||
li id=(import_html_id) { span class="error" { (error) } }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
.into_string(),
|
.into_string(),
|
||||||
)),
|
)),
|
||||||
|
@ -23,8 +23,9 @@ use crate::partials::layout::Layout;
|
|||||||
pub async fn get(layout: Layout) -> Result<Response> {
|
pub async fn get(layout: Layout) -> Result<Response> {
|
||||||
let mem_buf = MEM_LOG.lock().unwrap();
|
let mem_buf = MEM_LOG.lock().unwrap();
|
||||||
Ok(layout.render(html! {
|
Ok(layout.render(html! {
|
||||||
turbo-stream-source src="/log/stream" {}
|
pre id="log" hx-sse="connect:/log/stream swap:message" hx-swap="beforeend" {
|
||||||
pre id="log" { (PreEscaped(convert_escaped(from_utf8(mem_buf.as_slices().0).unwrap()).unwrap())) }
|
(PreEscaped(convert_escaped(from_utf8(mem_buf.as_slices().0).unwrap()).unwrap()))
|
||||||
|
}
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -35,11 +36,7 @@ pub async fn stream(
|
|||||||
let log_stream = log_stream.map(|line| {
|
let log_stream = log_stream.map(|line| {
|
||||||
Ok(Event::default().data(
|
Ok(Event::default().data(
|
||||||
html! {
|
html! {
|
||||||
turbo-stream action="append" target="log" {
|
(PreEscaped(convert_escaped(from_utf8(&line).unwrap()).unwrap()))
|
||||||
template {
|
|
||||||
(PreEscaped(convert_escaped(from_utf8(&line).unwrap()).unwrap()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
.into_string(),
|
.into_string(),
|
||||||
))
|
))
|
||||||
|
@ -7,7 +7,6 @@ pub mod log;
|
|||||||
pub mod models;
|
pub mod models;
|
||||||
pub mod partials;
|
pub mod partials;
|
||||||
pub mod state;
|
pub mod state;
|
||||||
pub mod turbo_stream;
|
|
||||||
pub mod utils;
|
pub mod utils;
|
||||||
pub mod uuid;
|
pub mod uuid;
|
||||||
|
|
||||||
|
@ -71,7 +71,7 @@ pub struct Feed {
|
|||||||
pub deleted_at: Option<DateTime<Utc>>,
|
pub deleted_at: Option<DateTime<Utc>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Deserialize, Validate)]
|
#[derive(Debug, Deserialize, Default, Validate)]
|
||||||
pub struct CreateFeed {
|
pub struct CreateFeed {
|
||||||
#[validate(length(max = 255))]
|
#[validate(length(max = 255))]
|
||||||
pub title: Option<String>,
|
pub title: Option<String>,
|
||||||
|
13
src/partials/add_feed_form.rs
Normal file
13
src/partials/add_feed_form.rs
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
use maud::{html, Markup};
|
||||||
|
|
||||||
|
pub fn add_feed_form() -> Markup {
|
||||||
|
html! {
|
||||||
|
form hx-post="/feed" class="feed-form" {
|
||||||
|
div class="form-grid" {
|
||||||
|
label for="url" { "URL: " }
|
||||||
|
input type="text" id="url" name="url" placeholder="https://example.com/feed.xml" required="true";
|
||||||
|
button type="submit" { "Add Feed" }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
25
src/partials/feed_list.rs
Normal file
25
src/partials/feed_list.rs
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
use maud::{html, Markup};
|
||||||
|
|
||||||
|
use crate::models::feed::{Feed, GetFeedsOptions, DEFAULT_FEEDS_PAGE_SIZE};
|
||||||
|
use crate::partials::feed_link::feed_link;
|
||||||
|
|
||||||
|
pub fn feed_list(feeds: Vec<Feed>, options: GetFeedsOptions) -> Markup {
|
||||||
|
let len = feeds.len() as i64;
|
||||||
|
html! {
|
||||||
|
div class="feeds-list" {
|
||||||
|
@if len == 0 {
|
||||||
|
p id="no-feeds" { "No feeds found." }
|
||||||
|
} else {
|
||||||
|
ul id="feeds" {
|
||||||
|
@for feed in feeds {
|
||||||
|
li { (feed_link(&feed, false)) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// TODO: pagination
|
||||||
|
@if len == options.limit.unwrap_or(DEFAULT_FEEDS_PAGE_SIZE) {
|
||||||
|
button id="load-more-feeds" { "Load More" }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -95,9 +95,8 @@ impl Layout {
|
|||||||
meta charset="utf-8";
|
meta charset="utf-8";
|
||||||
title { (self.title) }
|
title { (self.title) }
|
||||||
// TODO: vendor this before going to prod
|
// TODO: vendor this before going to prod
|
||||||
script type="module" {
|
script src="https://unpkg.com/htmx.org@1.9.5" integrity="sha384-xcuj3WpfgjlKF+FXhSQFQ0ZNr39ln+hwjN3npfM9VBnUskLolQAcN80McRIVOPuO" crossorigin="anonymous" {}
|
||||||
r#"import * as Turbo from 'https://cdn.skypack.dev/@hotwired/turbo';"#
|
script src="https://unpkg.com/htmx.org/dist/ext/sse.js" {}
|
||||||
}
|
|
||||||
@for js_bundle in js_bundles() {
|
@for js_bundle in js_bundles() {
|
||||||
script type="module" src=(js_bundle) {}
|
script type="module" src=(js_bundle) {}
|
||||||
}
|
}
|
||||||
@ -105,7 +104,7 @@ impl Layout {
|
|||||||
link rel="stylesheet" href=(css_bundle) {}
|
link rel="stylesheet" href=(css_bundle) {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
body {
|
body hx-booster="true" {
|
||||||
(header(&self.title))
|
(header(&self.title))
|
||||||
(template)
|
(template)
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,8 @@
|
|||||||
|
pub mod add_feed_form;
|
||||||
pub mod entry_link;
|
pub mod entry_link;
|
||||||
pub mod entry_list;
|
pub mod entry_list;
|
||||||
pub mod feed_link;
|
pub mod feed_link;
|
||||||
|
pub mod feed_list;
|
||||||
pub mod header;
|
pub mod header;
|
||||||
pub mod layout;
|
pub mod layout;
|
||||||
|
pub mod opml_import_form;
|
||||||
|
24
src/partials/opml_import_form.rs
Normal file
24
src/partials/opml_import_form.rs
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
use maud::{html, Markup, PreEscaped};
|
||||||
|
|
||||||
|
pub fn opml_import_form() -> Markup {
|
||||||
|
html! {
|
||||||
|
form id="opml-import-form" hx-post="/import/opml" hx-encoding="multipart/form-data" class="feed-form" {
|
||||||
|
div class="form-grid" {
|
||||||
|
label for="opml" { "OPML: " }
|
||||||
|
input type="file" id="opml" name="opml" required="true" accept="text/x-opml,application/xml,text/xml";
|
||||||
|
button type="submit" { "Import Feeds" }
|
||||||
|
progress id="opml-upload-progress" max="100" value="0" hidden="true" {}
|
||||||
|
}
|
||||||
|
script {
|
||||||
|
(PreEscaped(r#"
|
||||||
|
htmx.on('#opml-import-form', 'htmx:xhr:progress', function (evt) {
|
||||||
|
htmx.find('#opml-upload-progress').setAttribute(
|
||||||
|
'value',
|
||||||
|
evt.detail.loaded / evt.detail.total * 100,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
"#))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -1,34 +0,0 @@
|
|||||||
use axum::response::{IntoResponse, Response};
|
|
||||||
use axum::http::{header, HeaderValue};
|
|
||||||
use axum::body::{Bytes, Full};
|
|
||||||
|
|
||||||
/// A Turbo Stream HTML response.
|
|
||||||
///
|
|
||||||
/// See [the Turbo Streams specification](https://turbo.hotwire.dev/handbook/streams) for more
|
|
||||||
/// details.
|
|
||||||
///
|
|
||||||
/// Will automatically get `Content-Type: text/vnd.turbo-stream.html`.
|
|
||||||
#[derive(Clone, Copy, Debug)]
|
|
||||||
pub struct TurboStream<T>(pub T);
|
|
||||||
|
|
||||||
impl<T> IntoResponse for TurboStream<T>
|
|
||||||
where
|
|
||||||
T: Into<Full<Bytes>>,
|
|
||||||
{
|
|
||||||
fn into_response(self) -> Response {
|
|
||||||
(
|
|
||||||
[(
|
|
||||||
header::CONTENT_TYPE,
|
|
||||||
HeaderValue::from_static("text/vnd.turbo-stream.html"),
|
|
||||||
)],
|
|
||||||
self.0.into(),
|
|
||||||
)
|
|
||||||
.into_response()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> From<T> for TurboStream<T> {
|
|
||||||
fn from(inner: T) -> Self {
|
|
||||||
Self(inner)
|
|
||||||
}
|
|
||||||
}
|
|
Loading…
Reference in New Issue
Block a user