initial commit
This commit is contained in:
commit
2a9f427bc7
21 changed files with 3692 additions and 0 deletions
74
src/feeds/mod.rs
Normal file
74
src/feeds/mod.rs
Normal file
|
@ -0,0 +1,74 @@
|
|||
use std::collections::HashMap;
|
||||
use std::fmt;
|
||||
|
||||
use futures::future::join_all;
|
||||
use rss::{GuidBuilder, Item, ItemBuilder};
|
||||
use serde::Deserialize;
|
||||
use time::format_description::well_known::iso8601::FormattedComponents;
|
||||
use time::format_description::well_known::{iso8601, Iso8601};
|
||||
use time::Date;
|
||||
|
||||
use crate::scrapers::page_url;
|
||||
use crate::{hash, scrapers};
|
||||
|
||||
pub mod route;
|
||||
pub mod template;
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub struct Feed {
|
||||
pub title: String,
|
||||
pub url: String,
|
||||
pub id: String,
|
||||
}
|
||||
|
||||
impl fmt::Display for Feed {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "{} ({})", self.title, self.url)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub struct Footprint {
|
||||
pub title: String,
|
||||
pub text: String,
|
||||
pub url: String,
|
||||
pub date: Date,
|
||||
pub page: u8,
|
||||
}
|
||||
|
||||
const ISO8601_DATE: u128 = iso8601::Config::DEFAULT
|
||||
.set_formatted_components(FormattedComponents::Date)
|
||||
.encode();
|
||||
|
||||
impl Footprint {
|
||||
pub fn into_rss_item(self, root_url: &str) -> Item {
|
||||
let desc = format!(
|
||||
"{} <br /><br /> --- <br /><br /> from {}",
|
||||
self.text,
|
||||
page_url(root_url, self.page)
|
||||
);
|
||||
ItemBuilder::default()
|
||||
.title(Some(self.title))
|
||||
.pub_date(self.date.format(&Iso8601::<ISO8601_DATE>).ok())
|
||||
.link(Some(self.url.clone()))
|
||||
.description(Some(desc))
|
||||
.guid(Some(GuidBuilder::default().value(self.url).build()))
|
||||
.build()
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn enrich_feeds(feed_urls: Vec<String>) -> HashMap<String, Feed> {
|
||||
let feeds = join_all(feed_urls.iter().map(|feed_url| async {
|
||||
let title = scrapers::feed_title(feed_url).await.unwrap();
|
||||
let id = hash::fnv_str(feed_url);
|
||||
|
||||
Feed {
|
||||
title,
|
||||
url: feed_url.clone(),
|
||||
id,
|
||||
}
|
||||
}))
|
||||
.await;
|
||||
|
||||
feeds.iter().map(|x| (x.id.clone(), x.clone())).collect()
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue