Move code around

This commit is contained in:
Greg Shuflin 2025-02-05 15:46:15 -08:00
parent 5db16fc2ce
commit cf89d17035
4 changed files with 71 additions and 61 deletions

View File

@ -61,17 +61,20 @@ pub async fn setup_demo_data(pool: &sqlx::SqlitePool) {
.await .await
.expect("Failed to create demo user"); .expect("Failed to create demo user");
let feeds: Vec<Feed> = DEMO_FEEDS.iter().map(|demo_feed| { let feeds: Vec<Feed> = DEMO_FEEDS
let mut feed = Feed::new( .iter()
demo_feed.name.to_string(), .map(|demo_feed| {
demo_feed.url.parse().unwrap(), let mut feed = Feed::new(
demo.id, demo_feed.name.to_string(),
); demo_feed.url.parse().unwrap(),
if let Some(category) = demo_feed.category { demo.id,
feed.categorization = vec![category.to_string()]; );
} if let Some(category) = demo_feed.category {
feed feed.categorization = vec![category.to_string()];
}).collect(); }
feed
})
.collect();
for feed in feeds { for feed in feeds {
feed.write_to_database(pool) feed.write_to_database(pool)
@ -90,6 +93,5 @@ pub async fn setup_demo_data(pool: &sqlx::SqlitePool) {
} }
*/ */
info!("Successfully set up demo data"); info!("Successfully set up demo data");
} }

View File

@ -9,6 +9,7 @@ mod demo;
mod feed_utils; mod feed_utils;
mod feeds; mod feeds;
mod poll; mod poll;
mod poll_utils;
mod session_store; mod session_store;
mod user; mod user;

View File

@ -1,14 +1,13 @@
use crate::poll_utils::Entry;
use crate::user::AuthenticatedUser; use crate::user::AuthenticatedUser;
use crate::{feed_utils::fetch_feed, Db}; use crate::Db;
use chrono::{DateTime, Duration, Utc}; use chrono::{Duration, Utc};
use feed_rs::model::Text;
use rocket::http::Status; use rocket::http::Status;
use rocket::serde::uuid::Uuid; use rocket::serde::uuid::Uuid;
use rocket::serde::{self, json::Json, Deserialize, Serialize}; use rocket::serde::{self, json::Json, Deserialize, Serialize};
use rocket_db_pools::Connection; use rocket_db_pools::Connection;
use sqlx::{Acquire, SqliteConnection}; use sqlx::{Acquire, SqliteConnection};
use tracing::{error, info}; use tracing::{error, info};
use url::Url;
const POLLING_INTERVAL: Duration = Duration::minutes(20); const POLLING_INTERVAL: Duration = Duration::minutes(20);
const MAX_ENTRIES_PER_FEED: i32 = 30; const MAX_ENTRIES_PER_FEED: i32 = 30;
@ -21,23 +20,6 @@ pub struct FeedPollResponse {
entries: Vec<Entry>, entries: Vec<Entry>,
} }
#[derive(Debug, Serialize)]
#[serde(crate = "rocket::serde")]
struct Entry {
/// id is the id from the feed, and is the primary key of entries
id: String,
/// local_id is a UUID generated locally. it is used because we don't have control over the
/// exact format of the id from the feed entry
local_id: Uuid,
title: String,
published: Option<DateTime<Utc>>,
updated: Option<DateTime<Utc>>,
summary: String,
content: Option<feed_rs::model::Content>,
link: Option<String>,
marked_read: Option<DateTime<Utc>>,
}
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
#[serde(crate = "rocket::serde")] #[serde(crate = "rocket::serde")]
pub struct EntryStateUpdate { pub struct EntryStateUpdate {
@ -182,33 +164,6 @@ async fn read_entries(feed_id: &str, db: &mut SqliteConnection) -> Result<Vec<En
Ok(entries) Ok(entries)
} }
/// Perform the request to fetch from the remote feed url
async fn fetch_new_entries(url: &Url) -> Result<Vec<Entry>, Status> {
let feed_data = fetch_feed(url).await.map_err(|_| Status::BadGateway)?;
fn get(item: Option<Text>, name: &'static str) -> String {
item.map(|t| t.content.to_string())
.unwrap_or(format!("<no {name}>"))
}
let entries: Vec<Entry> = feed_data
.entries
.into_iter()
.map(|feed_entry| Entry {
id: feed_entry.id,
local_id: Uuid::new_v4(),
title: get(feed_entry.title, "title"),
published: feed_entry.published,
updated: feed_entry.updated,
summary: get(feed_entry.summary, "summary"),
content: feed_entry.content,
link: feed_entry.links.first().map(|l| l.href.clone()),
marked_read: None,
})
.collect();
Ok(entries)
}
#[post("/poll/<feed_id>")] #[post("/poll/<feed_id>")]
pub async fn poll_feed( pub async fn poll_feed(
mut db: Connection<Db>, mut db: Connection<Db>,
@ -239,7 +194,7 @@ pub async fn poll_feed(
read_entries(&feed_id, &mut db).await? read_entries(&feed_id, &mut db).await?
} else { } else {
info!("Fetching new entries for feed {}", feed_id); info!("Fetching new entries for feed {}", feed_id);
let entries = fetch_new_entries(&url).await?; let entries = crate::poll_utils::fetch_new_entries(&url).await?;
update_entry_db(&entries, &feed_id, &mut db).await?; update_entry_db(&entries, &feed_id, &mut db).await?;
entries entries
}; };

52
src/poll_utils.rs Normal file
View File

@ -0,0 +1,52 @@
use crate::feed_utils::fetch_feed;
use chrono::{DateTime, Utc};
use feed_rs::model::Text;
use rocket::http::Status;
use rocket::serde::uuid::Uuid;
use rocket::serde::Serialize;
use url::Url;
#[derive(Debug, Serialize)]
#[serde(crate = "rocket::serde")]
pub struct Entry {
/// id is the id from the feed, and is the primary key of entries
pub id: String,
/// local_id is a UUID generated locally. it is used because we don't have control over the
/// exact format of the id from the feed entry
pub local_id: Uuid,
pub title: String,
pub published: Option<DateTime<Utc>>,
pub updated: Option<DateTime<Utc>>,
pub summary: String,
pub content: Option<feed_rs::model::Content>,
pub link: Option<String>,
pub marked_read: Option<DateTime<Utc>>,
}
///
/// Perform the request to fetch from the remote feed url
pub async fn fetch_new_entries(url: &Url) -> Result<Vec<Entry>, Status> {
let feed_data = fetch_feed(url).await.map_err(|_| Status::BadGateway)?;
fn get(item: Option<Text>, name: &'static str) -> String {
item.map(|t| t.content.to_string())
.unwrap_or(format!("<no {name}>"))
}
let entries: Vec<Entry> = feed_data
.entries
.into_iter()
.map(|feed_entry| Entry {
id: feed_entry.id,
local_id: Uuid::new_v4(),
title: get(feed_entry.title, "title"),
published: feed_entry.published,
updated: feed_entry.updated,
summary: get(feed_entry.summary, "summary"),
content: feed_entry.content,
link: feed_entry.links.first().map(|l| l.href.clone()),
marked_read: None,
})
.collect();
Ok(entries)
}