diff --git a/README.md b/README.md index 24bebb8..2c14e9a 100644 --- a/README.md +++ b/README.md @@ -50,4 +50,4 @@ news = [ After running Packard with your configured settings, the parsed results can be opened in your default browser however your terminal allows for opening URLs. For example the keybind for this with [Foot](https://codeberg.org/dnkl/foot#urls) is `ctrl` + `shift` + `o`. -Currently no keyboard interaction is implemented. To get around this you can pipe the output of Packard into a tool like `less` like so: `packard -c 12 -l news -s 3 | less`. Be aware, this will remove any text formatting that has been applied. +Currently no keyboard interaction is implemented. To get around this you can pipe the output of Packard into a tool like `less` like so: `packard -c 12 -l news -s 3 | less`. diff --git a/src/config.rs b/src/config.rs new file mode 100644 index 0000000..5c13f05 --- /dev/null +++ b/src/config.rs @@ -0,0 +1,47 @@ +use clap::Parser; +use serde::Deserialize; +use std::collections::HashMap; +use std::fs; +use toml; +use xdg::BaseDirectories; + +#[derive(Parser, Debug)] +#[command(author, version, about, long_about = None)] +pub struct Cli { + #[arg(short, long)] + pub verbose: bool, + #[arg(short, long)] + pub count: Option, + #[arg(short = 'l', long)] + pub selected_list: Option, + #[arg(short, long)] + pub skip_amount: Option, +} + +#[derive(Debug, Deserialize)] +pub struct Config { + pub count: Option, + pub skip_amount: Option, + pub selected_list: Option, + pub lists: HashMap>, +} + +pub fn parse_cli() -> Cli { + let args = Cli::parse(); + args +} + +pub fn validate_config() -> Config { + let xdg_dirs = BaseDirectories::new().expect("Failed to get XDG directories"); + let config_path = xdg_dirs + .place_config_file("packard/config.toml") + .expect("Failed to determine config file path"); + + if !config_path.exists() { + eprintln!("Configuration file not found at {:?}", config_path); + } + + let config_content = fs::read_to_string(&config_path).expect("Failed to read config file"); + let config: Config = toml::de::from_str(&config_content).expect("Failed to parse TOML"); + config +} diff --git a/src/data.rs b/src/data.rs new file mode 100644 index 0000000..aecaa0b --- /dev/null +++ b/src/data.rs @@ -0,0 +1,63 @@ +use chrono::{DateTime, Utc}; +use futures::future::join_all; +use indicatif::ProgressBar; +use reqwest::get; +use rss::Channel; +use std::error::Error; + +#[derive(Debug)] +pub struct FeedItem { + pub title: String, + pub description: String, + pub link: String, + pub pub_date: DateTime, +} + +async fn fetch_rss(url: &str, pb: &ProgressBar) -> Result> { + let response = get(url).await?.text().await?; + let channel = Channel::read_from(response.as_bytes())?; + pb.inc(1); + pb.set_message(format!("Processing: {}", channel.title)); + Ok(channel) +} + +fn parse_feed(channel: &Channel) -> Vec { + channel + .items() + .iter() + .map(|item| FeedItem { + title: item.title().unwrap_or("No title").to_string(), + description: item.description().unwrap_or("No description").to_string(), + link: item.link().unwrap_or("No link").to_string(), + pub_date: item + .pub_date() + .and_then(|date_str| DateTime::parse_from_rfc2822(date_str).ok()) + .map(|dt| dt.with_timezone(&Utc)) + .unwrap_or_else(|| Utc::now()), + }) + .collect() +} + +pub async fn run_tasks( + sources: Vec, + count: u8, + skip: u8, + pb: &ProgressBar, +) -> Vec { + let fetch_futures: Vec<_> = sources.iter().map(|url| fetch_rss(url, &pb)).collect(); + + let channels = join_all(fetch_futures).await; + + let mut all_items = Vec::new(); + + for channel in channels.into_iter().filter_map(Result::ok) { + let feed_items = parse_feed(&channel); + all_items.extend(feed_items); + } + + all_items.sort_by(|a, b| b.pub_date.cmp(&a.pub_date)); + all_items.truncate((count + skip).into()); + let removed_items = all_items.split_off(skip.into()); + + removed_items +} diff --git a/src/main.rs b/src/main.rs index cb9eefb..d1140f5 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,86 +1,10 @@ -use chrono::{DateTime, Utc}; -use clap::Parser; -use futures::future::join_all; -use indicatif::{ProgressBar, ProgressStyle}; -use reqwest::get; -use rss::Channel; -use serde::Deserialize; -use std::collections::HashMap; +use indicatif::ProgressStyle; use std::error::Error; -use std::fs; use terminal_link::Link; use tokio; -use toml; -use xdg::BaseDirectories; -#[derive(Parser, Debug)] -#[command(author, version, about, long_about = None)] -struct Cli { - #[arg(short, long)] - verbose: bool, - #[arg(short, long)] - count: Option, - #[arg(short = 'l', long)] - selected_list: Option, - #[arg(short, long)] - skip_amount: Option, -} - -#[derive(Debug, Deserialize)] -struct Config { - count: Option, - skip_amount: Option, - selected_list: Option, - lists: HashMap>, -} - -#[derive(Debug)] -struct FeedItem { - title: String, - description: String, - link: String, - pub_date: DateTime, -} - -async fn fetch_rss(url: &str, pb: &ProgressBar) -> Result> { - let response = get(url).await?.text().await?; - let channel = Channel::read_from(response.as_bytes())?; - pb.inc(1); - pb.set_message(format!("Processing: {}", channel.title)); - Ok(channel) -} - -fn parse_feed(channel: &Channel) -> Vec { - channel - .items() - .iter() - .map(|item| FeedItem { - title: item.title().unwrap_or("No title").to_string(), - description: item.description().unwrap_or("No description").to_string(), - link: item.link().unwrap_or("No link").to_string(), - pub_date: item - .pub_date() - .and_then(|date_str| DateTime::parse_from_rfc2822(date_str).ok()) - .map(|dt| dt.with_timezone(&Utc)) - .unwrap_or_else(|| Utc::now()), - }) - .collect() -} - -fn validate_config() -> Config { - let xdg_dirs = BaseDirectories::new().expect("Failed to get XDG directories"); - let config_path = xdg_dirs - .place_config_file("packard/config.toml") - .expect("Failed to determine config file path"); - - if !config_path.exists() { - eprintln!("Configuration file not found at {:?}", config_path); - } - - let config_content = fs::read_to_string(&config_path).expect("Failed to read config file"); - let config: Config = toml::de::from_str(&config_content).expect("Failed to parse TOML"); - config -} +mod config; +mod data; fn trim_chars(input: &str) -> String { let trimmed: String = input.chars().take(256).collect(); @@ -92,29 +16,10 @@ fn trim_chars(input: &str) -> String { } } -async fn run_tasks(sources: Vec, count: u8, skip: u8, pb: &ProgressBar) -> Vec { - let fetch_futures: Vec<_> = sources.iter().map(|url| fetch_rss(url, &pb)).collect(); - - let channels = join_all(fetch_futures).await; - - let mut all_items = Vec::new(); - - for channel in channels.into_iter().filter_map(Result::ok) { - let feed_items = parse_feed(&channel); - all_items.extend(feed_items); - } - - all_items.sort_by(|a, b| b.pub_date.cmp(&a.pub_date)); - all_items.truncate((count + skip).into()); - let removed_items = all_items.split_off(skip.into()); - - removed_items -} - #[tokio::main] async fn main() -> Result<(), Box> { - let config = validate_config(); - let args = Cli::parse(); + let config = config::validate_config(); + let args = config::parse_cli(); if args.verbose { println!("{:?}", args); @@ -157,7 +62,7 @@ async fn main() -> Result<(), Box> { ProgressStyle::with_template("[{elapsed}] {bar:40.green/black} {msg}").unwrap(), ); - let all_items = run_tasks(values.to_vec(), count, skip_amount, &pb).await; + let all_items = data::run_tasks(values.to_vec(), count, skip_amount, &pb).await; pb.finish_and_clear(); for item in all_items {