separate modules into different files

This commit is contained in:
Troy 2025-01-14 12:32:48 +00:00
parent 5a9338f64d
commit 28d59a4c7a
Signed by: troy
GPG key ID: DFC06C02ED3B4711
4 changed files with 117 additions and 102 deletions

View file

@ -50,4 +50,4 @@ news = [
After running Packard with your configured settings, the parsed results can be opened in your default browser however your terminal allows for opening URLs. For example the keybind for this with [Foot](https://codeberg.org/dnkl/foot#urls) is `ctrl` + `shift` + `o`. After running Packard with your configured settings, the parsed results can be opened in your default browser however your terminal allows for opening URLs. For example the keybind for this with [Foot](https://codeberg.org/dnkl/foot#urls) is `ctrl` + `shift` + `o`.
Currently no keyboard interaction is implemented. To get around this you can pipe the output of Packard into a tool like `less` like so: `packard -c 12 -l news -s 3 | less`. Be aware, this will remove any text formatting that has been applied. Currently no keyboard interaction is implemented. To get around this you can pipe the output of Packard into a tool like `less` like so: `packard -c 12 -l news -s 3 | less`.

47
src/config.rs Normal file
View file

@ -0,0 +1,47 @@
use clap::Parser;
use serde::Deserialize;
use std::collections::HashMap;
use std::fs;
use toml;
use xdg::BaseDirectories;
#[derive(Parser, Debug)]
#[command(author, version, about, long_about = None)]
pub struct Cli {
#[arg(short, long)]
pub verbose: bool,
#[arg(short, long)]
pub count: Option<u8>,
#[arg(short = 'l', long)]
pub selected_list: Option<String>,
#[arg(short, long)]
pub skip_amount: Option<u8>,
}
#[derive(Debug, Deserialize)]
pub struct Config {
pub count: Option<u8>,
pub skip_amount: Option<u8>,
pub selected_list: Option<String>,
pub lists: HashMap<String, Vec<String>>,
}
pub fn parse_cli() -> Cli {
let args = Cli::parse();
args
}
pub fn validate_config() -> Config {
let xdg_dirs = BaseDirectories::new().expect("Failed to get XDG directories");
let config_path = xdg_dirs
.place_config_file("packard/config.toml")
.expect("Failed to determine config file path");
if !config_path.exists() {
eprintln!("Configuration file not found at {:?}", config_path);
}
let config_content = fs::read_to_string(&config_path).expect("Failed to read config file");
let config: Config = toml::de::from_str(&config_content).expect("Failed to parse TOML");
config
}

63
src/data.rs Normal file
View file

@ -0,0 +1,63 @@
use chrono::{DateTime, Utc};
use futures::future::join_all;
use indicatif::ProgressBar;
use reqwest::get;
use rss::Channel;
use std::error::Error;
#[derive(Debug)]
pub struct FeedItem {
pub title: String,
pub description: String,
pub link: String,
pub pub_date: DateTime<Utc>,
}
async fn fetch_rss(url: &str, pb: &ProgressBar) -> Result<Channel, Box<dyn Error>> {
let response = get(url).await?.text().await?;
let channel = Channel::read_from(response.as_bytes())?;
pb.inc(1);
pb.set_message(format!("Processing: {}", channel.title));
Ok(channel)
}
fn parse_feed(channel: &Channel) -> Vec<FeedItem> {
channel
.items()
.iter()
.map(|item| FeedItem {
title: item.title().unwrap_or("No title").to_string(),
description: item.description().unwrap_or("No description").to_string(),
link: item.link().unwrap_or("No link").to_string(),
pub_date: item
.pub_date()
.and_then(|date_str| DateTime::parse_from_rfc2822(date_str).ok())
.map(|dt| dt.with_timezone(&Utc))
.unwrap_or_else(|| Utc::now()),
})
.collect()
}
pub async fn run_tasks(
sources: Vec<String>,
count: u8,
skip: u8,
pb: &ProgressBar,
) -> Vec<FeedItem> {
let fetch_futures: Vec<_> = sources.iter().map(|url| fetch_rss(url, &pb)).collect();
let channels = join_all(fetch_futures).await;
let mut all_items = Vec::new();
for channel in channels.into_iter().filter_map(Result::ok) {
let feed_items = parse_feed(&channel);
all_items.extend(feed_items);
}
all_items.sort_by(|a, b| b.pub_date.cmp(&a.pub_date));
all_items.truncate((count + skip).into());
let removed_items = all_items.split_off(skip.into());
removed_items
}

View file

@ -1,86 +1,10 @@
use chrono::{DateTime, Utc}; use indicatif::ProgressStyle;
use clap::Parser;
use futures::future::join_all;
use indicatif::{ProgressBar, ProgressStyle};
use reqwest::get;
use rss::Channel;
use serde::Deserialize;
use std::collections::HashMap;
use std::error::Error; use std::error::Error;
use std::fs;
use terminal_link::Link; use terminal_link::Link;
use tokio; use tokio;
use toml;
use xdg::BaseDirectories;
#[derive(Parser, Debug)] mod config;
#[command(author, version, about, long_about = None)] mod data;
struct Cli {
#[arg(short, long)]
verbose: bool,
#[arg(short, long)]
count: Option<u8>,
#[arg(short = 'l', long)]
selected_list: Option<String>,
#[arg(short, long)]
skip_amount: Option<u8>,
}
#[derive(Debug, Deserialize)]
struct Config {
count: Option<u8>,
skip_amount: Option<u8>,
selected_list: Option<String>,
lists: HashMap<String, Vec<String>>,
}
#[derive(Debug)]
struct FeedItem {
title: String,
description: String,
link: String,
pub_date: DateTime<Utc>,
}
async fn fetch_rss(url: &str, pb: &ProgressBar) -> Result<Channel, Box<dyn Error>> {
let response = get(url).await?.text().await?;
let channel = Channel::read_from(response.as_bytes())?;
pb.inc(1);
pb.set_message(format!("Processing: {}", channel.title));
Ok(channel)
}
fn parse_feed(channel: &Channel) -> Vec<FeedItem> {
channel
.items()
.iter()
.map(|item| FeedItem {
title: item.title().unwrap_or("No title").to_string(),
description: item.description().unwrap_or("No description").to_string(),
link: item.link().unwrap_or("No link").to_string(),
pub_date: item
.pub_date()
.and_then(|date_str| DateTime::parse_from_rfc2822(date_str).ok())
.map(|dt| dt.with_timezone(&Utc))
.unwrap_or_else(|| Utc::now()),
})
.collect()
}
fn validate_config() -> Config {
let xdg_dirs = BaseDirectories::new().expect("Failed to get XDG directories");
let config_path = xdg_dirs
.place_config_file("packard/config.toml")
.expect("Failed to determine config file path");
if !config_path.exists() {
eprintln!("Configuration file not found at {:?}", config_path);
}
let config_content = fs::read_to_string(&config_path).expect("Failed to read config file");
let config: Config = toml::de::from_str(&config_content).expect("Failed to parse TOML");
config
}
fn trim_chars(input: &str) -> String { fn trim_chars(input: &str) -> String {
let trimmed: String = input.chars().take(256).collect(); let trimmed: String = input.chars().take(256).collect();
@ -92,29 +16,10 @@ fn trim_chars(input: &str) -> String {
} }
} }
async fn run_tasks(sources: Vec<String>, count: u8, skip: u8, pb: &ProgressBar) -> Vec<FeedItem> {
let fetch_futures: Vec<_> = sources.iter().map(|url| fetch_rss(url, &pb)).collect();
let channels = join_all(fetch_futures).await;
let mut all_items = Vec::new();
for channel in channels.into_iter().filter_map(Result::ok) {
let feed_items = parse_feed(&channel);
all_items.extend(feed_items);
}
all_items.sort_by(|a, b| b.pub_date.cmp(&a.pub_date));
all_items.truncate((count + skip).into());
let removed_items = all_items.split_off(skip.into());
removed_items
}
#[tokio::main] #[tokio::main]
async fn main() -> Result<(), Box<dyn Error>> { async fn main() -> Result<(), Box<dyn Error>> {
let config = validate_config(); let config = config::validate_config();
let args = Cli::parse(); let args = config::parse_cli();
if args.verbose { if args.verbose {
println!("{:?}", args); println!("{:?}", args);
@ -157,7 +62,7 @@ async fn main() -> Result<(), Box<dyn Error>> {
ProgressStyle::with_template("[{elapsed}] {bar:40.green/black} {msg}").unwrap(), ProgressStyle::with_template("[{elapsed}] {bar:40.green/black} {msg}").unwrap(),
); );
let all_items = run_tasks(values.to_vec(), count, skip_amount, &pb).await; let all_items = data::run_tasks(values.to_vec(), count, skip_amount, &pb).await;
pb.finish_and_clear(); pb.finish_and_clear();
for item in all_items { for item in all_items {