use params::Args; use prelude::*; pub mod feed; pub mod file; pub mod history; pub mod network; pub mod params; pub mod prelude; #[cfg(test)] mod test_utils; use file::FileEnv; use network::NetworkEnv; pub struct Env { pub network: NetworkEnv, pub file: FileEnv, } pub fn run(site: &str, a: &Args, e: Env) -> Result<()> { for channel_name in file::read::lines_from(&a.subscriptions, &e.file).context("Reading subscriptions")? { println!("Channel: {}", channel_name); let feed_url = feed::find(site, &channel_name, &e.network).context("Finding channel")?; for entry in feed::get(&feed_url, &e.network) .context("Fetching channel")? .entries() { if let Some(link) = entry.links().get(0).cloned() { if !history::find(&link, &a.history, &e.file).context("Finding history")? { println!("Downloading {}: {}", &channel_name, entry.title().as_str()); (e.network.download_as_mp3)(&link.href).context("Downloading as MP3")?; history::add(&link, &a.history, &e.file).context("Adding to history")?; } } } } Ok(()) } #[cfg(test)] mod tests { use std::{collections::HashMap, sync::mpsc}; use atom_syndication::{Entry, EntryBuilder, Feed, FeedBuilder, LinkBuilder, Text}; use crate::test_utils::{ create_text_file, mock_fetch_as_text_with_rss_url, mock_file_append_line, mock_file_open, mock_network_download_as_mp3, mock_network_fetch_as_bytes_with_rss_entries, }; use pretty_assertions::assert_eq; use super::*; #[test] fn downloads_two_items_from_two_feeds_ignoring_existing_items_in_history() -> Result<()> { //given let site = "http://example.com/"; let (tx, rx) = mpsc::channel::(); // channel to recieve notice of downloaded urls // two channels in subscriptions.txt let subs_file_name = "subs"; let subs_dir = create_text_file(subs_file_name, "@channel1\nignore me\n@channel2".as_bytes())?; let subs_file_name = format!("{}/{}", subs_dir.path().to_string_lossy(), subs_file_name); // one item from each channel is already listed in the downloads.txt file let history_file_name = "history"; let history_dir = create_text_file(history_file_name, "c1-f2\nc2-f3".as_bytes())?; let history_file_name = format!( "{}/{}", history_dir.path().to_string_lossy(), history_file_name ); let args = Args { downloads: subs_dir.path().to_string_lossy().to_string(), history: history_file_name.clone(), subscriptions: subs_file_name.clone(), }; let env = Env { network: NetworkEnv { fetch_as_text: mock_fetch_as_text_with_rss_url(HashMap::from([ ("http://example.com/@channel1", "rss-feed-1"), ("http://example.com/@channel2", "rss-feed-2"), ])), fetch_as_bytes: mock_network_fetch_as_bytes_with_rss_entries(HashMap::from([ ( "rss-feed-1".into(), feed_with_three_links("c1-f1", "c1-f2", "c1-f3").to_string(), ), ( "rss-feed-2".into(), feed_with_three_links("c2-f1", "c2-f2", "c2-f3").to_string(), ), ])), download_as_mp3: mock_network_download_as_mp3(tx), }, file: FileEnv { open: mock_file_open(HashMap::from([ (subs_file_name.to_string(), subs_file_name), (history_file_name.to_string(), history_file_name), ])), append_line: mock_file_append_line(), }, }; //when run(site, &args, env)?; //then drop(subs_dir); drop(history_dir); let mut downloads: Vec = vec![]; for m in rx { downloads.push(m); } assert_eq!(downloads, vec!["c1-f1", "c1-f3", "c2-f1", "c2-f2"]); Ok(()) } fn entry_with_link(link: &str, title: &str) -> Entry { EntryBuilder::default() .links(vec![LinkBuilder::default().href(link.to_string()).build()]) .title(Text::from(title)) .build() } fn feed_with_three_links(l1: &str, l2: &str, l3: &str) -> Feed { FeedBuilder::default() .entries(vec![ entry_with_link(l1, "l1"), entry_with_link(l2, "l2"), entry_with_link(l3, "l3"), ]) .build() } }