diff --git a/src/feed/find.rs b/src/feed/find.rs index 1ec3a2c..5817abd 100644 --- a/src/feed/find.rs +++ b/src/feed/find.rs @@ -14,8 +14,10 @@ pub fn find(site: &str, channel_name: &str, e: &NetworkEnv) -> Result { let channel_url = format!("{}{}", site, channel_name); let response = (e.fetch_as_text)(&channel_url) .context(format!("Fetching channel to find RSS: {}", channel_url))?; + let rss_selector = scraper::Selector::parse("link[title='RSS']") + .map_err(|e| anyhow!("Invalid selector: {}", e))?; let rss_url = scraper::Html::parse_document(&response) - .select(&scraper::Selector::parse("link[title='RSS']").unwrap()) + .select(&rss_selector) .next() .context("No RSS link found")? .value() diff --git a/src/lib.rs b/src/lib.rs index 0b1c117..4fc0976 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -29,11 +29,11 @@ pub fn run(site: &str, a: &Args, e: Env) -> Result<()> { .context("Fetching channel")? .entries() { - if let Some(link) = entry.links().get(0).cloned() { - if !history::find(&link, &a.history, &e.file).context("Finding history")? { + if let Some(link) = entry.links().first() { + if !history::find(link, &a.history, &e.file).context("Finding history")? { println!("Downloading {}: {}", &channel_name, entry.title().as_str()); (e.network.download_as_mp3)(&link.href).context("Downloading as MP3")?; - history::add(&link, &a.history, &e.file).context("Adding to history")?; + history::add(link, &a.history, &e.file).context("Adding to history")?; } } } @@ -64,13 +64,12 @@ mod tests { // two channels in subscriptions.txt let subs_file_name = "subs"; - let subs_dir = - create_text_file(subs_file_name, "@channel1\nignore me\n@channel2".as_bytes())?; + let subs_dir = create_text_file(subs_file_name, b"@channel1\nignore me\n@channel2")?; let subs_file_name = format!("{}/{}", subs_dir.path().to_string_lossy(), subs_file_name); // one item from each channel is already listed in the downloads.txt file let history_file_name = "history"; - let history_dir = create_text_file(history_file_name, "c1-f2\nc2-f3".as_bytes())?; + let history_dir = create_text_file(history_file_name, b"c1-f2\nc2-f3")?; let history_file_name = format!( "{}/{}", diff --git a/src/test_utils.rs b/src/test_utils.rs index fac64f9..3ebae16 100644 --- a/src/test_utils.rs +++ b/src/test_utils.rs @@ -26,7 +26,10 @@ pub fn create_text_file(name: &str, data: &[u8]) -> Result { } pub fn read_text_file(path: &Path, file_name: &str) -> Result> { - let file_name = format!("{}/{}", path.to_str().unwrap(), file_name); + let path = path + .to_str() + .ok_or_else(|| anyhow!("Path has non-utf8 character(s)"))?; + let file_name = format!("{}/{}", path, file_name); Ok(read_to_string(file_name)? .lines() .map(String::from) @@ -35,27 +38,21 @@ pub fn read_text_file(path: &Path, file_name: &str) -> Result> { pub fn mock_fetch_as_text_with_rss_url( map: HashMap<&'static str, &'static str>, ) -> NetworkFetchAsTextFn { - Box::new(move |url: &str| match map.get(url) { - Some(url) => Ok(format!( - r#" - - - - "#, - url - )), - None => Err(anyhow!("Unexpected request for {}", url)), + Box::new(move |url: &str| { + map.get(url).map_or_else( + || Err(anyhow!("Unexpected request for {}", url)), + |url| Ok(format!(r#""#, url)), + ) }) } pub fn mock_network_fetch_as_bytes_with_rss_entries( feeds: HashMap, ) -> NetworkFetchAsBytesFn { Box::new(move |url| { - if let Some(feed) = feeds.get(url).cloned() { - Ok(bytes::Bytes::from(feed)) - } else { - Err(anyhow!("No mock feed: {}", url)) - } + feeds.get(url).cloned().map_or_else( + || Err(anyhow!("No mock feed: {}", url)), + |feed| Ok(bytes::Bytes::from(feed)), + ) }) } pub fn mock_file_open(real_paths: HashMap) -> FileOpenFn {