diff --git a/Cargo.lock b/Cargo.lock index 831201d..a82ccaa 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1485,6 +1485,7 @@ name = "paperoni" version = "0.3.0-alpha1" dependencies = [ "async-std", + "chrono", "clap", "colored", "comfy-table", diff --git a/Cargo.toml b/Cargo.toml index af15d18..655fbb7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -14,6 +14,7 @@ readme = "README.md" [dependencies] async-std = "1.9.0" # atty = "0.2.14" +chrono = "0.4.19" clap = "2.33.3" colored = "2.0.0" comfy-table = "2.1.0" diff --git a/src/cli.rs b/src/cli.rs index 9e4b62e..abf8226 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -1,6 +1,10 @@ -use std::{fs::File, io::Read}; +use std::{fs::File, io::Read, path::Path}; +use chrono::{DateTime, Local}; use clap::{App, AppSettings, Arg}; +use flexi_logger::LevelFilter as LogLevel; + +use crate::logs::init_logger; pub fn cli_init() -> AppConfig { let app = App::new("paperoni") @@ -12,7 +16,7 @@ pub fn cli_init() -> AppConfig { .about( " Paperoni is an article downloader. -It takes a url and downloads the article content from it and saves it to an epub. +It takes a url, downloads the article content from it and saves it to an epub. ", ) .arg( @@ -40,11 +44,27 @@ It takes a url and downloads the article content from it and saves it to an epub .long_help("The maximum number of concurrent HTTP connections when downloading articles. Default is 8.\nNOTE: It is advised to use as few connections as needed i.e between 1 and 50. Using more connections can end up overloading your network card with too many concurrent requests.") .takes_value(true)) .arg( - Arg::with_name("debug") - .long("debug") - .help("Enable logging of events for debugging") + Arg::with_name("verbosity") + .short("v") + .multiple(true) + .help("Enables logging of events and set the verbosity level. Use -h to read on its usage") + .long_help( +"This takes upto 4 levels of verbosity in the following order. + - Error (-v) + - Warn (-vv) + - Info (-vvv) + - Debug (-vvvv) + When this flag is passed, it disables the progress bars and logs to stderr. + If you would like to send the logs to a file (and enable progress bars), pass the log-to-file flag." + ) + .takes_value(false)) + .arg( + Arg::with_name("log-to-file") + .long("log-to-file") + .help("Enables logging of events to a file located in .paperoni/logs with a default log level of debug. Use -v to specify the logging level") .takes_value(false)); let arg_matches = app.get_matches(); + let mut urls: Vec = match arg_matches.value_of("file") { Some(file_name) => { if let Ok(mut file) = File::open(file_name) { @@ -81,17 +101,43 @@ It takes a url and downloads the article content from it and saves it to an epub let mut app_config = AppConfig::new(max_conn); app_config.set_urls(urls); + if let Some(name) = arg_matches.value_of("output_name") { + let file_path = Path::new(name); + if !file_path.is_file() { + eprintln!("{:?} is not a vaild file", name); + std::process::exit(1); + } + let file_name = if name.ends_with(".epub") && name.len() > 5 { name.to_owned() } else { name.to_owned() + ".epub" }; - app_config.set_merged(file_name); + app_config.merged = Some(file_name); } - if arg_matches.is_present("debug") { - app_config.toggle_debug(true); + + if arg_matches.is_present("verbosity") { + if !arg_matches.is_present("log-to-file") { + app_config.can_disable_progress_bar = true; + } + let log_levels: [LogLevel; 5] = [ + LogLevel::Off, + LogLevel::Debug, + LogLevel::Info, + LogLevel::Warn, + LogLevel::Error, + ]; + let level = arg_matches.occurrences_of("verbosity").clamp(0, 4) as usize; + app_config.log_level = log_levels[level]; } + if arg_matches.is_present("log-to-file") { + app_config.log_level = LogLevel::Debug; + app_config.is_logging_to_file = true; + } + + init_logger(&app_config); + app_config } @@ -99,7 +145,10 @@ pub struct AppConfig { urls: Vec, max_conn: usize, merged: Option, - is_debug: bool, + log_level: LogLevel, + can_disable_progress_bar: bool, + start_time: DateTime, + is_logging_to_file: bool, } impl AppConfig { @@ -108,22 +157,17 @@ impl AppConfig { urls: vec![], max_conn, merged: None, - is_debug: false, + log_level: LogLevel::Off, + can_disable_progress_bar: false, + start_time: Local::now(), + is_logging_to_file: false, } } - fn toggle_debug(&mut self, is_debug: bool) { - self.is_debug = is_debug; - } - fn set_urls(&mut self, urls: Vec) { self.urls.extend(urls); } - fn set_merged(&mut self, name: String) { - self.merged = Some(name); - } - pub fn urls(&self) -> &Vec { &self.urls } @@ -135,7 +179,19 @@ impl AppConfig { self.merged.as_ref() } - pub fn is_debug(&self) -> bool { - self.is_debug + pub fn log_level(&self) -> LogLevel { + self.log_level + } + + pub fn can_disable_progress_bar(&self) -> bool { + self.can_disable_progress_bar + } + + pub fn start_time(&self) -> &DateTime { + &self.start_time + } + + pub fn is_logging_to_file(&self) -> bool { + self.is_logging_to_file } } diff --git a/src/epub.rs b/src/epub.rs index 87d6106..75f2b9e 100644 --- a/src/epub.rs +++ b/src/epub.rs @@ -6,27 +6,33 @@ use indicatif::{ProgressBar, ProgressStyle}; use log::{debug, info}; use crate::{ + cli::AppConfig, errors::PaperoniError, extractor::{self, Extractor}, }; pub fn generate_epubs( articles: Vec, - merged: Option<&String>, + app_config: &AppConfig, successful_articles_table: &mut Table, ) -> Result<(), Vec> { - let bar = ProgressBar::new(articles.len() as u64); - let style = ProgressStyle::default_bar().template( + let bar = if app_config.can_disable_progress_bar() { + ProgressBar::hidden() + } else { + let enabled_bar = ProgressBar::new(articles.len() as u64); + let style = ProgressStyle::default_bar().template( "{spinner:.cyan} [{elapsed_precise}] {bar:40.white} {:>8} epub {pos}/{len:7} {msg:.green}", ); - bar.set_style(style); - if !articles.is_empty() { - bar.set_message("Generating epubs"); - } + enabled_bar.set_style(style); + if !articles.is_empty() { + enabled_bar.set_message("Generating epubs"); + } + enabled_bar + }; let mut errors: Vec = Vec::new(); - match merged { + match app_config.merged() { Some(name) => { successful_articles_table.set_header(vec![Cell::new("Table of Contents") .add_attribute(Attribute::Bold) diff --git a/src/logs.rs b/src/logs.rs index 84e97e0..87b5d1b 100644 --- a/src/logs.rs +++ b/src/logs.rs @@ -2,24 +2,28 @@ use colored::*; use comfy_table::presets::UTF8_HORIZONTAL_BORDERS_ONLY; use comfy_table::{Cell, CellAlignment, ContentArrangement, Table}; use directories::UserDirs; +use flexi_logger::LogSpecBuilder; use log::error; -use crate::errors::PaperoniError; +use crate::{cli::AppConfig, errors::PaperoniError}; pub fn display_summary( initial_article_count: usize, succesful_articles_table: Table, + partial_downloads_count: usize, errors: Vec, ) { - let successfully_downloaded_count = initial_article_count - errors.len(); + let successfully_downloaded_count = + initial_article_count - partial_downloads_count - errors.len(); println!( "{}", - short_summary( + short_summary(DownloadCount::new( initial_article_count, successfully_downloaded_count, + partial_downloads_count, errors.len() - ) + )) .bold() ); @@ -50,49 +54,128 @@ pub fn display_summary( } /// Returns a string summary of the total number of failed and successful article downloads -fn short_summary(initial_count: usize, successful_count: usize, failed_count: usize) -> String { - if initial_count != successful_count + failed_count { +fn short_summary(download_count: DownloadCount) -> String { + // TODO: Refactor this + if download_count.total + != download_count.successful + download_count.failed + download_count.partial + { panic!("initial_count must be equal to the sum of failed and successful count") } let get_noun = |count: usize| if count == 1 { "article" } else { "articles" }; - if successful_count == initial_count && successful_count == 1 { + if download_count.successful == download_count.total && download_count.successful == 1 { "Article downloaded successfully".green().to_string() - } else if initial_count == failed_count && failed_count == 1 { + } else if download_count.total == download_count.failed && download_count.failed == 1 { "Article failed to download".red().to_string() - } else if successful_count == initial_count { + } else if download_count.total == download_count.partial && download_count.partial == 1 { + "Article partially failed to download".yellow().to_string() + } else if download_count.successful == download_count.total { "All articles downloaded successfully".green().to_string() - } else if successful_count == 0 { + } else if download_count.failed == download_count.total { "All articles failed to download".red().to_string() - } else { + } else if download_count.partial == download_count.total { + "All articles partially failed to download" + .yellow() + .to_string() + } else if download_count.partial == 0 { format!( "{} {} downloaded successfully, {} {} failed", - successful_count, - get_noun(successful_count), - failed_count, - get_noun(failed_count) + download_count.successful, + get_noun(download_count.successful), + download_count.failed, + get_noun(download_count.failed) + ) + .yellow() + .to_string() + } else if download_count.successful == 0 + && download_count.partial > 0 + && download_count.failed > 0 + { + format!( + "{} {} partially failed to download, {} {} failed", + download_count.partial, + get_noun(download_count.partial), + download_count.failed, + get_noun(download_count.failed) + ) + .yellow() + .to_string() + } else if download_count.failed == 0 + && download_count.successful > 0 + && download_count.partial > 0 + { + format!( + "{} {} downloaded successfully, {} {} partially failed to download", + download_count.successful, + get_noun(download_count.successful), + download_count.partial, + get_noun(download_count.partial) + ) + .yellow() + .to_string() + } else { + format!( + "{} {} downloaded successfully, {} {} partially failed to download, {} {} failed", + download_count.successful, + get_noun(download_count.successful), + download_count.partial, + get_noun(download_count.partial), + download_count.failed, + get_noun(download_count.failed) ) .yellow() .to_string() } } -pub fn init_logger() { +struct DownloadCount { + total: usize, + successful: usize, + partial: usize, + failed: usize, +} +impl DownloadCount { + fn new(total: usize, successful: usize, partial: usize, failed: usize) -> Self { + Self { + total, + successful, + partial, + failed, + } + } +} + +pub fn init_logger(app_config: &AppConfig) { match UserDirs::new() { Some(user_dirs) => { let home_dir = user_dirs.home_dir(); let paperoni_dir = home_dir.join(".paperoni"); let log_dir = paperoni_dir.join("logs"); - if !paperoni_dir.is_dir() || !log_dir.is_dir() { - std::fs::create_dir_all(&log_dir).expect( - "Unable to create paperoni directories on home directory for logging purposes", - ); + + let log_spec = LogSpecBuilder::new() + .module("paperoni", app_config.log_level()) + .build(); + let formatted_timestamp = app_config.start_time().format("%Y-%m-%d_%H-%M-%S"); + let mut logger = flexi_logger::Logger::with(log_spec); + + if app_config.is_logging_to_file() && (!paperoni_dir.is_dir() || !log_dir.is_dir()) { + match std::fs::create_dir_all(&log_dir) { + Ok(_) => (), + Err(e) => { + eprintln!("Unable to create paperoni directories on home directory for logging purposes\n{}",e); + std::process::exit(1); + } + }; } - match flexi_logger::Logger::with_str("paperoni=debug") - .directory(log_dir) - .log_to_file() - .print_message() - .start() - { + + if app_config.is_logging_to_file() { + logger = logger + .directory(log_dir) + .discriminant(formatted_timestamp.to_string()) + .suppress_timestamp() + .log_to_file(); + } + + match logger.start() { Ok(_) => (), Err(e) => eprintln!("Unable to start logger!\n{}", e), } @@ -103,44 +186,68 @@ pub fn init_logger() { #[cfg(test)] mod tests { - use super::short_summary; + use super::{short_summary, DownloadCount}; use colored::*; #[test] fn test_short_summary() { assert_eq!( - short_summary(1, 1, 0), + short_summary(DownloadCount::new(1, 1, 0, 0)), "Article downloaded successfully".green().to_string() ); assert_eq!( - short_summary(1, 0, 1), + short_summary(DownloadCount::new(1, 0, 0, 1)), "Article failed to download".red().to_string() ); assert_eq!( - short_summary(10, 10, 0), + short_summary(DownloadCount::new(10, 10, 0, 0)), "All articles downloaded successfully".green().to_string() ); assert_eq!( - short_summary(10, 0, 10), + short_summary(DownloadCount::new(10, 0, 0, 10)), "All articles failed to download".red().to_string() ); assert_eq!( - short_summary(10, 8, 2), + short_summary(DownloadCount::new(10, 8, 0, 2)), "8 articles downloaded successfully, 2 articles failed" .yellow() .to_string() ); assert_eq!( - short_summary(10, 1, 9), + short_summary(DownloadCount::new(10, 1, 0, 9)), "1 article downloaded successfully, 9 articles failed" .yellow() .to_string() ); assert_eq!( - short_summary(7, 6, 1), + short_summary(DownloadCount::new(7, 6, 0, 1)), "6 articles downloaded successfully, 1 article failed" .yellow() .to_string() ); + assert_eq!( + short_summary(DownloadCount::new(7, 4, 2, 1)), + "4 articles downloaded successfully, 2 articles partially failed to download, 1 article failed" + .yellow() + .to_string() + ); + assert_eq!( + short_summary(DownloadCount::new(12, 6, 6, 0)), + "6 articles downloaded successfully, 6 articles partially failed to download" + .yellow() + .to_string() + ); + assert_eq!( + short_summary(DownloadCount::new(5, 0, 4, 1)), + "4 articles partially failed to download, 1 article failed" + .yellow() + .to_string() + ); + assert_eq!( + short_summary(DownloadCount::new(4, 0, 4, 0)), + "All articles partially failed to download" + .yellow() + .to_string() + ); } #[test] @@ -148,6 +255,6 @@ mod tests { expected = "initial_count must be equal to the sum of failed and successful count" )] fn test_short_summary_panics_on_invalid_input() { - short_summary(0, 12, 43); + short_summary(DownloadCount::new(0, 12, 0, 43)); } } diff --git a/src/main.rs b/src/main.rs index b5cd770..0f8b34a 100644 --- a/src/main.rs +++ b/src/main.rs @@ -24,27 +24,30 @@ use cli::AppConfig; use epub::generate_epubs; use extractor::Extractor; use http::{download_images, fetch_html}; -use logs::{display_summary, init_logger}; +use logs::display_summary; fn main() { let app_config = cli::cli_init(); if !app_config.urls().is_empty() { - if app_config.is_debug() { - init_logger(); - } download(app_config); } } fn download(app_config: AppConfig) { - let bar = ProgressBar::new(app_config.urls().len() as u64); let mut errors = Vec::new(); - let style = ProgressStyle::default_bar().template( + let mut partial_download_count: usize = 0; + let bar = if app_config.can_disable_progress_bar() { + ProgressBar::hidden() + } else { + let enabled_bar = ProgressBar::new(app_config.urls().len() as u64); + let style = ProgressStyle::default_bar().template( "{spinner:.cyan} [{elapsed_precise}] {bar:40.white} {:>8} link {pos}/{len:7} {msg:.yellow/white}", ); - bar.set_style(style); - bar.enable_steady_tick(500); + enabled_bar.set_style(style); + enabled_bar.enable_steady_tick(500); + enabled_bar + }; let articles = task::block_on(async { let urls_iter = app_config.urls().iter().map(|url| fetch_html(url)); let mut responses = stream::from_iter(urls_iter).buffered(app_config.max_conn()); @@ -62,6 +65,7 @@ fn download(app_config: AppConfig) { download_images(&mut extractor, &Url::parse(&url).unwrap(), &bar) .await { + partial_download_count += 1; warn!( "{} image{} failed to download for {}", img_errors.len(), @@ -97,14 +101,25 @@ fn download(app_config: AppConfig) { .load_preset(UTF8_FULL) .load_preset(UTF8_HORIZONTAL_BORDERS_ONLY) .set_content_arrangement(ContentArrangement::Dynamic); - match generate_epubs(articles, app_config.merged(), &mut succesful_articles_table) { + match generate_epubs(articles, &app_config, &mut succesful_articles_table) { Ok(_) => (), Err(gen_epub_errors) => { errors.extend(gen_epub_errors); } }; let has_errors = !errors.is_empty(); - display_summary(app_config.urls().len(), succesful_articles_table, errors); + display_summary( + app_config.urls().len(), + succesful_articles_table, + partial_download_count, + errors, + ); + if app_config.is_logging_to_file() { + println!( + "Log written to paperoni_{}.log\n", + app_config.start_time().format("%Y-%m-%d_%H-%M-%S") + ); + } if has_errors { std::process::exit(1); }