Add features for logging and cli
- display of partial downloads in the summary - custom file name that is displayed after the summary ensuring it is visible - log-to-file flag which specifies that logs will be sent to the default directory - verbose flag (v) used to configure the log levels - disabling the progress bars when logging to the terminal is active
This commit is contained in:
parent
00d704fdd6
commit
ae52cc4e13
6 changed files with 259 additions and 73 deletions
1
Cargo.lock
generated
1
Cargo.lock
generated
|
@ -1485,6 +1485,7 @@ name = "paperoni"
|
||||||
version = "0.3.0-alpha1"
|
version = "0.3.0-alpha1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"async-std",
|
"async-std",
|
||||||
|
"chrono",
|
||||||
"clap",
|
"clap",
|
||||||
"colored",
|
"colored",
|
||||||
"comfy-table",
|
"comfy-table",
|
||||||
|
|
|
@ -14,6 +14,7 @@ readme = "README.md"
|
||||||
[dependencies]
|
[dependencies]
|
||||||
async-std = "1.9.0"
|
async-std = "1.9.0"
|
||||||
# atty = "0.2.14"
|
# atty = "0.2.14"
|
||||||
|
chrono = "0.4.19"
|
||||||
clap = "2.33.3"
|
clap = "2.33.3"
|
||||||
colored = "2.0.0"
|
colored = "2.0.0"
|
||||||
comfy-table = "2.1.0"
|
comfy-table = "2.1.0"
|
||||||
|
|
96
src/cli.rs
96
src/cli.rs
|
@ -1,6 +1,10 @@
|
||||||
use std::{fs::File, io::Read};
|
use std::{fs::File, io::Read, path::Path};
|
||||||
|
|
||||||
|
use chrono::{DateTime, Local};
|
||||||
use clap::{App, AppSettings, Arg};
|
use clap::{App, AppSettings, Arg};
|
||||||
|
use flexi_logger::LevelFilter as LogLevel;
|
||||||
|
|
||||||
|
use crate::logs::init_logger;
|
||||||
|
|
||||||
pub fn cli_init() -> AppConfig {
|
pub fn cli_init() -> AppConfig {
|
||||||
let app = App::new("paperoni")
|
let app = App::new("paperoni")
|
||||||
|
@ -12,7 +16,7 @@ pub fn cli_init() -> AppConfig {
|
||||||
.about(
|
.about(
|
||||||
"
|
"
|
||||||
Paperoni is an article downloader.
|
Paperoni is an article downloader.
|
||||||
It takes a url and downloads the article content from it and saves it to an epub.
|
It takes a url, downloads the article content from it and saves it to an epub.
|
||||||
",
|
",
|
||||||
)
|
)
|
||||||
.arg(
|
.arg(
|
||||||
|
@ -40,11 +44,27 @@ It takes a url and downloads the article content from it and saves it to an epub
|
||||||
.long_help("The maximum number of concurrent HTTP connections when downloading articles. Default is 8.\nNOTE: It is advised to use as few connections as needed i.e between 1 and 50. Using more connections can end up overloading your network card with too many concurrent requests.")
|
.long_help("The maximum number of concurrent HTTP connections when downloading articles. Default is 8.\nNOTE: It is advised to use as few connections as needed i.e between 1 and 50. Using more connections can end up overloading your network card with too many concurrent requests.")
|
||||||
.takes_value(true))
|
.takes_value(true))
|
||||||
.arg(
|
.arg(
|
||||||
Arg::with_name("debug")
|
Arg::with_name("verbosity")
|
||||||
.long("debug")
|
.short("v")
|
||||||
.help("Enable logging of events for debugging")
|
.multiple(true)
|
||||||
|
.help("Enables logging of events and set the verbosity level. Use -h to read on its usage")
|
||||||
|
.long_help(
|
||||||
|
"This takes upto 4 levels of verbosity in the following order.
|
||||||
|
- Error (-v)
|
||||||
|
- Warn (-vv)
|
||||||
|
- Info (-vvv)
|
||||||
|
- Debug (-vvvv)
|
||||||
|
When this flag is passed, it disables the progress bars and logs to stderr.
|
||||||
|
If you would like to send the logs to a file (and enable progress bars), pass the log-to-file flag."
|
||||||
|
)
|
||||||
|
.takes_value(false))
|
||||||
|
.arg(
|
||||||
|
Arg::with_name("log-to-file")
|
||||||
|
.long("log-to-file")
|
||||||
|
.help("Enables logging of events to a file located in .paperoni/logs with a default log level of debug. Use -v to specify the logging level")
|
||||||
.takes_value(false));
|
.takes_value(false));
|
||||||
let arg_matches = app.get_matches();
|
let arg_matches = app.get_matches();
|
||||||
|
|
||||||
let mut urls: Vec<String> = match arg_matches.value_of("file") {
|
let mut urls: Vec<String> = match arg_matches.value_of("file") {
|
||||||
Some(file_name) => {
|
Some(file_name) => {
|
||||||
if let Ok(mut file) = File::open(file_name) {
|
if let Ok(mut file) = File::open(file_name) {
|
||||||
|
@ -81,17 +101,43 @@ It takes a url and downloads the article content from it and saves it to an epub
|
||||||
|
|
||||||
let mut app_config = AppConfig::new(max_conn);
|
let mut app_config = AppConfig::new(max_conn);
|
||||||
app_config.set_urls(urls);
|
app_config.set_urls(urls);
|
||||||
|
|
||||||
if let Some(name) = arg_matches.value_of("output_name") {
|
if let Some(name) = arg_matches.value_of("output_name") {
|
||||||
|
let file_path = Path::new(name);
|
||||||
|
if !file_path.is_file() {
|
||||||
|
eprintln!("{:?} is not a vaild file", name);
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
let file_name = if name.ends_with(".epub") && name.len() > 5 {
|
let file_name = if name.ends_with(".epub") && name.len() > 5 {
|
||||||
name.to_owned()
|
name.to_owned()
|
||||||
} else {
|
} else {
|
||||||
name.to_owned() + ".epub"
|
name.to_owned() + ".epub"
|
||||||
};
|
};
|
||||||
app_config.set_merged(file_name);
|
app_config.merged = Some(file_name);
|
||||||
}
|
}
|
||||||
if arg_matches.is_present("debug") {
|
|
||||||
app_config.toggle_debug(true);
|
if arg_matches.is_present("verbosity") {
|
||||||
|
if !arg_matches.is_present("log-to-file") {
|
||||||
|
app_config.can_disable_progress_bar = true;
|
||||||
}
|
}
|
||||||
|
let log_levels: [LogLevel; 5] = [
|
||||||
|
LogLevel::Off,
|
||||||
|
LogLevel::Debug,
|
||||||
|
LogLevel::Info,
|
||||||
|
LogLevel::Warn,
|
||||||
|
LogLevel::Error,
|
||||||
|
];
|
||||||
|
let level = arg_matches.occurrences_of("verbosity").clamp(0, 4) as usize;
|
||||||
|
app_config.log_level = log_levels[level];
|
||||||
|
}
|
||||||
|
if arg_matches.is_present("log-to-file") {
|
||||||
|
app_config.log_level = LogLevel::Debug;
|
||||||
|
app_config.is_logging_to_file = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
init_logger(&app_config);
|
||||||
|
|
||||||
app_config
|
app_config
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -99,7 +145,10 @@ pub struct AppConfig {
|
||||||
urls: Vec<String>,
|
urls: Vec<String>,
|
||||||
max_conn: usize,
|
max_conn: usize,
|
||||||
merged: Option<String>,
|
merged: Option<String>,
|
||||||
is_debug: bool,
|
log_level: LogLevel,
|
||||||
|
can_disable_progress_bar: bool,
|
||||||
|
start_time: DateTime<Local>,
|
||||||
|
is_logging_to_file: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AppConfig {
|
impl AppConfig {
|
||||||
|
@ -108,22 +157,17 @@ impl AppConfig {
|
||||||
urls: vec![],
|
urls: vec![],
|
||||||
max_conn,
|
max_conn,
|
||||||
merged: None,
|
merged: None,
|
||||||
is_debug: false,
|
log_level: LogLevel::Off,
|
||||||
|
can_disable_progress_bar: false,
|
||||||
|
start_time: Local::now(),
|
||||||
|
is_logging_to_file: false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn toggle_debug(&mut self, is_debug: bool) {
|
|
||||||
self.is_debug = is_debug;
|
|
||||||
}
|
|
||||||
|
|
||||||
fn set_urls(&mut self, urls: Vec<String>) {
|
fn set_urls(&mut self, urls: Vec<String>) {
|
||||||
self.urls.extend(urls);
|
self.urls.extend(urls);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_merged(&mut self, name: String) {
|
|
||||||
self.merged = Some(name);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn urls(&self) -> &Vec<String> {
|
pub fn urls(&self) -> &Vec<String> {
|
||||||
&self.urls
|
&self.urls
|
||||||
}
|
}
|
||||||
|
@ -135,7 +179,19 @@ impl AppConfig {
|
||||||
self.merged.as_ref()
|
self.merged.as_ref()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_debug(&self) -> bool {
|
pub fn log_level(&self) -> LogLevel {
|
||||||
self.is_debug
|
self.log_level
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn can_disable_progress_bar(&self) -> bool {
|
||||||
|
self.can_disable_progress_bar
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn start_time(&self) -> &DateTime<Local> {
|
||||||
|
&self.start_time
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_logging_to_file(&self) -> bool {
|
||||||
|
self.is_logging_to_file
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
16
src/epub.rs
16
src/epub.rs
|
@ -6,27 +6,33 @@ use indicatif::{ProgressBar, ProgressStyle};
|
||||||
use log::{debug, info};
|
use log::{debug, info};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
cli::AppConfig,
|
||||||
errors::PaperoniError,
|
errors::PaperoniError,
|
||||||
extractor::{self, Extractor},
|
extractor::{self, Extractor},
|
||||||
};
|
};
|
||||||
|
|
||||||
pub fn generate_epubs(
|
pub fn generate_epubs(
|
||||||
articles: Vec<Extractor>,
|
articles: Vec<Extractor>,
|
||||||
merged: Option<&String>,
|
app_config: &AppConfig,
|
||||||
successful_articles_table: &mut Table,
|
successful_articles_table: &mut Table,
|
||||||
) -> Result<(), Vec<PaperoniError>> {
|
) -> Result<(), Vec<PaperoniError>> {
|
||||||
let bar = ProgressBar::new(articles.len() as u64);
|
let bar = if app_config.can_disable_progress_bar() {
|
||||||
|
ProgressBar::hidden()
|
||||||
|
} else {
|
||||||
|
let enabled_bar = ProgressBar::new(articles.len() as u64);
|
||||||
let style = ProgressStyle::default_bar().template(
|
let style = ProgressStyle::default_bar().template(
|
||||||
"{spinner:.cyan} [{elapsed_precise}] {bar:40.white} {:>8} epub {pos}/{len:7} {msg:.green}",
|
"{spinner:.cyan} [{elapsed_precise}] {bar:40.white} {:>8} epub {pos}/{len:7} {msg:.green}",
|
||||||
);
|
);
|
||||||
bar.set_style(style);
|
enabled_bar.set_style(style);
|
||||||
if !articles.is_empty() {
|
if !articles.is_empty() {
|
||||||
bar.set_message("Generating epubs");
|
enabled_bar.set_message("Generating epubs");
|
||||||
}
|
}
|
||||||
|
enabled_bar
|
||||||
|
};
|
||||||
|
|
||||||
let mut errors: Vec<PaperoniError> = Vec::new();
|
let mut errors: Vec<PaperoniError> = Vec::new();
|
||||||
|
|
||||||
match merged {
|
match app_config.merged() {
|
||||||
Some(name) => {
|
Some(name) => {
|
||||||
successful_articles_table.set_header(vec![Cell::new("Table of Contents")
|
successful_articles_table.set_header(vec![Cell::new("Table of Contents")
|
||||||
.add_attribute(Attribute::Bold)
|
.add_attribute(Attribute::Bold)
|
||||||
|
|
175
src/logs.rs
175
src/logs.rs
|
@ -2,24 +2,28 @@ use colored::*;
|
||||||
use comfy_table::presets::UTF8_HORIZONTAL_BORDERS_ONLY;
|
use comfy_table::presets::UTF8_HORIZONTAL_BORDERS_ONLY;
|
||||||
use comfy_table::{Cell, CellAlignment, ContentArrangement, Table};
|
use comfy_table::{Cell, CellAlignment, ContentArrangement, Table};
|
||||||
use directories::UserDirs;
|
use directories::UserDirs;
|
||||||
|
use flexi_logger::LogSpecBuilder;
|
||||||
use log::error;
|
use log::error;
|
||||||
|
|
||||||
use crate::errors::PaperoniError;
|
use crate::{cli::AppConfig, errors::PaperoniError};
|
||||||
|
|
||||||
pub fn display_summary(
|
pub fn display_summary(
|
||||||
initial_article_count: usize,
|
initial_article_count: usize,
|
||||||
succesful_articles_table: Table,
|
succesful_articles_table: Table,
|
||||||
|
partial_downloads_count: usize,
|
||||||
errors: Vec<PaperoniError>,
|
errors: Vec<PaperoniError>,
|
||||||
) {
|
) {
|
||||||
let successfully_downloaded_count = initial_article_count - errors.len();
|
let successfully_downloaded_count =
|
||||||
|
initial_article_count - partial_downloads_count - errors.len();
|
||||||
|
|
||||||
println!(
|
println!(
|
||||||
"{}",
|
"{}",
|
||||||
short_summary(
|
short_summary(DownloadCount::new(
|
||||||
initial_article_count,
|
initial_article_count,
|
||||||
successfully_downloaded_count,
|
successfully_downloaded_count,
|
||||||
|
partial_downloads_count,
|
||||||
errors.len()
|
errors.len()
|
||||||
)
|
))
|
||||||
.bold()
|
.bold()
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -50,49 +54,128 @@ pub fn display_summary(
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns a string summary of the total number of failed and successful article downloads
|
/// Returns a string summary of the total number of failed and successful article downloads
|
||||||
fn short_summary(initial_count: usize, successful_count: usize, failed_count: usize) -> String {
|
fn short_summary(download_count: DownloadCount) -> String {
|
||||||
if initial_count != successful_count + failed_count {
|
// TODO: Refactor this
|
||||||
|
if download_count.total
|
||||||
|
!= download_count.successful + download_count.failed + download_count.partial
|
||||||
|
{
|
||||||
panic!("initial_count must be equal to the sum of failed and successful count")
|
panic!("initial_count must be equal to the sum of failed and successful count")
|
||||||
}
|
}
|
||||||
let get_noun = |count: usize| if count == 1 { "article" } else { "articles" };
|
let get_noun = |count: usize| if count == 1 { "article" } else { "articles" };
|
||||||
if successful_count == initial_count && successful_count == 1 {
|
if download_count.successful == download_count.total && download_count.successful == 1 {
|
||||||
"Article downloaded successfully".green().to_string()
|
"Article downloaded successfully".green().to_string()
|
||||||
} else if initial_count == failed_count && failed_count == 1 {
|
} else if download_count.total == download_count.failed && download_count.failed == 1 {
|
||||||
"Article failed to download".red().to_string()
|
"Article failed to download".red().to_string()
|
||||||
} else if successful_count == initial_count {
|
} else if download_count.total == download_count.partial && download_count.partial == 1 {
|
||||||
|
"Article partially failed to download".yellow().to_string()
|
||||||
|
} else if download_count.successful == download_count.total {
|
||||||
"All articles downloaded successfully".green().to_string()
|
"All articles downloaded successfully".green().to_string()
|
||||||
} else if successful_count == 0 {
|
} else if download_count.failed == download_count.total {
|
||||||
"All articles failed to download".red().to_string()
|
"All articles failed to download".red().to_string()
|
||||||
} else {
|
} else if download_count.partial == download_count.total {
|
||||||
|
"All articles partially failed to download"
|
||||||
|
.yellow()
|
||||||
|
.to_string()
|
||||||
|
} else if download_count.partial == 0 {
|
||||||
format!(
|
format!(
|
||||||
"{} {} downloaded successfully, {} {} failed",
|
"{} {} downloaded successfully, {} {} failed",
|
||||||
successful_count,
|
download_count.successful,
|
||||||
get_noun(successful_count),
|
get_noun(download_count.successful),
|
||||||
failed_count,
|
download_count.failed,
|
||||||
get_noun(failed_count)
|
get_noun(download_count.failed)
|
||||||
|
)
|
||||||
|
.yellow()
|
||||||
|
.to_string()
|
||||||
|
} else if download_count.successful == 0
|
||||||
|
&& download_count.partial > 0
|
||||||
|
&& download_count.failed > 0
|
||||||
|
{
|
||||||
|
format!(
|
||||||
|
"{} {} partially failed to download, {} {} failed",
|
||||||
|
download_count.partial,
|
||||||
|
get_noun(download_count.partial),
|
||||||
|
download_count.failed,
|
||||||
|
get_noun(download_count.failed)
|
||||||
|
)
|
||||||
|
.yellow()
|
||||||
|
.to_string()
|
||||||
|
} else if download_count.failed == 0
|
||||||
|
&& download_count.successful > 0
|
||||||
|
&& download_count.partial > 0
|
||||||
|
{
|
||||||
|
format!(
|
||||||
|
"{} {} downloaded successfully, {} {} partially failed to download",
|
||||||
|
download_count.successful,
|
||||||
|
get_noun(download_count.successful),
|
||||||
|
download_count.partial,
|
||||||
|
get_noun(download_count.partial)
|
||||||
|
)
|
||||||
|
.yellow()
|
||||||
|
.to_string()
|
||||||
|
} else {
|
||||||
|
format!(
|
||||||
|
"{} {} downloaded successfully, {} {} partially failed to download, {} {} failed",
|
||||||
|
download_count.successful,
|
||||||
|
get_noun(download_count.successful),
|
||||||
|
download_count.partial,
|
||||||
|
get_noun(download_count.partial),
|
||||||
|
download_count.failed,
|
||||||
|
get_noun(download_count.failed)
|
||||||
)
|
)
|
||||||
.yellow()
|
.yellow()
|
||||||
.to_string()
|
.to_string()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn init_logger() {
|
struct DownloadCount {
|
||||||
|
total: usize,
|
||||||
|
successful: usize,
|
||||||
|
partial: usize,
|
||||||
|
failed: usize,
|
||||||
|
}
|
||||||
|
impl DownloadCount {
|
||||||
|
fn new(total: usize, successful: usize, partial: usize, failed: usize) -> Self {
|
||||||
|
Self {
|
||||||
|
total,
|
||||||
|
successful,
|
||||||
|
partial,
|
||||||
|
failed,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn init_logger(app_config: &AppConfig) {
|
||||||
match UserDirs::new() {
|
match UserDirs::new() {
|
||||||
Some(user_dirs) => {
|
Some(user_dirs) => {
|
||||||
let home_dir = user_dirs.home_dir();
|
let home_dir = user_dirs.home_dir();
|
||||||
let paperoni_dir = home_dir.join(".paperoni");
|
let paperoni_dir = home_dir.join(".paperoni");
|
||||||
let log_dir = paperoni_dir.join("logs");
|
let log_dir = paperoni_dir.join("logs");
|
||||||
if !paperoni_dir.is_dir() || !log_dir.is_dir() {
|
|
||||||
std::fs::create_dir_all(&log_dir).expect(
|
let log_spec = LogSpecBuilder::new()
|
||||||
"Unable to create paperoni directories on home directory for logging purposes",
|
.module("paperoni", app_config.log_level())
|
||||||
);
|
.build();
|
||||||
|
let formatted_timestamp = app_config.start_time().format("%Y-%m-%d_%H-%M-%S");
|
||||||
|
let mut logger = flexi_logger::Logger::with(log_spec);
|
||||||
|
|
||||||
|
if app_config.is_logging_to_file() && (!paperoni_dir.is_dir() || !log_dir.is_dir()) {
|
||||||
|
match std::fs::create_dir_all(&log_dir) {
|
||||||
|
Ok(_) => (),
|
||||||
|
Err(e) => {
|
||||||
|
eprintln!("Unable to create paperoni directories on home directory for logging purposes\n{}",e);
|
||||||
|
std::process::exit(1);
|
||||||
}
|
}
|
||||||
match flexi_logger::Logger::with_str("paperoni=debug")
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if app_config.is_logging_to_file() {
|
||||||
|
logger = logger
|
||||||
.directory(log_dir)
|
.directory(log_dir)
|
||||||
.log_to_file()
|
.discriminant(formatted_timestamp.to_string())
|
||||||
.print_message()
|
.suppress_timestamp()
|
||||||
.start()
|
.log_to_file();
|
||||||
{
|
}
|
||||||
|
|
||||||
|
match logger.start() {
|
||||||
Ok(_) => (),
|
Ok(_) => (),
|
||||||
Err(e) => eprintln!("Unable to start logger!\n{}", e),
|
Err(e) => eprintln!("Unable to start logger!\n{}", e),
|
||||||
}
|
}
|
||||||
|
@ -103,44 +186,68 @@ pub fn init_logger() {
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::short_summary;
|
use super::{short_summary, DownloadCount};
|
||||||
use colored::*;
|
use colored::*;
|
||||||
#[test]
|
#[test]
|
||||||
fn test_short_summary() {
|
fn test_short_summary() {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
short_summary(1, 1, 0),
|
short_summary(DownloadCount::new(1, 1, 0, 0)),
|
||||||
"Article downloaded successfully".green().to_string()
|
"Article downloaded successfully".green().to_string()
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
short_summary(1, 0, 1),
|
short_summary(DownloadCount::new(1, 0, 0, 1)),
|
||||||
"Article failed to download".red().to_string()
|
"Article failed to download".red().to_string()
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
short_summary(10, 10, 0),
|
short_summary(DownloadCount::new(10, 10, 0, 0)),
|
||||||
"All articles downloaded successfully".green().to_string()
|
"All articles downloaded successfully".green().to_string()
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
short_summary(10, 0, 10),
|
short_summary(DownloadCount::new(10, 0, 0, 10)),
|
||||||
"All articles failed to download".red().to_string()
|
"All articles failed to download".red().to_string()
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
short_summary(10, 8, 2),
|
short_summary(DownloadCount::new(10, 8, 0, 2)),
|
||||||
"8 articles downloaded successfully, 2 articles failed"
|
"8 articles downloaded successfully, 2 articles failed"
|
||||||
.yellow()
|
.yellow()
|
||||||
.to_string()
|
.to_string()
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
short_summary(10, 1, 9),
|
short_summary(DownloadCount::new(10, 1, 0, 9)),
|
||||||
"1 article downloaded successfully, 9 articles failed"
|
"1 article downloaded successfully, 9 articles failed"
|
||||||
.yellow()
|
.yellow()
|
||||||
.to_string()
|
.to_string()
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
short_summary(7, 6, 1),
|
short_summary(DownloadCount::new(7, 6, 0, 1)),
|
||||||
"6 articles downloaded successfully, 1 article failed"
|
"6 articles downloaded successfully, 1 article failed"
|
||||||
.yellow()
|
.yellow()
|
||||||
.to_string()
|
.to_string()
|
||||||
);
|
);
|
||||||
|
assert_eq!(
|
||||||
|
short_summary(DownloadCount::new(7, 4, 2, 1)),
|
||||||
|
"4 articles downloaded successfully, 2 articles partially failed to download, 1 article failed"
|
||||||
|
.yellow()
|
||||||
|
.to_string()
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
short_summary(DownloadCount::new(12, 6, 6, 0)),
|
||||||
|
"6 articles downloaded successfully, 6 articles partially failed to download"
|
||||||
|
.yellow()
|
||||||
|
.to_string()
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
short_summary(DownloadCount::new(5, 0, 4, 1)),
|
||||||
|
"4 articles partially failed to download, 1 article failed"
|
||||||
|
.yellow()
|
||||||
|
.to_string()
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
short_summary(DownloadCount::new(4, 0, 4, 0)),
|
||||||
|
"All articles partially failed to download"
|
||||||
|
.yellow()
|
||||||
|
.to_string()
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -148,6 +255,6 @@ mod tests {
|
||||||
expected = "initial_count must be equal to the sum of failed and successful count"
|
expected = "initial_count must be equal to the sum of failed and successful count"
|
||||||
)]
|
)]
|
||||||
fn test_short_summary_panics_on_invalid_input() {
|
fn test_short_summary_panics_on_invalid_input() {
|
||||||
short_summary(0, 12, 43);
|
short_summary(DownloadCount::new(0, 12, 0, 43));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
33
src/main.rs
33
src/main.rs
|
@ -24,27 +24,30 @@ use cli::AppConfig;
|
||||||
use epub::generate_epubs;
|
use epub::generate_epubs;
|
||||||
use extractor::Extractor;
|
use extractor::Extractor;
|
||||||
use http::{download_images, fetch_html};
|
use http::{download_images, fetch_html};
|
||||||
use logs::{display_summary, init_logger};
|
use logs::display_summary;
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
let app_config = cli::cli_init();
|
let app_config = cli::cli_init();
|
||||||
|
|
||||||
if !app_config.urls().is_empty() {
|
if !app_config.urls().is_empty() {
|
||||||
if app_config.is_debug() {
|
|
||||||
init_logger();
|
|
||||||
}
|
|
||||||
download(app_config);
|
download(app_config);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn download(app_config: AppConfig) {
|
fn download(app_config: AppConfig) {
|
||||||
let bar = ProgressBar::new(app_config.urls().len() as u64);
|
|
||||||
let mut errors = Vec::new();
|
let mut errors = Vec::new();
|
||||||
|
let mut partial_download_count: usize = 0;
|
||||||
|
let bar = if app_config.can_disable_progress_bar() {
|
||||||
|
ProgressBar::hidden()
|
||||||
|
} else {
|
||||||
|
let enabled_bar = ProgressBar::new(app_config.urls().len() as u64);
|
||||||
let style = ProgressStyle::default_bar().template(
|
let style = ProgressStyle::default_bar().template(
|
||||||
"{spinner:.cyan} [{elapsed_precise}] {bar:40.white} {:>8} link {pos}/{len:7} {msg:.yellow/white}",
|
"{spinner:.cyan} [{elapsed_precise}] {bar:40.white} {:>8} link {pos}/{len:7} {msg:.yellow/white}",
|
||||||
);
|
);
|
||||||
bar.set_style(style);
|
enabled_bar.set_style(style);
|
||||||
bar.enable_steady_tick(500);
|
enabled_bar.enable_steady_tick(500);
|
||||||
|
enabled_bar
|
||||||
|
};
|
||||||
let articles = task::block_on(async {
|
let articles = task::block_on(async {
|
||||||
let urls_iter = app_config.urls().iter().map(|url| fetch_html(url));
|
let urls_iter = app_config.urls().iter().map(|url| fetch_html(url));
|
||||||
let mut responses = stream::from_iter(urls_iter).buffered(app_config.max_conn());
|
let mut responses = stream::from_iter(urls_iter).buffered(app_config.max_conn());
|
||||||
|
@ -62,6 +65,7 @@ fn download(app_config: AppConfig) {
|
||||||
download_images(&mut extractor, &Url::parse(&url).unwrap(), &bar)
|
download_images(&mut extractor, &Url::parse(&url).unwrap(), &bar)
|
||||||
.await
|
.await
|
||||||
{
|
{
|
||||||
|
partial_download_count += 1;
|
||||||
warn!(
|
warn!(
|
||||||
"{} image{} failed to download for {}",
|
"{} image{} failed to download for {}",
|
||||||
img_errors.len(),
|
img_errors.len(),
|
||||||
|
@ -97,14 +101,25 @@ fn download(app_config: AppConfig) {
|
||||||
.load_preset(UTF8_FULL)
|
.load_preset(UTF8_FULL)
|
||||||
.load_preset(UTF8_HORIZONTAL_BORDERS_ONLY)
|
.load_preset(UTF8_HORIZONTAL_BORDERS_ONLY)
|
||||||
.set_content_arrangement(ContentArrangement::Dynamic);
|
.set_content_arrangement(ContentArrangement::Dynamic);
|
||||||
match generate_epubs(articles, app_config.merged(), &mut succesful_articles_table) {
|
match generate_epubs(articles, &app_config, &mut succesful_articles_table) {
|
||||||
Ok(_) => (),
|
Ok(_) => (),
|
||||||
Err(gen_epub_errors) => {
|
Err(gen_epub_errors) => {
|
||||||
errors.extend(gen_epub_errors);
|
errors.extend(gen_epub_errors);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let has_errors = !errors.is_empty();
|
let has_errors = !errors.is_empty();
|
||||||
display_summary(app_config.urls().len(), succesful_articles_table, errors);
|
display_summary(
|
||||||
|
app_config.urls().len(),
|
||||||
|
succesful_articles_table,
|
||||||
|
partial_download_count,
|
||||||
|
errors,
|
||||||
|
);
|
||||||
|
if app_config.is_logging_to_file() {
|
||||||
|
println!(
|
||||||
|
"Log written to paperoni_{}.log\n",
|
||||||
|
app_config.start_time().format("%Y-%m-%d_%H-%M-%S")
|
||||||
|
);
|
||||||
|
}
|
||||||
if has_errors {
|
if has_errors {
|
||||||
std::process::exit(1);
|
std::process::exit(1);
|
||||||
}
|
}
|
||||||
|
|
Reference in a new issue