Fix from PR#15

- refactor comments
- move `cli::Error` to `errors::ErrorCli`
- removed mixing of order of input urls
- move pure functionality if `init_logger` to clear function
This commit is contained in:
Mikhail Gorbachev 2021-06-06 13:20:08 +03:00
parent 13ad14e73d
commit aa9258e122
4 changed files with 133 additions and 93 deletions

View file

@ -48,23 +48,41 @@ USAGE:
paperoni [OPTIONS] [urls]... paperoni [OPTIONS] [urls]...
OPTIONS: OPTIONS:
-f, --file <file> Input file containing links -f, --file <file>
-h, --help Prints help information Input file containing links
-h, --help
Prints help information
--log-to-file --log-to-file
Enables logging of events to a file located in .paperoni/logs with a default log level of debug. Use -v to Enables logging of events to a file located in .paperoni/logs with a default log level of debug. Use -v to
specify the logging level specify the logging level
--max_conn <max_conn> --max_conn <max_conn>
The maximum number of concurrent HTTP connections when downloading articles. Default is 8 The maximum number of concurrent HTTP connections when downloading articles. Default is 8.
NOTE: It is advised to use as few connections as needed i.e between 1 and 50. Using more connections can end
up overloading your network card with too many concurrent requests.
-o, --output_directory <output_directory>
Directory for saving epub documents
--merge <output_name>
Merge multiple articles into a single epub that will be given the name provided
-V, --version
Prints version information
-o, --output_directory <output_directory> Directory for store output epub documents
--merge <output_name> Merge multiple articles into a single epub
-V, --version Prints version information
-v -v
Enables logging of events and set the verbosity level. Use --help to read on its usage This takes upto 4 levels of verbosity in the following order.
- Error (-v)
- Warn (-vv)
- Info (-vvv)
- Debug (-vvvv)
When this flag is passed, it disables the progress bars and logs to stderr.
If you would like to send the logs to a file (and enable progress bars), pass the log-to-file flag.
ARGS: ARGS:
<urls>... Urls of web articles <urls>...
Urls of web articles
``` ```
To download a single article pass in its URL To download a single article pass in its URL

View file

@ -1,14 +1,10 @@
use std::{ use std::{collections::BTreeSet, fs, num::NonZeroUsize, path::Path};
collections::HashSet,
num::{NonZeroUsize, ParseIntError},
path::Path,
};
use chrono::{DateTime, Local}; use chrono::{DateTime, Local};
use clap::{App, AppSettings, Arg, ArgMatches}; use clap::{App, AppSettings, Arg, ArgMatches};
use flexi_logger::{FlexiLoggerError, LevelFilter as LogLevel}; use flexi_logger::LevelFilter as LogLevel;
use std::fs;
use thiserror::Error; type Error = crate::errors::CliError<AppConfigBuilderError>;
const DEFAULT_MAX_CONN: usize = 8; const DEFAULT_MAX_CONN: usize = 8;
@ -26,28 +22,6 @@ pub struct AppConfig {
pub is_logging_to_file: bool, pub is_logging_to_file: bool,
} }
#[derive(Debug, Error)]
pub enum Error {
#[error("Failed to open file with urls: {0}")]
UrlFileError(#[from] std::io::Error),
#[error("Failed to parse max connection value: {0}")]
InvalidMaxConnectionCount(#[from] ParseIntError),
#[error("No urls for parse")]
NoUrls,
#[error("No urls for parse")]
AppBuildError(#[from] AppConfigBuilderError),
#[error("Invalid output path name for merged epubs: {0}")]
InvalidOutputPath(String),
#[error("Log error: {0}")]
LogDirectoryError(String),
#[error(transparent)]
LogError(#[from] FlexiLoggerError),
#[error("Wrong output directory")]
WrongOutputDirectory,
#[error("Output directory not exists")]
OutputDirectoryNotExists,
}
impl AppConfig { impl AppConfig {
pub fn init_with_cli() -> Result<AppConfig, Error> { pub fn init_with_cli() -> Result<AppConfig, Error> {
let app = App::new("paperoni") let app = App::new("paperoni")
@ -73,11 +47,10 @@ impl AppConfig {
) )
.arg( .arg(
Arg::with_name("output_directory") Arg::with_name("output_directory")
.long("output_directory") .long("output-directory")
.short("o") .short("o")
.help("Directory for store output epub documents") .help("Directory to store output epub documents")
.conflicts_with("output_name") .conflicts_with("output_name")
.long_help("Directory for saving epub documents")
.takes_value(true), .takes_value(true),
) )
.arg( .arg(
@ -128,40 +101,10 @@ impl AppConfig {
} }
fn init_logger(self) -> Result<Self, Error> { fn init_logger(self) -> Result<Self, Error> {
use directories::UserDirs; use crate::logs;
use flexi_logger::LogSpecBuilder; logs::init_logger(self.log_level, &self.start_time, self.is_logging_to_file)
.map(|_| self)
match UserDirs::new() { .map_err(Error::LogError)
Some(user_dirs) => {
let home_dir = user_dirs.home_dir();
let paperoni_dir = home_dir.join(".paperoni");
let log_dir = paperoni_dir.join("logs");
let log_spec = LogSpecBuilder::new()
.module("paperoni", self.log_level)
.build();
let formatted_timestamp = self.start_time.format("%Y-%m-%d_%H-%M-%S");
let mut logger = flexi_logger::Logger::with(log_spec);
if self.is_logging_to_file && (!paperoni_dir.is_dir() || !log_dir.is_dir()) {
if let Err(e) = fs::create_dir_all(&log_dir) {
return Err(Error::LogDirectoryError(format!("Unable to create paperoni directories on home directory for logging purposes\n{}",e)));
}
}
if self.is_logging_to_file {
logger = logger
.directory(log_dir)
.discriminant(formatted_timestamp.to_string())
.suppress_timestamp()
.log_to_file();
}
logger.start()?;
Ok(self)
}
None => Err(Error::LogDirectoryError(
"Unable to get user directories for logging purposes".to_string(),
)),
}
} }
} }
@ -181,21 +124,20 @@ impl<'a> TryFrom<ArgMatches<'a>> for AppConfig {
None None
} }
}; };
match ( let direct_urls = arg_matches
arg_matches .values_of("urls")
.values_of("urls") .and_then(|urls| urls.map(url_filter).collect::<Option<BTreeSet<_>>>());
.and_then(|urls| urls.map(url_filter).collect::<Option<HashSet<_>>>()), let file_urls = arg_matches
arg_matches .value_of("file")
.value_of("file") .map(fs::read_to_string)
.map(fs::read_to_string) .transpose()?
.transpose()? .and_then(|content| {
.and_then(|content| { content
content .lines()
.lines() .map(url_filter)
.map(url_filter) .collect::<Option<BTreeSet<_>>>()
.collect::<Option<HashSet<_>>>() });
}), match (direct_urls, file_urls) {
) {
(Some(direct_urls), Some(file_urls)) => Ok(direct_urls (Some(direct_urls), Some(file_urls)) => Ok(direct_urls
.union(&file_urls) .union(&file_urls)
.map(ToOwned::to_owned) .map(ToOwned::to_owned)
@ -219,7 +161,7 @@ impl<'a> TryFrom<ArgMatches<'a>> for AppConfig {
3 => LogLevel::Info, 3 => LogLevel::Info,
4..=u64::MAX => LogLevel::Debug, 4..=u64::MAX => LogLevel::Debug,
}) })
.is_logging_to_file(arg_matches.is_present("log-to_file")) .is_logging_to_file(arg_matches.is_present("log-to-file"))
.output_directory( .output_directory(
arg_matches arg_matches
.value_of("output_directory") .value_of("output_directory")
@ -242,6 +184,9 @@ impl<'a> TryFrom<ArgMatches<'a>> for AppConfig {
impl AppConfigBuilder { impl AppConfigBuilder {
pub fn try_init(&self) -> Result<AppConfig, Error> { pub fn try_init(&self) -> Result<AppConfig, Error> {
self.build()?.init_logger()?.init_merge_file() self.build()
.map_err(Error::AppBuildError)?
.init_logger()?
.init_merge_file()
} }
} }

View file

@ -1,3 +1,6 @@
use std::fmt::{Debug, Display};
use flexi_logger::FlexiLoggerError;
use thiserror::Error; use thiserror::Error;
#[derive(Error, Debug)] #[derive(Error, Debug)]
@ -124,3 +127,31 @@ impl From<std::str::Utf8Error> for PaperoniError {
PaperoniError::with_kind(ErrorKind::UTF8Error(err.to_string())) PaperoniError::with_kind(ErrorKind::UTF8Error(err.to_string()))
} }
} }
#[derive(Debug, Error)]
pub enum LogError {
#[error(transparent)]
FlexiError(#[from] FlexiLoggerError),
#[error("Wrong log directory: {0}")]
LogDirectoryError(String),
}
#[derive(Debug, Error)]
pub enum CliError<BuilderError: Debug + Display> {
#[error("Failed to open file with urls: {0}")]
UrlFileError(#[from] std::io::Error),
#[error("Failed to parse max connection value: {0}")]
InvalidMaxConnectionCount(#[from] std::num::ParseIntError),
#[error("No urls were provided")]
NoUrls,
#[error("Failed to build cli application: {0}")]
AppBuildError(BuilderError),
#[error("Invalid output path name for merged epubs: {0}")]
InvalidOutputPath(String),
#[error("Wrong output directory")]
WrongOutputDirectory,
#[error("Output directory not exists")]
OutputDirectoryNotExists,
#[error("Unable to start logger!\n{0}")]
LogError(#[from] LogError),
}

View file

@ -1,6 +1,10 @@
use std::fs;
use chrono::{DateTime, Local};
use colored::*; use colored::*;
use comfy_table::presets::UTF8_HORIZONTAL_BORDERS_ONLY; use comfy_table::presets::UTF8_HORIZONTAL_BORDERS_ONLY;
use comfy_table::{Cell, CellAlignment, ContentArrangement, Table}; use comfy_table::{Cell, CellAlignment, ContentArrangement, Table};
use flexi_logger::LevelFilter;
use log::error; use log::error;
use crate::errors::PaperoniError; use crate::errors::PaperoniError;
@ -141,6 +145,48 @@ impl DownloadCount {
} }
} }
} }
use crate::errors::LogError as Error;
pub fn init_logger(
log_level: LevelFilter,
start_time: &DateTime<Local>,
is_logging_to_file: bool,
) -> Result<(), Error> {
use directories::UserDirs;
use flexi_logger::LogSpecBuilder;
match UserDirs::new() {
Some(user_dirs) => {
let home_dir = user_dirs.home_dir();
let paperoni_dir = home_dir.join(".paperoni");
let log_dir = paperoni_dir.join("logs");
let log_spec = LogSpecBuilder::new().module("paperoni", log_level).build();
let formatted_timestamp = start_time.format("%Y-%m-%d_%H-%M-%S");
let mut logger = flexi_logger::Logger::with(log_spec);
if is_logging_to_file && (!paperoni_dir.is_dir() || !log_dir.is_dir()) {
if let Err(e) = fs::create_dir_all(&log_dir) {
return Err(Error::LogDirectoryError(format!("Unable to create paperoni directories on home directory for logging purposes\n{}",e)));
}
}
if is_logging_to_file {
logger = logger
.directory(log_dir)
.discriminant(formatted_timestamp.to_string())
.suppress_timestamp()
.log_to_file();
}
logger.start()?;
Ok(())
}
None => Err(Error::LogDirectoryError(
"Unable to get user directories for logging purposes".to_string(),
)),
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::{short_summary, DownloadCount}; use super::{short_summary, DownloadCount};