Fix from PR#15

- refactor comments
- move `cli::Error` to `errors::ErrorCli`
- removed mixing of order of input urls
- move pure functionality if `init_logger` to clear function
This commit is contained in:
Mikhail Gorbachev 2021-06-06 13:20:08 +03:00
parent 13ad14e73d
commit aa9258e122
4 changed files with 133 additions and 93 deletions

View file

@ -48,23 +48,41 @@ USAGE:
paperoni [OPTIONS] [urls]...
OPTIONS:
-f, --file <file> Input file containing links
-h, --help Prints help information
-f, --file <file>
Input file containing links
-h, --help
Prints help information
--log-to-file
Enables logging of events to a file located in .paperoni/logs with a default log level of debug. Use -v to
specify the logging level
--max_conn <max_conn>
The maximum number of concurrent HTTP connections when downloading articles. Default is 8
The maximum number of concurrent HTTP connections when downloading articles. Default is 8.
NOTE: It is advised to use as few connections as needed i.e between 1 and 50. Using more connections can end
up overloading your network card with too many concurrent requests.
-o, --output_directory <output_directory>
Directory for saving epub documents
--merge <output_name>
Merge multiple articles into a single epub that will be given the name provided
-V, --version
Prints version information
-o, --output_directory <output_directory> Directory for store output epub documents
--merge <output_name> Merge multiple articles into a single epub
-V, --version Prints version information
-v
Enables logging of events and set the verbosity level. Use --help to read on its usage
This takes upto 4 levels of verbosity in the following order.
- Error (-v)
- Warn (-vv)
- Info (-vvv)
- Debug (-vvvv)
When this flag is passed, it disables the progress bars and logs to stderr.
If you would like to send the logs to a file (and enable progress bars), pass the log-to-file flag.
ARGS:
<urls>... Urls of web articles
<urls>...
Urls of web articles
```
To download a single article pass in its URL

View file

@ -1,14 +1,10 @@
use std::{
collections::HashSet,
num::{NonZeroUsize, ParseIntError},
path::Path,
};
use std::{collections::BTreeSet, fs, num::NonZeroUsize, path::Path};
use chrono::{DateTime, Local};
use clap::{App, AppSettings, Arg, ArgMatches};
use flexi_logger::{FlexiLoggerError, LevelFilter as LogLevel};
use std::fs;
use thiserror::Error;
use flexi_logger::LevelFilter as LogLevel;
type Error = crate::errors::CliError<AppConfigBuilderError>;
const DEFAULT_MAX_CONN: usize = 8;
@ -26,28 +22,6 @@ pub struct AppConfig {
pub is_logging_to_file: bool,
}
#[derive(Debug, Error)]
pub enum Error {
#[error("Failed to open file with urls: {0}")]
UrlFileError(#[from] std::io::Error),
#[error("Failed to parse max connection value: {0}")]
InvalidMaxConnectionCount(#[from] ParseIntError),
#[error("No urls for parse")]
NoUrls,
#[error("No urls for parse")]
AppBuildError(#[from] AppConfigBuilderError),
#[error("Invalid output path name for merged epubs: {0}")]
InvalidOutputPath(String),
#[error("Log error: {0}")]
LogDirectoryError(String),
#[error(transparent)]
LogError(#[from] FlexiLoggerError),
#[error("Wrong output directory")]
WrongOutputDirectory,
#[error("Output directory not exists")]
OutputDirectoryNotExists,
}
impl AppConfig {
pub fn init_with_cli() -> Result<AppConfig, Error> {
let app = App::new("paperoni")
@ -73,11 +47,10 @@ impl AppConfig {
)
.arg(
Arg::with_name("output_directory")
.long("output_directory")
.long("output-directory")
.short("o")
.help("Directory for store output epub documents")
.help("Directory to store output epub documents")
.conflicts_with("output_name")
.long_help("Directory for saving epub documents")
.takes_value(true),
)
.arg(
@ -128,40 +101,10 @@ impl AppConfig {
}
fn init_logger(self) -> Result<Self, Error> {
use directories::UserDirs;
use flexi_logger::LogSpecBuilder;
match UserDirs::new() {
Some(user_dirs) => {
let home_dir = user_dirs.home_dir();
let paperoni_dir = home_dir.join(".paperoni");
let log_dir = paperoni_dir.join("logs");
let log_spec = LogSpecBuilder::new()
.module("paperoni", self.log_level)
.build();
let formatted_timestamp = self.start_time.format("%Y-%m-%d_%H-%M-%S");
let mut logger = flexi_logger::Logger::with(log_spec);
if self.is_logging_to_file && (!paperoni_dir.is_dir() || !log_dir.is_dir()) {
if let Err(e) = fs::create_dir_all(&log_dir) {
return Err(Error::LogDirectoryError(format!("Unable to create paperoni directories on home directory for logging purposes\n{}",e)));
}
}
if self.is_logging_to_file {
logger = logger
.directory(log_dir)
.discriminant(formatted_timestamp.to_string())
.suppress_timestamp()
.log_to_file();
}
logger.start()?;
Ok(self)
}
None => Err(Error::LogDirectoryError(
"Unable to get user directories for logging purposes".to_string(),
)),
}
use crate::logs;
logs::init_logger(self.log_level, &self.start_time, self.is_logging_to_file)
.map(|_| self)
.map_err(Error::LogError)
}
}
@ -181,11 +124,10 @@ impl<'a> TryFrom<ArgMatches<'a>> for AppConfig {
None
}
};
match (
arg_matches
let direct_urls = arg_matches
.values_of("urls")
.and_then(|urls| urls.map(url_filter).collect::<Option<HashSet<_>>>()),
arg_matches
.and_then(|urls| urls.map(url_filter).collect::<Option<BTreeSet<_>>>());
let file_urls = arg_matches
.value_of("file")
.map(fs::read_to_string)
.transpose()?
@ -193,9 +135,9 @@ impl<'a> TryFrom<ArgMatches<'a>> for AppConfig {
content
.lines()
.map(url_filter)
.collect::<Option<HashSet<_>>>()
}),
) {
.collect::<Option<BTreeSet<_>>>()
});
match (direct_urls, file_urls) {
(Some(direct_urls), Some(file_urls)) => Ok(direct_urls
.union(&file_urls)
.map(ToOwned::to_owned)
@ -219,7 +161,7 @@ impl<'a> TryFrom<ArgMatches<'a>> for AppConfig {
3 => LogLevel::Info,
4..=u64::MAX => LogLevel::Debug,
})
.is_logging_to_file(arg_matches.is_present("log-to_file"))
.is_logging_to_file(arg_matches.is_present("log-to-file"))
.output_directory(
arg_matches
.value_of("output_directory")
@ -242,6 +184,9 @@ impl<'a> TryFrom<ArgMatches<'a>> for AppConfig {
impl AppConfigBuilder {
pub fn try_init(&self) -> Result<AppConfig, Error> {
self.build()?.init_logger()?.init_merge_file()
self.build()
.map_err(Error::AppBuildError)?
.init_logger()?
.init_merge_file()
}
}

View file

@ -1,3 +1,6 @@
use std::fmt::{Debug, Display};
use flexi_logger::FlexiLoggerError;
use thiserror::Error;
#[derive(Error, Debug)]
@ -124,3 +127,31 @@ impl From<std::str::Utf8Error> for PaperoniError {
PaperoniError::with_kind(ErrorKind::UTF8Error(err.to_string()))
}
}
#[derive(Debug, Error)]
pub enum LogError {
#[error(transparent)]
FlexiError(#[from] FlexiLoggerError),
#[error("Wrong log directory: {0}")]
LogDirectoryError(String),
}
#[derive(Debug, Error)]
pub enum CliError<BuilderError: Debug + Display> {
#[error("Failed to open file with urls: {0}")]
UrlFileError(#[from] std::io::Error),
#[error("Failed to parse max connection value: {0}")]
InvalidMaxConnectionCount(#[from] std::num::ParseIntError),
#[error("No urls were provided")]
NoUrls,
#[error("Failed to build cli application: {0}")]
AppBuildError(BuilderError),
#[error("Invalid output path name for merged epubs: {0}")]
InvalidOutputPath(String),
#[error("Wrong output directory")]
WrongOutputDirectory,
#[error("Output directory not exists")]
OutputDirectoryNotExists,
#[error("Unable to start logger!\n{0}")]
LogError(#[from] LogError),
}

View file

@ -1,6 +1,10 @@
use std::fs;
use chrono::{DateTime, Local};
use colored::*;
use comfy_table::presets::UTF8_HORIZONTAL_BORDERS_ONLY;
use comfy_table::{Cell, CellAlignment, ContentArrangement, Table};
use flexi_logger::LevelFilter;
use log::error;
use crate::errors::PaperoniError;
@ -141,6 +145,48 @@ impl DownloadCount {
}
}
}
use crate::errors::LogError as Error;
pub fn init_logger(
log_level: LevelFilter,
start_time: &DateTime<Local>,
is_logging_to_file: bool,
) -> Result<(), Error> {
use directories::UserDirs;
use flexi_logger::LogSpecBuilder;
match UserDirs::new() {
Some(user_dirs) => {
let home_dir = user_dirs.home_dir();
let paperoni_dir = home_dir.join(".paperoni");
let log_dir = paperoni_dir.join("logs");
let log_spec = LogSpecBuilder::new().module("paperoni", log_level).build();
let formatted_timestamp = start_time.format("%Y-%m-%d_%H-%M-%S");
let mut logger = flexi_logger::Logger::with(log_spec);
if is_logging_to_file && (!paperoni_dir.is_dir() || !log_dir.is_dir()) {
if let Err(e) = fs::create_dir_all(&log_dir) {
return Err(Error::LogDirectoryError(format!("Unable to create paperoni directories on home directory for logging purposes\n{}",e)));
}
}
if is_logging_to_file {
logger = logger
.directory(log_dir)
.discriminant(formatted_timestamp.to_string())
.suppress_timestamp()
.log_to_file();
}
logger.start()?;
Ok(())
}
None => Err(Error::LogDirectoryError(
"Unable to get user directories for logging purposes".to_string(),
)),
}
}
#[cfg(test)]
mod tests {
use super::{short_summary, DownloadCount};