From cae9227ab048603402c99707ce98b6beee5ee2e8 Mon Sep 17 00:00:00 2001 From: Kenneth Gitere Date: Fri, 30 Apr 2021 06:55:02 +0300 Subject: [PATCH] Update documentation --- Cargo.lock | 2 +- Cargo.toml | 2 +- README.md | 43 ++++++++++++++++++++++++++++++++++++++++--- src/cli.rs | 7 ++----- 4 files changed, 44 insertions(+), 10 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a82ccaa..ca5456c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1482,7 +1482,7 @@ dependencies = [ [[package]] name = "paperoni" -version = "0.3.0-alpha1" +version = "0.4.0-alpha1" dependencies = [ "async-std", "chrono", diff --git a/Cargo.toml b/Cargo.toml index 655fbb7..3fbd83c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -3,7 +3,7 @@ description = "A web article downloader" homepage = "https://github.com/hipstermojo/paperoni" repository = "https://github.com/hipstermojo/paperoni" name = "paperoni" -version = "0.3.0-alpha1" +version = "0.4.0-alpha1" authors = ["Kenneth Gitere "] edition = "2018" license = "MIT" diff --git a/README.md b/README.md index 96e15c5..0e626e0 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,8 @@

Salami not included

-Paperoni is a web article downloader written in Rust. The downloaded articles are then exported as EPUB files. +![crates.io](https://img.shields.io/crates/v/paperoni.svg) +Paperoni is a CLI tool made in Rust for downloading web articles as EPUBs. > This project is in an alpha release so it might crash when you use it. Please open an [issue on Github](https://github.com/hipstermojo/paperoni/issues/new) if it does crash. @@ -17,7 +18,7 @@ Check the [releases](https://github.com/hipstermojo/paperoni/releases) page for Paperoni is published on [crates.io](https://crates.io). If you have [cargo](https://github.com/rust-lang/cargo) installed, then run: ```sh -cargo install paperoni --version 0.3.0-alpha1 +cargo install paperoni --version 0.4.0-alpha1 ``` _Paperoni is still in alpha so the `version` flag has to be passed._ @@ -37,6 +38,27 @@ cargo run -- # pass your url here ## Usage +``` +USAGE: + paperoni [OPTIONS] [urls]... + +OPTIONS: + -f, --file Input file containing links + -h, --help Prints help information + --log-to-file Enables logging of events to a file located in .paperoni/logs with a default log level + of debug. Use -v to specify the logging level + --max_conn The maximum number of concurrent HTTP connections when downloading articles. Default is + 8 + --merge Merge multiple articles into a single epub + -V, --version Prints version information + -v Enables logging of events and set the verbosity level. Use -h to read on its usage + +ARGS: + ... Urls of web articles +``` + +To download a single article pass in its URL + ```sh paperoni https://en.wikipedia.org/wiki/Pepperoni ``` @@ -68,10 +90,23 @@ into a single epub using the `merge` flag and specifying the output file. paperoni -f links.txt --merge out.epub ``` +### Logging events + +Logging is disabled by default. This can be activated by either using the `-v` flag or `--log-to-file` flag. If the `--log-to-file` flag is passed the logs are sent to a file in the default Paperoni directory `.paperoni/logs` which is on your home directory. The `-v` flag configures the verbosity levels such that: + +``` +-v Logs only the error level +-vv Logs only the warn level +-vvv Logs only the info level +-vvvv Logs only the debug level +``` + +If only the `-v` flag is passed, the progress bars are disabled. If both `-v` and `--log-to-file` are passed then the progress bars will still be shown. + ## How it works The URL passed to Paperoni is fetched and the returned HTML response is passed to the extractor. -This extractor retrieves a possible article using a port of the [Mozilla Readability algorithm](https://github.com/mozilla/readability). This article is then saved in an EPUB. +This extractor retrieves a possible article using a [custom port](https://github.com/hipstermojo/paperoni/blob/master/src/moz_readability/mod.rs) of the [Mozilla Readability algorithm](https://github.com/mozilla/readability). This article is then saved in an EPUB. > The port of the algorithm is still unstable as well so it is not fully compatible with all the websites that can be extracted using Readability. @@ -82,3 +117,5 @@ This program is still in alpha so a number of things won't work: - Websites that only run with JavaScript cannot be extracted. - Website articles that cannot be extracted by Readability cannot be extracted by Paperoni either. - Code snippets on Medium articles that are lazy loaded will not appear in the EPUB. + +There are also web pages it won't work on in general such as Twitter and Reddit threads. diff --git a/src/cli.rs b/src/cli.rs index 30c5367..f1f38bc 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -14,10 +14,7 @@ pub fn cli_init() -> AppConfig { ]) .version(clap::crate_version!()) .about( - " -Paperoni is an article downloader. -It takes a url, downloads the article content from it and saves it to an epub. - ", + "Paperoni is a CLI tool made in Rust for downloading web articles as EPUBs", ) .arg( Arg::with_name("urls") @@ -47,7 +44,7 @@ It takes a url, downloads the article content from it and saves it to an epub. Arg::with_name("verbosity") .short("v") .multiple(true) - .help("Enables logging of events and set the verbosity level. Use -h to read on its usage") + .help("Enables logging of events and set the verbosity level. Use --help to read on its usage") .long_help( "This takes upto 4 levels of verbosity in the following order. - Error (-v)