Use logging in place of println
This commit is contained in:
parent
6761fb0ade
commit
11cbcb2312
7 changed files with 29 additions and 19 deletions
|
@ -23,7 +23,7 @@ Usage: S3Thorp [options]
|
|||
|
||||
- [x] Improve test coverage
|
||||
- [ ] Create os-native binaries
|
||||
- [ ] Replace println with real logging
|
||||
- [x] Replace println with real logging
|
||||
- [ ] Add support for logging options
|
||||
- [ ] Add support for exclusion filters
|
||||
- [ ] Bulk fetching of Hash values from S3
|
||||
|
|
7
src/main/scala/net/kemitix/s3thorp/Logging.scala
Normal file
7
src/main/scala/net/kemitix/s3thorp/Logging.scala
Normal file
|
@ -0,0 +1,7 @@
|
|||
package net.kemitix.s3thorp
|
||||
|
||||
import com.typesafe.scalalogging.LazyLogging
|
||||
|
||||
trait Logging extends LazyLogging {
|
||||
|
||||
}
|
|
@ -6,9 +6,7 @@ import cats.effect.ExitCase.{Canceled, Completed, Error}
|
|||
import cats.effect.{ExitCode, IO, IOApp}
|
||||
import net.kemitix.s3thorp.awssdk.S3Client
|
||||
|
||||
object Main extends IOApp {
|
||||
|
||||
def putStrLn(value: String) = IO { println(value) }
|
||||
object Main extends IOApp with Logging {
|
||||
|
||||
val defaultConfig: Config =
|
||||
Config("(none)", "", Paths.get(".").toFile)
|
||||
|
@ -17,7 +15,7 @@ object Main extends IOApp {
|
|||
|
||||
def program(args: List[String]): IO[ExitCode] =
|
||||
for {
|
||||
_ <- putStrLn("S3Thorp - hashed sync for s3")
|
||||
_ <- IO(logger.info("S3Thorp - hashed sync for s3"))
|
||||
a <- ParseArgs(args, defaultConfig)
|
||||
_ <- sync.run(a)
|
||||
} yield ExitCode.Success
|
||||
|
@ -25,9 +23,9 @@ object Main extends IOApp {
|
|||
override def run(args: List[String]): IO[ExitCode] =
|
||||
program(args)
|
||||
.guaranteeCase {
|
||||
case Canceled => IO(println("Interrupted"))
|
||||
case Error(e) => IO(println("ERROR: " + e.getMessage))
|
||||
case Completed => IO(println("Done"))
|
||||
case Canceled => IO(logger.warn("Interrupted"))
|
||||
case Error(e) => IO(logger.error(e.getMessage))
|
||||
case Completed => IO(logger.info("Done"))
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -6,13 +6,16 @@ import fs2.Stream
|
|||
import cats.effect.IO
|
||||
import net.kemitix.s3thorp.awssdk.S3Client
|
||||
|
||||
trait S3MetaDataEnricher extends S3Client with KeyGenerator {
|
||||
trait S3MetaDataEnricher
|
||||
extends S3Client
|
||||
with KeyGenerator
|
||||
with Logging {
|
||||
|
||||
def enrichWithS3MetaData(c: Config): File => Stream[IO, Either[File, S3MetaData]] = {
|
||||
val remoteKey = generateKey(c)_
|
||||
file =>
|
||||
Stream.eval({
|
||||
println(s"- Consider: ${c.relativePath(file)}")
|
||||
logger.info(s"- Consider: ${c.relativePath(file)}")
|
||||
val key = remoteKey(file)
|
||||
for {
|
||||
head <- objectHead(c.bucket, key)
|
||||
|
|
|
@ -4,12 +4,12 @@ import java.io.File
|
|||
|
||||
import fs2.Stream
|
||||
import cats.effect.IO
|
||||
import net.kemitix.s3thorp.Main.putStrLn
|
||||
import net.kemitix.s3thorp.awssdk.S3Client
|
||||
|
||||
trait S3Uploader
|
||||
extends S3Client
|
||||
with KeyGenerator {
|
||||
with KeyGenerator
|
||||
with Logging {
|
||||
|
||||
def performUpload(c: Config): File => Stream[IO, Unit] = {
|
||||
val remoteKey = generateKey(c) _
|
||||
|
@ -17,9 +17,9 @@ trait S3Uploader
|
|||
val key = remoteKey(file)
|
||||
val shortFile = c.relativePath(file)
|
||||
Stream.eval(for {
|
||||
_ <- putStrLn(s" Upload: $shortFile")
|
||||
_ <- IO(logger.info(s" Upload: $shortFile"))
|
||||
_ <- upload(file, c.bucket, key)
|
||||
_ <- putStrLn(s" Done: $shortFile")
|
||||
_ <- IO(logger.info(s" Done: $shortFile"))
|
||||
} yield ())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,7 +11,8 @@ class Sync(s3Client: S3Client)
|
|||
extends LocalFileStream
|
||||
with S3MetaDataEnricher
|
||||
with UploadSelectionFilter
|
||||
with S3Uploader {
|
||||
with S3Uploader
|
||||
with Logging {
|
||||
|
||||
override def objectHead(bucket: Bucket, remoteKey: RemoteKey)=
|
||||
s3Client.objectHead(bucket, remoteKey)
|
||||
|
@ -20,7 +21,7 @@ class Sync(s3Client: S3Client)
|
|||
s3Client.upload(localFile, bucket, remoteKey)
|
||||
|
||||
def run(c: Config): IO[Unit] = {
|
||||
println(s"Bucket: ${c.bucket}, Prefix: ${c.prefix}, Source: ${c.source}")
|
||||
logger.info(s"Bucket: ${c.bucket}, Prefix: ${c.prefix}, Source: ${c.source}")
|
||||
streamDirectoryPaths(c.source).flatMap(
|
||||
enrichWithS3MetaData(c)).flatMap(
|
||||
uploadRequiredFilter(c)).flatMap(
|
||||
|
|
|
@ -6,7 +6,8 @@ import net.kemitix.s3thorp.Sync.{MD5Hash, LocalFile}
|
|||
import java.security.{MessageDigest, DigestInputStream}
|
||||
import java.io.{File, FileInputStream}
|
||||
|
||||
trait UploadSelectionFilter {
|
||||
trait UploadSelectionFilter
|
||||
extends Logging {
|
||||
|
||||
private def md5File(localFile: LocalFile): MD5Hash = {
|
||||
val buffer = new Array[Byte](8192)
|
||||
|
@ -20,7 +21,7 @@ trait UploadSelectionFilter {
|
|||
|
||||
def uploadRequiredFilter(c: Config): Either[File, S3MetaData] => Stream[IO, File] = {
|
||||
case Left(file) => {
|
||||
println(s" Created: ${c.relativePath(file)}")
|
||||
logger.info(s" Created: ${c.relativePath(file)}")
|
||||
Stream(file)
|
||||
}
|
||||
case Right(s3Metadata) =>
|
||||
|
@ -30,7 +31,7 @@ trait UploadSelectionFilter {
|
|||
filter { case (_, localHash) => localHash != s3Metadata.remoteHash }.
|
||||
map {
|
||||
case (localFile,_) => {
|
||||
println(s" Updated: ${c.relativePath(localFile)}")
|
||||
logger.info(s" Updated: ${c.relativePath(localFile)}")
|
||||
localFile
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue