Use logging in place of println
This commit is contained in:
parent
6761fb0ade
commit
11cbcb2312
7 changed files with 29 additions and 19 deletions
|
@ -23,7 +23,7 @@ Usage: S3Thorp [options]
|
||||||
|
|
||||||
- [x] Improve test coverage
|
- [x] Improve test coverage
|
||||||
- [ ] Create os-native binaries
|
- [ ] Create os-native binaries
|
||||||
- [ ] Replace println with real logging
|
- [x] Replace println with real logging
|
||||||
- [ ] Add support for logging options
|
- [ ] Add support for logging options
|
||||||
- [ ] Add support for exclusion filters
|
- [ ] Add support for exclusion filters
|
||||||
- [ ] Bulk fetching of Hash values from S3
|
- [ ] Bulk fetching of Hash values from S3
|
||||||
|
|
7
src/main/scala/net/kemitix/s3thorp/Logging.scala
Normal file
7
src/main/scala/net/kemitix/s3thorp/Logging.scala
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
package net.kemitix.s3thorp
|
||||||
|
|
||||||
|
import com.typesafe.scalalogging.LazyLogging
|
||||||
|
|
||||||
|
trait Logging extends LazyLogging {
|
||||||
|
|
||||||
|
}
|
|
@ -6,9 +6,7 @@ import cats.effect.ExitCase.{Canceled, Completed, Error}
|
||||||
import cats.effect.{ExitCode, IO, IOApp}
|
import cats.effect.{ExitCode, IO, IOApp}
|
||||||
import net.kemitix.s3thorp.awssdk.S3Client
|
import net.kemitix.s3thorp.awssdk.S3Client
|
||||||
|
|
||||||
object Main extends IOApp {
|
object Main extends IOApp with Logging {
|
||||||
|
|
||||||
def putStrLn(value: String) = IO { println(value) }
|
|
||||||
|
|
||||||
val defaultConfig: Config =
|
val defaultConfig: Config =
|
||||||
Config("(none)", "", Paths.get(".").toFile)
|
Config("(none)", "", Paths.get(".").toFile)
|
||||||
|
@ -17,7 +15,7 @@ object Main extends IOApp {
|
||||||
|
|
||||||
def program(args: List[String]): IO[ExitCode] =
|
def program(args: List[String]): IO[ExitCode] =
|
||||||
for {
|
for {
|
||||||
_ <- putStrLn("S3Thorp - hashed sync for s3")
|
_ <- IO(logger.info("S3Thorp - hashed sync for s3"))
|
||||||
a <- ParseArgs(args, defaultConfig)
|
a <- ParseArgs(args, defaultConfig)
|
||||||
_ <- sync.run(a)
|
_ <- sync.run(a)
|
||||||
} yield ExitCode.Success
|
} yield ExitCode.Success
|
||||||
|
@ -25,9 +23,9 @@ object Main extends IOApp {
|
||||||
override def run(args: List[String]): IO[ExitCode] =
|
override def run(args: List[String]): IO[ExitCode] =
|
||||||
program(args)
|
program(args)
|
||||||
.guaranteeCase {
|
.guaranteeCase {
|
||||||
case Canceled => IO(println("Interrupted"))
|
case Canceled => IO(logger.warn("Interrupted"))
|
||||||
case Error(e) => IO(println("ERROR: " + e.getMessage))
|
case Error(e) => IO(logger.error(e.getMessage))
|
||||||
case Completed => IO(println("Done"))
|
case Completed => IO(logger.info("Done"))
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,13 +6,16 @@ import fs2.Stream
|
||||||
import cats.effect.IO
|
import cats.effect.IO
|
||||||
import net.kemitix.s3thorp.awssdk.S3Client
|
import net.kemitix.s3thorp.awssdk.S3Client
|
||||||
|
|
||||||
trait S3MetaDataEnricher extends S3Client with KeyGenerator {
|
trait S3MetaDataEnricher
|
||||||
|
extends S3Client
|
||||||
|
with KeyGenerator
|
||||||
|
with Logging {
|
||||||
|
|
||||||
def enrichWithS3MetaData(c: Config): File => Stream[IO, Either[File, S3MetaData]] = {
|
def enrichWithS3MetaData(c: Config): File => Stream[IO, Either[File, S3MetaData]] = {
|
||||||
val remoteKey = generateKey(c)_
|
val remoteKey = generateKey(c)_
|
||||||
file =>
|
file =>
|
||||||
Stream.eval({
|
Stream.eval({
|
||||||
println(s"- Consider: ${c.relativePath(file)}")
|
logger.info(s"- Consider: ${c.relativePath(file)}")
|
||||||
val key = remoteKey(file)
|
val key = remoteKey(file)
|
||||||
for {
|
for {
|
||||||
head <- objectHead(c.bucket, key)
|
head <- objectHead(c.bucket, key)
|
||||||
|
|
|
@ -4,12 +4,12 @@ import java.io.File
|
||||||
|
|
||||||
import fs2.Stream
|
import fs2.Stream
|
||||||
import cats.effect.IO
|
import cats.effect.IO
|
||||||
import net.kemitix.s3thorp.Main.putStrLn
|
|
||||||
import net.kemitix.s3thorp.awssdk.S3Client
|
import net.kemitix.s3thorp.awssdk.S3Client
|
||||||
|
|
||||||
trait S3Uploader
|
trait S3Uploader
|
||||||
extends S3Client
|
extends S3Client
|
||||||
with KeyGenerator {
|
with KeyGenerator
|
||||||
|
with Logging {
|
||||||
|
|
||||||
def performUpload(c: Config): File => Stream[IO, Unit] = {
|
def performUpload(c: Config): File => Stream[IO, Unit] = {
|
||||||
val remoteKey = generateKey(c) _
|
val remoteKey = generateKey(c) _
|
||||||
|
@ -17,9 +17,9 @@ trait S3Uploader
|
||||||
val key = remoteKey(file)
|
val key = remoteKey(file)
|
||||||
val shortFile = c.relativePath(file)
|
val shortFile = c.relativePath(file)
|
||||||
Stream.eval(for {
|
Stream.eval(for {
|
||||||
_ <- putStrLn(s" Upload: $shortFile")
|
_ <- IO(logger.info(s" Upload: $shortFile"))
|
||||||
_ <- upload(file, c.bucket, key)
|
_ <- upload(file, c.bucket, key)
|
||||||
_ <- putStrLn(s" Done: $shortFile")
|
_ <- IO(logger.info(s" Done: $shortFile"))
|
||||||
} yield ())
|
} yield ())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,7 +11,8 @@ class Sync(s3Client: S3Client)
|
||||||
extends LocalFileStream
|
extends LocalFileStream
|
||||||
with S3MetaDataEnricher
|
with S3MetaDataEnricher
|
||||||
with UploadSelectionFilter
|
with UploadSelectionFilter
|
||||||
with S3Uploader {
|
with S3Uploader
|
||||||
|
with Logging {
|
||||||
|
|
||||||
override def objectHead(bucket: Bucket, remoteKey: RemoteKey)=
|
override def objectHead(bucket: Bucket, remoteKey: RemoteKey)=
|
||||||
s3Client.objectHead(bucket, remoteKey)
|
s3Client.objectHead(bucket, remoteKey)
|
||||||
|
@ -20,7 +21,7 @@ class Sync(s3Client: S3Client)
|
||||||
s3Client.upload(localFile, bucket, remoteKey)
|
s3Client.upload(localFile, bucket, remoteKey)
|
||||||
|
|
||||||
def run(c: Config): IO[Unit] = {
|
def run(c: Config): IO[Unit] = {
|
||||||
println(s"Bucket: ${c.bucket}, Prefix: ${c.prefix}, Source: ${c.source}")
|
logger.info(s"Bucket: ${c.bucket}, Prefix: ${c.prefix}, Source: ${c.source}")
|
||||||
streamDirectoryPaths(c.source).flatMap(
|
streamDirectoryPaths(c.source).flatMap(
|
||||||
enrichWithS3MetaData(c)).flatMap(
|
enrichWithS3MetaData(c)).flatMap(
|
||||||
uploadRequiredFilter(c)).flatMap(
|
uploadRequiredFilter(c)).flatMap(
|
||||||
|
|
|
@ -6,7 +6,8 @@ import net.kemitix.s3thorp.Sync.{MD5Hash, LocalFile}
|
||||||
import java.security.{MessageDigest, DigestInputStream}
|
import java.security.{MessageDigest, DigestInputStream}
|
||||||
import java.io.{File, FileInputStream}
|
import java.io.{File, FileInputStream}
|
||||||
|
|
||||||
trait UploadSelectionFilter {
|
trait UploadSelectionFilter
|
||||||
|
extends Logging {
|
||||||
|
|
||||||
private def md5File(localFile: LocalFile): MD5Hash = {
|
private def md5File(localFile: LocalFile): MD5Hash = {
|
||||||
val buffer = new Array[Byte](8192)
|
val buffer = new Array[Byte](8192)
|
||||||
|
@ -20,7 +21,7 @@ trait UploadSelectionFilter {
|
||||||
|
|
||||||
def uploadRequiredFilter(c: Config): Either[File, S3MetaData] => Stream[IO, File] = {
|
def uploadRequiredFilter(c: Config): Either[File, S3MetaData] => Stream[IO, File] = {
|
||||||
case Left(file) => {
|
case Left(file) => {
|
||||||
println(s" Created: ${c.relativePath(file)}")
|
logger.info(s" Created: ${c.relativePath(file)}")
|
||||||
Stream(file)
|
Stream(file)
|
||||||
}
|
}
|
||||||
case Right(s3Metadata) =>
|
case Right(s3Metadata) =>
|
||||||
|
@ -30,7 +31,7 @@ trait UploadSelectionFilter {
|
||||||
filter { case (_, localHash) => localHash != s3Metadata.remoteHash }.
|
filter { case (_, localHash) => localHash != s3Metadata.remoteHash }.
|
||||||
map {
|
map {
|
||||||
case (localFile,_) => {
|
case (localFile,_) => {
|
||||||
println(s" Updated: ${c.relativePath(localFile)}")
|
logger.info(s" Updated: ${c.relativePath(localFile)}")
|
||||||
localFile
|
localFile
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue