Consistent logging format

This commit is contained in:
Paul Campbell 2019-05-10 08:52:39 +01:00
parent 27d1d1b99f
commit 4c1cf89d51
5 changed files with 21 additions and 6 deletions

View file

@ -1,5 +1,8 @@
package net.kemitix.s3thorp package net.kemitix.s3thorp
import java.io.File
import java.nio.file.Path
import net.kemitix.s3thorp.Sync.{Bucket, LocalFile} import net.kemitix.s3thorp.Sync.{Bucket, LocalFile}
case class Config(bucket: Bucket = "", case class Config(bucket: Bucket = "",
@ -7,4 +10,6 @@ case class Config(bucket: Bucket = "",
source: LocalFile source: LocalFile
) { ) {
def relativePath(file: File): Path = source.toPath.relativize(file.toPath)
} }

View file

@ -12,6 +12,7 @@ trait S3MetaDataEnricher extends S3Client with KeyGenerator {
val remoteKey = generateKey(c)_ val remoteKey = generateKey(c)_
file => file =>
Stream.eval({ Stream.eval({
println(s"- Consider: ${c.relativePath(file)}")
val key = remoteKey(file) val key = remoteKey(file)
for { for {
head <- objectHead(c.bucket, key) head <- objectHead(c.bucket, key)

View file

@ -15,10 +15,11 @@ trait S3Uploader
val remoteKey = generateKey(c) _ val remoteKey = generateKey(c) _
file => { file => {
val key = remoteKey(file) val key = remoteKey(file)
val shortFile = c.source.toPath.relativize(file.toPath) val shortFile = c.relativePath(file)
Stream.eval(for { Stream.eval(for {
_ <- putStrLn(s"Uploading: $shortFile") _ <- putStrLn(s" Upload: $shortFile")
_ <- upload(file, c.bucket, key) _ <- upload(file, c.bucket, key)
_ <- putStrLn(s" Done: $shortFile")
} yield ()) } yield ())
} }
} }

View file

@ -25,7 +25,7 @@ class Sync(s3Client: S3Client)
_ <- { _ <- {
streamDirectoryPaths(c.source).flatMap( streamDirectoryPaths(c.source).flatMap(
enrichWithS3MetaData(c)).flatMap( enrichWithS3MetaData(c)).flatMap(
uploadRequiredFilter).flatMap( uploadRequiredFilter(c)).flatMap(
performUpload(c)).compile.drain performUpload(c)).compile.drain
} }
} yield () } yield ()

View file

@ -18,14 +18,22 @@ trait UploadSelectionFilter {
md5.digest.map("%02x".format(_)).mkString md5.digest.map("%02x".format(_)).mkString
} }
def uploadRequiredFilter: Either[File, S3MetaData] => Stream[IO, File] = { def uploadRequiredFilter(c: Config): Either[File, S3MetaData] => Stream[IO, File] = {
case Left(file) => Stream(file) case Left(file) => {
println(s" Created: ${c.relativePath(file)}")
Stream(file)
}
case Right(s3Metadata) => case Right(s3Metadata) =>
Stream.eval(for { Stream.eval(for {
localHash <- IO(md5File(s3Metadata.localFile)) localHash <- IO(md5File(s3Metadata.localFile))
} yield (s3Metadata.localFile, localHash)). } yield (s3Metadata.localFile, localHash)).
filter { case (_, localHash) => localHash != s3Metadata.remoteHash }. filter { case (_, localHash) => localHash != s3Metadata.remoteHash }.
map {case (localFile,_) => localFile} map {
case (localFile,_) => {
println(s" Updated: ${c.relativePath(localFile)}")
localFile
}
}
} }
} }