Basic backup functionality
This commit is contained in:
57
src/MyLib.hs
57
src/MyLib.hs
@@ -1,16 +1,33 @@
|
||||
{-# LANGUAGE TypeOperators #-}
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
{-# LANGUAGE TypeApplications #-}
|
||||
{-# LANGUAGE DeriveGeneric #-}
|
||||
module MyLib (someFunc) where
|
||||
{-# LANGUAGE TupleSections #-}
|
||||
{-# LANGUAGE DataKinds #-}
|
||||
module MyLib where
|
||||
|
||||
import Network.Wai.Metrics (metrics, registerWaiMetrics)
|
||||
import System.Metrics
|
||||
(Store, newStore, registerGcMetrics)
|
||||
import Servant.Metrics.Prometheus
|
||||
import Network.Wai.Handler.Warp (runSettings, defaultSettings, setPort)
|
||||
import Data.Function ((&))
|
||||
import Servant
|
||||
import GHC.Generics (Generic)
|
||||
import Conduit (runConduit, (.|), MonadUnliftIO, liftIO, ConduitT, MonadResource)
|
||||
import System.FilePath (isExtensionOf, takeBaseName)
|
||||
import qualified Data.Conduit.Combinators as C
|
||||
import Crypto.Hash (HashAlgorithm, Digest, SHA256)
|
||||
import qualified Crypto.Hash as Crypto
|
||||
import Amazonka (newEnv, Region(..), Env, Env'(..), setEndpoint, runResourceT, send, chunkedFile, defaultChunkSize)
|
||||
import Amazonka.Auth (discover)
|
||||
import qualified Amazonka.S3 as S3
|
||||
import qualified Data.Text as T
|
||||
import Control.Lens (set, (&))
|
||||
import Data.Generics.Product (field)
|
||||
import Control.Monad (void)
|
||||
import Data.HashMap.Strict (HashMap)
|
||||
import Data.Text (Text)
|
||||
import qualified Data.HashMap.Strict as HM
|
||||
|
||||
newtype Routes route
|
||||
= Routes { _getMetrics :: route :- MetricsRoute }
|
||||
@@ -21,8 +38,44 @@ type API = NamedRoutes Routes
|
||||
app :: Store -> Application
|
||||
app store = serve (Proxy @API) Routes { _getMetrics = metricsServer store }
|
||||
|
||||
hash :: (HashAlgorithm a, MonadUnliftIO m) => FilePath -> m (Digest a)
|
||||
hash path = C.withSourceFile path $ \source ->
|
||||
Crypto.hashFinalize <$> runConduit (source .| C.foldl Crypto.hashUpdate Crypto.hashInit)
|
||||
|
||||
data Image = Image
|
||||
{ imagePath :: !FilePath
|
||||
, imageHash :: !(Digest SHA256)
|
||||
}
|
||||
deriving Show
|
||||
|
||||
imageToMetadata :: Image -> HashMap Text Text
|
||||
imageToMetadata img = HM.fromList [("sha256", T.pack . show . imageHash $ img)]
|
||||
|
||||
uploadImage :: Env -> S3.BucketName -> Image -> IO S3.PutObjectResponse
|
||||
uploadImage env bucket img = do
|
||||
obj <- chunkedFile defaultChunkSize (imagePath img)
|
||||
runResourceT $ do
|
||||
let putObj = S3.newPutObject bucket key obj & set (field @"metadata") mt & set (field @"contentType") (Just "image/jpeg")
|
||||
key = S3.ObjectKey (T.pack . takeBaseName . imagePath $ img)
|
||||
mt = imageToMetadata img
|
||||
send env putObj
|
||||
|
||||
scan :: (MonadResource m) => FilePath -> (FilePath -> Bool) -> ConduitT () FilePath m ()
|
||||
scan root predicate = C.sourceDirectoryDeep False root .| C.filter predicate
|
||||
|
||||
analyze :: (MonadUnliftIO m) => ConduitT FilePath Image m ()
|
||||
analyze = C.mapM (\p -> Image p <$> hash p)
|
||||
|
||||
someFunc :: IO ()
|
||||
someFunc = do
|
||||
discoveredEnv <- newEnv discover
|
||||
let env = discoveredEnv
|
||||
{ region = Region' "fr-par"
|
||||
, overrides = setEndpoint True "s3.fr-par.scw.cloud" 443
|
||||
}
|
||||
bucket = S3.BucketName "introitu-photoprism"
|
||||
_ <- runResourceT $ runConduit $ scan "/home/masse/wikidata/" (isExtensionOf ".jpg") .| analyze .| C.iterM (liftIO . print) .| C.mapM_ (void . liftIO . uploadImage env bucket)
|
||||
putStrLn "done"
|
||||
store <- newStore
|
||||
waiMetrics <- registerWaiMetrics store
|
||||
registerGcMetrics store
|
||||
|
||||
Reference in New Issue
Block a user