Compare commits
9 Commits
94f4593fdc
...
main
Author | SHA1 | Date | |
---|---|---|---|
d253cbaa82 | |||
c0dbc5f0d2 | |||
ce3773b3ee | |||
03b4cfb3bf | |||
0266d4b06b | |||
9f3196cd1f | |||
c9a7d79bcf | |||
21013d7e40 | |||
9002d3424c |
1
cabal.project
Normal file
1
cabal.project
Normal file
@ -0,0 +1 @@
|
||||
packages: */*
|
33
flake.lock
generated
33
flake.lock
generated
@ -1,32 +1,12 @@
|
||||
{
|
||||
"nodes": {
|
||||
"easy-hls": {
|
||||
"inputs": {
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1637250802,
|
||||
"narHash": "sha256-/crlHEVB148PGQLZCsHOR9L5qgvCAfRSocIoKgmMAhA=",
|
||||
"owner": "jkachmar",
|
||||
"repo": "easy-hls-nix",
|
||||
"rev": "7c123399ef8a67dc0e505d9cf7f2c7f64f1cd847",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "jkachmar",
|
||||
"repo": "easy-hls-nix",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"flake-utils": {
|
||||
"locked": {
|
||||
"lastModified": 1644229661,
|
||||
"narHash": "sha256-1YdnJAsNy69bpcjuoKdOYQX0YxZBiCYZo4Twxerqv7k=",
|
||||
"lastModified": 1649676176,
|
||||
"narHash": "sha256-OWKJratjt2RW151VUlJPRALb7OU2S5s+f0vLj4o1bHM=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "3cecb5b042f7f209c56ffd8371b2711a290ec797",
|
||||
"rev": "a4b154ebbdc88c8498a5c7b01589addc9e9cb678",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@ -37,11 +17,11 @@
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1644453163,
|
||||
"narHash": "sha256-VPlXtIsShceYHUspnJSvuvucrX1OWVFmrqiofn69yCM=",
|
||||
"lastModified": 1650440614,
|
||||
"narHash": "sha256-7mF7gyS5P3UmZmQuo9jbikP2wMyRUnimI7HcKLJ9OZQ=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "7e003d7fb9eff8ecb84405360c75c716cdd1f79f",
|
||||
"rev": "0bbb65673c0ba31047c9ba6c4cd211556b534a4e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@ -51,7 +31,6 @@
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"easy-hls": "easy-hls",
|
||||
"flake-utils": "flake-utils",
|
||||
"nixpkgs": "nixpkgs"
|
||||
}
|
||||
|
19
flake.nix
19
flake.nix
@ -2,22 +2,20 @@
|
||||
description = "A very basic flake";
|
||||
|
||||
inputs = {
|
||||
easy-hls = {
|
||||
url = "github:jkachmar/easy-hls-nix";
|
||||
inputs.nixpkgs.follows = "nixpkgs";
|
||||
};
|
||||
flake-utils = {
|
||||
url = "github:numtide/flake-utils";
|
||||
inputs.nixpkgs.follows = "nixpkgs";
|
||||
};
|
||||
};
|
||||
|
||||
outputs = { self, nixpkgs, flake-utils, easy-hls }:
|
||||
outputs = { self, nixpkgs, flake-utils }:
|
||||
{
|
||||
overlay = final: prev: {
|
||||
haskellPackages = prev.haskellPackages.override ( old: {
|
||||
overrides = final.lib.composeExtensions ( old.overrides or (_: _: {})) (f: p: {
|
||||
reddit-pub = f.callPackage ./. {};
|
||||
overrides = final.lib.composeExtensions ( old.overrides or (_: _: {})) (f: p: rec {
|
||||
reddit-pub = f.callPackage ./reddit_pub {};
|
||||
reddit-lib = f.callPackage ./reddit_lib {};
|
||||
reddit-tags = f.callPackage ./reddit_tags {};
|
||||
});
|
||||
} );
|
||||
};
|
||||
@ -27,18 +25,18 @@
|
||||
let
|
||||
pkgs = import nixpkgs { inherit system; overlays = [ self.overlay ]; };
|
||||
hp = pkgs.haskellPackages;
|
||||
hls = (easy-hls.withGhcs [ hp.ghc.version ] ).${system};
|
||||
in
|
||||
rec {
|
||||
|
||||
packages.reddit-pub = pkgs.haskell.lib.justStaticExecutables hp.reddit-pub;
|
||||
packages.reddit-pub-dhall = pkgs.dhallPackages.callPackage ./dhall.nix {};
|
||||
packages.reddit-tags = pkgs.haskell.lib.justStaticExecutables hp.reddit-tags;
|
||||
|
||||
defaultPackage = packages.reddit-pub;
|
||||
|
||||
devShell =
|
||||
hp.shellFor {
|
||||
packages = h: [h.reddit-pub];
|
||||
packages = h: [h.reddit-pub h.reddit-tags];
|
||||
withHoogle = true;
|
||||
buildInputs = with pkgs; [
|
||||
dhall-lsp-server
|
||||
@ -48,7 +46,6 @@
|
||||
hp.hlint
|
||||
stylish-haskell
|
||||
ghcid
|
||||
hls
|
||||
rrdtool
|
||||
jq
|
||||
sqlite-interactive
|
||||
@ -58,6 +55,8 @@
|
||||
hp.graphmod
|
||||
|
||||
hp.dhall-nixpkgs
|
||||
|
||||
hp.haskell-language-server
|
||||
];
|
||||
};
|
||||
}
|
||||
|
16
rabbitmq.nix
16
rabbitmq.nix
@ -1,11 +1,27 @@
|
||||
{ lib, config, pkgs, ... }:
|
||||
|
||||
let
|
||||
cookie = "dontusethisinprod";
|
||||
setup = pkgs.writeScriptBin "setup-rabbitmq.sh" ''
|
||||
rabbitmqctl --erlang-cookie "${cookie}" add_vhost reddit
|
||||
rabbitmqctl --erlang-cookie "${cookie}" add_user admin_user password
|
||||
rabbitmqctl --erlang-cookie "${cookie}" set_user_tags admin_user administrator
|
||||
rabbitmqctl --erlang-cookie "${cookie}" set_permissions -p / admin_user ".*" ".*" ".*"
|
||||
rabbitmqctl --erlang-cookie "${cookie}" set_permissions -p reddit admin_user ".*" ".*" ".*"
|
||||
|
||||
rabbitmqctl --erlang-cookie "${cookie}" add_user reddit_user password
|
||||
rabbitmqctl --erlang-cookie "${cookie}" set_permissions -p reddit reddit_user ".*" ".*" ".*"
|
||||
'';
|
||||
in
|
||||
{
|
||||
services.rabbitmq = {
|
||||
enable = true;
|
||||
listenAddress = "0.0.0.0";
|
||||
managementPlugin.enable = true;
|
||||
cookie = "dontusethisinprod";
|
||||
};
|
||||
networking.firewall.allowedTCPPorts = [ 5672 15672 ];
|
||||
|
||||
environment.systemPackages = [ setup ];
|
||||
}
|
||||
|
||||
|
5
reddit_lib/CHANGELOG.md
Normal file
5
reddit_lib/CHANGELOG.md
Normal file
@ -0,0 +1,5 @@
|
||||
# Revision history for reddit-lib
|
||||
|
||||
## 0.1.0.0 -- YYYY-mm-dd
|
||||
|
||||
* First version. Released on an unsuspecting world.
|
10
reddit_lib/default.nix
Normal file
10
reddit_lib/default.nix
Normal file
@ -0,0 +1,10 @@
|
||||
{ mkDerivation, base, lib }:
|
||||
mkDerivation {
|
||||
pname = "reddit-lib";
|
||||
version = "0.1.0.0";
|
||||
src = ./.;
|
||||
libraryHaskellDepends = [ base ];
|
||||
testHaskellDepends = [ base ];
|
||||
license = "unknown";
|
||||
hydraPlatforms = lib.platforms.none;
|
||||
}
|
38
reddit_lib/reddit-lib.cabal
Normal file
38
reddit_lib/reddit-lib.cabal
Normal file
@ -0,0 +1,38 @@
|
||||
cabal-version: 3.0
|
||||
name: reddit-lib
|
||||
version: 0.1.0.0
|
||||
synopsis:
|
||||
|
||||
-- A longer description of the package.
|
||||
-- description:
|
||||
homepage:
|
||||
|
||||
-- A URL where users can report bugs.
|
||||
-- bug-reports:
|
||||
license: NONE
|
||||
author: Mats Rauhala
|
||||
maintainer: mats.rauhala@iki.fi
|
||||
|
||||
-- A copyright notice.
|
||||
-- copyright:
|
||||
category: Web
|
||||
extra-source-files: CHANGELOG.md
|
||||
|
||||
library
|
||||
exposed-modules: Reddit.Publish
|
||||
|
||||
-- Modules included in this library but not exported.
|
||||
-- other-modules:
|
||||
|
||||
-- LANGUAGE extensions used by modules in this package.
|
||||
-- other-extensions:
|
||||
build-depends: base ^>=4.15.1.0
|
||||
hs-source-dirs: src
|
||||
default-language: Haskell2010
|
||||
|
||||
test-suite reddit-lib-test
|
||||
default-language: Haskell2010
|
||||
type: exitcode-stdio-1.0
|
||||
hs-source-dirs: test
|
||||
main-is: MyLibTest.hs
|
||||
build-depends: base ^>=4.15.1.0
|
@ -1,5 +1,5 @@
|
||||
{-# LANGUAGE DerivingVia #-}
|
||||
module Publish where
|
||||
module Reddit.Publish where
|
||||
|
||||
import Data.Functor.Contravariant
|
||||
import Data.Monoid (Ap(..))
|
4
reddit_lib/test/MyLibTest.hs
Normal file
4
reddit_lib/test/MyLibTest.hs
Normal file
@ -0,0 +1,4 @@
|
||||
module Main (main) where
|
||||
|
||||
main :: IO ()
|
||||
main = putStrLn "Test suite not yet implemented."
|
@ -1,6 +1,6 @@
|
||||
{ mkDerivation, aeson, amqp, base, bytestring, containers, dhall
|
||||
, hedgehog, hspec, hspec-hedgehog, lens, lens-aeson, lib, mtl
|
||||
, pipes, sqlite-simple, text, wreq
|
||||
, pipes, reddit-lib, sqlite-simple, text, wreq
|
||||
}:
|
||||
mkDerivation {
|
||||
pname = "reddit-pub";
|
||||
@ -8,9 +8,10 @@ mkDerivation {
|
||||
src = ./.;
|
||||
isLibrary = true;
|
||||
isExecutable = true;
|
||||
enableSeparateDataOutput = true;
|
||||
libraryHaskellDepends = [
|
||||
aeson amqp base bytestring containers dhall lens lens-aeson mtl
|
||||
pipes sqlite-simple text wreq
|
||||
pipes reddit-lib sqlite-simple text wreq
|
||||
];
|
||||
executableHaskellDepends = [ base ];
|
||||
testHaskellDepends = [
|
@ -32,7 +32,6 @@ library
|
||||
Data.Deriving.Aeson
|
||||
Network.Reddit
|
||||
Data.SubReddit
|
||||
Publish
|
||||
Membership
|
||||
|
||||
-- Modules included in this library but not exported.
|
||||
@ -40,7 +39,7 @@ library
|
||||
|
||||
-- LANGUAGE extensions used by modules in this package.
|
||||
-- other-extensions:
|
||||
build-depends: base ^>=4.14.1.0
|
||||
build-depends: base ^>=4.15.1.0
|
||||
, amqp
|
||||
, aeson
|
||||
, lens
|
||||
@ -53,6 +52,7 @@ library
|
||||
, pipes
|
||||
, containers
|
||||
, sqlite-simple
|
||||
, reddit-lib
|
||||
hs-source-dirs: src
|
||||
default-language: Haskell2010
|
||||
|
||||
@ -65,7 +65,7 @@ executable reddit-pub
|
||||
-- LANGUAGE extensions used by modules in this package.
|
||||
-- other-extensions:
|
||||
build-depends:
|
||||
base ^>=4.14.1.0,
|
||||
base ^>=4.15.1.0,
|
||||
reddit-pub
|
||||
|
||||
hs-source-dirs: app
|
||||
@ -82,7 +82,7 @@ test-suite reddit-tests
|
||||
-- LANGUAGE extensions used by modules in this package.
|
||||
-- other-extensions:
|
||||
build-depends:
|
||||
base ^>=4.14.1.0,
|
||||
base ^>=4.15.1.0,
|
||||
mtl,
|
||||
containers,
|
||||
bytestring,
|
@ -2,20 +2,26 @@
|
||||
{-# LANGUAGE DeriveAnyClass #-}
|
||||
{-# LANGUAGE DeriveGeneric #-}
|
||||
{-# LANGUAGE DerivingVia #-}
|
||||
{-# LANGUAGE NumericUnderscores #-}
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
{-# LANGUAGE TypeOperators #-}
|
||||
{-# LANGUAGE LambdaCase #-}
|
||||
module MyLib (defaultMain) where
|
||||
|
||||
import Control.Concurrent (threadDelay)
|
||||
import Control.Exception (bracket)
|
||||
import Control.Lens
|
||||
import Control.Monad (void)
|
||||
import Control.Monad (forever, void)
|
||||
import Data.Aeson (FromJSON, ToJSON, Value)
|
||||
import qualified Data.Aeson as A
|
||||
import Data.Aeson.Lens (_String, key)
|
||||
import Data.Bool (bool)
|
||||
import Data.Config
|
||||
import Data.Deriving.Aeson
|
||||
import Data.Foldable (for_)
|
||||
import Data.Functor.Contravariant ((>$<))
|
||||
import Data.Text (Text)
|
||||
import qualified Data.Text.IO as TI
|
||||
import qualified Data.Text.Strict.Lens as T
|
||||
import qualified Database.SQLite.Simple as SQL
|
||||
import GHC.Generics (Generic)
|
||||
@ -23,19 +29,23 @@ import qualified Membership
|
||||
import Network.AMQP
|
||||
( Channel
|
||||
, DeliveryMode(Persistent)
|
||||
, closeConnection
|
||||
, declareExchange
|
||||
, exchangeName
|
||||
, exchangeType
|
||||
, msgBody
|
||||
, msgDeliveryMode
|
||||
, newExchange
|
||||
, newMsg
|
||||
, publishMsg, openConnection, closeConnection, openChannel, newExchange, declareExchange
|
||||
, openChannel
|
||||
, openConnection
|
||||
, publishMsg
|
||||
)
|
||||
import Network.Reddit (RedditId (RedditId), publishEntries)
|
||||
import Network.Reddit (RedditId(RedditId), publishEntries)
|
||||
import Network.Wreq.Session (newSession)
|
||||
import Publish (Publish(..))
|
||||
import Data.Aeson.Lens (key, _String)
|
||||
import Data.Bool (bool)
|
||||
import qualified Data.Text.IO as TI
|
||||
import Reddit.Publish (Publish(..))
|
||||
import Text.Printf (printf)
|
||||
import Data.ByteString.Lazy (ByteString)
|
||||
|
||||
data MessageType = Create | Update
|
||||
deriving stock (Show, Eq, Generic)
|
||||
@ -50,17 +60,23 @@ data Message = Message
|
||||
deriving (ToJSON, FromJSON)
|
||||
via AesonCodec (Field (CamelCase <<< DropPrefix "message")) Message
|
||||
|
||||
toMessage :: Value -> Message
|
||||
toMessage entry = Message Create (RedditId (entry ^. key "id" . _String)) entry
|
||||
|
||||
sqlRecorder :: SQL.Connection -> Publish IO RedditId
|
||||
sqlRecorder conn = Publish $ Membership.recordSeen conn
|
||||
toMessage :: SQL.Connection -> Publish IO (Maybe Message) -> Publish IO Value
|
||||
toMessage sqlConn (Publish p) = Publish $ \entry -> do
|
||||
case RedditId <$> (entry ^? key "id" . _String) of
|
||||
Nothing -> p Nothing
|
||||
Just redditId -> do
|
||||
event <- bool Create Update <$> Membership.isSeen sqlConn redditId
|
||||
p $ Just $ Message event redditId entry
|
||||
|
||||
amqpPublisher :: SQL.Connection -> Channel -> Text -> Publish IO Message
|
||||
amqpPublisher sqlConn channel exchange = Publish $ \msg -> do
|
||||
seen <- Membership.isSeen sqlConn (messageIdentifier msg)
|
||||
let msg' = msg{messageType = bool Create Update seen}
|
||||
void $ publishMsg channel exchange routingKey (message (A.encode msg'))
|
||||
sqlRecorder :: SQL.Connection -> Publish IO (Maybe RedditId)
|
||||
sqlRecorder conn = Publish $ maybe (pure ()) (Membership.recordSeen conn)
|
||||
|
||||
amqpPublisher :: Channel -> Text -> Publish IO (Maybe ByteString)
|
||||
amqpPublisher channel exchange = Publish $ \case
|
||||
Nothing -> pure ()
|
||||
Just lbs ->
|
||||
void $ publishMsg channel exchange routingKey (message lbs)
|
||||
where
|
||||
routingKey = "doesn't matter on fanout"
|
||||
message lbs = newMsg
|
||||
@ -68,6 +84,23 @@ amqpPublisher sqlConn channel exchange = Publish $ \msg -> do
|
||||
, msgDeliveryMode = Just Persistent
|
||||
}
|
||||
|
||||
stdoutPublisher :: Publish IO String
|
||||
stdoutPublisher = Publish putStrLn
|
||||
|
||||
data Fetch
|
||||
= Fetch Fetcher
|
||||
| PublishMessage Message
|
||||
| ParseFailed
|
||||
|
||||
fetchToLog :: Fetch -> String
|
||||
fetchToLog (Fetch fetcher) = printf "Refreshing %s" (show $ fetcherSubreddit fetcher)
|
||||
fetchToLog ParseFailed = printf "Failed parsing"
|
||||
fetchToLog (PublishMessage msg) = messageToLog msg
|
||||
where
|
||||
messageToLog :: Message -> String
|
||||
messageToLog m = printf "Publishing %s as type %s" (show $ messageIdentifier m) (show $ messageType m)
|
||||
|
||||
|
||||
defaultMain :: FilePath -> IO ()
|
||||
defaultMain path = do
|
||||
conf <- readConfig path
|
||||
@ -83,12 +116,15 @@ defaultMain path = do
|
||||
chan <- openChannel conn
|
||||
declareExchange chan newExchange { exchangeName = "reddit_posts", exchangeType = "fanout" }
|
||||
sess <- newSession
|
||||
let encoder = amqpPublisher sqlConn chan "reddit_posts"
|
||||
let encoder = amqpPublisher chan "reddit_posts"
|
||||
recorder = sqlRecorder sqlConn
|
||||
publisher = encoder <> (messageIdentifier >$< recorder)
|
||||
publisher = (fmap A.encode >$< encoder) <> (fmap messageIdentifier >$< recorder) <> (maybe ParseFailed PublishMessage >$< logger)
|
||||
logger = fetchToLog >$< stdoutPublisher
|
||||
forever $ do
|
||||
for_ (conf ^. fetchers) $ \fetcher -> do
|
||||
print fetcher
|
||||
publishEntries (toMessage >$< publisher) sess fetcher
|
||||
publish logger (Fetch fetcher)
|
||||
publishEntries (toMessage sqlConn publisher) sess fetcher
|
||||
threadDelay (15 * 60_000_000)
|
||||
|
||||
getPassword :: Password -> IO Text
|
||||
getPassword (Password p) = pure p
|
@ -14,7 +14,7 @@ import Network.Wreq hiding (getWith)
|
||||
import Network.Wreq.Session (Session, getWith)
|
||||
import Pipes (Producer, (>->), for, runEffect)
|
||||
import qualified Pipes.Prelude as P
|
||||
import Publish
|
||||
import Reddit.Publish
|
||||
import Data.Maybe (maybeToList)
|
||||
import Control.Monad.Trans (liftIO)
|
||||
import Database.SQLite.Simple.ToField (ToField)
|
0
reddit_pub/src/RedditPub/Publisher.hs
Normal file
0
reddit_pub/src/RedditPub/Publisher.hs
Normal file
5
reddit_tags/CHANGELOG.md
Normal file
5
reddit_tags/CHANGELOG.md
Normal file
@ -0,0 +1,5 @@
|
||||
# Revision history for reddit-tags
|
||||
|
||||
## 0.1.0.0 -- YYYY-mm-dd
|
||||
|
||||
* First version. Released on an unsuspecting world.
|
8
reddit_tags/app/Main.hs
Normal file
8
reddit_tags/app/Main.hs
Normal file
@ -0,0 +1,8 @@
|
||||
module Main where
|
||||
|
||||
import qualified MyLib (someFunc)
|
||||
|
||||
main :: IO ()
|
||||
main = do
|
||||
putStrLn "Hello, Haskell!"
|
||||
MyLib.someFunc
|
18
reddit_tags/default.nix
Normal file
18
reddit_tags/default.nix
Normal file
@ -0,0 +1,18 @@
|
||||
{ mkDerivation, aeson, amqp, attoparsec, base, hspec, lens
|
||||
, lens-aeson, lib, mtl, reddit-lib, text, transformers
|
||||
}:
|
||||
mkDerivation {
|
||||
pname = "reddit-tags";
|
||||
version = "0.1.0.0";
|
||||
src = ./.;
|
||||
isLibrary = true;
|
||||
isExecutable = true;
|
||||
libraryHaskellDepends = [
|
||||
aeson amqp attoparsec base lens lens-aeson mtl reddit-lib text
|
||||
transformers
|
||||
];
|
||||
executableHaskellDepends = [ base ];
|
||||
testHaskellDepends = [ base hspec ];
|
||||
license = "unknown";
|
||||
hydraPlatforms = lib.platforms.none;
|
||||
}
|
67
reddit_tags/reddit-tags.cabal
Normal file
67
reddit_tags/reddit-tags.cabal
Normal file
@ -0,0 +1,67 @@
|
||||
cabal-version: 3.0
|
||||
name: reddit-tags
|
||||
version: 0.1.0.0
|
||||
synopsis:
|
||||
|
||||
-- A longer description of the package.
|
||||
-- description:
|
||||
homepage:
|
||||
|
||||
-- A URL where users can report bugs.
|
||||
-- bug-reports:
|
||||
license: NONE
|
||||
author: Mats Rauhala
|
||||
maintainer: mats.rauhala@iki.fi
|
||||
|
||||
-- A copyright notice.
|
||||
-- copyright:
|
||||
category: Web
|
||||
extra-source-files: CHANGELOG.md
|
||||
|
||||
library
|
||||
exposed-modules: MyLib
|
||||
Tags
|
||||
Transformer
|
||||
|
||||
-- Modules included in this library but not exported.
|
||||
-- other-modules:
|
||||
|
||||
-- LANGUAGE extensions used by modules in this package.
|
||||
-- other-extensions:
|
||||
build-depends: base ^>=4.15.1.0
|
||||
, amqp
|
||||
, reddit-lib
|
||||
, mtl
|
||||
, text
|
||||
, aeson
|
||||
, lens
|
||||
, lens-aeson
|
||||
, transformers
|
||||
, attoparsec
|
||||
, bytestring
|
||||
hs-source-dirs: src
|
||||
default-language: Haskell2010
|
||||
|
||||
executable reddit-tags
|
||||
main-is: Main.hs
|
||||
|
||||
-- Modules included in this executable, other than Main.
|
||||
-- other-modules:
|
||||
|
||||
-- LANGUAGE extensions used by modules in this package.
|
||||
-- other-extensions:
|
||||
build-depends:
|
||||
base ^>=4.15.1.0,
|
||||
reddit-tags
|
||||
|
||||
hs-source-dirs: app
|
||||
default-language: Haskell2010
|
||||
|
||||
test-suite reddit-tags-test
|
||||
default-language: Haskell2010
|
||||
type: exitcode-stdio-1.0
|
||||
hs-source-dirs: test
|
||||
main-is: MyLibTest.hs
|
||||
build-depends: base ^>=4.15.1.0
|
||||
, hspec
|
||||
, reddit-tags
|
57
reddit_tags/src/MyLib.hs
Normal file
57
reddit_tags/src/MyLib.hs
Normal file
@ -0,0 +1,57 @@
|
||||
{-# LANGUAGE RecordWildCards #-}
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
module MyLib (someFunc) where
|
||||
import System.Environment (lookupEnv)
|
||||
import Control.Monad.Trans.Maybe (MaybeT(..))
|
||||
import Network.AMQP
|
||||
import qualified Data.Text as T
|
||||
import Data.Text (Text)
|
||||
import Control.Exception (bracket)
|
||||
import Control.Concurrent (newEmptyMVar, readMVar)
|
||||
import Control.Monad (void)
|
||||
|
||||
import Control.Lens
|
||||
import Data.Aeson.Lens
|
||||
import Transformer
|
||||
import Data.ByteString.Lazy (ByteString)
|
||||
import Control.Category ((>>>))
|
||||
import Control.Arrow (arr, (&&&))
|
||||
import Tags (parseTags)
|
||||
|
||||
data AMQP = AMQP
|
||||
{ host :: String
|
||||
, vhost :: Text
|
||||
, username :: Text
|
||||
, password :: Text
|
||||
}
|
||||
|
||||
getAMQP :: IO (Maybe AMQP)
|
||||
getAMQP = runMaybeT $
|
||||
AMQP <$> lookupEnvM "AMQP_HOST" <*> lookupEnvMText "AMQP_VHOST" <*> lookupEnvMText "AMQP_USER" <*> lookupEnvMText "AMQP_PASS"
|
||||
where
|
||||
lookupEnvM = MaybeT . lookupEnv
|
||||
lookupEnvMText = fmap T.pack . lookupEnvM
|
||||
|
||||
tagTransformer :: Transformer IO ByteString ()
|
||||
tagTransformer =
|
||||
arrOpt (preview (key "content" . key "title" . _String))
|
||||
>>> arr id &&& arr parseTags
|
||||
>>> liftTransformer print
|
||||
|
||||
someFunc :: IO ()
|
||||
someFunc = do
|
||||
Just AMQP{..} <- getAMQP
|
||||
let rabbitConnect = openConnection host vhost username password
|
||||
bracket rabbitConnect closeConnection $ \conn -> do
|
||||
chan <- openChannel conn
|
||||
qos chan 0 1 False
|
||||
declareQueue chan newQueue {queueName="reddit_tags"}
|
||||
bindQueue chan "reddit_tags" "reddit_posts" "key"
|
||||
consumeMsgs chan "reddit_tags" Ack $ \(msg, env) -> do
|
||||
void $ runTransformer tagTransformer (msgBody msg)
|
||||
-- let body = msgBody msg
|
||||
-- let title = body ^? key "content" . key "title"
|
||||
-- print title
|
||||
-- print $ parseTags title
|
||||
ackEnv env
|
||||
void getLine
|
18
reddit_tags/src/Tags.hs
Normal file
18
reddit_tags/src/Tags.hs
Normal file
@ -0,0 +1,18 @@
|
||||
{-# LANGUAGE ApplicativeDo #-}
|
||||
module Tags (parseTags) where
|
||||
|
||||
import qualified Data.Attoparsec.Text as A
|
||||
import Data.Attoparsec.Text (Parser)
|
||||
import Data.Text (Text)
|
||||
import Data.Either (fromRight)
|
||||
|
||||
tag :: Parser Text
|
||||
tag = do
|
||||
A.skipWhile (/= '[')
|
||||
A.char '[' *> A.takeWhile (/= ']') <* A.char ']'
|
||||
|
||||
tags :: Parser [Text]
|
||||
tags = A.many1 tag
|
||||
|
||||
parseTags :: Text -> [Text]
|
||||
parseTags = fromRight [] . A.parseOnly tags
|
18
reddit_tags/src/Transformer.hs
Normal file
18
reddit_tags/src/Transformer.hs
Normal file
@ -0,0 +1,18 @@
|
||||
{-# LANGUAGE DerivingVia #-}
|
||||
module Transformer where
|
||||
|
||||
import Control.Category ( Category )
|
||||
import Control.Arrow ( Arrow, ArrowChoice, Kleisli(Kleisli) )
|
||||
import Control.Monad.Trans.Maybe ( MaybeT(MaybeT) )
|
||||
|
||||
newtype Transformer m a b = Transformer (a -> m (Maybe b))
|
||||
deriving (Category, Arrow, ArrowChoice) via Kleisli (MaybeT m)
|
||||
|
||||
arrOpt :: Applicative m => (a -> Maybe b) -> Transformer m a b
|
||||
arrOpt f = Transformer (pure . f)
|
||||
|
||||
liftTransformer :: Monad m => (a -> m b) -> Transformer m a b
|
||||
liftTransformer f = Transformer (fmap Just . f)
|
||||
|
||||
runTransformer :: Monad m => Transformer m a b -> a -> m (Maybe b)
|
||||
runTransformer (Transformer tr) = tr
|
17
reddit_tags/test/MyLibTest.hs
Normal file
17
reddit_tags/test/MyLibTest.hs
Normal file
@ -0,0 +1,17 @@
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
module Main (main) where
|
||||
|
||||
import Tags (parseTags)
|
||||
import Test.Hspec
|
||||
|
||||
main :: IO ()
|
||||
main = hspec $
|
||||
describe "Parser" $ do
|
||||
it "Returns no results if there are no tags" $
|
||||
parseTags "foo bar" `shouldBe` []
|
||||
it "Returns a single result" $
|
||||
parseTags "[foo]" `shouldBe` ["foo"]
|
||||
it "Finds multiple results" $
|
||||
parseTags "[foo][bar]" `shouldBe` ["foo", "bar"]
|
||||
it "Finds multiple results with other text interleaved" $
|
||||
parseTags "prefix [foo] infix [bar] suffix" `shouldBe` ["foo", "bar"]
|
Reference in New Issue
Block a user