Compare commits
No commits in common. "master" and "import" have entirely different histories.
@ -21,7 +21,6 @@ import qualified Operations
|
|||||||
|
|
||||||
import Data.Query
|
import Data.Query
|
||||||
(Field(..))
|
(Field(..))
|
||||||
import qualified Options.Applicative.NonEmpty as NE
|
|
||||||
|
|
||||||
commands :: Parser (BuukaM ())
|
commands :: Parser (BuukaM ())
|
||||||
commands = subparser
|
commands = subparser
|
||||||
@ -35,7 +34,7 @@ commands = subparser
|
|||||||
f <$> strOption (long "url" <> short 'u' <> metavar "URL")
|
f <$> strOption (long "url" <> short 'u' <> metavar "URL")
|
||||||
<*> optional (strOption (long "title"))
|
<*> optional (strOption (long "title"))
|
||||||
queryOpts f =
|
queryOpts f =
|
||||||
f <$> NE.some1 (asum [tagged Title "title", tagged Url "url"])
|
uncurry f <$> asum [tagged Title "title", tagged Url "url"]
|
||||||
tagged t x = (t, ) <$> strOption (long x <> metavar "REGEX")
|
tagged t x = (t, ) <$> strOption (long x <> metavar "REGEX")
|
||||||
|
|
||||||
main :: IO ()
|
main :: IO ()
|
||||||
|
@ -16,7 +16,7 @@ category: Web
|
|||||||
extra-source-files: CHANGELOG.md
|
extra-source-files: CHANGELOG.md
|
||||||
|
|
||||||
common common-stanza
|
common common-stanza
|
||||||
build-depends: base >=4.13.0.0 && <4.15
|
build-depends: base ^>=4.13.0.0
|
||||||
default-extensions: OverloadedStrings
|
default-extensions: OverloadedStrings
|
||||||
, ScopedTypeVariables
|
, ScopedTypeVariables
|
||||||
, DerivingVia
|
, DerivingVia
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
{ mkDerivation, aeson, base, bytestring, conduit, conduit-extra
|
{ mkDerivation, aeson, base, bytestring, conduit, conduit-extra
|
||||||
, containers, deriving-compat, exceptions, filepath, hashable
|
, containers, deriving-compat, exceptions, filepath, hashable
|
||||||
, hashids, hedgehog, hedgehog-corpus, lens, lib, mtl
|
, hashids, hedgehog, hedgehog-corpus, lens, mtl
|
||||||
, optparse-applicative, regex-tdfa, sqlite-simple, tasty
|
, optparse-applicative, regex-tdfa, sqlite-simple, stdenv, tasty
|
||||||
, tasty-hedgehog, tasty-hunit, text, transformers, unliftio, vector
|
, tasty-hedgehog, tasty-hunit, text, transformers, unliftio, vector
|
||||||
, yaml
|
, yaml
|
||||||
}:
|
}:
|
||||||
@ -21,5 +21,5 @@ mkDerivation {
|
|||||||
aeson base deriving-compat hedgehog hedgehog-corpus tasty
|
aeson base deriving-compat hedgehog hedgehog-corpus tasty
|
||||||
tasty-hedgehog tasty-hunit text
|
tasty-hedgehog tasty-hunit text
|
||||||
];
|
];
|
||||||
license = lib.licenses.bsd3;
|
license = stdenv.lib.licenses.bsd3;
|
||||||
}
|
}
|
||||||
|
16
shell.nix
16
shell.nix
@ -1,26 +1,18 @@
|
|||||||
with (import <nixpkgs> {});
|
with (import <nixpkgs> {});
|
||||||
|
|
||||||
let
|
let
|
||||||
hp = haskellPackages.extend (self: super: {
|
|
||||||
buuka = haskellPackages.callPackage ./. {};
|
buuka = haskellPackages.callPackage ./. {};
|
||||||
});
|
|
||||||
easy-hls-src = fetchFromGitHub {
|
|
||||||
owner = "ssbothwell";
|
|
||||||
repo = "easy-hls-nix";
|
|
||||||
inherit (builtins.fromJSON (builtins.readFile ./easy-hls-nix.json)) rev sha256;
|
|
||||||
};
|
|
||||||
easy-hls = callPackage easy-hls-src { ghcVersions = [ hp.ghc.version ]; };
|
|
||||||
|
|
||||||
in
|
in
|
||||||
|
|
||||||
hp.shellFor {
|
mkShell {
|
||||||
packages = h: [h.buuka];
|
name = "shell-buuka";
|
||||||
buildInputs = [
|
buildInputs = [
|
||||||
hlint
|
hlint
|
||||||
ghcid
|
ghcid
|
||||||
stylish-haskell
|
stylish-haskell
|
||||||
cabal2nix
|
cabal2nix
|
||||||
cabal-install
|
haskellPackages.cabal-install
|
||||||
easy-hls
|
(haskellPackages.ghcWithHoogle (_: buuka.buildInputs ++ buuka.propagatedBuildInputs))
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
|
@ -30,7 +30,7 @@ import Data.Hashable
|
|||||||
import GHC.Generics
|
import GHC.Generics
|
||||||
(Generic)
|
(Generic)
|
||||||
|
|
||||||
import Data.Text (Text)
|
import Data.Text
|
||||||
|
|
||||||
import Control.Monad.Reader
|
import Control.Monad.Reader
|
||||||
import Control.Monad.State
|
import Control.Monad.State
|
||||||
@ -51,16 +51,7 @@ data BuukaEntry
|
|||||||
, _title :: Maybe Text
|
, _title :: Maybe Text
|
||||||
}
|
}
|
||||||
deriving stock (Show, Eq, Generic)
|
deriving stock (Show, Eq, Generic)
|
||||||
deriving anyclass (Hashable)
|
deriving anyclass (ToJSON, FromJSON, Hashable)
|
||||||
|
|
||||||
opts :: Options
|
|
||||||
opts = defaultOptions { fieldLabelModifier = dropWhile (== '_'), omitNothingFields = True }
|
|
||||||
|
|
||||||
instance ToJSON BuukaEntry where
|
|
||||||
toJSON = genericToJSON opts
|
|
||||||
|
|
||||||
instance FromJSON BuukaEntry where
|
|
||||||
parseJSON = genericParseJSON opts
|
|
||||||
|
|
||||||
makeLenses ''BuukaEntry
|
makeLenses ''BuukaEntry
|
||||||
|
|
||||||
|
@ -1,7 +1,5 @@
|
|||||||
{-# LANGUAGE GADTs #-}
|
{-# LANGUAGE GADTs #-}
|
||||||
{-# LANGUAGE LambdaCase #-}
|
{-# LANGUAGE LambdaCase #-}
|
||||||
{-# LANGUAGE TypeSynonymInstances #-}
|
|
||||||
{-# LANGUAGE FlexibleInstances #-}
|
|
||||||
module Data.Query
|
module Data.Query
|
||||||
(
|
(
|
||||||
-- * AST
|
-- * AST
|
||||||
@ -43,20 +41,11 @@ data QueryF f where
|
|||||||
EndsWith :: Field Text -> Text -> QueryF f
|
EndsWith :: Field Text -> Text -> QueryF f
|
||||||
Regex :: Field Text -> Text -> QueryF f
|
Regex :: Field Text -> Text -> QueryF f
|
||||||
And :: f -> f -> QueryF f
|
And :: f -> f -> QueryF f
|
||||||
Pass :: QueryF f
|
|
||||||
|
|
||||||
deriving instance Functor QueryF
|
deriving instance Functor QueryF
|
||||||
|
|
||||||
type Query = Fix QueryF
|
type Query = Fix QueryF
|
||||||
|
|
||||||
-- Query is a semigroup over the &&
|
|
||||||
instance Semigroup Query where
|
|
||||||
(<>) = (.&&.)
|
|
||||||
|
|
||||||
-- Identity is the constant true
|
|
||||||
instance Monoid Query where
|
|
||||||
mempty = Fix Pass
|
|
||||||
|
|
||||||
startsWith :: Field Text -> Text -> Query
|
startsWith :: Field Text -> Text -> Query
|
||||||
startsWith field x = Fix (StartsWith field x)
|
startsWith field x = Fix (StartsWith field x)
|
||||||
|
|
||||||
@ -71,18 +60,13 @@ a .&&. b = Fix (And a b)
|
|||||||
|
|
||||||
evaluate :: QueryF (BuukaEntry -> Bool) -> (BuukaEntry -> Bool)
|
evaluate :: QueryF (BuukaEntry -> Bool) -> (BuukaEntry -> Bool)
|
||||||
evaluate = \case
|
evaluate = \case
|
||||||
StartsWith Url x -> has (url . _URL . prefixed x)
|
StartsWith Url x -> \e -> x `T.isPrefixOf` (e ^. url . _URL)
|
||||||
EndsWith Url x -> has (url . _URL . suffixed x)
|
EndsWith Url x -> \e -> x `T.isSuffixOf` (e ^. url . _URL)
|
||||||
StartsWith Title x -> has (title . _Just . prefixed x)
|
StartsWith Title x -> \e -> maybe False (x `T.isPrefixOf`) $ e ^. title
|
||||||
EndsWith Title x -> has (title . _Just . suffixed x)
|
EndsWith Title x -> \e -> maybe False (x `T.isSuffixOf`) $ e ^. title
|
||||||
Regex Url x -> \e -> (e ^. url . _URL) =~ x
|
Regex Url x -> \e -> (e ^. url . _URL) =~ x
|
||||||
Regex Title x -> \e -> maybe False (=~ x) $ e ^. title
|
Regex Title x -> \e -> maybe False (=~ x) $ e ^. title
|
||||||
And a b -> \e -> a e && b e
|
And a b -> \e -> a e && b e
|
||||||
Pass -> const True
|
|
||||||
where
|
|
||||||
prefixed ps = prism' (ps <>) (T.stripPrefix ps)
|
|
||||||
suffixed qs = prism' (<> qs) (T.stripSuffix qs)
|
|
||||||
|
|
||||||
predicate :: Query -> BuukaEntry -> Bool
|
predicate :: Query -> BuukaEntry -> Bool
|
||||||
predicate = cata evaluate
|
predicate = cata evaluate
|
||||||
|
|
||||||
|
@ -79,7 +79,7 @@ newtype ImportException
|
|||||||
|
|
||||||
data Firefox
|
data Firefox
|
||||||
= Firefox { _url :: Text
|
= Firefox { _url :: Text
|
||||||
, _title :: Maybe Text
|
, _title :: Text
|
||||||
, _keywords :: [Text]
|
, _keywords :: [Text]
|
||||||
}
|
}
|
||||||
deriving stock (Show, Eq)
|
deriving stock (Show, Eq)
|
||||||
@ -124,9 +124,9 @@ importFirefox = do
|
|||||||
-- exist in the set, will it be inserted to the store
|
-- exist in the set, will it be inserted to the store
|
||||||
buukaU $ modify $ \oldState -> F.foldl' update (initialState oldState) fs ^. buuka
|
buukaU $ modify $ \oldState -> F.foldl' update (initialState oldState) fs ^. buuka
|
||||||
where
|
where
|
||||||
toEntry Firefox{..} = B.BuukaEntry{ B._url = B.URL _url, B._title = _title }
|
toEntry Firefox{..} = B.BuukaEntry{ B._url = B.URL _url, B._title = Just _title }
|
||||||
update acc f = acc
|
update acc f = acc
|
||||||
& seen <>~ (f ^. url . to S.singleton)
|
& seen <>~ (f ^. url . to S.singleton)
|
||||||
& if has (seen . ix (f ^. url)) acc then id else buuka %~ B.insert (toEntry f)
|
& if has (seen . ix (f ^. url)) acc then id else buuka %~ (B.insert (toEntry f))
|
||||||
initialState oldState = Update oldState (initialUrls oldState)
|
initialState oldState = Update oldState (initialUrls oldState)
|
||||||
initialUrls = foldMapOf (B._Buuka . folded . B.url . B._URL) S.singleton
|
initialUrls = foldMapOf (B._Buuka . folded . B.url . B._URL) S.singleton
|
||||||
|
@ -17,13 +17,11 @@ import Operations.Format
|
|||||||
import Data.Text
|
import Data.Text
|
||||||
(Text)
|
(Text)
|
||||||
import qualified Data.Text.IO as T
|
import qualified Data.Text.IO as T
|
||||||
import Data.List.NonEmpty (NonEmpty)
|
|
||||||
|
|
||||||
query :: NonEmpty (Field Text, Text) -> BuukaM ()
|
query :: Field Text -> Text -> BuukaM ()
|
||||||
query qs =
|
query field q =
|
||||||
buukaQ (asks go) >>= traverse_ (liftIO . T.putStrLn)
|
buukaQ (asks go) >>= traverse_ (liftIO . T.putStrLn)
|
||||||
where
|
where
|
||||||
q = foldMap (uncurry regex) qs
|
|
||||||
go :: Buuka -> [Text]
|
go :: Buuka -> [Text]
|
||||||
go b = formatEntries b (filter (predicate q) . B.elements $ b)
|
go b = formatEntries b (filter (predicate (regex field q)) . B.elements $ b)
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user