From bc0da4a4eb142e5bdef75384881b66cfcdd887df Mon Sep 17 00:00:00 2001 From: Chris Allen Date: Tue, 13 Feb 2018 15:47:43 -0600 Subject: [PATCH] hlint, weeder --- .hlint.yaml | 66 ++ Makefile | 16 + bloodhound.cabal | 20 +- src/Database/V1/Bloodhound/Client.hs | 22 +- src/Database/V1/Bloodhound/Types.hs | 12 +- src/Database/V5/Bloodhound/Client.hs | 25 +- .../V5/Bloodhound/Internal/Analysis.hs | 8 +- src/Database/V5/Bloodhound/Internal/Client.hs | 453 ++++++-- .../V5/Bloodhound/Internal/Newtypes.hs | 29 +- src/Database/V5/Bloodhound/Internal/Query.hs | 101 +- src/Database/V5/Bloodhound/Types.hs | 976 ++++++------------ tests/V5/Test/Aggregation.hs | 4 +- tests/V5/Test/BulkAPI.hs | 8 +- tests/V5/Test/Documents.hs | 4 +- tests/V5/Test/Generators.hs | 6 +- tests/V5/Test/Highlights.hs | 26 +- tests/V5/Test/JSON.hs | 2 +- tests/V5/Test/Script.hs | 5 +- tests/V5/Test/Snapshots.hs | 8 +- tests/V5/Test/Sorting.hs | 2 +- tests/V5/Test/SourceFiltering.hs | 10 +- tests/V5/Test/Suggest.hs | 2 +- tests/V5/tests.hs | 24 +- 23 files changed, 983 insertions(+), 846 deletions(-) create mode 100644 .hlint.yaml diff --git a/.hlint.yaml b/.hlint.yaml new file mode 100644 index 0000000..80500e4 --- /dev/null +++ b/.hlint.yaml @@ -0,0 +1,66 @@ +# HLint configuration file +# https://github.com/ndmitchell/hlint +########################## + +# This file contains a template configuration file, which is typically +# placed as .hlint.yaml in the root of your project + + +# Specify additional command line arguments +# +# - arguments: [--color, --cpp-simple, -XQuasiQuotes] + + +# Control which extensions/flags/modules/functions can be used +# +# - extensions: +# - default: false # all extension are banned by default +# - name: [PatternGuards, ViewPatterns] # only these listed extensions can be used +# - {name: CPP, within: CrossPlatform} # CPP can only be used in a given module +# +# - flags: +# - {name: -w, within: []} # -w is allowed nowhere +# +# - modules: +# - {name: [Data.Set, Data.HashSet], as: Set} # if you import Data.Set qualified, it must be as 'Set' +# - {name: Control.Arrow, within: []} # Certain modules are banned entirely +# +# - functions: +# - {name: unsafePerformIO, within: []} # unsafePerformIO can only appear in no modules + + +# Add custom hints for this project +# +# Will suggest replacing "wibbleMany [myvar]" with "wibbleOne myvar" +# - error: {lhs: "wibbleMany [x]", rhs: wibbleOne x} + + +# Turn on hints that are off by default +# +# Ban "module X(module X) where", to require a real export list +# - warn: {name: Use explicit module export list} +# +# Replace a $ b $ c with a . b $ c +# - group: {name: dollar, enabled: true} +# +# Generalise map to fmap, ++ to <> +# - group: {name: generalise, enabled: true} + + +# Ignore some builtin hints +- ignore: {name: "Use <$>"} +- ignore: {name: "Use lambda-case"} +# When we don't use camelCase it's to match ES. +# Possibly this is a mistake. +- ignore: {name: "Use camelCase"} +- ignore: {name: "Eta reduce"} +# - ignore: {name: Use let} +# - ignore: {name: Use const, within: SpecialModule} # Only within certain modules + + +# Define some custom infix operators +# - fixity: infixr 3 ~^#^~ + + +# To generate a suitable file for HLint do: +# $ hlint --default > .hlint.yaml diff --git a/Makefile b/Makefile index ac57a3f..a48dd2d 100644 --- a/Makefile +++ b/Makefile @@ -3,6 +3,9 @@ stack = STACK_YAML='stack.yaml' stack build: stack build +build-validate: + stack build --fast --ghc-options '-Werror' + ghci: stack ghci @@ -18,6 +21,19 @@ test-ghci: ghcid: ghcid -c "$(stack) ghci bloodhound:lib --test --ghci-options='-fobject-code -fno-warn-unused-do-bind' --main-is bloodhound:test:bloodhound-tests" +ghcid-validate: + ghcid -c "$(stack) ghci bloodhound:lib --test --ghci-options='-Werror -fobject-code -fno-warn-unused-do-bind' --main-is bloodhound:test:bloodhound-tests" + +weeder: + weeder . --build + +# hlint --default > .hlint.yaml +hlint: + hlint . + +hlint-watch: + sos src/ -c "hlint ." -p "src/(.*)\.hs" + mod-build: stack build --ghc-options '+RTS -A128M -RTS' diff --git a/bloodhound.cabal b/bloodhound.cabal index 0f0649b..9c3a339 100644 --- a/bloodhound.cabal +++ b/bloodhound.cabal @@ -37,6 +37,11 @@ library Database.V5.Bloodhound.Client Database.V5.Bloodhound.Types Database.V5.Bloodhound.Types.Class + Database.V5.Bloodhound.Internal.Analysis + Database.V5.Bloodhound.Internal.Client + Database.V5.Bloodhound.Internal.Newtypes + Database.V5.Bloodhound.Internal.Query + Database.V5.Bloodhound.Internal.StringlyTyped Database.V1.Bloodhound Database.V1.Bloodhound.Client Database.V1.Bloodhound.Types @@ -44,11 +49,6 @@ library Database.V1.Bloodhound.Types.Internal other-modules: Bloodhound.Import Database.Bloodhound.Common.Script - Database.V5.Bloodhound.Internal.Analysis - Database.V5.Bloodhound.Internal.Client - Database.V5.Bloodhound.Internal.Newtypes - Database.V5.Bloodhound.Internal.Query - Database.V5.Bloodhound.Internal.StringlyTyped hs-source-dirs: src build-depends: base >= 4.3 && <5, bytestring >= 0.10.0 && <0.11, @@ -64,13 +64,10 @@ library http-types >= 0.8 && <0.13, vector >= 0.10.9 && <0.13, scientific >= 0.3.0.0 && <0.4.0.0, - bifunctors, - exceptions, - data-default-class, blaze-builder, - unordered-containers, - mtl-compat, - hashable + exceptions, + hashable, + unordered-containers default-language: Haskell2010 test-suite bloodhound-tests @@ -116,7 +113,6 @@ test-suite bloodhound-tests pretty-simple, quickcheck-arbitrary-template, quickcheck-properties, - generics-sop >=0.2 && <0.4, errors, exceptions, temporary, diff --git a/src/Database/V1/Bloodhound/Client.hs b/src/Database/V1/Bloodhound/Client.hs index 0059026..9c68f2e 100644 --- a/src/Database/V1/Bloodhound/Client.hs +++ b/src/Database/V1/Bloodhound/Client.hs @@ -223,7 +223,8 @@ getStatus :: MonadBH m => m (Maybe Status) getStatus = do response <- get =<< url return $ decode (responseBody response) - where url = joinPath [] + where + url = joinPath [] -- | 'getSnapshotRepos' gets the definitions of a subset of the -- defined snapshot repos. @@ -252,7 +253,7 @@ instance FromJSON GSRs where parseJSON = withObject "Collection of GenericSnapshotRepo" parse where parse = fmap GSRs . mapM (uncurry go) . HM.toList - go rawName = withObject "GenericSnapshotRepo" $ \o -> do + go rawName = withObject "GenericSnapshotRepo" $ \o -> GenericSnapshotRepo (SnapshotRepoName rawName) <$> o .: "type" <*> o .: "settings" @@ -454,16 +455,18 @@ deleteIndex (IndexName indexName) = updateIndexSettings :: MonadBH m => NonEmpty UpdatableIndexSetting -> IndexName -> m Reply updateIndexSettings updates (IndexName indexName) = bindM2 put url (return body) - where url = joinPath [indexName, "_settings"] - body = Just (encode jsonBody) - jsonBody = Object (deepMerge [u | Object u <- toJSON <$> toList updates]) + where + url = joinPath [indexName, "_settings"] + body = Just (encode jsonBody) + jsonBody = Object (deepMerge [u | Object u <- toJSON <$> toList updates]) getIndexSettings :: (MonadBH m, MonadThrow m) => IndexName -> m (Either EsError IndexSettingsSummary) -getIndexSettings (IndexName indexName) = do +getIndexSettings (IndexName indexName) = parseEsResponse =<< get =<< url - where url = joinPath [indexName, "_settings"] + where + url = joinPath [indexName, "_settings"] -- | 'optimizeIndex' will optimize a single index, list of indexes or @@ -586,7 +589,7 @@ listIndices = url = joinPath ["_cat/indices?format=json"] parse body = maybe (throwM (EsProtocolException body)) return $ do vals <- decode body - forM vals $ \val -> do + forM vals $ \val -> case val of Object obj -> do indexVal <- HM.lookup "index" obj @@ -718,7 +721,8 @@ encodeBulkOperations stream = collapsed where collapsed = toLazyByteString $ mappend mashedTaters (byteString "\n") mash :: Builder -> V.Vector L.ByteString -> Builder -mash = V.foldl' (\b x -> b `mappend` (byteString "\n") `mappend` (lazyByteString x)) +mash = + V.foldl' (\b x -> b <> byteString "\n" <> lazyByteString x) mkBulkStreamValue :: Text -> Text -> Text -> Text -> Value mkBulkStreamValue operation indexName mappingName docId = diff --git a/src/Database/V1/Bloodhound/Types.hs b/src/Database/V1/Bloodhound/Types.hs index 10f19e5..91e469b 100644 --- a/src/Database/V1/Bloodhound/Types.hs +++ b/src/Database/V1/Bloodhound/Types.hs @@ -4,7 +4,6 @@ {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE MultiParamTypeClasses #-} --- {-# LANGUAGE NoMonomorphismRestriction #-} {-# LANGUAGE NamedFieldPuns #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} @@ -1137,8 +1136,10 @@ data Source = | SourceIncludeExclude Include Exclude deriving (Read, Show, Eq, Generic, Typeable) -data PatternOrPatterns = PopPattern Pattern - | PopPatterns [Pattern] deriving (Eq, Read, Show, Generic, Typeable) +data PatternOrPatterns = + PopPattern Pattern + | PopPatterns [Pattern] + deriving (Eq, Read, Show, Generic, Typeable) data Include = Include [Pattern] deriving (Eq, Read, Show, Generic, Typeable) data Exclude = Exclude [Pattern] deriving (Eq, Read, Show, Generic, Typeable) @@ -1906,8 +1907,9 @@ instance ToJSON TermOrder where instance ToJSON TermInclusion where toJSON (TermInclusion x) = toJSON x - toJSON (TermPattern pattern flags) = omitNulls [ "pattern" .= pattern, - "flags" .= flags] + toJSON (TermPattern pattern flags) = + omitNulls [ "pattern" .= pattern + , "flags" .= flags] instance ToJSON CollectionMode where toJSON BreadthFirst = "breadth_first" diff --git a/src/Database/V5/Bloodhound/Client.hs b/src/Database/V5/Bloodhound/Client.hs index c63fc8d..fd28423 100644 --- a/src/Database/V5/Bloodhound/Client.hs +++ b/src/Database/V5/Bloodhound/Client.hs @@ -317,7 +317,7 @@ instance FromJSON GSRs where parseJSON = withObject "Collection of GenericSnapshotRepo" parse where parse = fmap GSRs . mapM (uncurry go) . HM.toList - go rawName = withObject "GenericSnapshotRepo" $ \o -> do + go rawName = withObject "GenericSnapshotRepo" $ \o -> GenericSnapshotRepo (SnapshotRepoName rawName) <$> o .: "type" <*> o .: "settings" @@ -555,16 +555,18 @@ deleteIndex (IndexName indexName) = updateIndexSettings :: MonadBH m => NonEmpty UpdatableIndexSetting -> IndexName -> m Reply updateIndexSettings updates (IndexName indexName) = bindM2 put url (return body) - where url = joinPath [indexName, "_settings"] - body = Just (encode jsonBody) - jsonBody = Object (deepMerge [u | Object u <- toJSON <$> toList updates]) + where + url = joinPath [indexName, "_settings"] + body = Just (encode jsonBody) + jsonBody = Object (deepMerge [u | Object u <- toJSON <$> toList updates]) getIndexSettings :: (MonadBH m, MonadThrow m) => IndexName -> m (Either EsError IndexSettingsSummary) -getIndexSettings (IndexName indexName) = do +getIndexSettings (IndexName indexName) = parseEsResponse =<< get =<< url - where url = joinPath [indexName, "_settings"] + where + url = joinPath [indexName, "_settings"] -- | 'forceMergeIndex' -- @@ -704,7 +706,7 @@ listIndices = url = joinPath ["_cat/indices?format=json"] parse body = maybe (throwM (EsProtocolException body)) return $ do vals <- decode body - forM vals $ \val -> do + forM vals $ \val -> case val of Object obj -> do indexVal <- HM.lookup "index" obj @@ -861,10 +863,11 @@ deleteDocument (IndexName indexName) -- >>> _ <- runBH' $ bulk stream -- >>> _ <- runBH' $ refreshIndex testIndex bulk :: MonadBH m => V.Vector BulkOperation -> m Reply -bulk bulkOps = do +bulk bulkOps = bindM2 post url (return body) - where url = joinPath ["_bulk"] - body = Just $ encodeBulkOperations bulkOps + where + url = joinPath ["_bulk"] + body = Just $ encodeBulkOperations bulkOps -- | 'encodeBulkOperations' is a convenience function for dumping a vector of 'BulkOperation' -- into an 'L.ByteString' @@ -882,7 +885,7 @@ encodeBulkOperations stream = collapsed where toLazyByteString $ mappend mashedTaters (byteString "\n") mash :: Builder -> V.Vector L.ByteString -> Builder -mash = V.foldl' (\b x -> b `mappend` (byteString "\n") `mappend` (lazyByteString x)) +mash = V.foldl' (\b x -> b <> byteString "\n" <> lazyByteString x) mkBulkStreamValue :: Text -> Text -> Text -> Text -> Value mkBulkStreamValue operation indexName mappingName docId = diff --git a/src/Database/V5/Bloodhound/Internal/Analysis.hs b/src/Database/V5/Bloodhound/Internal/Analysis.hs index ba2bbdc..ede6948 100644 --- a/src/Database/V5/Bloodhound/Internal/Analysis.hs +++ b/src/Database/V5/Bloodhound/Internal/Analysis.hs @@ -52,8 +52,8 @@ instance FromJSON AnalyzerDefinition where <$> m .:? "tokenizer" <*> m .:? "filter" .!= [] -data TokenizerDefinition - = TokenizerDefinitionNgram Ngram +newtype TokenizerDefinition = + TokenizerDefinitionNgram Ngram deriving (Eq,Show) instance ToJSON TokenizerDefinition where @@ -70,8 +70,8 @@ instance FromJSON TokenizerDefinition where typ <- m .: "type" :: Parser Text case typ of "ngram" -> fmap TokenizerDefinitionNgram $ Ngram - <$> (fmap unStringlyTypedInt (m .: "min_gram")) - <*> (fmap unStringlyTypedInt (m .: "max_gram")) + <$> fmap unStringlyTypedInt (m .: "min_gram") + <*> fmap unStringlyTypedInt (m .: "max_gram") <*> m .: "token_chars" _ -> fail "invalid TokenizerDefinition" diff --git a/src/Database/V5/Bloodhound/Internal/Client.hs b/src/Database/V5/Bloodhound/Internal/Client.hs index 46d7860..11f6653 100644 --- a/src/Database/V5/Bloodhound/Internal/Client.hs +++ b/src/Database/V5/Bloodhound/Internal/Client.hs @@ -10,6 +10,7 @@ module Database.V5.Bloodhound.Internal.Client where import Bloodhound.Import import qualified Data.Text as T +import qualified Data.Traversable as DT import qualified Data.HashMap.Strict as HM import qualified Data.Vector as V import qualified Data.Version as Vers @@ -114,6 +115,15 @@ data Status = Status , tagline :: Text } deriving (Eq, Show) +instance FromJSON Status where + parseJSON (Object v) = Status <$> + v .: "name" <*> + v .: "cluster_name" <*> + v .: "cluster_uuid" <*> + v .: "version" <*> + v .: "tagline" + parseJSON _ = empty + {-| 'IndexSettings' is used to configure the shards and replicas when you create an Elasticsearch Index. @@ -125,6 +135,20 @@ data IndexSettings = IndexSettings , indexReplicas :: ReplicaCount } deriving (Eq, Show) +instance ToJSON IndexSettings where + toJSON (IndexSettings s r) = object ["settings" .= + object ["index" .= + object ["number_of_shards" .= s, "number_of_replicas" .= r] + ] + ] + +instance FromJSON IndexSettings where + parseJSON = withObject "IndexSettings" parse + where parse o = do s <- o .: "settings" + i <- s .: "index" + IndexSettings <$> i .: "number_of_shards" + <*> i .: "number_of_replicas" + {-| 'defaultIndexSettings' is an 'IndexSettings' with 3 shards and 2 replicas. -} defaultIndexSettings :: IndexSettings @@ -212,11 +236,150 @@ data UpdatableIndexSetting = NumberOfReplicas ReplicaCount -- ^ Analysis is not a dynamic setting and can only be performed on a closed index. deriving (Eq, Show) +oPath :: ToJSON a => NonEmpty Text -> a -> Value +oPath (k :| []) v = object [k .= v] +oPath (k:| (h:t)) v = object [k .= oPath (h :| t) v] + +attrFilterJSON :: NonEmpty NodeAttrFilter -> Value +attrFilterJSON fs = object [ n .= T.intercalate "," (toList vs) + | NodeAttrFilter (NodeAttrName n) vs <- toList fs] + +parseAttrFilter :: Value -> Parser (NonEmpty NodeAttrFilter) +parseAttrFilter = withObject "NonEmpty NodeAttrFilter" parse + where parse o = case HM.toList o of + [] -> fail "Expected non-empty list of NodeAttrFilters" + x:xs -> DT.mapM (uncurry parse') (x :| xs) + parse' n = withText "Text" $ \t -> + case T.splitOn "," t of + fv:fvs -> return (NodeAttrFilter (NodeAttrName n) (fv :| fvs)) + [] -> fail "Expected non-empty list of filter values" + +instance ToJSON UpdatableIndexSetting where + toJSON (NumberOfReplicas x) = oPath ("index" :| ["number_of_replicas"]) x + toJSON (AutoExpandReplicas x) = oPath ("index" :| ["auto_expand_replicas"]) x + toJSON (RefreshInterval x) = oPath ("index" :| ["refresh_interval"]) (NominalDiffTimeJSON x) + toJSON (IndexConcurrency x) = oPath ("index" :| ["concurrency"]) x + toJSON (FailOnMergeFailure x) = oPath ("index" :| ["fail_on_merge_failure"]) x + toJSON (TranslogFlushThresholdOps x) = oPath ("index" :| ["translog", "flush_threshold_ops"]) x + toJSON (TranslogFlushThresholdSize x) = oPath ("index" :| ["translog", "flush_threshold_size"]) x + toJSON (TranslogFlushThresholdPeriod x) = oPath ("index" :| ["translog", "flush_threshold_period"]) (NominalDiffTimeJSON x) + toJSON (TranslogDisableFlush x) = oPath ("index" :| ["translog", "disable_flush"]) x + toJSON (CacheFilterMaxSize x) = oPath ("index" :| ["cache", "filter", "max_size"]) x + toJSON (CacheFilterExpire x) = oPath ("index" :| ["cache", "filter", "expire"]) (NominalDiffTimeJSON <$> x) + toJSON (GatewaySnapshotInterval x) = oPath ("index" :| ["gateway", "snapshot_interval"]) (NominalDiffTimeJSON x) + toJSON (RoutingAllocationInclude fs) = oPath ("index" :| ["routing", "allocation", "include"]) (attrFilterJSON fs) + toJSON (RoutingAllocationExclude fs) = oPath ("index" :| ["routing", "allocation", "exclude"]) (attrFilterJSON fs) + toJSON (RoutingAllocationRequire fs) = oPath ("index" :| ["routing", "allocation", "require"]) (attrFilterJSON fs) + toJSON (RoutingAllocationEnable x) = oPath ("index" :| ["routing", "allocation", "enable"]) x + toJSON (RoutingAllocationShardsPerNode x) = oPath ("index" :| ["routing", "allocation", "total_shards_per_node"]) x + toJSON (RecoveryInitialShards x) = oPath ("index" :| ["recovery", "initial_shards"]) x + toJSON (GCDeletes x) = oPath ("index" :| ["gc_deletes"]) (NominalDiffTimeJSON x) + toJSON (TTLDisablePurge x) = oPath ("index" :| ["ttl", "disable_purge"]) x + toJSON (TranslogFSType x) = oPath ("index" :| ["translog", "fs", "type"]) x + toJSON (CompressionSetting x) = oPath ("index" :| ["codec"]) x + toJSON (IndexCompoundFormat x) = oPath ("index" :| ["compound_format"]) x + toJSON (IndexCompoundOnFlush x) = oPath ("index" :| ["compound_on_flush"]) x + toJSON (WarmerEnabled x) = oPath ("index" :| ["warmer", "enabled"]) x + toJSON (BlocksReadOnly x) = oPath ("blocks" :| ["read_only"]) x + toJSON (BlocksRead x) = oPath ("blocks" :| ["read"]) x + toJSON (BlocksWrite x) = oPath ("blocks" :| ["write"]) x + toJSON (BlocksMetaData x) = oPath ("blocks" :| ["metadata"]) x + toJSON (MappingTotalFieldsLimit x) = oPath ("index" :| ["mapping","total_fields","limit"]) x + toJSON (AnalysisSetting x) = oPath ("index" :| ["analysis"]) x + +instance FromJSON UpdatableIndexSetting where + parseJSON = withObject "UpdatableIndexSetting" parse + where parse o = numberOfReplicas `taggedAt` ["index", "number_of_replicas"] + <|> autoExpandReplicas `taggedAt` ["index", "auto_expand_replicas"] + <|> refreshInterval `taggedAt` ["index", "refresh_interval"] + <|> indexConcurrency `taggedAt` ["index", "concurrency"] + <|> failOnMergeFailure `taggedAt` ["index", "fail_on_merge_failure"] + <|> translogFlushThresholdOps `taggedAt` ["index", "translog", "flush_threshold_ops"] + <|> translogFlushThresholdSize `taggedAt` ["index", "translog", "flush_threshold_size"] + <|> translogFlushThresholdPeriod `taggedAt` ["index", "translog", "flush_threshold_period"] + <|> translogDisableFlush `taggedAt` ["index", "translog", "disable_flush"] + <|> cacheFilterMaxSize `taggedAt` ["index", "cache", "filter", "max_size"] + <|> cacheFilterExpire `taggedAt` ["index", "cache", "filter", "expire"] + <|> gatewaySnapshotInterval `taggedAt` ["index", "gateway", "snapshot_interval"] + <|> routingAllocationInclude `taggedAt` ["index", "routing", "allocation", "include"] + <|> routingAllocationExclude `taggedAt` ["index", "routing", "allocation", "exclude"] + <|> routingAllocationRequire `taggedAt` ["index", "routing", "allocation", "require"] + <|> routingAllocationEnable `taggedAt` ["index", "routing", "allocation", "enable"] + <|> routingAllocationShardsPerNode `taggedAt` ["index", "routing", "allocation", "total_shards_per_node"] + <|> recoveryInitialShards `taggedAt` ["index", "recovery", "initial_shards"] + <|> gcDeletes `taggedAt` ["index", "gc_deletes"] + <|> ttlDisablePurge `taggedAt` ["index", "ttl", "disable_purge"] + <|> translogFSType `taggedAt` ["index", "translog", "fs", "type"] + <|> compressionSetting `taggedAt` ["index", "codec"] + <|> compoundFormat `taggedAt` ["index", "compound_format"] + <|> compoundOnFlush `taggedAt` ["index", "compound_on_flush"] + <|> warmerEnabled `taggedAt` ["index", "warmer", "enabled"] + <|> blocksReadOnly `taggedAt` ["blocks", "read_only"] + <|> blocksRead `taggedAt` ["blocks", "read"] + <|> blocksWrite `taggedAt` ["blocks", "write"] + <|> blocksMetaData `taggedAt` ["blocks", "metadata"] + <|> mappingTotalFieldsLimit `taggedAt` ["index", "mapping", "total_fields", "limit"] + <|> analysisSetting `taggedAt` ["index", "analysis"] + where taggedAt f ks = taggedAt' f (Object o) ks + taggedAt' f v [] = + f =<< (parseJSON v <|> parseJSON (unStringlyTypeJSON v)) + taggedAt' f v (k:ks) = + withObject "Object" (\o -> do v' <- o .: k + taggedAt' f v' ks) v + numberOfReplicas = pure . NumberOfReplicas + autoExpandReplicas = pure . AutoExpandReplicas + refreshInterval = pure . RefreshInterval . ndtJSON + indexConcurrency = pure . IndexConcurrency + failOnMergeFailure = pure . FailOnMergeFailure + translogFlushThresholdOps = pure . TranslogFlushThresholdOps + translogFlushThresholdSize = pure . TranslogFlushThresholdSize + translogFlushThresholdPeriod = pure . TranslogFlushThresholdPeriod . ndtJSON + translogDisableFlush = pure . TranslogDisableFlush + cacheFilterMaxSize = pure . CacheFilterMaxSize + cacheFilterExpire = pure . CacheFilterExpire . fmap ndtJSON + gatewaySnapshotInterval = pure . GatewaySnapshotInterval . ndtJSON + routingAllocationInclude = fmap RoutingAllocationInclude . parseAttrFilter + routingAllocationExclude = fmap RoutingAllocationExclude . parseAttrFilter + routingAllocationRequire = fmap RoutingAllocationRequire . parseAttrFilter + routingAllocationEnable = pure . RoutingAllocationEnable + routingAllocationShardsPerNode = pure . RoutingAllocationShardsPerNode + recoveryInitialShards = pure . RecoveryInitialShards + gcDeletes = pure . GCDeletes . ndtJSON + ttlDisablePurge = pure . TTLDisablePurge + translogFSType = pure . TranslogFSType + compressionSetting = pure . CompressionSetting + compoundFormat = pure . IndexCompoundFormat + compoundOnFlush = pure . IndexCompoundOnFlush + warmerEnabled = pure . WarmerEnabled + blocksReadOnly = pure . BlocksReadOnly + blocksRead = pure . BlocksRead + blocksWrite = pure . BlocksWrite + blocksMetaData = pure . BlocksMetaData + mappingTotalFieldsLimit = pure . MappingTotalFieldsLimit + analysisSetting = pure . AnalysisSetting + data ReplicaBounds = ReplicasBounded Int Int | ReplicasLowerBounded Int | ReplicasUnbounded deriving (Eq, Show) + +instance ToJSON ReplicaBounds where + toJSON (ReplicasBounded a b) = String (showText a <> "-" <> showText b) + toJSON (ReplicasLowerBounded a) = String (showText a <> "-all") + toJSON ReplicasUnbounded = Bool False + +instance FromJSON ReplicaBounds where + parseJSON v = withText "ReplicaBounds" parseText v + <|> withBool "ReplicaBounds" parseBool v + where parseText t = case T.splitOn "-" t of + [a, "all"] -> ReplicasLowerBounded <$> parseReadText a + [a, b] -> ReplicasBounded <$> parseReadText a + <*> parseReadText b + _ -> fail ("Could not parse ReplicaBounds: " <> show t) + parseBool False = pure ReplicasUnbounded + parseBool _ = fail "ReplicasUnbounded cannot be represented with True" + data Compression = CompressionDefault -- ^ Compress with LZ4 @@ -267,6 +430,16 @@ kilobytes n = Bytes (1000 * n) data FSType = FSSimple | FSBuffered deriving (Eq, Show) +instance ToJSON FSType where + toJSON FSSimple = "simple" + toJSON FSBuffered = "buffered" + +instance FromJSON FSType where + parseJSON = withText "FSType" parse + where parse "simple" = pure FSSimple + parse "buffered" = pure FSBuffered + parse t = fail ("Invalid FSType: " <> show t) + data InitialShardCount = QuorumShards | QuorumMinus1Shards | FullShards @@ -274,6 +447,22 @@ data InitialShardCount = QuorumShards | ExplicitShards Int deriving (Eq, Show) +instance FromJSON InitialShardCount where + parseJSON v = withText "InitialShardCount" parseText v + <|> ExplicitShards <$> parseJSON v + where parseText "quorum" = pure QuorumShards + parseText "quorum-1" = pure QuorumMinus1Shards + parseText "full" = pure FullShards + parseText "full-1" = pure FullMinus1Shards + parseText _ = mzero + +instance ToJSON InitialShardCount where + toJSON QuorumShards = String "quorum" + toJSON QuorumMinus1Shards = String "quorum-1" + toJSON FullShards = String "full" + toJSON FullMinus1Shards = String "full-1" + toJSON (ExplicitShards x) = toJSON x + data NodeAttrFilter = NodeAttrFilter { nodeAttrFilterName :: NodeAttrName , nodeAttrFilterValues :: NonEmpty Text } @@ -286,12 +475,52 @@ data CompoundFormat = CompoundFileFormat Bool -- ^ percentage between 0 and 1 where 0 is false, 1 is true deriving (Eq, Show) -newtype NominalDiffTimeJSON = NominalDiffTimeJSON { ndtJSON :: NominalDiffTime } +instance ToJSON CompoundFormat where + toJSON (CompoundFileFormat x) = Bool x + toJSON (MergeSegmentVsTotalIndex x) = toJSON x -data IndexSettingsSummary = IndexSettingsSummary { sSummaryIndexName :: IndexName - , sSummaryFixedSettings :: IndexSettings - , sSummaryUpdateable :: [UpdatableIndexSetting]} - deriving (Eq, Show) +instance FromJSON CompoundFormat where + parseJSON v = CompoundFileFormat <$> parseJSON v + <|> MergeSegmentVsTotalIndex <$> parseJSON v + +newtype NominalDiffTimeJSON = + NominalDiffTimeJSON { ndtJSON :: NominalDiffTime } + +instance ToJSON NominalDiffTimeJSON where + toJSON (NominalDiffTimeJSON t) = String (showText (round t :: Integer) <> "s") + +instance FromJSON NominalDiffTimeJSON where + parseJSON = withText "NominalDiffTime" parse + where parse t = case T.takeEnd 1 t of + "s" -> NominalDiffTimeJSON . fromInteger <$> parseReadText (T.dropEnd 1 t) + _ -> fail "Invalid or missing NominalDiffTime unit (expected s)" + +data IndexSettingsSummary = IndexSettingsSummary + { sSummaryIndexName :: IndexName + , sSummaryFixedSettings :: IndexSettings + , sSummaryUpdateable :: [UpdatableIndexSetting]} + deriving (Eq, Show) + +parseSettings :: Object -> Parser [UpdatableIndexSetting] +parseSettings o = do + o' <- o .: "index" + -- slice the index object into singleton hashmaps and try to parse each + parses <- forM (HM.toList o') $ \(k, v) -> do + -- blocks are now nested into the "index" key, which is not how they're serialized + let atRoot = Object (HM.singleton k v) + let atIndex = Object (HM.singleton "index" atRoot) + optional (parseJSON atRoot <|> parseJSON atIndex) + return (catMaybes parses) + +instance FromJSON IndexSettingsSummary where + parseJSON = withObject "IndexSettingsSummary" parse + where parse o = case HM.toList o of + [(ixn, v@(Object o'))] -> IndexSettingsSummary (IndexName ixn) + <$> parseJSON v + <*> (fmap (filter (not . redundant)) . parseSettings =<< o' .: "settings") + _ -> fail "Expected single-key object with index name" + redundant (NumberOfReplicas _) = True + redundant _ = False {-| 'Reply' and 'Method' are type synonyms from 'Network.HTTP.Types.Method.Method' -} type Reply = Network.HTTP.Client.Response LByteString @@ -310,8 +539,9 @@ data FieldType = GeoPointType | ShortType | ByteType deriving (Eq, Show) -data FieldDefinition = - FieldDefinition { fieldType :: FieldType } deriving (Eq, Show) +newtype FieldDefinition = FieldDefinition + { fieldType :: FieldType + } deriving (Eq, Show) {-| An 'IndexTemplate' defines a template that will automatically be applied to new indices created. The templates include both @@ -327,6 +557,17 @@ data IndexTemplate = , templateMappings :: [Value] } +instance ToJSON IndexTemplate where + toJSON (IndexTemplate p s m) = merge + (object [ "template" .= p + , "mappings" .= foldl' merge (object []) m + ]) + (toJSON s) + where + merge (Object o1) (Object o2) = toJSON $ HM.union o1 o2 + merge o Null = o + merge _ _ = undefined + data MappingField = MappingField { mappingFieldName :: FieldName , fieldDefinition :: FieldDefinition } @@ -355,6 +596,20 @@ data AllocationPolicy = AllocAll -- ^ No shard allocation is allowed deriving (Eq, Show) +instance ToJSON AllocationPolicy where + toJSON AllocAll = String "all" + toJSON AllocPrimaries = String "primaries" + toJSON AllocNewPrimaries = String "new_primaries" + toJSON AllocNone = String "none" + +instance FromJSON AllocationPolicy where + parseJSON = withText "AllocationPolicy" parse + where parse "all" = pure AllocAll + parse "primaries" = pure AllocPrimaries + parse "new_primaries" = pure AllocNewPrimaries + parse "none" = pure AllocNone + parse t = fail ("Invlaid AllocationPolicy: " <> show t) + {-| 'BulkOperation' is a sum type for expressing the four kinds of bulk operation index, create, delete, and update. 'BulkIndex' behaves like an "upsert", 'BulkCreate' will fail if a document already exists at the DocId. @@ -401,6 +656,24 @@ data EsResultFound a = , _source :: a } deriving (Eq, Show) +instance (FromJSON a) => FromJSON (EsResult a) where + parseJSON jsonVal@(Object v) = do + found <- v .:? "found" .!= False + fr <- if found + then parseJSON jsonVal + else return Nothing + EsResult <$> v .: "_index" <*> + v .: "_type" <*> + v .: "_id" <*> + pure fr + parseJSON _ = empty + +instance (FromJSON a) => FromJSON (EsResultFound a) where + parseJSON (Object v) = EsResultFound <$> + v .: "_version" <*> + v .: "_source" + parseJSON _ = empty + {-| 'EsError' is the generic type that will be returned when there was a problem. If you can't parse the expected response, its a good idea to try parsing this. @@ -410,6 +683,12 @@ data EsError = , errorMessage :: Text } deriving (Eq, Show) +instance FromJSON EsError where + parseJSON (Object v) = EsError <$> + v .: "status" <*> + (v .: "error" <|> (v .: "error" >>= (.: "reason"))) + parseJSON _ = empty + {-| 'EsProtocolException' will be thrown if Bloodhound cannot parse a response returned by the ElasticSearch server. If you encounter this error, please verify that your domain data types and FromJSON instances are working properly @@ -418,8 +697,9 @@ sure that your mappings are correct, then this error may be an indication of an incompatibility between Bloodhound and ElasticSearch. Please open a bug report and be sure to include the exception body. -} -data EsProtocolException = EsProtocolException { esProtoExBody :: LByteString } - deriving (Eq, Show) +newtype EsProtocolException = EsProtocolException + { esProtoExBody :: LByteString + } deriving (Eq, Show) instance Exception EsProtocolException @@ -445,6 +725,13 @@ newtype SearchAliasRouting = SearchAliasRouting (NonEmpty RoutingValue) deriving (Eq, Show) +instance ToJSON SearchAliasRouting where + toJSON (SearchAliasRouting rvs) = toJSON (T.intercalate "," (routingValue <$> toList rvs)) + +instance FromJSON SearchAliasRouting where + parseJSON = withText "SearchAliasRouting" parse + where parse t = SearchAliasRouting <$> parseNEJSON (String <$> T.splitOn "," t) + newtype IndexAliasRouting = IndexAliasRouting RoutingValue deriving (Eq, Show, ToJSON, FromJSON) @@ -457,6 +744,55 @@ newtype IndexAliasesSummary = IndexAliasesSummary { indexAliasesSummary :: [IndexAliasSummary] } deriving (Eq, Show) +instance FromJSON IndexAliasesSummary where + parseJSON = withObject "IndexAliasesSummary" parse + where parse o = IndexAliasesSummary . mconcat <$> mapM (uncurry go) (HM.toList o) + go ixn = withObject "index aliases" $ \ia -> do + aliases <- ia .:? "aliases" .!= mempty + forM (HM.toList aliases) $ \(aName, v) -> do + let indexAlias = IndexAlias (IndexName ixn) (IndexAliasName (IndexName aName)) + IndexAliasSummary indexAlias <$> parseJSON v + + +instance ToJSON IndexAliasAction where + toJSON (AddAlias ia opts) = object ["add" .= (iaObj <> optsObj)] + where Object iaObj = toJSON ia + Object optsObj = toJSON opts + toJSON (RemoveAlias ia) = object ["remove" .= iaObj] + where Object iaObj = toJSON ia + +instance ToJSON IndexAlias where + toJSON IndexAlias {..} = object ["index" .= srcIndex + , "alias" .= indexAlias + ] + +instance ToJSON IndexAliasCreate where + toJSON IndexAliasCreate {..} = Object (filterObj <> routingObj) + where filterObj = maybe mempty (HM.singleton "filter" . toJSON) aliasCreateFilter + Object routingObj = maybe (Object mempty) toJSON aliasCreateRouting + +instance ToJSON AliasRouting where + toJSON (AllAliasRouting v) = object ["routing" .= v] + toJSON (GranularAliasRouting srch idx) = object (catMaybes prs) + where prs = [("search_routing" .=) <$> srch + ,("index_routing" .=) <$> idx] + +instance FromJSON AliasRouting where + parseJSON = withObject "AliasRouting" parse + where parse o = parseAll o <|> parseGranular o + parseAll o = AllAliasRouting <$> o .: "routing" + parseGranular o = do + sr <- o .:? "search_routing" + ir <- o .:? "index_routing" + if isNothing sr && isNothing ir + then fail "Both search_routing and index_routing can't be blank" + else return (GranularAliasRouting sr ir) + +instance FromJSON IndexAliasCreate where + parseJSON v = withObject "IndexAliasCreate" parse v + where parse o = IndexAliasCreate <$> optional (parseJSON v) + <*> o .:? "filter" + {-| 'IndexAliasSummary' is a summary of an index alias configured for a server. -} data IndexAliasSummary = IndexAliasSummary { indexAliasSummaryAlias :: IndexAlias @@ -473,10 +809,32 @@ newtype DocVersion = DocVersion { -- | Smart constructor for in-range doc version mkDocVersion :: Int -> Maybe DocVersion mkDocVersion i - | i >= (docVersionNumber minBound) && i <= (docVersionNumber maxBound) = + | i >= docVersionNumber minBound + && i <= docVersionNumber maxBound = Just $ DocVersion i | otherwise = Nothing +instance Bounded DocVersion where + minBound = DocVersion 1 + maxBound = DocVersion 9200000000000000000 -- 9.2e+18 + +instance Enum DocVersion where + succ x + | x /= maxBound = DocVersion (succ $ docVersionNumber x) + | otherwise = succError "DocVersion" + pred x + | x /= minBound = DocVersion (pred $ docVersionNumber x) + | otherwise = predError "DocVersion" + toEnum i = + fromMaybe (error $ show i ++ " out of DocVersion range") $ mkDocVersion i + fromEnum = docVersionNumber + enumFrom = boundedEnumFrom + enumFromThen = boundedEnumFromThen + +instance FromJSON DocVersion where + parseJSON v = do + i <- parseJSON v + maybe (fail "DocVersion out of range") return $ mkDocVersion i {-| 'ExternalDocVersion' is a convenience wrapper if your code uses its own version numbers instead of ones from ES. @@ -576,36 +934,6 @@ newtype TemplateName = TemplateName Text deriving (Eq, Show, ToJSON, FromJSON) -} newtype TemplatePattern = TemplatePattern Text deriving (Eq, Show, ToJSON, FromJSON) --- This insanity is because ES *sometimes* returns Replica/Shard counts as strings -instance FromJSON ReplicaCount where - parseJSON v = parseAsInt v - <|> parseAsString v - where parseAsInt = fmap ReplicaCount . parseJSON - parseAsString = withText "ReplicaCount" (fmap ReplicaCount . parseReadText) - -instance FromJSON ShardCount where - parseJSON v = parseAsInt v - <|> parseAsString v - where parseAsInt = fmap ShardCount . parseJSON - parseAsString = withText "ShardCount" (fmap ShardCount . parseReadText) - -instance Bounded DocVersion where - minBound = DocVersion 1 - maxBound = DocVersion 9200000000000000000 -- 9.2e+18 - -instance Enum DocVersion where - succ x - | x /= maxBound = DocVersion (succ $ docVersionNumber x) - | otherwise = succError "DocVersion" - pred x - | x /= minBound = DocVersion (pred $ docVersionNumber x) - | otherwise = predError "DocVersion" - toEnum i = - fromMaybe (error $ show i ++ " out of DocVersion range") $ mkDocVersion i - fromEnum = docVersionNumber - enumFrom = boundedEnumFrom - enumFromThen = boundedEnumFromThen - -- | Username type used for HTTP Basic authentication. See 'basicAuthHook'. newtype EsUsername = EsUsername { esUsername :: Text } deriving (Read, Show, Eq) @@ -1225,13 +1553,13 @@ data SnapshotRestoreSettings = SnapshotRestoreSettings { -- revert back to the server default during the restore process. } deriving (Eq, Show) -data SnapshotRepoUpdateSettings = SnapshotRepoUpdateSettings { - repoUpdateVerify :: Bool - -- ^ After creation/update, synchronously check that nodes can - -- write to this repo. Defaults to True. You may use False if you - -- need a faster response and plan on verifying manually later - -- with 'verifySnapshotRepo'. - } deriving (Eq, Show) +newtype SnapshotRepoUpdateSettings = SnapshotRepoUpdateSettings + { repoUpdateVerify :: Bool + -- ^ After creation/update, synchronously check that nodes can + -- write to this repo. Defaults to True. You may use False if you + -- need a faster response and plan on verifying manually later + -- with 'verifySnapshotRepo'. + } deriving (Eq, Show) -- | Reasonable defaults for repo creation/update @@ -1271,7 +1599,7 @@ instance SnapshotRepo FsSnapshotRepo where fromGSnapshotRepo GenericSnapshotRepo {..} | gSnapshotRepoType == fsRepoType = do let o = gSnapshotRepoSettingsObject gSnapshotRepoSettings - parseRepo $ do + parseRepo $ FsSnapshotRepo gSnapshotRepoName <$> o .: "location" <*> o .:? "compress" .!= False <*> o .:? "chunk_size" @@ -1370,12 +1698,6 @@ data SnapshotInfo = SnapshotInfo { } deriving (Eq, Show) -instance FromJSON POSIXMS where - parseJSON = withScientific "POSIXMS" (return . parse) - where parse n = - let n' = truncate n :: Integer - in POSIXMS (posixSecondsToUTCTime (fromInteger (n' `div` 1000))) - instance FromJSON SnapshotInfo where parseJSON = withObject "SnapshotInfo" parse where @@ -1437,7 +1759,8 @@ instance Bounded RRGroupRefNum where -- | Only allows valid group number references (1-9). mkRRGroupRefNum :: Int -> Maybe RRGroupRefNum mkRRGroupRefNum i - | i >= (rrGroupRefNum minBound) && i <= (rrGroupRefNum maxBound) = + | i >= rrGroupRefNum minBound + && i <= rrGroupRefNum maxBound = Just $ RRGroupRefNum i | otherwise = Nothing @@ -1471,9 +1794,9 @@ defaultSnapshotRestoreSettings = SnapshotRestoreSettings { -- | Index settings that can be overridden. The docs only mention you -- can update number of replicas, but there may be more. You -- definitely cannot override shard count. -data RestoreIndexSettings = RestoreIndexSettings { - restoreOverrideReplicas :: Maybe ReplicaCount - } deriving (Eq, Show) +newtype RestoreIndexSettings = RestoreIndexSettings + { restoreOverrideReplicas :: Maybe ReplicaCount + } deriving (Eq, Show) instance ToJSON RestoreIndexSettings where @@ -1813,7 +2136,7 @@ instance FromJSON NodeBreakersStats where <*> o .: "fielddata" parseNodeStats :: FullNodeId -> Object -> Parser NodeStats -parseNodeStats fnid o = do +parseNodeStats fnid o = NodeStats <$> o .: "name" <*> pure fnid <*> o .:? "breakers" @@ -1871,7 +2194,7 @@ instance FromJSON BoundTransportAddress where instance FromJSON NodeOSInfo where parseJSON = withObject "NodeOSInfo" parse where - parse o = do + parse o = NodeOSInfo <$> (unMS <$> o .: "refresh_interval_in_millis") <*> o .: "name" <*> o .: "arch" @@ -1990,6 +2313,16 @@ data Interval = Year | Minute | Second deriving (Eq, Show) +instance ToJSON Interval where + toJSON Year = "year" + toJSON Quarter = "quarter" + toJSON Month = "month" + toJSON Week = "week" + toJSON Day = "day" + toJSON Hour = "hour" + toJSON Minute = "minute" + toJSON Second = "second" + parseStringInterval :: (Monad m) => String -> m NominalDiffTime parseStringInterval s = case span isNumber s of ("", _) -> fail "Invalid interval" diff --git a/src/Database/V5/Bloodhound/Internal/Newtypes.hs b/src/Database/V5/Bloodhound/Internal/Newtypes.hs index 8714cd7..24cc0c5 100644 --- a/src/Database/V5/Bloodhound/Internal/Newtypes.hs +++ b/src/Database/V5/Bloodhound/Internal/Newtypes.hs @@ -146,6 +146,12 @@ newtype MaxDocFrequency = MaxDocFrequency Int deriving (Eq, Show, FromJSON, ToJS -- | Newtype wrapper to parse ES's concerning tendency to in some APIs return a floating point number of milliseconds since epoch ಠ_ಠ newtype POSIXMS = POSIXMS { posixMS :: UTCTime } +instance FromJSON POSIXMS where + parseJSON = withScientific "POSIXMS" (return . parse) + where parse n = + let n' = truncate n :: Integer + in POSIXMS (posixSecondsToUTCTime (fromInteger (n' `div` 1000))) + newtype Boost = Boost Double deriving (Eq, Show, ToJSON, FromJSON) @@ -154,15 +160,28 @@ newtype BoostTerms = BoostTerms Double deriving (Eq, Show, ToJSON, FromJSON) +{-| 'ReplicaCount' is part of 'IndexSettings' -} +newtype ReplicaCount = + ReplicaCount Int + deriving (Eq, Show, ToJSON) + {-| 'ShardCount' is part of 'IndexSettings' -} newtype ShardCount = ShardCount Int deriving (Eq, Show, ToJSON) -{-| 'ReplicaCount' is part of 'IndexSettings' -} -newtype ReplicaCount = - ReplicaCount Int - deriving (Eq, Show, ToJSON) +-- This insanity is because ES *sometimes* returns Replica/Shard counts as strings +instance FromJSON ReplicaCount where + parseJSON v = parseAsInt v + <|> parseAsString v + where parseAsInt = fmap ReplicaCount . parseJSON + parseAsString = withText "ReplicaCount" (fmap ReplicaCount . parseReadText) + +instance FromJSON ShardCount where + parseJSON v = parseAsInt v + <|> parseAsString v + where parseAsInt = fmap ShardCount . parseJSON + parseAsString = withText "ShardCount" (fmap ShardCount . parseReadText) {-| 'IndexName' is used to describe which index to query/create/delete -} newtype IndexName = @@ -195,7 +214,7 @@ unMS (MS t) = t instance FromJSON MS where parseJSON = withScientific "MS" (return . MS . parse) where - parse n = fromInteger ((truncate n) * 1000) + parse n = fromInteger (truncate n * 1000) newtype TokenFilter = TokenFilter Text deriving (Eq, Show, FromJSON, ToJSON) diff --git a/src/Database/V5/Bloodhound/Internal/Query.hs b/src/Database/V5/Bloodhound/Internal/Query.hs index fcea1ff..e3e59b7 100644 --- a/src/Database/V5/Bloodhound/Internal/Query.hs +++ b/src/Database/V5/Bloodhound/Internal/Query.hs @@ -435,6 +435,10 @@ data GeoPoint = GeoPoint { geoField :: FieldName , latLon :: LatLon} deriving (Eq, Show) +instance ToJSON GeoPoint where + toJSON (GeoPoint (FieldName geoPointField) geoPointLatLon) = + object [ geoPointField .= geoPointLatLon ] + data DistanceUnit = Miles | Yards | Feet @@ -882,7 +886,8 @@ instance FromJSON QueryStringQuery where instance ToJSON RangeQuery where toJSON (RangeQuery (FieldName fieldName) range boost) = object [ fieldName .= object conjoined ] - where conjoined = [ "boost" .= boost ] ++ (rangeValueToPair range) + where + conjoined = ("boost" .= boost) : rangeValueToPair range instance FromJSON RangeQuery where parseJSON = withObject "RangeQuery" parse @@ -891,38 +896,76 @@ instance FromJSON RangeQuery where <$> parseJSON (Object o) <*> o .: "boost" +parseRangeValue :: ( FromJSON t4 + , FromJSON t3 + , FromJSON t2 + , FromJSON t1 + ) + => (t3 -> t5) + -> (t1 -> t6) + -> (t4 -> t7) + -> (t2 -> t8) + -> (t5 -> t6 -> b) + -> (t7 -> t6 -> b) + -> (t5 -> t8 -> b) + -> (t7 -> t8 -> b) + -> (t5 -> b) + -> (t6 -> b) + -> (t7 -> b) + -> (t8 -> b) + -> Parser b + -> Object + -> Parser b +parseRangeValue mkGt mkLt mkGte mkLte + fGtLt fGteLt fGtLte fGteLte + fGt fLt fGte fLte nada o = do + lt <- o .:? "lt" + lte <- o .:? "lte" + gt <- o .:? "gt" + gte <- o .:? "gte" + case (lt, lte, gt, gte) of + (Just a, _, Just b, _) -> + return (fGtLt (mkGt b) (mkLt a)) + (Just a, _, _, Just b) -> + return (fGteLt (mkGte b) (mkLt a)) + (_, Just a, Just b, _) -> + return (fGtLte (mkGt b) (mkLte a)) + (_, Just a, _, Just b) -> + return (fGteLte (mkGte b) (mkLte a)) + (_, _, Just a, _) -> + return (fGt (mkGt a)) + (Just a, _, _, _) -> + return (fLt (mkLt a)) + (_, _, _, Just a) -> + return (fGte (mkGte a)) + (_, Just a, _, _) -> + return (fLte (mkLte a)) + (Nothing, Nothing, Nothing, Nothing) -> + nada + + instance FromJSON RangeValue where parseJSON = withObject "RangeValue" parse where parse o = parseDate o <|> parseDouble o - parseDate o = do lt <- o .:? "lt" - lte <- o .:? "lte" - gt <- o .:? "gt" - gte <- o .:? "gte" - case (lt, lte, gt, gte) of - (Just a, _, Just b, _) -> return (RangeDateGtLt (GreaterThanD b) (LessThanD a)) - (Just a, _, _, Just b)-> return (RangeDateGteLt (GreaterThanEqD b) (LessThanD a)) - (_, Just a, Just b, _)-> return (RangeDateGtLte (GreaterThanD b) (LessThanEqD a)) - (_, Just a, _, Just b)-> return (RangeDateGteLte (GreaterThanEqD b) (LessThanEqD a)) - (_, _, Just a, _)-> return (RangeDateGt (GreaterThanD a)) - (Just a, _, _, _)-> return (RangeDateLt (LessThanD a)) - (_, _, _, Just a)-> return (RangeDateGte (GreaterThanEqD a)) - (_, Just a, _, _)-> return (RangeDateLte (LessThanEqD a)) - (Nothing, Nothing, Nothing, Nothing) -> mzero - parseDouble o = do lt <- o .:? "lt" - lte <- o .:? "lte" - gt <- o .:? "gt" - gte <- o .:? "gte" - case (lt, lte, gt, gte) of - (Just a, _, Just b, _) -> return (RangeDoubleGtLt (GreaterThan b) (LessThan a)) - (Just a, _, _, Just b)-> return (RangeDoubleGteLt (GreaterThanEq b) (LessThan a)) - (_, Just a, Just b, _)-> return (RangeDoubleGtLte (GreaterThan b) (LessThanEq a)) - (_, Just a, _, Just b)-> return (RangeDoubleGteLte (GreaterThanEq b) (LessThanEq a)) - (_, _, Just a, _)-> return (RangeDoubleGt (GreaterThan a)) - (Just a, _, _, _)-> return (RangeDoubleLt (LessThan a)) - (_, _, _, Just a)-> return (RangeDoubleGte (GreaterThanEq a)) - (_, Just a, _, _)-> return (RangeDoubleLte (LessThanEq a)) - (Nothing, Nothing, Nothing, Nothing) -> mzero + parseDate o = + parseRangeValue + GreaterThanD LessThanD + GreaterThanEqD LessThanEqD + RangeDateGtLt RangeDateGteLt + RangeDateGtLte RangeDateGteLte + RangeDateGt RangeDateLt + RangeDateGte RangeDateLte + mzero o + parseDouble o = + parseRangeValue + GreaterThan LessThan + GreaterThanEq LessThanEq + RangeDoubleGtLt RangeDoubleGteLt + RangeDoubleGtLte RangeDoubleGteLte + RangeDoubleGt RangeDoubleLt + RangeDoubleGte RangeDoubleLte + mzero o instance ToJSON PrefixQuery where toJSON (PrefixQuery (FieldName fieldName) queryValue boost) = diff --git a/src/Database/V5/Bloodhound/Types.hs b/src/Database/V5/Bloodhound/Types.hs index e479e10..99f0bd2 100644 --- a/src/Database/V5/Bloodhound/Types.hs +++ b/src/Database/V5/Bloodhound/Types.hs @@ -414,7 +414,6 @@ module Database.V5.Bloodhound.Types import Bloodhound.Import -import qualified Data.Traversable as DT import qualified Data.HashMap.Strict as HM import qualified Data.Map.Strict as M import qualified Data.Text as T @@ -424,7 +423,6 @@ import Database.V5.Bloodhound.Internal.Analysis import Database.V5.Bloodhound.Internal.Client import Database.V5.Bloodhound.Internal.Newtypes import Database.V5.Bloodhound.Internal.Query -import Database.V5.Bloodhound.Internal.StringlyTyped {-| 'Sort' is a synonym for a list of 'SortSpec's. Sort behavior is order dependent with later sorts acting as tie-breakers for earlier sorts. @@ -438,8 +436,26 @@ type Sort = [SortSpec] -} -data SortSpec = DefaultSortSpec DefaultSort - | GeoDistanceSortSpec SortOrder GeoPoint DistanceUnit deriving (Eq, Show) +data SortSpec = + DefaultSortSpec DefaultSort + | GeoDistanceSortSpec SortOrder GeoPoint DistanceUnit + deriving (Eq, Show) + +instance ToJSON SortSpec where + toJSON (DefaultSortSpec + (DefaultSort (FieldName dsSortFieldName) dsSortOrder dsIgnoreUnmapped + dsSortMode dsMissingSort dsNestedFilter)) = + object [dsSortFieldName .= omitNulls base] where + base = [ "order" .= dsSortOrder + , "unmapped_type" .= dsIgnoreUnmapped + , "mode" .= dsSortMode + , "missing" .= dsMissingSort + , "nested_filter" .= dsNestedFilter ] + + toJSON (GeoDistanceSortSpec gdsSortOrder (GeoPoint (FieldName field) gdsLatLon) units) = + object [ "unit" .= units + , field .= gdsLatLon + , "order" .= gdsSortOrder ] {-| 'DefaultSort' is usually the kind of 'SortSpec' you'll want. There's a 'mkSort' convenience function for when you want to specify only the most @@ -466,6 +482,10 @@ data DefaultSort = data SortOrder = Ascending | Descending deriving (Eq, Show) +instance ToJSON SortOrder where + toJSON Ascending = String "asc" + toJSON Descending = String "desc" + {-| 'Missing' prescribes how to handle missing fields. A missing field can be sorted last, first, or using a custom value as a substitute. @@ -475,6 +495,11 @@ data Missing = LastMissing | FirstMissing | CustomMissing Text deriving (Eq, Show) +instance ToJSON Missing where + toJSON LastMissing = String "_last" + toJSON FirstMissing = String "_first" + toJSON (CustomMissing txt) = String txt + {-| 'SortMode' prescribes how to handle sorting array/multi-valued fields. http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-sort.html#_sort_mode_option @@ -484,6 +509,12 @@ data SortMode = SortMin | SortSum | SortAvg deriving (Eq, Show) +instance ToJSON SortMode where + toJSON SortMin = String "min" + toJSON SortMax = String "max" + toJSON SortSum = String "sum" + toJSON SortAvg = String "avg" + {-| 'mkSort' defaults everything but the 'FieldName' and the 'SortOrder' so that you can concisely describe the usual kind of 'SortSpec's you want. -} @@ -590,6 +621,10 @@ data HighlightEncoder = DefaultEncoder | HTMLEncoder deriving (Eq, Show) +instance ToJSON HighlightEncoder where + toJSON DefaultEncoder = String "default" + toJSON HTMLEncoder = String "html" + -- NOTE: Should the tags use some kind of HTML type, rather than Text? data HighlightTag = TagSchema Text @@ -609,6 +644,18 @@ data SearchResult a = } deriving (Eq, Show) + +instance (FromJSON a) => FromJSON (SearchResult a) where + parseJSON (Object v) = SearchResult <$> + v .: "took" <*> + v .: "timed_out" <*> + v .: "_shards" <*> + v .: "hits" <*> + v .:? "aggregations" <*> + v .:? "_scroll_id" <*> + v .:? "suggest" + parseJSON _ = empty + newtype ScrollId = ScrollId Text deriving (Eq, Show, Ord, ToJSON, FromJSON) @@ -618,6 +665,14 @@ data SearchHits a = , maxScore :: Score , hits :: [Hit a] } deriving (Eq, Show) + +instance (FromJSON a) => FromJSON (SearchHits a) where + parseJSON (Object v) = SearchHits <$> + v .: "total" <*> + v .: "max_score" <*> + v .: "hits" + parseJSON _ = empty + instance Semigroup (SearchHits a) where (SearchHits ta ma ha) <> (SearchHits tb mb hb) = SearchHits (ta + tb) (max ma mb) (ha <> hb) @@ -634,12 +689,26 @@ data Hit a = , hitFields :: Maybe HitFields , hitHighlight :: Maybe HitHighlight } deriving (Eq, Show) +instance (FromJSON a) => FromJSON (Hit a) where + parseJSON (Object v) = Hit <$> + v .: "_index" <*> + v .: "_type" <*> + v .: "_id" <*> + v .: "_score" <*> + v .:? "_source" <*> + v .:? "fields" <*> + v .:? "highlight" + parseJSON _ = empty + newtype HitFields = HitFields (M.Map Text [Value]) deriving (Eq, Show) -type HitHighlight = M.Map Text [Text] +instance FromJSON HitFields where + parseJSON x + = HitFields <$> parseJSON x +type HitHighlight = M.Map Text [Text] type Aggregations = M.Map Text Aggregation @@ -649,21 +718,43 @@ emptyAggregations = M.empty mkAggregations :: Text -> Aggregation -> Aggregations mkAggregations name aggregation = M.insert name aggregation emptyAggregations -data TermOrder = TermOrder{ termSortField :: Text - , termSortOrder :: SortOrder } deriving (Eq, Show) +data TermOrder = TermOrder + { termSortField :: Text + , termSortOrder :: SortOrder } deriving (Eq, Show) + +instance ToJSON TermOrder where + toJSON (TermOrder termSortField termSortOrder) = + object [termSortField .= termSortOrder] data TermInclusion = TermInclusion Text | TermPattern Text Text deriving (Eq, Show) +instance ToJSON TermInclusion where + toJSON (TermInclusion x) = toJSON x + toJSON (TermPattern pattern flags) = + omitNulls [ "pattern" .= pattern + , "flags" .= flags] + data CollectionMode = BreadthFirst | DepthFirst deriving (Eq, Show) +instance ToJSON CollectionMode where + toJSON BreadthFirst = "breadth_first" + toJSON DepthFirst = "depth_first" + data ExecutionHint = Ordinals | GlobalOrdinals | GlobalOrdinalsHash | GlobalOrdinalsLowCardinality | Map deriving (Eq, Show) +instance ToJSON ExecutionHint where + toJSON Ordinals = "ordinals" + toJSON GlobalOrdinals = "global_ordinals" + toJSON GlobalOrdinalsHash = "global_ordinals_hash" + toJSON GlobalOrdinalsLowCardinality = "global_ordinals_low_cardinality" + toJSON Map = "map" + data Aggregation = TermsAgg TermsAggregation | CardinalityAgg CardinalityAggregation | DateHistogramAgg DateHistogramAggregation @@ -675,6 +766,67 @@ data Aggregation = TermsAgg TermsAggregation | StatsAgg StatisticsAggregation deriving (Eq, Show) +instance ToJSON Aggregation where + toJSON (TermsAgg (TermsAggregation term include exclude order minDocCount size shardSize collectMode executionHint termAggs)) = + omitNulls ["terms" .= omitNulls [ toJSON' term, + "include" .= include, + "exclude" .= exclude, + "order" .= order, + "min_doc_count" .= minDocCount, + "size" .= size, + "shard_size" .= shardSize, + "collect_mode" .= collectMode, + "execution_hint" .= executionHint + ], + "aggs" .= termAggs ] + where + toJSON' x = case x of { Left y -> "field" .= y; Right y -> "script" .= y } + + toJSON (CardinalityAgg (CardinalityAggregation field precisionThreshold)) = + object ["cardinality" .= omitNulls [ "field" .= field, + "precisionThreshold" .= precisionThreshold + ] + ] + + toJSON (DateHistogramAgg + (DateHistogramAggregation field interval format + preZone postZone preOffset postOffset dateHistoAggs)) = + omitNulls ["date_histogram" .= omitNulls [ "field" .= field, + "interval" .= interval, + "format" .= format, + "pre_zone" .= preZone, + "post_zone" .= postZone, + "pre_offset" .= preOffset, + "post_offset" .= postOffset + ], + "aggs" .= dateHistoAggs ] + toJSON (ValueCountAgg a) = object ["value_count" .= v] + where v = case a of + (FieldValueCount (FieldName n)) -> + object ["field" .= n] + (ScriptValueCount s) -> + object ["script" .= s] + toJSON (FilterAgg (FilterAggregation filt ags)) = + omitNulls [ "filter" .= filt + , "aggs" .= ags] + toJSON (DateRangeAgg a) = object [ "date_range" .= a + ] + toJSON (MissingAgg (MissingAggregation{..})) = + object ["missing" .= object ["field" .= maField]] + + toJSON (TopHitsAgg (TopHitsAggregation mfrom msize msort)) = + omitNulls ["top_hits" .= omitNulls [ "size" .= msize + , "from" .= mfrom + , "sort" .= msort + ] + ] + + toJSON (StatsAgg (StatisticsAggregation typ field)) = + object [stType .= omitNulls [ "field" .= field ]] + where + stType | typ == Basic = "stats" + | otherwise = "extended_stats" + data TopHitsAggregation = TopHitsAggregation { taFrom :: Maybe From , taSize :: Maybe Size @@ -685,17 +837,18 @@ data MissingAggregation = MissingAggregation { maField :: Text } deriving (Eq, Show) -data TermsAggregation = TermsAggregation { term :: Either Text Text - , termInclude :: Maybe TermInclusion - , termExclude :: Maybe TermInclusion - , termOrder :: Maybe TermOrder - , termMinDocCount :: Maybe Int - , termSize :: Maybe Int - , termShardSize :: Maybe Int - , termCollectMode :: Maybe CollectionMode - , termExecutionHint :: Maybe ExecutionHint - , termAggs :: Maybe Aggregations - } deriving (Eq, Show) +data TermsAggregation = TermsAggregation + { term :: Either Text Text + , termInclude :: Maybe TermInclusion + , termExclude :: Maybe TermInclusion + , termOrder :: Maybe TermOrder + , termMinDocCount :: Maybe Int + , termSize :: Maybe Int + , termShardSize :: Maybe Int + , termCollectMode :: Maybe CollectionMode + , termExecutionHint :: Maybe ExecutionHint + , termAggs :: Maybe Aggregations + } deriving (Eq, Show) data CardinalityAggregation = CardinalityAggregation { cardinalityField :: FieldName, @@ -720,17 +873,43 @@ data DateRangeAggregation = DateRangeAggregation , draRanges :: NonEmpty DateRangeAggRange } deriving (Eq, Show) +instance ToJSON DateRangeAggregation where + toJSON DateRangeAggregation {..} = + omitNulls [ "field" .= draField + , "format" .= draFormat + , "ranges" .= toList draRanges + ] + data DateRangeAggRange = DateRangeFrom DateMathExpr | DateRangeTo DateMathExpr | DateRangeFromAndTo DateMathExpr DateMathExpr deriving (Eq, Show) +instance ToJSON DateRangeAggRange where + toJSON (DateRangeFrom e) = object [ "from" .= e ] + toJSON (DateRangeTo e) = object [ "to" .= e ] + toJSON (DateRangeFromAndTo f t) = object [ "from" .= f, "to" .= t ] + -- | See for more information. data DateMathExpr = DateMathExpr DateMathAnchor [DateMathModifier] deriving (Eq, Show) +instance ToJSON DateMathExpr where + toJSON (DateMathExpr a mods) = String (fmtA a <> mconcat (fmtMod <$> mods)) + where fmtA DMNow = "now" + fmtA (DMDate date) = (T.pack $ showGregorian date) <> "||" + fmtMod (AddTime n u) = "+" <> showText n <> fmtU u + fmtMod (SubtractTime n u) = "-" <> showText n <> fmtU u + fmtMod (RoundDownTo u) = "/" <> fmtU u + fmtU DMYear = "y" + fmtU DMMonth = "M" + fmtU DMWeek = "w" + fmtU DMDay = "d" + fmtU DMHour = "h" + fmtU DMMinute = "m" + fmtU DMSecond = "s" -- | Starting point for a date range. This along with the 'DateMathModifiers' gets you the date ES will start from. data DateMathAnchor = @@ -796,124 +975,6 @@ mkStatsAggregation = StatisticsAggregation Basic mkExtendedStatsAggregation :: FieldName -> StatisticsAggregation mkExtendedStatsAggregation = StatisticsAggregation Extended -instance ToJSON TermOrder where - toJSON (TermOrder termSortField termSortOrder) = object [termSortField .= termSortOrder] - -instance ToJSON TermInclusion where - toJSON (TermInclusion x) = toJSON x - toJSON (TermPattern pattern flags) = omitNulls [ "pattern" .= pattern, - "flags" .= flags] - -instance ToJSON CollectionMode where - toJSON BreadthFirst = "breadth_first" - toJSON DepthFirst = "depth_first" - -instance ToJSON ExecutionHint where - toJSON Ordinals = "ordinals" - toJSON GlobalOrdinals = "global_ordinals" - toJSON GlobalOrdinalsHash = "global_ordinals_hash" - toJSON GlobalOrdinalsLowCardinality = "global_ordinals_low_cardinality" - toJSON Map = "map" - -instance ToJSON Interval where - toJSON Year = "year" - toJSON Quarter = "quarter" - toJSON Month = "month" - toJSON Week = "week" - toJSON Day = "day" - toJSON Hour = "hour" - toJSON Minute = "minute" - toJSON Second = "second" - -instance ToJSON Aggregation where - toJSON (TermsAgg (TermsAggregation term include exclude order minDocCount size shardSize collectMode executionHint termAggs)) = - omitNulls ["terms" .= omitNulls [ toJSON' term, - "include" .= include, - "exclude" .= exclude, - "order" .= order, - "min_doc_count" .= minDocCount, - "size" .= size, - "shard_size" .= shardSize, - "collect_mode" .= collectMode, - "execution_hint" .= executionHint - ], - "aggs" .= termAggs ] - where - toJSON' x = case x of { Left y -> "field" .= y; Right y -> "script" .= y } - - toJSON (CardinalityAgg (CardinalityAggregation field precisionThreshold)) = - object ["cardinality" .= omitNulls [ "field" .= field, - "precisionThreshold" .= precisionThreshold - ] - ] - - toJSON (DateHistogramAgg - (DateHistogramAggregation field interval format - preZone postZone preOffset postOffset dateHistoAggs)) = - omitNulls ["date_histogram" .= omitNulls [ "field" .= field, - "interval" .= interval, - "format" .= format, - "pre_zone" .= preZone, - "post_zone" .= postZone, - "pre_offset" .= preOffset, - "post_offset" .= postOffset - ], - "aggs" .= dateHistoAggs ] - toJSON (ValueCountAgg a) = object ["value_count" .= v] - where v = case a of - (FieldValueCount (FieldName n)) -> - object ["field" .= n] - (ScriptValueCount s) -> - object ["script" .= s] - toJSON (FilterAgg (FilterAggregation filt ags)) = - omitNulls [ "filter" .= filt - , "aggs" .= ags] - toJSON (DateRangeAgg a) = object [ "date_range" .= a - ] - toJSON (MissingAgg (MissingAggregation{..})) = - object ["missing" .= object ["field" .= maField]] - - toJSON (TopHitsAgg (TopHitsAggregation mfrom msize msort)) = - omitNulls ["top_hits" .= omitNulls [ "size" .= msize - , "from" .= mfrom - , "sort" .= msort - ] - ] - - toJSON (StatsAgg (StatisticsAggregation typ field)) = - object [stType .= omitNulls [ "field" .= field ]] - where - stType | typ == Basic = "stats" - | otherwise = "extended_stats" - -instance ToJSON DateRangeAggregation where - toJSON DateRangeAggregation {..} = - omitNulls [ "field" .= draField - , "format" .= draFormat - , "ranges" .= toList draRanges - ] - -instance ToJSON DateRangeAggRange where - toJSON (DateRangeFrom e) = object [ "from" .= e ] - toJSON (DateRangeTo e) = object [ "to" .= e ] - toJSON (DateRangeFromAndTo f t) = object [ "from" .= f, "to" .= t ] - -instance ToJSON DateMathExpr where - toJSON (DateMathExpr a mods) = String (fmtA a <> mconcat (fmtMod <$> mods)) - where fmtA DMNow = "now" - fmtA (DMDate date) = (T.pack $ showGregorian date) <> "||" - fmtMod (AddTime n u) = "+" <> showText n <> fmtU u - fmtMod (SubtractTime n u) = "-" <> showText n <> fmtU u - fmtMod (RoundDownTo u) = "/" <> fmtU u - fmtU DMYear = "y" - fmtU DMMonth = "M" - fmtU DMWeek = "w" - fmtU DMDay = "d" - fmtU DMHour = "h" - fmtU DMMinute = "m" - fmtU DMSecond = "s" - - type AggregationResults = M.Map Text Value class BucketAggregation a where @@ -921,83 +982,48 @@ class BucketAggregation a where docCount :: a -> Int aggs :: a -> Maybe AggregationResults - -data Bucket a = Bucket { buckets :: [a]} deriving (Read, Show) - -data BucketValue = TextValue Text - | ScientificValue Scientific - | BoolValue Bool deriving (Read, Show) - -data MissingResult = MissingResult { missingDocCount :: Int } deriving (Show) - -data TopHitResult a = TopHitResult { tarHits :: (SearchHits a) - } deriving Show - -data TermsResult = TermsResult { termKey :: BucketValue - , termsDocCount :: Int - , termsAggs :: Maybe AggregationResults } deriving (Read, Show) - -data DateHistogramResult = DateHistogramResult { dateKey :: Int - , dateKeyStr :: Maybe Text - , dateDocCount :: Int - , dateHistogramAggs :: Maybe AggregationResults } deriving (Read, Show) - -data DateRangeResult = - DateRangeResult { dateRangeKey :: Text - , dateRangeFrom :: Maybe UTCTime - , dateRangeFromAsString :: Maybe Text - , dateRangeTo :: Maybe UTCTime - , dateRangeToAsString :: Maybe Text - , dateRangeDocCount :: Int - , dateRangeAggs :: Maybe AggregationResults } - deriving (Eq, Show) - -toTerms :: Text -> AggregationResults -> Maybe (Bucket TermsResult) -toTerms = toAggResult - -toDateHistogram :: Text -> AggregationResults -> Maybe (Bucket DateHistogramResult) -toDateHistogram = toAggResult - -toMissing :: Text -> AggregationResults -> Maybe MissingResult -toMissing = toAggResult - -toTopHits :: (FromJSON a) => Text -> AggregationResults -> Maybe (TopHitResult a) -toTopHits = toAggResult - -toAggResult :: (FromJSON a) => Text -> AggregationResults -> Maybe a -toAggResult t a = M.lookup t a >>= deserialize - where deserialize = parseMaybe parseJSON - -instance BucketAggregation TermsResult where - key = termKey - docCount = termsDocCount - aggs = termsAggs - -instance BucketAggregation DateHistogramResult where - key = TextValue . showText . dateKey - docCount = dateDocCount - aggs = dateHistogramAggs - -instance BucketAggregation DateRangeResult where - key = TextValue . dateRangeKey - docCount = dateRangeDocCount - aggs = dateRangeAggs +data Bucket a = Bucket + { buckets :: [a] + } deriving (Read, Show) instance (FromJSON a) => FromJSON (Bucket a) where parseJSON (Object v) = Bucket <$> v .: "buckets" parseJSON _ = mempty +data BucketValue = TextValue Text + | ScientificValue Scientific + | BoolValue Bool deriving (Read, Show) + instance FromJSON BucketValue where parseJSON (String t) = return $ TextValue t parseJSON (Number s) = return $ ScientificValue s parseJSON (Bool b) = return $ BoolValue b parseJSON _ = mempty +data MissingResult = MissingResult + { missingDocCount :: Int + } deriving (Show) + instance FromJSON MissingResult where parseJSON = withObject "MissingResult" parse where parse v = MissingResult <$> v .: "doc_count" +data TopHitResult a = TopHitResult + { tarHits :: (SearchHits a) + } deriving Show + +instance (FromJSON a) => FromJSON (TopHitResult a) where + parseJSON (Object v) = TopHitResult <$> + v .: "hits" + parseJSON _ = fail "Failure in FromJSON (TopHitResult a)" + +data TermsResult = TermsResult + { termKey :: BucketValue + , termsDocCount :: Int + , termsAggs :: Maybe AggregationResults + } deriving (Read, Show) + instance FromJSON TermsResult where parseJSON (Object v) = TermsResult <$> v .: "key" <*> @@ -1005,6 +1031,18 @@ instance FromJSON TermsResult where (pure $ getNamedSubAgg v ["key", "doc_count"]) parseJSON _ = mempty +instance BucketAggregation TermsResult where + key = termKey + docCount = termsDocCount + aggs = termsAggs + +data DateHistogramResult = DateHistogramResult + { dateKey :: Int + , dateKeyStr :: Maybe Text + , dateDocCount :: Int + , dateHistogramAggs :: Maybe AggregationResults + } deriving (Show) + instance FromJSON DateHistogramResult where parseJSON (Object v) = DateHistogramResult <$> v .: "key" <*> @@ -1017,6 +1055,21 @@ instance FromJSON DateHistogramResult where ) parseJSON _ = mempty +instance BucketAggregation DateHistogramResult where + key = TextValue . showText . dateKey + docCount = dateDocCount + aggs = dateHistogramAggs + +data DateRangeResult = DateRangeResult + { dateRangeKey :: Text + , dateRangeFrom :: Maybe UTCTime + , dateRangeFromAsString :: Maybe Text + , dateRangeTo :: Maybe UTCTime + , dateRangeToAsString :: Maybe Text + , dateRangeDocCount :: Int + , dateRangeAggs :: Maybe AggregationResults + } deriving (Eq, Show) + instance FromJSON DateRangeResult where parseJSON = withObject "DateRangeResult" parse where parse v = DateRangeResult <$> @@ -1035,10 +1088,26 @@ instance FromJSON DateRangeResult where ] ) -instance (FromJSON a) => FromJSON (TopHitResult a) where - parseJSON (Object v) = TopHitResult <$> - v .: "hits" - parseJSON _ = fail "Failure in FromJSON (TopHitResult a)" +instance BucketAggregation DateRangeResult where + key = TextValue . dateRangeKey + docCount = dateRangeDocCount + aggs = dateRangeAggs + +toTerms :: Text -> AggregationResults -> Maybe (Bucket TermsResult) +toTerms = toAggResult + +toDateHistogram :: Text -> AggregationResults -> Maybe (Bucket DateHistogramResult) +toDateHistogram = toAggResult + +toMissing :: Text -> AggregationResults -> Maybe MissingResult +toMissing = toAggResult + +toTopHits :: (FromJSON a) => Text -> AggregationResults -> Maybe (TopHitResult a) +toTopHits = toAggResult + +toAggResult :: (FromJSON a) => Text -> AggregationResults -> Maybe a +toAggResult t a = M.lookup t a >>= deserialize + where deserialize = parseMaybe parseJSON -- Try to get an AggregationResults when we don't know the -- field name. We filter out the known keys to try to minimize the noise. @@ -1049,339 +1118,6 @@ getNamedSubAgg o knownKeys = maggRes | HM.null unknownKeys = Nothing | otherwise = Just . M.fromList $ HM.toList unknownKeys -instance ToJSON GeoPoint where - toJSON (GeoPoint (FieldName geoPointField) geoPointLatLon) = - object [ geoPointField .= geoPointLatLon ] - -instance FromJSON Status where - parseJSON (Object v) = Status <$> - v .: "name" <*> - v .: "cluster_name" <*> - v .: "cluster_uuid" <*> - v .: "version" <*> - v .: "tagline" - parseJSON _ = empty - -instance ToJSON IndexSettings where - toJSON (IndexSettings s r) = object ["settings" .= - object ["index" .= - object ["number_of_shards" .= s, "number_of_replicas" .= r] - ] - ] - -instance FromJSON IndexSettings where - parseJSON = withObject "IndexSettings" parse - where parse o = do s <- o .: "settings" - i <- s .: "index" - IndexSettings <$> i .: "number_of_shards" - <*> i .: "number_of_replicas" - -instance ToJSON UpdatableIndexSetting where - toJSON (NumberOfReplicas x) = oPath ("index" :| ["number_of_replicas"]) x - toJSON (AutoExpandReplicas x) = oPath ("index" :| ["auto_expand_replicas"]) x - toJSON (RefreshInterval x) = oPath ("index" :| ["refresh_interval"]) (NominalDiffTimeJSON x) - toJSON (IndexConcurrency x) = oPath ("index" :| ["concurrency"]) x - toJSON (FailOnMergeFailure x) = oPath ("index" :| ["fail_on_merge_failure"]) x - toJSON (TranslogFlushThresholdOps x) = oPath ("index" :| ["translog", "flush_threshold_ops"]) x - toJSON (TranslogFlushThresholdSize x) = oPath ("index" :| ["translog", "flush_threshold_size"]) x - toJSON (TranslogFlushThresholdPeriod x) = oPath ("index" :| ["translog", "flush_threshold_period"]) (NominalDiffTimeJSON x) - toJSON (TranslogDisableFlush x) = oPath ("index" :| ["translog", "disable_flush"]) x - toJSON (CacheFilterMaxSize x) = oPath ("index" :| ["cache", "filter", "max_size"]) x - toJSON (CacheFilterExpire x) = oPath ("index" :| ["cache", "filter", "expire"]) (NominalDiffTimeJSON <$> x) - toJSON (GatewaySnapshotInterval x) = oPath ("index" :| ["gateway", "snapshot_interval"]) (NominalDiffTimeJSON x) - toJSON (RoutingAllocationInclude fs) = oPath ("index" :| ["routing", "allocation", "include"]) (attrFilterJSON fs) - toJSON (RoutingAllocationExclude fs) = oPath ("index" :| ["routing", "allocation", "exclude"]) (attrFilterJSON fs) - toJSON (RoutingAllocationRequire fs) = oPath ("index" :| ["routing", "allocation", "require"]) (attrFilterJSON fs) - toJSON (RoutingAllocationEnable x) = oPath ("index" :| ["routing", "allocation", "enable"]) x - toJSON (RoutingAllocationShardsPerNode x) = oPath ("index" :| ["routing", "allocation", "total_shards_per_node"]) x - toJSON (RecoveryInitialShards x) = oPath ("index" :| ["recovery", "initial_shards"]) x - toJSON (GCDeletes x) = oPath ("index" :| ["gc_deletes"]) (NominalDiffTimeJSON x) - toJSON (TTLDisablePurge x) = oPath ("index" :| ["ttl", "disable_purge"]) x - toJSON (TranslogFSType x) = oPath ("index" :| ["translog", "fs", "type"]) x - toJSON (CompressionSetting x) = oPath ("index" :| ["codec"]) x - toJSON (IndexCompoundFormat x) = oPath ("index" :| ["compound_format"]) x - toJSON (IndexCompoundOnFlush x) = oPath ("index" :| ["compound_on_flush"]) x - toJSON (WarmerEnabled x) = oPath ("index" :| ["warmer", "enabled"]) x - toJSON (BlocksReadOnly x) = oPath ("blocks" :| ["read_only"]) x - toJSON (BlocksRead x) = oPath ("blocks" :| ["read"]) x - toJSON (BlocksWrite x) = oPath ("blocks" :| ["write"]) x - toJSON (BlocksMetaData x) = oPath ("blocks" :| ["metadata"]) x - toJSON (MappingTotalFieldsLimit x) = oPath ("index" :| ["mapping","total_fields","limit"]) x - toJSON (AnalysisSetting x) = oPath ("index" :| ["analysis"]) x - -instance FromJSON UpdatableIndexSetting where - parseJSON = withObject "UpdatableIndexSetting" parse - where parse o = numberOfReplicas `taggedAt` ["index", "number_of_replicas"] - <|> autoExpandReplicas `taggedAt` ["index", "auto_expand_replicas"] - <|> refreshInterval `taggedAt` ["index", "refresh_interval"] - <|> indexConcurrency `taggedAt` ["index", "concurrency"] - <|> failOnMergeFailure `taggedAt` ["index", "fail_on_merge_failure"] - <|> translogFlushThresholdOps `taggedAt` ["index", "translog", "flush_threshold_ops"] - <|> translogFlushThresholdSize `taggedAt` ["index", "translog", "flush_threshold_size"] - <|> translogFlushThresholdPeriod `taggedAt` ["index", "translog", "flush_threshold_period"] - <|> translogDisableFlush `taggedAt` ["index", "translog", "disable_flush"] - <|> cacheFilterMaxSize `taggedAt` ["index", "cache", "filter", "max_size"] - <|> cacheFilterExpire `taggedAt` ["index", "cache", "filter", "expire"] - <|> gatewaySnapshotInterval `taggedAt` ["index", "gateway", "snapshot_interval"] - <|> routingAllocationInclude `taggedAt` ["index", "routing", "allocation", "include"] - <|> routingAllocationExclude `taggedAt` ["index", "routing", "allocation", "exclude"] - <|> routingAllocationRequire `taggedAt` ["index", "routing", "allocation", "require"] - <|> routingAllocationEnable `taggedAt` ["index", "routing", "allocation", "enable"] - <|> routingAllocationShardsPerNode `taggedAt` ["index", "routing", "allocation", "total_shards_per_node"] - <|> recoveryInitialShards `taggedAt` ["index", "recovery", "initial_shards"] - <|> gcDeletes `taggedAt` ["index", "gc_deletes"] - <|> ttlDisablePurge `taggedAt` ["index", "ttl", "disable_purge"] - <|> translogFSType `taggedAt` ["index", "translog", "fs", "type"] - <|> compressionSetting `taggedAt` ["index", "codec"] - <|> compoundFormat `taggedAt` ["index", "compound_format"] - <|> compoundOnFlush `taggedAt` ["index", "compound_on_flush"] - <|> warmerEnabled `taggedAt` ["index", "warmer", "enabled"] - <|> blocksReadOnly `taggedAt` ["blocks", "read_only"] - <|> blocksRead `taggedAt` ["blocks", "read"] - <|> blocksWrite `taggedAt` ["blocks", "write"] - <|> blocksMetaData `taggedAt` ["blocks", "metadata"] - <|> mappingTotalFieldsLimit `taggedAt` ["index", "mapping", "total_fields", "limit"] - <|> analysisSetting `taggedAt` ["index", "analysis"] - where taggedAt f ks = taggedAt' f (Object o) ks - taggedAt' f v [] = f =<< (parseJSON v <|> (parseJSON (unStringlyTypeJSON v))) - taggedAt' f v (k:ks) = withObject "Object" (\o -> do v' <- o .: k - taggedAt' f v' ks) v - numberOfReplicas = pure . NumberOfReplicas - autoExpandReplicas = pure . AutoExpandReplicas - refreshInterval = pure . RefreshInterval . ndtJSON - indexConcurrency = pure . IndexConcurrency - failOnMergeFailure = pure . FailOnMergeFailure - translogFlushThresholdOps = pure . TranslogFlushThresholdOps - translogFlushThresholdSize = pure . TranslogFlushThresholdSize - translogFlushThresholdPeriod = pure . TranslogFlushThresholdPeriod . ndtJSON - translogDisableFlush = pure . TranslogDisableFlush - cacheFilterMaxSize = pure . CacheFilterMaxSize - cacheFilterExpire = pure . CacheFilterExpire . fmap ndtJSON - gatewaySnapshotInterval = pure . GatewaySnapshotInterval . ndtJSON - routingAllocationInclude = fmap RoutingAllocationInclude . parseAttrFilter - routingAllocationExclude = fmap RoutingAllocationExclude . parseAttrFilter - routingAllocationRequire = fmap RoutingAllocationRequire . parseAttrFilter - routingAllocationEnable = pure . RoutingAllocationEnable - routingAllocationShardsPerNode = pure . RoutingAllocationShardsPerNode - recoveryInitialShards = pure . RecoveryInitialShards - gcDeletes = pure . GCDeletes . ndtJSON - ttlDisablePurge = pure . TTLDisablePurge - translogFSType = pure . TranslogFSType - compressionSetting = pure . CompressionSetting - compoundFormat = pure . IndexCompoundFormat - compoundOnFlush = pure . IndexCompoundOnFlush - warmerEnabled = pure . WarmerEnabled - blocksReadOnly = pure . BlocksReadOnly - blocksRead = pure . BlocksRead - blocksWrite = pure . BlocksWrite - blocksMetaData = pure . BlocksMetaData - mappingTotalFieldsLimit = pure . MappingTotalFieldsLimit - analysisSetting = pure . AnalysisSetting - -instance FromJSON IndexSettingsSummary where - parseJSON = withObject "IndexSettingsSummary" parse - where parse o = case HM.toList o of - [(ixn, v@(Object o'))] -> IndexSettingsSummary (IndexName ixn) - <$> parseJSON v - <*> (fmap (filter (not . redundant)) . parseSettings =<< o' .: "settings") - _ -> fail "Expected single-key object with index name" - redundant (NumberOfReplicas _) = True - redundant _ = False - - -parseSettings :: Object -> Parser [UpdatableIndexSetting] -parseSettings o = do - o' <- o .: "index" - -- slice the index object into singleton hashmaps and try to parse each - parses <- forM (HM.toList o') $ \(k, v) -> do - -- blocks are now nested into the "index" key, which is not how they're serialized - let atRoot = Object (HM.singleton k v) - let atIndex = Object (HM.singleton "index" atRoot) - optional (parseJSON atRoot <|> parseJSON atIndex) - return (catMaybes parses) - -oPath :: ToJSON a => NonEmpty Text -> a -> Value -oPath (k :| []) v = object [k .= v] -oPath (k:| (h:t)) v = object [k .= oPath (h :| t) v] - -attrFilterJSON :: NonEmpty NodeAttrFilter -> Value -attrFilterJSON fs = object [ n .= T.intercalate "," (toList vs) - | NodeAttrFilter (NodeAttrName n) vs <- toList fs] - -parseAttrFilter :: Value -> Parser (NonEmpty NodeAttrFilter) -parseAttrFilter = withObject "NonEmpty NodeAttrFilter" parse - where parse o = case HM.toList o of - [] -> fail "Expected non-empty list of NodeAttrFilters" - x:xs -> DT.mapM (uncurry parse') (x :| xs) - parse' n = withText "Text" $ \t -> - case T.splitOn "," t of - fv:fvs -> return (NodeAttrFilter (NodeAttrName n) (fv :| fvs)) - [] -> fail "Expected non-empty list of filter values" - -instance ToJSON ReplicaBounds where - toJSON (ReplicasBounded a b) = String (showText a <> "-" <> showText b) - toJSON (ReplicasLowerBounded a) = String (showText a <> "-all") - toJSON ReplicasUnbounded = Bool False - -instance FromJSON ReplicaBounds where - parseJSON v = withText "ReplicaBounds" parseText v - <|> withBool "ReplicaBounds" parseBool v - where parseText t = case T.splitOn "-" t of - [a, "all"] -> ReplicasLowerBounded <$> parseReadText a - [a, b] -> ReplicasBounded <$> parseReadText a - <*> parseReadText b - _ -> fail ("Could not parse ReplicaBounds: " <> show t) - parseBool False = pure ReplicasUnbounded - parseBool _ = fail "ReplicasUnbounded cannot be represented with True" - -instance ToJSON AllocationPolicy where - toJSON AllocAll = String "all" - toJSON AllocPrimaries = String "primaries" - toJSON AllocNewPrimaries = String "new_primaries" - toJSON AllocNone = String "none" - -instance FromJSON AllocationPolicy where - parseJSON = withText "AllocationPolicy" parse - where parse "all" = pure AllocAll - parse "primaries" = pure AllocPrimaries - parse "new_primaries" = pure AllocNewPrimaries - parse "none" = pure AllocNone - parse t = fail ("Invlaid AllocationPolicy: " <> show t) - -instance ToJSON InitialShardCount where - toJSON QuorumShards = String "quorum" - toJSON QuorumMinus1Shards = String "quorum-1" - toJSON FullShards = String "full" - toJSON FullMinus1Shards = String "full-1" - toJSON (ExplicitShards x) = toJSON x - -instance FromJSON InitialShardCount where - parseJSON v = withText "InitialShardCount" parseText v - <|> ExplicitShards <$> parseJSON v - where parseText "quorum" = pure QuorumShards - parseText "quorum-1" = pure QuorumMinus1Shards - parseText "full" = pure FullShards - parseText "full-1" = pure FullMinus1Shards - parseText _ = mzero - -instance ToJSON FSType where - toJSON FSSimple = "simple" - toJSON FSBuffered = "buffered" - -instance FromJSON FSType where - parseJSON = withText "FSType" parse - where parse "simple" = pure FSSimple - parse "buffered" = pure FSBuffered - parse t = fail ("Invalid FSType: " <> show t) - -instance ToJSON CompoundFormat where - toJSON (CompoundFileFormat x) = Bool x - toJSON (MergeSegmentVsTotalIndex x) = toJSON x - -instance FromJSON CompoundFormat where - parseJSON v = CompoundFileFormat <$> parseJSON v - <|> MergeSegmentVsTotalIndex <$> parseJSON v - -instance ToJSON NominalDiffTimeJSON where - toJSON (NominalDiffTimeJSON t) = String (showText (round t :: Integer) <> "s") - -instance FromJSON NominalDiffTimeJSON where - parseJSON = withText "NominalDiffTime" parse - where parse t = case T.takeEnd 1 t of - "s" -> NominalDiffTimeJSON . fromInteger <$> parseReadText (T.dropEnd 1 t) - _ -> fail "Invalid or missing NominalDiffTime unit (expected s)" - -instance ToJSON IndexTemplate where - toJSON (IndexTemplate p s m) = merge - (object [ "template" .= p - , "mappings" .= foldl' merge (object []) m - ]) - (toJSON s) - where - merge (Object o1) (Object o2) = toJSON $ HM.union o1 o2 - merge o Null = o - merge _ _ = undefined - -instance (FromJSON a) => FromJSON (EsResult a) where - parseJSON jsonVal@(Object v) = do - found <- v .:? "found" .!= False - fr <- if found - then parseJSON jsonVal - else return Nothing - EsResult <$> v .: "_index" <*> - v .: "_type" <*> - v .: "_id" <*> - pure fr - parseJSON _ = empty - -instance (FromJSON a) => FromJSON (EsResultFound a) where - parseJSON (Object v) = EsResultFound <$> - v .: "_version" <*> - v .: "_source" - parseJSON _ = empty - -instance FromJSON EsError where - parseJSON (Object v) = EsError <$> - v .: "status" <*> - (v .: "error" <|> (v .: "error" >>= (.: "reason"))) - parseJSON _ = empty - -instance FromJSON IndexAliasesSummary where - parseJSON = withObject "IndexAliasesSummary" parse - where parse o = IndexAliasesSummary . mconcat <$> mapM (uncurry go) (HM.toList o) - go ixn = withObject "index aliases" $ \ia -> do - aliases <- ia .:? "aliases" .!= mempty - forM (HM.toList aliases) $ \(aName, v) -> do - let indexAlias = IndexAlias (IndexName ixn) (IndexAliasName (IndexName aName)) - IndexAliasSummary indexAlias <$> parseJSON v - - -instance ToJSON IndexAliasAction where - toJSON (AddAlias ia opts) = object ["add" .= (iaObj <> optsObj)] - where Object iaObj = toJSON ia - Object optsObj = toJSON opts - toJSON (RemoveAlias ia) = object ["remove" .= iaObj] - where Object iaObj = toJSON ia - -instance ToJSON IndexAlias where - toJSON IndexAlias {..} = object ["index" .= srcIndex - , "alias" .= indexAlias - ] - -instance ToJSON IndexAliasCreate where - toJSON IndexAliasCreate {..} = Object (filterObj <> routingObj) - where filterObj = maybe mempty (HM.singleton "filter" . toJSON) aliasCreateFilter - Object routingObj = maybe (Object mempty) toJSON aliasCreateRouting - -instance ToJSON AliasRouting where - toJSON (AllAliasRouting v) = object ["routing" .= v] - toJSON (GranularAliasRouting srch idx) = object (catMaybes prs) - where prs = [("search_routing" .=) <$> srch - ,("index_routing" .=) <$> idx] - -instance FromJSON AliasRouting where - parseJSON = withObject "AliasRouting" parse - where parse o = parseAll o <|> parseGranular o - parseAll o = AllAliasRouting <$> o .: "routing" - parseGranular o = do - sr <- o .:? "search_routing" - ir <- o .:? "index_routing" - if isNothing sr && isNothing ir - then fail "Both search_routing and index_routing can't be blank" - else return (GranularAliasRouting sr ir) - -instance FromJSON IndexAliasCreate where - parseJSON v = withObject "IndexAliasCreate" parse v - where parse o = IndexAliasCreate <$> optional (parseJSON v) - <*> o .:? "filter" - -instance ToJSON SearchAliasRouting where - toJSON (SearchAliasRouting rvs) = toJSON (T.intercalate "," (routingValue <$> toList rvs)) - -instance FromJSON SearchAliasRouting where - parseJSON = withText "SearchAliasRouting" parse - where parse t = SearchAliasRouting <$> parseNEJSON (String <$> T.splitOn "," t) - instance ToJSON Search where toJSON (Search mquery sFilter sort searchAggs highlight sTrackSortScores sFrom sSize _ sFields @@ -1494,91 +1230,12 @@ nonPostingsToPairs (Just (NonPostings npFragSize npNumOfFrags)) = [ "fragment_size" .= npFragSize , "number_of_fragments" .= npNumOfFrags] - -instance ToJSON HighlightEncoder where - toJSON DefaultEncoder = String "default" - toJSON HTMLEncoder = String "html" - highlightTagToPairs :: Maybe HighlightTag -> [Pair] highlightTagToPairs (Just (TagSchema _)) = [ "scheme" .= String "default"] highlightTagToPairs (Just (CustomTags (pre, post))) = [ "pre_tags" .= pre , "post_tags" .= post] highlightTagToPairs Nothing = [] -instance ToJSON SortSpec where - toJSON (DefaultSortSpec - (DefaultSort (FieldName dsSortFieldName) dsSortOrder dsIgnoreUnmapped - dsSortMode dsMissingSort dsNestedFilter)) = - object [dsSortFieldName .= omitNulls base] where - base = [ "order" .= dsSortOrder - , "unmapped_type" .= dsIgnoreUnmapped - , "mode" .= dsSortMode - , "missing" .= dsMissingSort - , "nested_filter" .= dsNestedFilter ] - - toJSON (GeoDistanceSortSpec gdsSortOrder (GeoPoint (FieldName field) gdsLatLon) units) = - object [ "unit" .= units - , field .= gdsLatLon - , "order" .= gdsSortOrder ] - - -instance ToJSON SortOrder where - toJSON Ascending = String "asc" - toJSON Descending = String "desc" - - -instance ToJSON SortMode where - toJSON SortMin = String "min" - toJSON SortMax = String "max" - toJSON SortSum = String "sum" - toJSON SortAvg = String "avg" - - -instance ToJSON Missing where - toJSON LastMissing = String "_last" - toJSON FirstMissing = String "_first" - toJSON (CustomMissing txt) = String txt - - - -instance (FromJSON a) => FromJSON (SearchResult a) where - parseJSON (Object v) = SearchResult <$> - v .: "took" <*> - v .: "timed_out" <*> - v .: "_shards" <*> - v .: "hits" <*> - v .:? "aggregations" <*> - v .:? "_scroll_id" <*> - v .:? "suggest" - parseJSON _ = empty - -instance (FromJSON a) => FromJSON (SearchHits a) where - parseJSON (Object v) = SearchHits <$> - v .: "total" <*> - v .: "max_score" <*> - v .: "hits" - parseJSON _ = empty - -instance (FromJSON a) => FromJSON (Hit a) where - parseJSON (Object v) = Hit <$> - v .: "_index" <*> - v .: "_type" <*> - v .: "_id" <*> - v .: "_score" <*> - v .:? "_source" <*> - v .:? "fields" <*> - v .:? "highlight" - parseJSON _ = empty - -instance FromJSON HitFields where - parseJSON x - = HitFields <$> parseJSON x - -instance FromJSON DocVersion where - parseJSON v = do - i <- parseJSON v - maybe (fail "DocVersion out of range") return $ mkDocVersion i - data Suggest = Suggest { suggestText :: Text , suggestName :: Text , suggestType :: SuggestType @@ -1613,36 +1270,38 @@ instance FromJSON SuggestType where where taggedWith parser k = parser =<< o .: k phraseSuggester = pure . SuggestTypePhraseSuggester -data PhraseSuggester = - PhraseSuggester { phraseSuggesterField :: FieldName - , phraseSuggesterGramSize :: Maybe Int - , phraseSuggesterRealWordErrorLikelihood :: Maybe Int - , phraseSuggesterConfidence :: Maybe Int - , phraseSuggesterMaxErrors :: Maybe Int - , phraseSuggesterSeparator :: Maybe Text - , phraseSuggesterSize :: Maybe Size - , phraseSuggesterAnalyzer :: Maybe Analyzer - , phraseSuggesterShardSize :: Maybe Int - , phraseSuggesterHighlight :: Maybe PhraseSuggesterHighlighter - , phraseSuggesterCollate :: Maybe PhraseSuggesterCollate - , phraseSuggesterCandidateGenerators :: [DirectGenerators] - } - deriving (Eq, Show) +data PhraseSuggester = PhraseSuggester + { phraseSuggesterField :: FieldName + , phraseSuggesterGramSize :: Maybe Int + , phraseSuggesterRealWordErrorLikelihood :: Maybe Int + , phraseSuggesterConfidence :: Maybe Int + , phraseSuggesterMaxErrors :: Maybe Int + , phraseSuggesterSeparator :: Maybe Text + , phraseSuggesterSize :: Maybe Size + , phraseSuggesterAnalyzer :: Maybe Analyzer + , phraseSuggesterShardSize :: Maybe Int + , phraseSuggesterHighlight :: Maybe PhraseSuggesterHighlighter + , phraseSuggesterCollate :: Maybe PhraseSuggesterCollate + , phraseSuggesterCandidateGenerators :: [DirectGenerators] + } deriving (Eq, Show) instance ToJSON PhraseSuggester where - toJSON PhraseSuggester{..} = omitNulls [ "field" .= phraseSuggesterField - , "gram_size" .= phraseSuggesterGramSize - , "real_word_error_likelihood" .= phraseSuggesterRealWordErrorLikelihood - , "confidence" .= phraseSuggesterConfidence - , "max_errors" .= phraseSuggesterMaxErrors - , "separator" .= phraseSuggesterSeparator - , "size" .= phraseSuggesterSize - , "analyzer" .= phraseSuggesterAnalyzer - , "shard_size" .= phraseSuggesterShardSize - , "highlight" .= phraseSuggesterHighlight - , "collate" .= phraseSuggesterCollate - , "direct_generator" .= phraseSuggesterCandidateGenerators - ] + toJSON PhraseSuggester{..} = + omitNulls [ "field" .= phraseSuggesterField + , "gram_size" .= phraseSuggesterGramSize + , "real_word_error_likelihood" .= + phraseSuggesterRealWordErrorLikelihood + , "confidence" .= phraseSuggesterConfidence + , "max_errors" .= phraseSuggesterMaxErrors + , "separator" .= phraseSuggesterSeparator + , "size" .= phraseSuggesterSize + , "analyzer" .= phraseSuggesterAnalyzer + , "shard_size" .= phraseSuggesterShardSize + , "highlight" .= phraseSuggesterHighlight + , "collate" .= phraseSuggesterCollate + , "direct_generator" .= + phraseSuggesterCandidateGenerators + ] instance FromJSON PhraseSuggester where parseJSON = withObject "PhraseSuggester" parse @@ -1683,19 +1342,19 @@ instance FromJSON PhraseSuggesterHighlighter where <$> o .: "pre_tag" <*> o .: "post_tag" -data PhraseSuggesterCollate = - PhraseSuggesterCollate { phraseSuggesterCollateTemplateQuery :: TemplateQueryInline - , phraseSuggesterCollatePrune :: Bool - } - deriving (Eq, Show) +data PhraseSuggesterCollate = PhraseSuggesterCollate + { phraseSuggesterCollateTemplateQuery :: TemplateQueryInline + , phraseSuggesterCollatePrune :: Bool + } deriving (Eq, Show) instance ToJSON PhraseSuggesterCollate where - toJSON PhraseSuggesterCollate{..} = object [ "query" .= object - [ "inline" .= (inline phraseSuggesterCollateTemplateQuery) - ] - , "params" .= (params phraseSuggesterCollateTemplateQuery) - , "prune" .= phraseSuggesterCollatePrune - ] + toJSON PhraseSuggesterCollate{..} = + object [ "query" .= object + [ "inline" .= (inline phraseSuggesterCollateTemplateQuery) + ] + , "params" .= (params phraseSuggesterCollateTemplateQuery) + , "prune" .= phraseSuggesterCollatePrune + ] instance FromJSON PhraseSuggesterCollate where parseJSON (Object o) = do @@ -1703,7 +1362,8 @@ instance FromJSON PhraseSuggesterCollate where inline' <- query' .: "inline" params' <- o .: "params" prune' <- o .:? "prune" .!= False - return $ PhraseSuggesterCollate (TemplateQueryInline inline' params') prune' + return $ PhraseSuggesterCollate + (TemplateQueryInline inline' params') prune' parseJSON x = typeMismatch "PhraseSuggesterCollate" x data SuggestOptions = @@ -1738,11 +1398,10 @@ instance FromJSON SuggestResponse where <*> o .: "length" <*> o .: "options" -data NamedSuggestionResponse = - NamedSuggestionResponse { nsrName :: Text - , nsrResponses :: [SuggestResponse] - } - deriving (Eq, Read, Show) +data NamedSuggestionResponse = NamedSuggestionResponse + { nsrName :: Text + , nsrResponses :: [SuggestResponse] + } deriving (Eq, Read, Show) instance FromJSON NamedSuggestionResponse where parseJSON (Object o) = do @@ -1786,7 +1445,6 @@ data DirectGenerators = DirectGenerators } deriving (Eq, Show) - instance ToJSON DirectGenerators where toJSON DirectGenerators{..} = omitNulls [ "field" .= directGeneratorsField , "size" .= directGeneratorsSize diff --git a/tests/V5/Test/Aggregation.hs b/tests/V5/Test/Aggregation.hs index 91e0789..394aa22 100644 --- a/tests/V5/Test/Aggregation.hs +++ b/tests/V5/Test/Aggregation.hs @@ -1,6 +1,6 @@ {-# LANGUAGE OverloadedStrings #-} -module Test.Aggregation where +module Test.Aggregation (spec) where import Test.Common import Test.Import @@ -29,7 +29,7 @@ spec = usersAggResults = result >>= aggregations >>= toTerms "users" subAggResults = usersAggResults >>= (listToMaybe . buckets) >>= termsAggs >>= toTerms "age_agg" subAddResultsExists = isJust subAggResults - liftIO $ (subAddResultsExists) `shouldBe` True + liftIO $ subAddResultsExists `shouldBe` True it "returns cardinality aggregation results" $ withTestEnv $ do _ <- insertData diff --git a/tests/V5/Test/BulkAPI.hs b/tests/V5/Test/BulkAPI.hs index 1d952f4..ae77e17 100644 --- a/tests/V5/Test/BulkAPI.hs +++ b/tests/V5/Test/BulkAPI.hs @@ -1,6 +1,6 @@ {-# LANGUAGE OverloadedStrings #-} -module Test.BulkAPI where +module Test.BulkAPI (spec) where import Test.Common import Test.Import @@ -9,7 +9,7 @@ import qualified Data.Vector as V import qualified Lens.Micro.Aeson as LMA newtype BulkTest = - BulkTest { name :: Text } + BulkTest Text deriving (Eq, Show) instance ToJSON BulkTest where @@ -25,7 +25,7 @@ instance FromJSON BulkTest where spec :: Spec spec = - describe "Bulk API" $ do + describe "Bulk API" $ it "inserts all documents we request" $ withTestEnv $ do _ <- insertData let firstTest = BulkTest "blah" @@ -84,7 +84,7 @@ spec = hitsTotal (searchHits sr) `shouldBe` 6 let nameList :: [Text] nameList = - (hits (searchHits sr)) + hits (searchHits sr) ^.. traverse . to hitSource . _Just diff --git a/tests/V5/Test/Documents.hs b/tests/V5/Test/Documents.hs index 6468639..d9052df 100644 --- a/tests/V5/Test/Documents.hs +++ b/tests/V5/Test/Documents.hs @@ -14,14 +14,14 @@ spec = docInserted <- getDocument testIndex testMapping (DocId "1") let newTweet = eitherDecode (responseBody docInserted) :: Either String (EsResult Tweet) - liftIO $ (fmap getSource newTweet `shouldBe` Right (Just patchedTweet)) + liftIO $ fmap getSource newTweet `shouldBe` Right (Just patchedTweet) it "indexes, gets, and then deletes the generated document with a DocId containing a space" $ withTestEnv $ do _ <- insertWithSpaceInId docInserted <- getDocument testIndex testMapping (DocId "Hello World") let newTweet = eitherDecode (responseBody docInserted) :: Either String (EsResult Tweet) - liftIO $ (fmap getSource newTweet `shouldBe` Right (Just exampleTweet)) + liftIO $ fmap getSource newTweet `shouldBe` Right (Just exampleTweet) it "produces a parseable result when looking up a bogus document" $ withTestEnv $ do doc <- getDocument testIndex testMapping (DocId "bogus") diff --git a/tests/V5/Test/Generators.hs b/tests/V5/Test/Generators.hs index 0c02f09..ea758af 100644 --- a/tests/V5/Test/Generators.hs +++ b/tests/V5/Test/Generators.hs @@ -36,8 +36,10 @@ instance Arbitrary UTCTime where <*> (fromRational . toRational <$> choose (0::Double, 86400)) instance Arbitrary Day where - arbitrary = ModifiedJulianDay <$> (2000 +) <$> arbitrary - shrink = (ModifiedJulianDay <$>) . shrink . toModifiedJulianDay + arbitrary = + ModifiedJulianDay . (2000 +) <$> arbitrary + shrink = + (ModifiedJulianDay <$>) . shrink . toModifiedJulianDay #if !MIN_VERSION_QuickCheck(2,9,0) instance Arbitrary a => Arbitrary (NonEmpty a) where diff --git a/tests/V5/Test/Highlights.hs b/tests/V5/Test/Highlights.hs index 983d460..0bce38e 100644 --- a/tests/V5/Test/Highlights.hs +++ b/tests/V5/Test/Highlights.hs @@ -7,28 +7,24 @@ import Test.Import import qualified Data.Map as M +initHighlights :: Text -> BH IO (Either EsError (Maybe HitHighlight)) +initHighlights fieldName = do + _ <- insertData + _ <- insertOther + let query = QueryMatchQuery $ mkMatchQuery (FieldName fieldName) (QueryString "haskell") + let testHighlight = Highlights Nothing [FieldHighlight (FieldName fieldName) Nothing] + let search = mkHighlightSearch (Just query) testHighlight + searchTweetHighlight search + spec :: Spec spec = describe "Highlights API" $ do - it "returns highlight from query when there should be one" $ withTestEnv $ do - _ <- insertData - _ <- insertOther - let query = QueryMatchQuery $ mkMatchQuery (FieldName "message") (QueryString "haskell") - let testHighlight = Highlights Nothing [FieldHighlight (FieldName "message") Nothing] - - let search = mkHighlightSearch (Just query) testHighlight - myHighlight <- searchTweetHighlight search + myHighlight <- initHighlights "message" liftIO $ myHighlight `shouldBe` Right (Just (M.fromList [("message",["Use haskell!"])])) it "doesn't return highlight from a query when it shouldn't" $ withTestEnv $ do - _ <- insertData - _ <- insertOther - let query = QueryMatchQuery $ mkMatchQuery (FieldName "message") (QueryString "haskell") - let testHighlight = Highlights Nothing [FieldHighlight (FieldName "user") Nothing] - - let search = mkHighlightSearch (Just query) testHighlight - myHighlight <- searchTweetHighlight search + myHighlight <- initHighlights "user" liftIO $ myHighlight `shouldBe` Right Nothing diff --git a/tests/V5/Test/JSON.hs b/tests/V5/Test/JSON.hs index 5fc3567..bec04ec 100644 --- a/tests/V5/Test/JSON.hs +++ b/tests/V5/Test/JSON.hs @@ -1,7 +1,7 @@ {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ScopedTypeVariables #-} -module Test.JSON where +module Test.JSON (spec) where import Test.Import diff --git a/tests/V5/Test/Script.hs b/tests/V5/Test/Script.hs index 6a6b754..e076574 100644 --- a/tests/V5/Test/Script.hs +++ b/tests/V5/Test/Script.hs @@ -10,7 +10,7 @@ import qualified Data.Map as M spec :: Spec spec = - describe "Script" $ do + describe "Script" $ it "returns a transformed document based on the script field" $ withTestEnv $ do _ <- insertData let query = MatchAllQuery Nothing @@ -32,4 +32,5 @@ spec = Right sr -> do let Just results = hitFields (head (hits (searchHits sr))) - liftIO $ results `shouldBe` (HitFields (M.fromList [("test1", [Number 20000.0])])) + liftIO $ + results `shouldBe` HitFields (M.fromList [("test1", [Number 20000.0])]) diff --git a/tests/V5/Test/Snapshots.hs b/tests/V5/Test/Snapshots.hs index 932a119..cead610 100644 --- a/tests/V5/Test/Snapshots.hs +++ b/tests/V5/Test/Snapshots.hs @@ -1,7 +1,7 @@ {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE OverloadedStrings #-} -module Test.Snapshots where +module Test.Snapshots (spec) where import Test.Common import Test.Import @@ -18,7 +18,7 @@ import Test.Generators () spec :: Spec spec = do - describe "FsSnapshotRepo" $ do + describe "FsSnapshotRepo" $ prop "SnapshotRepo laws" $ \fsr -> fromGSnapshotRepo (toGSnapshotRepo fsr) === Right (fsr :: FsSnapshotRepo) @@ -149,7 +149,7 @@ canSnapshot :: IO Bool canSnapshot = do caresAboutRepos <- atleast es16 repoPaths <- getRepoPaths - return (not caresAboutRepos || not (null (repoPaths))) + return (not caresAboutRepos || not (null repoPaths)) withSnapshotRepo :: ( MonadMask m @@ -197,5 +197,5 @@ withSnapshot srn sn = bracket_ alloc free , snapIndices = Just (IndexList (testIndex :| [])) -- We don't actually need to back up any data } - free = do + free = deleteSnapshot srn sn diff --git a/tests/V5/Test/Sorting.hs b/tests/V5/Test/Sorting.hs index 5473403..8665368 100644 --- a/tests/V5/Test/Sorting.hs +++ b/tests/V5/Test/Sorting.hs @@ -7,7 +7,7 @@ import Test.Import spec :: Spec spec = - describe "sorting" $ do + describe "sorting" $ it "returns documents in the right order" $ withTestEnv $ do _ <- insertData _ <- insertOther diff --git a/tests/V5/Test/SourceFiltering.hs b/tests/V5/Test/SourceFiltering.hs index e44596d..447980c 100644 --- a/tests/V5/Test/SourceFiltering.hs +++ b/tests/V5/Test/SourceFiltering.hs @@ -11,27 +11,27 @@ spec :: Spec spec = describe "Source filtering" $ do - it "doesn't include source when sources are disabled" $ withTestEnv $ do + it "doesn't include source when sources are disabled" $ withTestEnv $ searchExpectSource NoSource (Left (EsError 500 "Source was missing")) - it "includes a source" $ withTestEnv $ do + it "includes a source" $ withTestEnv $ searchExpectSource (SourcePatterns (PopPattern (Pattern "message"))) (Right (Object (HM.fromList [("message", String "Use haskell!")]))) - it "includes sources" $ withTestEnv $ do + it "includes sources" $ withTestEnv $ searchExpectSource (SourcePatterns (PopPatterns [Pattern "user", Pattern "message"])) (Right (Object (HM.fromList [("user",String "bitemyapp"),("message", String "Use haskell!")]))) - it "includes source patterns" $ withTestEnv $ do + it "includes source patterns" $ withTestEnv $ searchExpectSource (SourcePatterns (PopPattern (Pattern "*ge"))) (Right (Object (HM.fromList [("age", Number 10000),("message", String "Use haskell!")]))) - it "excludes source patterns" $ withTestEnv $ do + it "excludes source patterns" $ withTestEnv $ searchExpectSource (SourceIncludeExclude (Include []) (Exclude [Pattern "l*", Pattern "*ge", Pattern "postDate", Pattern "extra"])) diff --git a/tests/V5/Test/Suggest.hs b/tests/V5/Test/Suggest.hs index c068985..b1a2c2b 100644 --- a/tests/V5/Test/Suggest.hs +++ b/tests/V5/Test/Suggest.hs @@ -7,7 +7,7 @@ import Test.Import spec :: Spec spec = - describe "Suggest" $ do + describe "Suggest" $ it "returns a search suggestion using the phrase suggester" $ withTestEnv $ do _ <- insertData let query = QueryMatchNoneQuery diff --git a/tests/V5/tests.hs b/tests/V5/tests.hs index 0e1768f..600bca5 100644 --- a/tests/V5/tests.hs +++ b/tests/V5/tests.hs @@ -1,13 +1,8 @@ {-# LANGUAGE CPP #-} -{-# LANGUAGE DefaultSignatures #-} -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TypeOperators #-} {-# OPTIONS_GHC -fno-warn-orphans #-} #if __GLASGOW_HASKELL__ < 800 @@ -21,7 +16,7 @@ module Main where import Test.Common import Test.Import -import Prelude hiding (filter) +import Prelude import qualified Test.Aggregation as Aggregation import qualified Test.BulkAPI as Bulk @@ -64,11 +59,11 @@ main = hspec $ do let errorResp = eitherDecode (responseBody res) liftIO (errorResp `shouldBe` Right (EsError 404 "no such index")) - describe "Monoid (SearchHits a)" $ do + describe "Monoid (SearchHits a)" $ prop "abides the monoid laws" $ eq $ prop_Monoid (T :: T (SearchHits ())) - describe "mkDocVersion" $ do + describe "mkDocVersion" $ prop "can never construct an out of range docVersion" $ \i -> let res = mkDocVersion i in case res of @@ -77,7 +72,7 @@ main = hspec $ do (dv <= maxBound) .&&. docVersionNumber dv === i - describe "getNodesInfo" $ do + describe "getNodesInfo" $ it "fetches the responding node when LocalNode is used" $ withTestEnv $ do res <- getNodesInfo LocalNode liftIO $ case res of @@ -87,7 +82,7 @@ main = hspec $ do Right NodesInfo {..} -> length nodesInfo `shouldBe` 1 Left e -> expectationFailure ("Expected NodesInfo but got " <> show e) - describe "getNodesStats" $ do + describe "getNodesStats" $ it "fetches the responding node when LocalNode is used" $ withTestEnv $ do res <- getNodesStats LocalNode liftIO $ case res of @@ -97,7 +92,7 @@ main = hspec $ do Right NodesStats {..} -> length nodesStats `shouldBe` 1 Left e -> expectationFailure ("Expected NodesStats but got " <> show e) - describe "Enum DocVersion" $ do + describe "Enum DocVersion" $ it "follows the laws of Enum, Bounded" $ do evaluate (succ maxBound :: DocVersion) `shouldThrow` anyErrorCall evaluate (pred minBound :: DocVersion) `shouldThrow` anyErrorCall @@ -107,11 +102,14 @@ main = hspec $ do enumFrom (pred maxBound :: DocVersion) `shouldBe` [pred maxBound, maxBound] enumFromThen minBound (pred maxBound :: DocVersion) `shouldBe` [minBound, pred maxBound] - describe "Scan & Scroll API" $ do + describe "Scan & Scroll API" $ it "returns documents using the scan&scroll API" $ withTestEnv $ do _ <- insertData _ <- insertOther - let search = (mkSearch (Just $ MatchAllQuery Nothing) Nothing) { size = (Size 1) } + let search = + (mkSearch + (Just $ MatchAllQuery Nothing) Nothing) + { size = Size 1 } regular_search <- searchTweet search scan_search' <- scanSearch testIndex testMapping search :: BH IO [Hit Tweet] let scan_search = map hitSource scan_search'