diff --git a/.gitignore b/.gitignore index 5b1bf7d..3414b7f 100644 --- a/.gitignore +++ b/.gitignore @@ -18,3 +18,4 @@ bloodhound.iml .hgignore examples/bloodhound-examples.cabal /.ghc.environment.* +.hspec-failures diff --git a/.hlint.yaml b/.hlint.yaml new file mode 100644 index 0000000..80500e4 --- /dev/null +++ b/.hlint.yaml @@ -0,0 +1,66 @@ +# HLint configuration file +# https://github.com/ndmitchell/hlint +########################## + +# This file contains a template configuration file, which is typically +# placed as .hlint.yaml in the root of your project + + +# Specify additional command line arguments +# +# - arguments: [--color, --cpp-simple, -XQuasiQuotes] + + +# Control which extensions/flags/modules/functions can be used +# +# - extensions: +# - default: false # all extension are banned by default +# - name: [PatternGuards, ViewPatterns] # only these listed extensions can be used +# - {name: CPP, within: CrossPlatform} # CPP can only be used in a given module +# +# - flags: +# - {name: -w, within: []} # -w is allowed nowhere +# +# - modules: +# - {name: [Data.Set, Data.HashSet], as: Set} # if you import Data.Set qualified, it must be as 'Set' +# - {name: Control.Arrow, within: []} # Certain modules are banned entirely +# +# - functions: +# - {name: unsafePerformIO, within: []} # unsafePerformIO can only appear in no modules + + +# Add custom hints for this project +# +# Will suggest replacing "wibbleMany [myvar]" with "wibbleOne myvar" +# - error: {lhs: "wibbleMany [x]", rhs: wibbleOne x} + + +# Turn on hints that are off by default +# +# Ban "module X(module X) where", to require a real export list +# - warn: {name: Use explicit module export list} +# +# Replace a $ b $ c with a . b $ c +# - group: {name: dollar, enabled: true} +# +# Generalise map to fmap, ++ to <> +# - group: {name: generalise, enabled: true} + + +# Ignore some builtin hints +- ignore: {name: "Use <$>"} +- ignore: {name: "Use lambda-case"} +# When we don't use camelCase it's to match ES. +# Possibly this is a mistake. +- ignore: {name: "Use camelCase"} +- ignore: {name: "Eta reduce"} +# - ignore: {name: Use let} +# - ignore: {name: Use const, within: SpecialModule} # Only within certain modules + + +# Define some custom infix operators +# - fixity: infixr 3 ~^#^~ + + +# To generate a suitable file for HLint do: +# $ hlint --default > .hlint.yaml diff --git a/.travis.yml b/.travis.yml index 395375c..1df3c80 100644 --- a/.travis.yml +++ b/.travis.yml @@ -8,21 +8,21 @@ addons: - oracle-java8-installer env: - global: - - JAVA_HOME=/usr/lib/jvm/java-8-oracle - matrix: - - GHCVER=7.8 ESVER=1.3.6 STACK_YAML=stack-7.8.yaml ESFLAG=ES1 DLINK=https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch - - GHCVER=7.8 ESVER=1.4.1 STACK_YAML=stack-7.8.yaml ESFLAG=ES1 DLINK=https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch - - GHCVER=7.10 ESVER=1.5.2 STACK_YAML=stack-7.10.yaml ESFLAG=ES1 DLINK=https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch - - GHCVER=7.10 ESVER=1.6.0 STACK_YAML=stack-7.10.yaml ESFLAG=ES1 DLINK=https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch - - GHCVER=7.10 ESVER=1.7.6 STACK_YAML=stack-7.10.yaml ESFLAG=ES1 DLINK=https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch - - GHCVER=8.0 ESVER=1.7.6 STACK_YAML=stack-8.0.yaml ESFLAG=ES1 DLINK=https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch - - GHCVER=8.0 ESVER=5.0.2 STACK_YAML=stack-8.0.yaml ESFLAG=ES5 DLINK=https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch ES_JAVA_OPTS="-Xms500m -Xmx750m" - - GHCVER=8.2 ESVER=1.7.6 STACK_YAML=stack.yaml ESFLAG=ES1 DLINK=https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch - - GHCVER=8.2 ESVER=5.0.2 STACK_YAML=stack.yaml ESFLAG=ES5 DLINK=https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch ES_JAVA_OPTS="-Xms500m -Xmx750m" - - GHCVER=8.2 ESVER=5.5.0 STACK_YAML=stack.yaml ESFLAG=ES5 DLINK=https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch ES_JAVA_OPTS="-Xms500m -Xmx750m" - allow_failures: - - GHCVER=8.2 ESVER=6.1.3 STACK_YAML=stack.yaml ESFLAG=ES5 DLINK=https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch ES_JAVA_OPTS="-Xms500m -Xmx750m" + global: + - JAVA_HOME=/usr/lib/jvm/java-8-oracle + matrix: + - GHCVER=7.8 ESVER=1.3.6 STACK_YAML=stack-7.8.yaml ESFLAG=ES1 DLINK=https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch + - GHCVER=7.8 ESVER=1.4.1 STACK_YAML=stack-7.8.yaml ESFLAG=ES1 DLINK=https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch + - GHCVER=7.10 ESVER=1.5.2 STACK_YAML=stack-7.10.yaml ESFLAG=ES1 DLINK=https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch + - GHCVER=7.10 ESVER=1.6.0 STACK_YAML=stack-7.10.yaml ESFLAG=ES1 DLINK=https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch + - GHCVER=7.10 ESVER=1.7.6 STACK_YAML=stack-7.10.yaml ESFLAG=ES1 DLINK=https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch + - GHCVER=8.0 ESVER=1.7.6 STACK_YAML=stack-8.0.yaml ESFLAG=ES1 DLINK=https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch + - GHCVER=8.0 ESVER=5.0.2 STACK_YAML=stack-8.0.yaml ESFLAG=ES5 DLINK=https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch ES_JAVA_OPTS="-Xms500m -Xmx750m" + - GHCVER=8.2 ESVER=1.7.6 STACK_YAML=stack.yaml ESFLAG=ES1 DLINK=https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch + - GHCVER=8.2 ESVER=5.0.2 STACK_YAML=stack.yaml ESFLAG=ES5 DLINK=https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch ES_JAVA_OPTS="-Xms500m -Xmx750m" + - GHCVER=8.2 ESVER=5.5.0 STACK_YAML=stack.yaml ESFLAG=ES5 DLINK=https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch ES_JAVA_OPTS="-Xms500m -Xmx750m" +# allow_failures: +# - GHCVER=8.2 ESVER=6.1.3 STACK_YAML=stack.yaml ESFLAG=ES5 DLINK=https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch ES_JAVA_OPTS="-Xms500m -Xmx750m" install: # stack @@ -30,7 +30,6 @@ install: - travis_retry curl -L https://github.com/commercialhaskell/stack/releases/download/v1.6.1/stack-1.6.1-linux-x86_64.tar.gz | tar xz --wildcards --strip-components=1 -C ~/.local/bin '*/stack' - export PATH=~/.local/bin:$PATH - stack --no-terminal --version - # elasticsearch - wget --no-check-certificate $DLINK-$ESVER.tar.gz - tar xzf elasticsearch-$ESVER.tar.gz @@ -43,7 +42,7 @@ script: - stack update --no-terminal - stack build -j2 --fast --no-terminal - travis_wait 45 sleep 1800 & - - stack test --fast --no-terminal bloodhound:tests --test-arguments="--qc-max-success 500" --flag bloodhound:$ESFLAG + - stack test --fast --no-terminal bloodhound:bloodhound-tests --test-arguments="--qc-max-success 500" --flag bloodhound:$ESFLAG cache: timeout: 2000 diff --git a/Makefile b/Makefile index 45d1517..37c8840 100644 --- a/Makefile +++ b/Makefile @@ -1,15 +1,45 @@ +stack = STACK_YAML='stack.yaml' stack + build: stack build +build-validate: + stack build --fast --ghc-options '-Wall -Werror' + +ghci: + stack ghci + +test: echo-warn + stack test + +test-rerun: echo-warn + stack test --test-arguments "-r" + +test-ghci: + stack ghci bloodhound:test:bloodhound-tests + +ghcid: + ghcid -c "$(stack) ghci bloodhound:lib --test --ghci-options='-fobject-code -fno-warn-unused-do-bind' --main-is bloodhound:test:bloodhound-tests" + +ghcid-validate: + ghcid -c "$(stack) ghci bloodhound:lib --test --ghci-options='-Werror -fobject-code -fno-warn-unused-do-bind' --main-is bloodhound:test:bloodhound-tests" + +weeder: + weeder . --build + +# hlint --default > .hlint.yaml +hlint: + hlint . + +hlint-watch: + sos src/ -c "hlint ." -p "src/(.*)\.hs" + mod-build: stack build --ghc-options '+RTS -A128M -RTS' echo-warn: echo "Make certain you have an elasticsearch instance on localhost:9200 !" -test: echo-warn - stack test - 7.8-build: STACK_YAML="stack-7.8.yaml" stack build @@ -23,16 +53,22 @@ test: echo-warn STACK_YAML="stack-7.10.yaml" stack test 7.10-test-ES1: - STACK_YAML="stack-7.10.yaml" stack test --fast bloodhound:tests --test-arguments="--qc-max-success 500" --flag bloodhound:ES1 + STACK_YAML="stack-7.10.yaml" stack test --fast bloodhound:test:bloodhound-tests --test-arguments="--qc-max-success 500" --flag bloodhound:ES1 7.10-test-ES5: - STACK_YAML="stack-7.10.yaml" stack test --fast bloodhound:tests --test-arguments="--qc-max-success 500" --flag bloodhound:ES5 + STACK_YAML="stack-7.10.yaml" stack test --fast bloodhound:test:bloodhound-tests --test-arguments="--qc-max-success 500" --flag bloodhound:ES5 8.0-test-ES1: - STACK_YAML="stack.yaml" stack test --fast bloodhound:tests --test-arguments="--qc-max-success 500" --flag bloodhound:ES1 + STACK_YAML="stack-8.0.yaml" stack test --fast bloodhound:test:bloodhound-tests --test-arguments="--qc-max-success 500" --flag bloodhound:ES1 8.0-test-ES5: - STACK_YAML="stack-8.0.yaml" stack test --fast bloodhound:tests --test-arguments="--qc-max-success 500" --flag bloodhound:ES5 + STACK_YAML="stack-8.0.yaml" stack test --fast bloodhound:test:bloodhound-tests --test-arguments="--qc-max-success 500" --flag bloodhound:ES5 + +8.2-test-ES1: + STACK_YAML="stack.yaml" stack test --fast bloodhound:test:bloodhound-tests --test-arguments="--qc-max-success 500" --flag bloodhound:ES1 + +8.2-test-ES5: + STACK_YAML="stack.yaml" stack test --fast bloodhound:test:bloodhound-tests --test-arguments="--qc-max-success 500" --flag bloodhound:ES5 8.0-build: STACK_YAML="stack-8.0.yaml" stack build @@ -44,8 +80,5 @@ module-touch: touch src/Database/V1/Bloodhound/Types.hs touch src/Database/V5/Bloodhound/Types.hs -ghci: - stack ghci - upload: stack upload --no-signature . diff --git a/bloodhound.cabal b/bloodhound.cabal index 803bf44..3b6025e 100644 --- a/bloodhound.cabal +++ b/bloodhound.cabal @@ -7,7 +7,7 @@ license: BSD3 license-file: LICENSE author: Chris Allen maintainer: cma@bitemyapp.com -copyright: 2015, Chris Allen +copyright: 2018 Chris Allen category: Database, Search build-type: Simple cabal-version: >=1.10 @@ -33,16 +33,35 @@ Flag ES5 library ghc-options: -Wall - exposed-modules: Database.V1.Bloodhound - Database.V1.Bloodhound.Client - Database.V1.Bloodhound.Types - Database.V1.Bloodhound.Types.Class - Database.V1.Bloodhound.Types.Internal - Database.V5.Bloodhound + exposed-modules: Database.V5.Bloodhound Database.V5.Bloodhound.Client Database.V5.Bloodhound.Types Database.V5.Bloodhound.Types.Class - Database.V5.Bloodhound.Types.Internal + Database.V5.Bloodhound.Internal.Aggregation + Database.V5.Bloodhound.Internal.Analysis + Database.V5.Bloodhound.Internal.Client + Database.V5.Bloodhound.Internal.Highlight + Database.V5.Bloodhound.Internal.Newtypes + Database.V5.Bloodhound.Internal.Query + Database.V5.Bloodhound.Internal.Sort + Database.V5.Bloodhound.Internal.StringlyTyped + Database.V5.Bloodhound.Internal.Suggest + + Database.V1.Bloodhound + Database.V1.Bloodhound.Client + Database.V1.Bloodhound.Types + Database.V1.Bloodhound.Types.Class + Database.V1.Bloodhound.Internal.Aggregation + Database.V1.Bloodhound.Internal.Client + Database.V1.Bloodhound.Internal.Highlight + Database.V1.Bloodhound.Internal.Newtypes + Database.V1.Bloodhound.Internal.Query + Database.V1.Bloodhound.Internal.Sort + Database.V1.Bloodhound.Internal.StringlyTyped + Database.V1.Bloodhound.Internal.Suggest + + other-modules: Bloodhound.Import + Database.Bloodhound.Common.Script hs-source-dirs: src build-depends: base >= 4.3 && <5, bytestring >= 0.10.0 && <0.11, @@ -58,15 +77,13 @@ library http-types >= 0.8 && <0.13, vector >= 0.10.9 && <0.13, scientific >= 0.3.0.0 && <0.4.0.0, - exceptions, - data-default-class, blaze-builder, - unordered-containers, - mtl-compat, - hashable + exceptions, + hashable, + unordered-containers default-language: Haskell2010 -test-suite tests +test-suite bloodhound-tests ghc-options: -Wall -fno-warn-orphans type: exitcode-stdio-1.0 main-is: tests.hs @@ -74,7 +91,22 @@ test-suite tests hs-source-dirs: tests/V1 else hs-source-dirs: tests/V5 - + other-modules: Test.Aggregation + Test.ApproxEq + Test.BulkAPI + Test.Common + Test.Documents + Test.Generators + Test.Highlights + Test.Import + Test.Indices + Test.JSON + Test.Query + Test.Snapshots + Test.Sorting + Test.SourceFiltering + Test.Suggest + Test.Templates build-depends: base, bloodhound, bytestring, @@ -90,11 +122,14 @@ test-suite tests vector, unordered-containers >= 0.2.5.0 && <0.3, mtl, + pretty-simple, + quickcheck-arbitrary-template, quickcheck-properties, - generics-sop >=0.2 && <0.4, errors, exceptions, temporary, unix-compat, - network-uri + network-uri, + microlens, + microlens-aeson default-language: Haskell2010 diff --git a/changelog.md b/changelog.md index e626836..ef62ca0 100644 --- a/changelog.md +++ b/changelog.md @@ -1,3 +1,17 @@ +0.16.0.0 +======== +- @bitemyapp + - Reorganized modules internally, ripped out Generic, + rewrote part of the test suite +- @andrewthad + - Added support for autogenerated elasticsearch ids in the bulk API + - Added support for token filters + - Added action for searching multiple indices +- @lunaris + - Added support for scripts fields and function score queries +- @bermanjosh + - Added support for direct generators + 0.15.0.2 ======== - @michaelxavier diff --git a/examples/Tweet.hs b/examples/Tweet.hs index 865aa20..e7af76c 100644 --- a/examples/Tweet.hs +++ b/examples/Tweet.hs @@ -5,7 +5,6 @@ module Main ) where -------------------------------------------------------------------------------- import Control.Monad.IO.Class (liftIO) import Data.Aeson (FromJSON (..), defaultOptions, genericParseJSON, genericToJSON, @@ -18,7 +17,6 @@ import qualified Data.Vector as V import Database.V5.Bloodhound import GHC.Generics (Generic) import Network.HTTP.Client (defaultManagerSettings) -------------------------------------------------------------------------------- data TweetMapping = TweetMapping deriving (Eq, Show) @@ -29,7 +27,7 @@ instance ToJSON TweetMapping where [ "properties" .= object ["location" .= object ["type" .= ("geo_point" :: Text)]] ] -------------------------------------------------------------------------------- + data Tweet = Tweet { user :: Text , postDate :: UTCTime @@ -39,7 +37,6 @@ data Tweet = Tweet } deriving (Eq, Generic, Show) -------------------------------------------------------------------------------- exampleTweet :: Tweet exampleTweet = Tweet diff --git a/examples/package.yaml b/examples/package.yaml index 2b962d1..c95a073 100644 --- a/examples/package.yaml +++ b/examples/package.yaml @@ -17,7 +17,6 @@ dependencies: - http-client - vector - semigroups -- transformers ghc-options: - -Wall - -threaded diff --git a/src/Bloodhound/Import.hs b/src/Bloodhound/Import.hs new file mode 100644 index 0000000..cee26d2 --- /dev/null +++ b/src/Bloodhound/Import.hs @@ -0,0 +1,81 @@ +module Bloodhound.Import + ( module X + , LByteString + , Method + , omitNulls + , parseNEJSON + , parseReadText + , readMay + , showText + , deleteSeveral + , oPath + ) where + +import Control.Applicative as X (Alternative (..), optional) +import Control.Exception as X (Exception) +import Control.Monad as X (MonadPlus (..), forM, (<=<)) +import Control.Monad.Catch as X (MonadCatch, MonadMask, + MonadThrow) +import Control.Monad.Except as X (MonadError) +import Control.Monad.Fix as X (MonadFix) +import Control.Monad.IO.Class as X (MonadIO (..)) +import Control.Monad.Reader as X (MonadReader (..), + MonadTrans (..), ReaderT (..)) +import Control.Monad.State as X (MonadState) +import Control.Monad.Writer as X (MonadWriter) +import Data.Aeson as X +import Data.Aeson.Types as X (Pair, Parser, emptyObject, + parseEither, parseMaybe, + typeMismatch) +import Data.Bifunctor as X (first) +import Data.Char as X (isNumber) +import Data.Hashable as X (Hashable) +import qualified Data.HashMap.Strict as HM +import Data.List as X (foldl', intercalate, nub) +import Data.List.NonEmpty as X (NonEmpty (..), toList) +import Data.Maybe as X (catMaybes, fromMaybe, + isNothing, maybeToList) +import Data.Scientific as X (Scientific) +import Data.Semigroup as X (Semigroup (..)) +import Data.Text as X (Text) +import Data.Time.Calendar as X (Day (..), showGregorian) +import Data.Time.Clock as X (NominalDiffTime, UTCTime) +import Data.Time.Clock.POSIX as X + +import qualified Data.ByteString.Lazy as BL +import qualified Data.Text as T +import qualified Data.Traversable as DT +import qualified Data.Vector as V +import qualified Network.HTTP.Types.Method as NHTM + +type LByteString = BL.ByteString + +type Method = NHTM.Method + +readMay :: Read a => String -> Maybe a +readMay s = case reads s of + (a, ""):_ -> Just a + _ -> Nothing + +parseReadText :: Read a => Text -> Parser a +parseReadText = maybe mzero return . readMay . T.unpack + +showText :: Show a => a -> Text +showText = T.pack . show + +omitNulls :: [(Text, Value)] -> Value +omitNulls = object . filter notNull where + notNull (_, Null) = False + notNull (_, Array a) = (not . V.null) a + notNull _ = True + +parseNEJSON :: (FromJSON a) => [Value] -> Parser (NonEmpty a) +parseNEJSON [] = fail "Expected non-empty list" +parseNEJSON (x:xs) = DT.mapM parseJSON (x :| xs) + +deleteSeveral :: (Eq k, Hashable k) => [k] -> HM.HashMap k v -> HM.HashMap k v +deleteSeveral ks hm = foldr HM.delete hm ks + +oPath :: ToJSON a => NonEmpty Text -> a -> Value +oPath (k :| []) v = object [k .= v] +oPath (k:| (h:t)) v = object [k .= oPath (h :| t) v] diff --git a/src/Database/Bloodhound/Common/Script.hs b/src/Database/Bloodhound/Common/Script.hs new file mode 100644 index 0000000..c4ffd95 --- /dev/null +++ b/src/Database/Bloodhound/Common/Script.hs @@ -0,0 +1,220 @@ +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE OverloadedStrings #-} + +module Database.Bloodhound.Common.Script where + +import Bloodhound.Import + +import qualified Data.HashMap.Strict as HM + +import Database.V5.Bloodhound.Internal.Newtypes + +newtype ScriptFields = + ScriptFields (HM.HashMap ScriptFieldName ScriptFieldValue) + deriving (Eq, Show) + +type ScriptFieldName = Text +type ScriptFieldValue = Value + +data Script = + Script { scriptLanguage :: Maybe ScriptLanguage + , scriptInline :: Maybe ScriptInline + , scriptStored :: Maybe ScriptId + , scriptParams :: Maybe ScriptParams + } deriving (Eq, Show) + +newtype ScriptLanguage = + ScriptLanguage Text deriving (Eq, Show, FromJSON, ToJSON) + +newtype ScriptInline = + ScriptInline Text deriving (Eq, Show, FromJSON, ToJSON) + +newtype ScriptId = + ScriptId Text deriving (Eq, Show, FromJSON, ToJSON) + +newtype ScriptParams = + ScriptParams (HM.HashMap ScriptParamName ScriptParamValue) + deriving (Eq, Show) + +type ScriptParamName = Text +type ScriptParamValue = Value + +data BoostMode = + BoostModeMultiply + | BoostModeReplace + | BoostModeSum + | BoostModeAvg + | BoostModeMax + | BoostModeMin deriving (Eq, Show) + +data ScoreMode = + ScoreModeMultiply + | ScoreModeSum + | ScoreModeAvg + | ScoreModeFirst + | ScoreModeMax + | ScoreModeMin deriving (Eq, Show) + +data FunctionScoreFunction = + FunctionScoreFunctionScript Script + | FunctionScoreFunctionRandom Seed + | FunctionScoreFunctionFieldValueFactor FieldValueFactor + deriving (Eq, Show) + +newtype Weight = + Weight Float deriving (Eq, Show, FromJSON, ToJSON) + +newtype Seed = + Seed Float deriving (Eq, Show, FromJSON, ToJSON) + +data FieldValueFactor = + FieldValueFactor { fieldValueFactorField :: FieldName + , fieldValueFactor :: Maybe Factor + , fieldValueFactorModifier :: Maybe FactorModifier + , fieldValueFactorMissing :: Maybe FactorMissingFieldValue + } deriving (Eq, Show) + +newtype Factor = + Factor Float deriving (Eq, Show, FromJSON, ToJSON) + +data FactorModifier = + FactorModifierNone + | FactorModifierLog + | FactorModifierLog1p + | FactorModifierLog2p + | FactorModifierLn + | FactorModifierLn1p + | FactorModifierLn2p + | FactorModifierSquare + | FactorModifierSqrt + | FactorModifierReciprocal deriving (Eq, Show) + +newtype FactorMissingFieldValue = + FactorMissingFieldValue Float deriving (Eq, Show, FromJSON, ToJSON) + +instance ToJSON BoostMode where + toJSON BoostModeMultiply = "multiply" + toJSON BoostModeReplace = "replace" + toJSON BoostModeSum = "sum" + toJSON BoostModeAvg = "avg" + toJSON BoostModeMax = "max" + toJSON BoostModeMin = "min" + +instance FromJSON BoostMode where + parseJSON = withText "BoostMode" parse + where parse "multiply" = pure BoostModeMultiply + parse "replace" = pure BoostModeReplace + parse "sum" = pure BoostModeSum + parse "avg" = pure BoostModeAvg + parse "max" = pure BoostModeMax + parse "min" = pure BoostModeMin + parse bm = fail ("Unexpected BoostMode: " <> show bm) + +instance ToJSON ScoreMode where + toJSON ScoreModeMultiply = "multiply" + toJSON ScoreModeSum = "sum" + toJSON ScoreModeFirst = "first" + toJSON ScoreModeAvg = "avg" + toJSON ScoreModeMax = "max" + toJSON ScoreModeMin = "min" + +instance FromJSON ScoreMode where + parseJSON = withText "ScoreMode" parse + where parse "multiply" = pure ScoreModeMultiply + parse "sum" = pure ScoreModeSum + parse "first" = pure ScoreModeFirst + parse "avg" = pure ScoreModeAvg + parse "max" = pure ScoreModeMax + parse "min" = pure ScoreModeMin + parse sm = fail ("Unexpected ScoreMode: " <> show sm) + +functionScoreFunctionPair :: FunctionScoreFunction -> (Text, Value) +functionScoreFunctionPair (FunctionScoreFunctionScript functionScoreScript) = + ("script_score", toJSON functionScoreScript) +functionScoreFunctionPair (FunctionScoreFunctionRandom seed) = + ("random_score", omitNulls [ "seed" .= seed ]) +functionScoreFunctionPair (FunctionScoreFunctionFieldValueFactor fvf) = + ("field_value_factor", toJSON fvf) + +parseFunctionScoreFunction :: Object -> Parser FunctionScoreFunction +parseFunctionScoreFunction o = + singleScript `taggedWith` "script_score" + <|> singleRandom `taggedWith` "random_score" + <|> singleFieldValueFactor `taggedWith` "field_value_factor" + where taggedWith parser k = parser =<< o .: k + singleScript = pure . FunctionScoreFunctionScript + singleRandom o' = FunctionScoreFunctionRandom <$> o' .: "seed" + singleFieldValueFactor = pure . FunctionScoreFunctionFieldValueFactor + +instance ToJSON ScriptFields where + toJSON (ScriptFields x) = Object x + +instance FromJSON ScriptFields where + parseJSON (Object o) = pure (ScriptFields o) + parseJSON _ = fail "error parsing ScriptFields" + +instance ToJSON Script where + toJSON (Script lang inline stored params) = + object [ "script" .= omitNulls base ] + where base = [ "lang" .= lang + , "inline" .= inline + , "stored" .= stored + , "params" .= params ] + +instance FromJSON Script where + parseJSON = withObject "Script" parse + where parse o = o .: "script" >>= \o' -> + Script + <$> o' .:? "lang" + <*> o' .:? "inline" + <*> o' .:? "stored" + <*> o' .:? "params" + +instance ToJSON ScriptParams where + toJSON (ScriptParams x) = Object x + +instance FromJSON ScriptParams where + parseJSON (Object o) = pure (ScriptParams o) + parseJSON _ = fail "error parsing ScriptParams" + +instance ToJSON FieldValueFactor where + toJSON (FieldValueFactor field factor modifier missing) = + omitNulls base + where base = [ "field" .= field + , "factor" .= factor + , "modifier" .= modifier + , "missing" .= missing ] + +instance FromJSON FieldValueFactor where + parseJSON = withObject "FieldValueFactor" parse + where parse o = FieldValueFactor + <$> o .: "field" + <*> o .:? "factor" + <*> o .:? "modifier" + <*> o .:? "missing" + +instance ToJSON FactorModifier where + toJSON FactorModifierNone = "none" + toJSON FactorModifierLog = "log" + toJSON FactorModifierLog1p = "log1p" + toJSON FactorModifierLog2p = "log2p" + toJSON FactorModifierLn = "ln" + toJSON FactorModifierLn1p = "ln1p" + toJSON FactorModifierLn2p = "ln2p" + toJSON FactorModifierSquare = "square" + toJSON FactorModifierSqrt = "sqrt" + toJSON FactorModifierReciprocal = "reciprocal" + +instance FromJSON FactorModifier where + parseJSON = withText "FactorModifier" parse + where parse "none" = pure FactorModifierNone + parse "log" = pure FactorModifierLog + parse "log1p" = pure FactorModifierLog1p + parse "log2p" = pure FactorModifierLog2p + parse "ln" = pure FactorModifierLn + parse "ln1p" = pure FactorModifierLn1p + parse "ln2p" = pure FactorModifierLn2p + parse "square" = pure FactorModifierSquare + parse "sqrt" = pure FactorModifierSqrt + parse "reciprocal" = pure FactorModifierReciprocal + parse fm = fail ("Unexpected FactorModifier: " <> show fm) diff --git a/src/Database/V1/Bloodhound/Client.hs b/src/Database/V1/Bloodhound/Client.hs index 56e05af..25f6cfd 100644 --- a/src/Database/V1/Bloodhound/Client.hs +++ b/src/Database/V1/Bloodhound/Client.hs @@ -223,7 +223,8 @@ getStatus :: MonadBH m => m (Maybe Status) getStatus = do response <- get =<< url return $ decode (responseBody response) - where url = joinPath [] + where + url = joinPath [] -- | 'getSnapshotRepos' gets the definitions of a subset of the -- defined snapshot repos. @@ -252,7 +253,7 @@ instance FromJSON GSRs where parseJSON = withObject "Collection of GenericSnapshotRepo" parse where parse = fmap GSRs . mapM (uncurry go) . HM.toList - go rawName = withObject "GenericSnapshotRepo" $ \o -> do + go rawName = withObject "GenericSnapshotRepo" $ \o -> GenericSnapshotRepo (SnapshotRepoName rawName) <$> o .: "type" <*> o .: "settings" @@ -454,16 +455,18 @@ deleteIndex (IndexName indexName) = updateIndexSettings :: MonadBH m => NonEmpty UpdatableIndexSetting -> IndexName -> m Reply updateIndexSettings updates (IndexName indexName) = bindM2 put url (return body) - where url = joinPath [indexName, "_settings"] - body = Just (encode jsonBody) - jsonBody = Object (deepMerge [u | Object u <- toJSON <$> toList updates]) + where + url = joinPath [indexName, "_settings"] + body = Just (encode jsonBody) + jsonBody = Object (deepMerge [u | Object u <- toJSON <$> toList updates]) getIndexSettings :: (MonadBH m, MonadThrow m) => IndexName -> m (Either EsError IndexSettingsSummary) -getIndexSettings (IndexName indexName) = do +getIndexSettings (IndexName indexName) = parseEsResponse =<< get =<< url - where url = joinPath [indexName, "_settings"] + where + url = joinPath [indexName, "_settings"] -- | 'optimizeIndex' will optimize a single index, list of indexes or @@ -586,7 +589,7 @@ listIndices = url = joinPath ["_cat/indices?format=json"] parse body = maybe (throwM (EsProtocolException body)) return $ do vals <- decode body - forM vals $ \val -> do + forM vals $ \val -> case val of Object obj -> do indexVal <- HM.lookup "index" obj @@ -718,7 +721,8 @@ encodeBulkOperations stream = collapsed where collapsed = toLazyByteString $ mappend mashedTaters (byteString "\n") mash :: Builder -> V.Vector L.ByteString -> Builder -mash = V.foldl' (\b x -> b `mappend` (byteString "\n") `mappend` (lazyByteString x)) +mash = + V.foldl' (\b x -> b <> byteString "\n" <> lazyByteString x) mkBulkStreamValue :: Text -> Text -> Text -> Text -> Value mkBulkStreamValue operation indexName mappingName docId = diff --git a/src/Database/V1/Bloodhound/Internal/Aggregation.hs b/src/Database/V1/Bloodhound/Internal/Aggregation.hs new file mode 100644 index 0000000..760a302 --- /dev/null +++ b/src/Database/V1/Bloodhound/Internal/Aggregation.hs @@ -0,0 +1,416 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} + +module Database.V1.Bloodhound.Internal.Aggregation where + +import Bloodhound.Import + +import qualified Data.HashMap.Strict as HM +import qualified Data.Map.Strict as M +import qualified Data.Text as T + +import Database.V1.Bloodhound.Internal.Client +import Database.V1.Bloodhound.Internal.Highlight (HitHighlight) +import Database.V1.Bloodhound.Internal.Newtypes +import Database.V1.Bloodhound.Internal.Query +import Database.V1.Bloodhound.Internal.Sort + + +type Aggregations = M.Map Text Aggregation + +emptyAggregations :: Aggregations +emptyAggregations = M.empty + +mkAggregations :: Text -> Aggregation -> Aggregations +mkAggregations name aggregation = M.insert name aggregation emptyAggregations + +data Aggregation = TermsAgg TermsAggregation + | CardinalityAgg CardinalityAggregation + | DateHistogramAgg DateHistogramAggregation + | ValueCountAgg ValueCountAggregation + | FilterAgg FilterAggregation + | DateRangeAgg DateRangeAggregation + | MissingAgg MissingAggregation + | TopHitsAgg TopHitsAggregation + deriving (Eq, Show) + + +instance ToJSON Aggregation where + toJSON (TermsAgg (TermsAggregation term include exclude order minDocCount size shardSize collectMode executionHint termAggs)) = + omitNulls ["terms" .= omitNulls [ toJSON' term, + "include" .= include, + "exclude" .= exclude, + "order" .= order, + "min_doc_count" .= minDocCount, + "size" .= size, + "shard_size" .= shardSize, + "collect_mode" .= collectMode, + "execution_hint" .= executionHint + ], + "aggs" .= termAggs ] + where + toJSON' x = case x of { Left y -> "field" .= y; Right y -> "script" .= y } + + toJSON (CardinalityAgg (CardinalityAggregation field precisionThreshold)) = + object ["cardinality" .= omitNulls [ "field" .= field, + "precisionThreshold" .= precisionThreshold + ] + ] + + toJSON (DateHistogramAgg (DateHistogramAggregation field interval format preZone postZone preOffset postOffset dateHistoAggs)) = + omitNulls ["date_histogram" .= omitNulls [ "field" .= field, + "interval" .= interval, + "format" .= format, + "pre_zone" .= preZone, + "post_zone" .= postZone, + "pre_offset" .= preOffset, + "post_offset" .= postOffset + ], + "aggs" .= dateHistoAggs ] + toJSON (ValueCountAgg a) = object ["value_count" .= v] + where v = case a of + (FieldValueCount (FieldName n)) -> object ["field" .= n] + (ScriptValueCount (Script s)) -> object ["script" .= s] + toJSON (FilterAgg (FilterAggregation filt ags)) = + omitNulls [ "filter" .= filt + , "aggs" .= ags] + toJSON (DateRangeAgg a) = object [ "date_range" .= a + ] + toJSON (MissingAgg (MissingAggregation{..})) = + object ["missing" .= object ["field" .= maField]] + + toJSON (TopHitsAgg (TopHitsAggregation mfrom msize msort)) = + omitNulls ["top_hits" .= omitNulls [ "size" .= msize + , "from" .= mfrom + , "sort" .= msort + ] + ] + +data TopHitsAggregation = TopHitsAggregation + { taFrom :: Maybe From + , taSize :: Maybe Size + , taSort :: Maybe Sort + } deriving (Eq, Show) + +data MissingAggregation = MissingAggregation + { maField :: Text + } deriving (Eq, Show) + +data TermsAggregation = TermsAggregation { term :: Either Text Text + , termInclude :: Maybe TermInclusion + , termExclude :: Maybe TermInclusion + , termOrder :: Maybe TermOrder + , termMinDocCount :: Maybe Int + , termSize :: Maybe Int + , termShardSize :: Maybe Int + , termCollectMode :: Maybe CollectionMode + , termExecutionHint :: Maybe ExecutionHint + , termAggs :: Maybe Aggregations + } deriving (Eq, Show) + +data CardinalityAggregation = CardinalityAggregation { cardinalityField :: FieldName, + precisionThreshold :: Maybe Int + } deriving (Eq, Show) + +data DateHistogramAggregation = DateHistogramAggregation { dateField :: FieldName + , dateInterval :: Interval + , dateFormat :: Maybe Text + -- pre and post deprecated in 1.5 + , datePreZone :: Maybe Text + , datePostZone :: Maybe Text + , datePreOffset :: Maybe Text + , datePostOffset :: Maybe Text + , dateAggs :: Maybe Aggregations + } deriving (Eq, Show) + + +data DateRangeAggregation = DateRangeAggregation { draField :: FieldName + , draFormat :: Maybe Text + , draRanges :: NonEmpty DateRangeAggRange + } deriving (Eq, Show) + +data DateRangeAggRange = DateRangeFrom DateMathExpr + | DateRangeTo DateMathExpr + | DateRangeFromAndTo DateMathExpr DateMathExpr deriving (Eq, Show) + +-- | See for more information. +data ValueCountAggregation = FieldValueCount FieldName + | ScriptValueCount Script deriving (Eq, Show) + +-- | Single-bucket filter aggregations. See for more information. +data FilterAggregation = FilterAggregation { faFilter :: Filter + , faAggs :: Maybe Aggregations} deriving (Eq, Show) + +mkTermsAggregation :: Text -> TermsAggregation +mkTermsAggregation t = TermsAggregation (Left t) Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing + +mkTermsScriptAggregation :: Text -> TermsAggregation +mkTermsScriptAggregation t = TermsAggregation (Right t) Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing + +mkDateHistogram :: FieldName -> Interval -> DateHistogramAggregation +mkDateHistogram t i = DateHistogramAggregation t i Nothing Nothing Nothing Nothing Nothing Nothing + +mkCardinalityAggregation :: FieldName -> CardinalityAggregation +mkCardinalityAggregation t = CardinalityAggregation t Nothing + +data TermInclusion = TermInclusion Text + | TermPattern Text Text deriving (Eq, Show) + +instance ToJSON TermInclusion where + toJSON (TermInclusion x) = toJSON x + toJSON (TermPattern pattern flags) = + omitNulls [ "pattern" .= pattern + , "flags" .= flags] + +data TermOrder = TermOrder + { termSortField :: Text + , termSortOrder :: SortOrder } deriving (Eq, Show) + +instance ToJSON TermOrder where + toJSON (TermOrder termSortField termSortOrder) = + object [termSortField .= termSortOrder] + + +data ExecutionHint = Ordinals + | GlobalOrdinals + | GlobalOrdinalsHash + | GlobalOrdinalsLowCardinality + | Map deriving (Eq, Show) + +instance ToJSON ExecutionHint where + toJSON Ordinals = "ordinals" + toJSON GlobalOrdinals = "global_ordinals" + toJSON GlobalOrdinalsHash = "global_ordinals_hash" + toJSON GlobalOrdinalsLowCardinality = "global_ordinals_low_cardinality" + toJSON Map = "map" + + +-- | See for more information. +data DateMathExpr = + DateMathExpr DateMathAnchor [DateMathModifier] + deriving (Eq, Show) + +instance ToJSON DateMathExpr where + toJSON (DateMathExpr a mods) = String (fmtA a <> mconcat (fmtMod <$> mods)) + where fmtA DMNow = "now" + fmtA (DMDate date) = (T.pack $ showGregorian date) <> "||" + fmtMod (AddTime n u) = "+" <> showText n <> fmtU u + fmtMod (SubtractTime n u) = "-" <> showText n <> fmtU u + fmtMod (RoundDownTo u) = "/" <> fmtU u + fmtU DMYear = "y" + fmtU DMMonth = "M" + fmtU DMWeek = "w" + fmtU DMDay = "d" + fmtU DMHour = "h" + fmtU DMMinute = "m" + fmtU DMSecond = "s" + +-- | Starting point for a date range. This along with the 'DateMathModifiers' gets you the date ES will start from. +data DateMathAnchor = + DMNow + | DMDate Day + deriving (Eq, Show) + +data DateMathModifier = + AddTime Int DateMathUnit + | SubtractTime Int DateMathUnit + | RoundDownTo DateMathUnit + deriving (Eq, Show) + +data DateMathUnit = + DMYear + | DMMonth + | DMWeek + | DMDay + | DMHour + | DMMinute + | DMSecond + deriving (Eq, Show) + +data CollectionMode = BreadthFirst + | DepthFirst deriving (Eq, Show) + +type AggregationResults = M.Map Text Value + +class BucketAggregation a where + key :: a -> BucketValue + docCount :: a -> Int + aggs :: a -> Maybe AggregationResults + +data BucketValue = TextValue Text + | ScientificValue Scientific + | BoolValue Bool deriving (Show) + +data Bucket a = Bucket { buckets :: [a]} deriving (Show) + +data TermsResult = TermsResult { termKey :: BucketValue + , termsDocCount :: Int + , termsAggs :: Maybe AggregationResults } deriving (Show) + +data DateHistogramResult = DateHistogramResult { dateKey :: Int + , dateKeyStr :: Maybe Text + , dateDocCount :: Int + , dateHistogramAggs :: Maybe AggregationResults } deriving (Show) + +data DateRangeResult = DateRangeResult { dateRangeKey :: Text + , dateRangeFrom :: Maybe UTCTime + , dateRangeFromAsString :: Maybe Text + , dateRangeTo :: Maybe UTCTime + , dateRangeToAsString :: Maybe Text + , dateRangeDocCount :: Int + , dateRangeAggs :: Maybe AggregationResults } deriving (Show, Eq) + +toTerms :: Text -> AggregationResults -> Maybe (Bucket TermsResult) +toTerms = toAggResult + +toDateHistogram :: Text -> AggregationResults -> Maybe (Bucket DateHistogramResult) +toDateHistogram = toAggResult + +toMissing :: Text -> AggregationResults -> Maybe MissingResult +toMissing = toAggResult + +toTopHits :: (FromJSON a) => Text -> AggregationResults -> Maybe (TopHitResult a) +toTopHits = toAggResult + +toAggResult :: (FromJSON a) => Text -> AggregationResults -> Maybe a +toAggResult t a = M.lookup t a >>= deserialize + where deserialize = parseMaybe parseJSON + +instance BucketAggregation TermsResult where + key = termKey + docCount = termsDocCount + aggs = termsAggs + +instance BucketAggregation DateHistogramResult where + key = TextValue . showText . dateKey + docCount = dateDocCount + aggs = dateHistogramAggs + +instance BucketAggregation DateRangeResult where + key = TextValue . dateRangeKey + docCount = dateRangeDocCount + aggs = dateRangeAggs + +instance (FromJSON a) => FromJSON (Bucket a) where + parseJSON (Object v) = Bucket <$> + v .: "buckets" + parseJSON _ = mempty + +instance FromJSON BucketValue where + parseJSON (String t) = return $ TextValue t + parseJSON (Number s) = return $ ScientificValue s + parseJSON (Bool b) = return $ BoolValue b + parseJSON _ = mempty + +instance FromJSON MissingResult where + parseJSON = withObject "MissingResult" parse + where parse v = MissingResult <$> v .: "doc_count" + +instance FromJSON TermsResult where + parseJSON (Object v) = TermsResult <$> + v .: "key" <*> + v .: "doc_count" <*> + (pure $ getNamedSubAgg v ["key", "doc_count"]) + parseJSON _ = mempty + +instance FromJSON DateHistogramResult where + parseJSON (Object v) = DateHistogramResult <$> + v .: "key" <*> + v .:? "key_as_string" <*> + v .: "doc_count" <*> + (pure $ getNamedSubAgg v [ "key" + , "doc_count" + , "key_as_string" + ] + ) + parseJSON _ = mempty + +instance FromJSON DateRangeResult where + parseJSON = withObject "DateRangeResult" parse + where parse v = DateRangeResult <$> + v .: "key" <*> + (fmap posixMS <$> v .:? "from") <*> + v .:? "from_as_string" <*> + (fmap posixMS <$> v .:? "to") <*> + v .:? "to_as_string" <*> + v .: "doc_count" <*> + (pure $ getNamedSubAgg v [ "key" + , "from" + , "from_as_string" + , "to" + , "to_as_string" + , "doc_count" + ] + ) + +instance (FromJSON a) => FromJSON (TopHitResult a) where + parseJSON (Object v) = TopHitResult <$> + v .: "hits" + parseJSON _ = fail "Failure in FromJSON (TopHitResult a)" + +data MissingResult = MissingResult { missingDocCount :: Int } deriving (Show) + +data TopHitResult a = TopHitResult { tarHits :: (SearchHits a) + } deriving Show + +data SearchHits a = + SearchHits { hitsTotal :: Int + , maxScore :: Score + , hits :: [Hit a] } deriving (Eq, Show) + +instance Semigroup (SearchHits a) where + (SearchHits ta ma ha) <> (SearchHits tb mb hb) = SearchHits (ta + tb) (max ma mb) (ha <> hb) + +instance Monoid (SearchHits a) where + mempty = SearchHits 0 Nothing mempty + mappend = (<>) + +data Hit a = + Hit { hitIndex :: IndexName + , hitType :: MappingName + , hitDocId :: DocId + , hitScore :: Score + , hitSource :: Maybe a + , hitHighlight :: Maybe HitHighlight } deriving (Eq, Show) + +-- Try to get an AggregationResults when we don't know the +-- field name. We filter out the known keys to try to minimize the noise. +getNamedSubAgg :: Object -> [Text] -> Maybe AggregationResults +getNamedSubAgg o knownKeys = maggRes + where unknownKeys = HM.filterWithKey (\k _ -> k `notElem` knownKeys) o + maggRes + | HM.null unknownKeys = Nothing + | otherwise = Just . M.fromList $ HM.toList unknownKeys + +instance ToJSON CollectionMode where + toJSON BreadthFirst = "breadth_first" + toJSON DepthFirst = "depth_first" + +instance ToJSON DateRangeAggregation where + toJSON DateRangeAggregation {..} = + omitNulls [ "field" .= draField + , "format" .= draFormat + , "ranges" .= toList draRanges + ] + +instance (FromJSON a) => FromJSON (SearchHits a) where + parseJSON (Object v) = SearchHits <$> + v .: "total" <*> + v .: "max_score" <*> + v .: "hits" + parseJSON _ = empty + +instance ToJSON DateRangeAggRange where + toJSON (DateRangeFrom e) = object [ "from" .= e ] + toJSON (DateRangeTo e) = object [ "to" .= e ] + toJSON (DateRangeFromAndTo f t) = object [ "from" .= f, "to" .= t ] + +instance (FromJSON a) => FromJSON (Hit a) where + parseJSON (Object v) = Hit <$> + v .: "_index" <*> + v .: "_type" <*> + v .: "_id" <*> + v .: "_score" <*> + v .:? "_source" <*> + v .:? "highlight" + parseJSON _ = empty diff --git a/src/Database/V1/Bloodhound/Internal/Client.hs b/src/Database/V1/Bloodhound/Internal/Client.hs new file mode 100644 index 0000000..b8543f0 --- /dev/null +++ b/src/Database/V1/Bloodhound/Internal/Client.hs @@ -0,0 +1,2341 @@ +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE MultiParamTypeClasses #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE UndecidableInstances #-} + +module Database.V1.Bloodhound.Internal.Client where + +import Bloodhound.Import + +import Control.Applicative as A +import qualified Data.HashMap.Strict as HM +import Data.Text (Text) +import qualified Data.Text as T +import qualified Data.Traversable as DT +import qualified Data.Vector as V +import qualified Data.Version as Vers +import GHC.Enum +import Network.HTTP.Client +import qualified Text.ParserCombinators.ReadP as RP +import Text.Read (Read (..)) +import qualified Text.Read as TR + +import Database.V1.Bloodhound.Internal.Newtypes +import Database.V1.Bloodhound.Internal.Query +import Database.V1.Bloodhound.Internal.StringlyTyped + + +{-| Common environment for Elasticsearch calls. Connections will be + pipelined according to the provided HTTP connection manager. +-} +data BHEnv = BHEnv { bhServer :: Server + , bhManager :: Manager + , bhRequestHook :: Request -> IO Request + -- ^ Low-level hook that is run before every request is sent. Used to implement custom authentication strategies. Defaults to 'return' with 'mkBHEnv'. + } + +instance (Functor m, Applicative m, MonadIO m) => MonadBH (ReaderT BHEnv m) where + getBHEnv = ask + +{-| 'Server' is used with the client functions to point at the ES instance +-} +newtype Server = Server Text deriving (Eq, Show, FromJSON) + +{-| All API calls to Elasticsearch operate within + MonadBH + . The idea is that it can be easily embedded in your + own monad transformer stack. A default instance for a ReaderT and + alias 'BH' is provided for the simple case. +-} +class (Functor m, A.Applicative m, MonadIO m) => MonadBH m where + getBHEnv :: m BHEnv + +-- $setup +-- >>> :set -XOverloadedStrings +-- >>> import Data.Aeson +-- >>> import Database.V1.Bloodhound +-- >>> let testServer = (Server "http://localhost:9200") +-- >>> let testIndex = IndexName "twitter" +-- >>> let testMapping = MappingName "tweet" +-- >>> let defaultIndexSettings = IndexSettings (ShardCount 3) (ReplicaCount 2) + +-- defaultIndexSettings is exported by Database.Bloodhound as well +-- no trailing slashes in servers, library handles building the path. + +-- | Create a 'BHEnv' with all optional fields defaulted. HTTP hook +-- will be a noop. You can use the exported fields to customize it further, e.g.: +-- +-- >> (mkBHEnv myServer myManager) { bhRequestHook = customHook } +mkBHEnv :: Server -> Manager -> BHEnv +mkBHEnv s m = BHEnv s m return + +newtype BH m a = BH { + unBH :: ReaderT BHEnv m a + } deriving ( Functor + , A.Applicative + , Monad + , MonadIO + , MonadState s + , MonadWriter w + , MonadError e + , Alternative + , MonadPlus + , MonadFix + , MonadThrow + , MonadCatch + , MonadMask) + +instance MonadTrans BH where + lift = BH . lift + +instance (MonadReader r m) => MonadReader r (BH m) where + ask = lift ask + local f (BH (ReaderT m)) = BH $ ReaderT $ \r -> + local f (m r) + +instance (Functor m, Applicative m, MonadIO m) => MonadBH (BH m) where + getBHEnv = BH getBHEnv + +runBH :: BHEnv -> BH m a -> m a +runBH e f = runReaderT (unBH f) e + +{-| 'Version' is embedded in 'Status' -} +data Version = Version { number :: VersionNumber + , build_hash :: BuildHash + , build_timestamp :: UTCTime + , build_snapshot :: Bool + , lucene_version :: VersionNumber } deriving (Eq, Show) + +instance ToJSON Version where + toJSON Version {..} = object ["number" .= number + ,"build_hash" .= build_hash + ,"build_timestamp" .= build_timestamp + ,"build_snapshot" .= build_snapshot + ,"lucene_version" .= lucene_version] + +instance FromJSON Version where + parseJSON = withObject "Version" parse + where parse o = Version + <$> o .: "number" + <*> o .: "build_hash" + <*> o .: "build_timestamp" + <*> o .: "build_snapshot" + <*> o .: "lucene_version" + +-- | Traditional software versioning number +newtype VersionNumber = VersionNumber { versionNumber :: Vers.Version + } deriving (Eq, Show, Ord) + +{-| 'Status' is a data type for describing the JSON body returned by + Elasticsearch when you query its status. This was deprecated in 1.2.0. + + +-} + +data Status = Status { ok :: Maybe Bool + , status :: Int + , name :: Text + , version :: Version + , tagline :: Text } deriving (Eq, Show) + +instance FromJSON Status where + parseJSON (Object v) = Status <$> + v .:? "ok" <*> + (v .:? "status" .!= 200) <*> + v .: "name" <*> + v .: "version" <*> + v .: "tagline" + parseJSON _ = empty + +{-| 'IndexSettings' is used to configure the shards and replicas when you create + an Elasticsearch Index. + + +-} + +data IndexSettings = + IndexSettings { indexShards :: ShardCount + , indexReplicas :: ReplicaCount } deriving (Eq, Show) + +{-| 'defaultIndexSettings' is an 'IndexSettings' with 3 shards and 2 replicas. -} +defaultIndexSettings :: IndexSettings +defaultIndexSettings = IndexSettings (ShardCount 3) (ReplicaCount 2) + + +{-| 'IndexOptimizationSettings' is used to configure index optimization. See + + for more info. +-} +data IndexOptimizationSettings = + IndexOptimizationSettings { maxNumSegments :: Maybe Int + -- ^ Number of segments to optimize to. 1 will fully optimize the index. If omitted, the default behavior is to only optimize if the server deems it necessary. + , onlyExpungeDeletes :: Bool + -- ^ Should the optimize process only expunge segments with deletes in them? If the purpose of the optimization is to free disk space, this should be set to True. + , flushAfterOptimize :: Bool + -- ^ Should a flush be performed after the optimize. + } deriving (Eq, Show) + + +{-| 'defaultIndexOptimizationSettings' implements the default settings that + ElasticSearch uses for index optimization. 'maxNumSegments' is Nothing, + 'onlyExpungeDeletes' is False, and flushAfterOptimize is True. +-} +defaultIndexOptimizationSettings :: IndexOptimizationSettings +defaultIndexOptimizationSettings = IndexOptimizationSettings Nothing False True + +{-| 'UpdatableIndexSetting' are settings which may be updated after an index is created. + + +-} +data UpdatableIndexSetting = NumberOfReplicas ReplicaCount + -- ^ The number of replicas each shard has. + | AutoExpandReplicas ReplicaBounds + | BlocksReadOnly Bool + -- ^ Set to True to have the index read only. False to allow writes and metadata changes. + | BlocksRead Bool + -- ^ Set to True to disable read operations against the index. + | BlocksWrite Bool + -- ^ Set to True to disable write operations against the index. + | BlocksMetaData Bool + -- ^ Set to True to disable metadata operations against the index. + | RefreshInterval NominalDiffTime + -- ^ The async refresh interval of a shard + | IndexConcurrency Int + | FailOnMergeFailure Bool + | TranslogFlushThresholdOps Int + -- ^ When to flush on operations. + | TranslogFlushThresholdSize Bytes + -- ^ When to flush based on translog (bytes) size. + | TranslogFlushThresholdPeriod NominalDiffTime + -- ^ When to flush based on a period of not flushing. + | TranslogDisableFlush Bool + -- ^ Disables flushing. Note, should be set for a short interval and then enabled. + | CacheFilterMaxSize (Maybe Bytes) + -- ^ The maximum size of filter cache (per segment in shard). + | CacheFilterExpire (Maybe NominalDiffTime) + -- ^ The expire after access time for filter cache. + | GatewaySnapshotInterval NominalDiffTime + -- ^ The gateway snapshot interval (only applies to shared gateways). + | RoutingAllocationInclude (NonEmpty NodeAttrFilter) + -- ^ A node matching any rule will be allowed to host shards from the index. + | RoutingAllocationExclude (NonEmpty NodeAttrFilter) + -- ^ A node matching any rule will NOT be allowed to host shards from the index. + | RoutingAllocationRequire (NonEmpty NodeAttrFilter) + -- ^ Only nodes matching all rules will be allowed to host shards from the index. + | RoutingAllocationEnable AllocationPolicy + -- ^ Enables shard allocation for a specific index. + | RoutingAllocationShardsPerNode ShardCount + -- ^ Controls the total number of shards (replicas and primaries) allowed to be allocated on a single node. + | RecoveryInitialShards InitialShardCount + -- ^ When using local gateway a particular shard is recovered only if there can be allocated quorum shards in the cluster. + | GCDeletes NominalDiffTime + | TTLDisablePurge Bool + -- ^ Disables temporarily the purge of expired docs. + | TranslogFSType FSType + | IndexCompoundFormat CompoundFormat + | IndexCompoundOnFlush Bool + | WarmerEnabled Bool + deriving (Eq, Show) + +data AllocationPolicy = AllocAll + -- ^ Allows shard allocation for all shards. + | AllocPrimaries + -- ^ Allows shard allocation only for primary shards. + | AllocNewPrimaries + -- ^ Allows shard allocation only for primary shards for new indices. + | AllocNone + -- ^ No shard allocation is allowed + deriving (Eq, Show) + +data ReplicaBounds = ReplicasBounded Int Int + | ReplicasLowerBounded Int + | ReplicasUnbounded + deriving (Eq, Show) + +-- | A measure of bytes used for various configurations. You may want +-- to use smart constructors like 'gigabytes' for larger values. +-- +-- >>> gigabytes 9 +-- Bytes 9000000000 +-- +-- >>> megabytes 9 +-- Bytes 9000000 +-- +-- >>> kilobytes 9 +-- Bytes 9000 +newtype Bytes = Bytes Int deriving (Eq, Show, Ord, ToJSON, FromJSON) + +gigabytes :: Int -> Bytes +gigabytes n = megabytes (1000 * n) + + +megabytes :: Int -> Bytes +megabytes n = kilobytes (1000 * n) + + +kilobytes :: Int -> Bytes +kilobytes n = Bytes (1000 * n) + +data Interval = Year + | Quarter + | Month + | Week + | Day + | Hour + | Minute + | Second + | FractionalInterval Float TimeInterval deriving (Eq, Show) + +data TimeInterval = Weeks + | Days + | Hours + | Minutes + | Seconds deriving Eq + +instance Show TimeInterval where + show Weeks = "w" + show Days = "d" + show Hours = "h" + show Minutes = "m" + show Seconds = "s" + +instance Read TimeInterval where + readPrec = f =<< TR.get + where + f 'w' = return Weeks + f 'd' = return Days + f 'h' = return Hours + f 'm' = return Minutes + f 's' = return Seconds + f _ = fail "TimeInterval expected one of w, d, h, m, s" + +-- | Typically a 7 character hex string. +newtype BuildHash = BuildHash { buildHash :: Text } + deriving (Eq, Ord, Show, FromJSON, ToJSON) + +data NodeAttrFilter = NodeAttrFilter + { nodeAttrFilterName :: NodeAttrName + , nodeAttrFilterValues :: NonEmpty Text } + deriving (Eq, Ord, Show) + +newtype NodeAttrName = NodeAttrName Text deriving (Eq, Ord, Show) + +data InitialShardCount = QuorumShards + | QuorumMinus1Shards + | FullShards + | FullMinus1Shards + | ExplicitShards Int + deriving (Eq, Show) + +instance FromJSON InitialShardCount where + parseJSON v = withText "InitialShardCount" parseText v + <|> ExplicitShards <$> parseJSON v + where parseText "quorum" = pure QuorumShards + parseText "quorum-1" = pure QuorumMinus1Shards + parseText "full" = pure FullShards + parseText "full-1" = pure FullMinus1Shards + parseText _ = mzero + +instance ToJSON InitialShardCount where + toJSON QuorumShards = String "quorum" + toJSON QuorumMinus1Shards = String "quorum-1" + toJSON FullShards = String "full" + toJSON FullMinus1Shards = String "full-1" + toJSON (ExplicitShards x) = toJSON x + +data FSType = FSSimple + | FSBuffered deriving (Eq, Show) + +instance ToJSON FSType where + toJSON FSSimple = "simple" + toJSON FSBuffered = "buffered" + +instance FromJSON FSType where + parseJSON = withText "FSType" parse + where parse "simple" = pure FSSimple + parse "buffered" = pure FSBuffered + parse t = fail ("Invalid FSType: " <> show t) + +data CompoundFormat = CompoundFileFormat Bool + | MergeSegmentVsTotalIndex Double + -- ^ percentage between 0 and 1 where 0 is false, 1 is true + deriving (Eq, Show) + +attrFilterJSON :: NonEmpty NodeAttrFilter -> Value +attrFilterJSON fs = object [ n .= T.intercalate "," (toList vs) + | NodeAttrFilter (NodeAttrName n) vs <- toList fs] + +parseAttrFilter :: Value -> Parser (NonEmpty NodeAttrFilter) +parseAttrFilter = withObject "NonEmpty NodeAttrFilter" parse + where parse o = case HM.toList o of + [] -> fail "Expected non-empty list of NodeAttrFilters" + x:xs -> DT.mapM (uncurry parse') (x :| xs) + parse' n = withText "Text" $ \t -> + case T.splitOn "," t of + fv:fvs -> return (NodeAttrFilter (NodeAttrName n) (fv :| fvs)) + [] -> fail "Expected non-empty list of filter values" + +newtype NominalDiffTimeJSON = NominalDiffTimeJSON { ndtJSON :: NominalDiffTime } + +data IndexSettingsSummary = IndexSettingsSummary { sSummaryIndexName :: IndexName + , sSummaryFixedSettings :: IndexSettings + , sSummaryUpdateable :: [UpdatableIndexSetting]} + deriving (Eq, Show) + +parseSettings :: Object -> Parser [UpdatableIndexSetting] +parseSettings o = do + o' <- o .: "index" + -- slice the index object into singleton hashmaps and try to parse each + parses <- forM (HM.toList o') $ \(k, v) -> do + -- blocks are now nested into the "index" key, which is not how they're serialized + let atRoot = Object (HM.singleton k v) + let atIndex = Object (HM.singleton "index" atRoot) + optional (parseJSON atRoot <|> parseJSON atIndex) + return (catMaybes parses) + +{-| 'OpenCloseIndex' is a sum type for opening and closing indices. + + +-} +data OpenCloseIndex = OpenIndex | CloseIndex deriving (Eq, Show) + +data FieldType = GeoPointType + | GeoShapeType + | FloatType + | IntegerType + | LongType + | ShortType + | ByteType deriving (Eq, Show) + +data FieldDefinition = + FieldDefinition { fieldType :: FieldType } deriving (Eq, Show) + +{-| An 'IndexTemplate' defines a template that will automatically be + applied to new indices created. The templates include both + 'IndexSettings' and mappings, and a simple 'TemplatePattern' that + controls if the template will be applied to the index created. + Specify mappings as follows: @[toJSON TweetMapping, ...]@ + + https://www.elastic.co/guide/en/elasticsearch/reference/1.7/indices-templates.html +-} +data IndexTemplate = + IndexTemplate { templatePattern :: TemplatePattern + , templateSettings :: Maybe IndexSettings + , templateMappings :: [Value] + } + +data MappingField = + MappingField { mappingFieldName :: FieldName + , fieldDefinition :: FieldDefinition } deriving (Eq, Show) + +{-| Support for type reification of 'Mapping's is currently incomplete, for + now the mapping API verbiage expects a 'ToJSON'able blob. + + Indexes have mappings, mappings are schemas for the documents contained in the + index. I'd recommend having only one mapping per index, always having a mapping, + and keeping different kinds of documents separated if possible. +-} +data Mapping = Mapping { typeName :: TypeName + , mappingFields :: [MappingField] } deriving (Eq, Show) + +{-| 'BulkOperation' is a sum type for expressing the four kinds of bulk + operation index, create, delete, and update. 'BulkIndex' behaves like an + "upsert", 'BulkCreate' will fail if a document already exists at the DocId. + + +-} +data BulkOperation = + BulkIndex IndexName MappingName DocId Value + | BulkCreate IndexName MappingName DocId Value + | BulkDelete IndexName MappingName DocId + | BulkUpdate IndexName MappingName DocId Value deriving (Eq, Show) + +{-| 'EsResult' describes the standard wrapper JSON document that you see in + successful Elasticsearch lookups or lookups that couldn't find the document. +-} +data EsResult a = EsResult { _index :: Text + , _type :: Text + , _id :: Text + , foundResult :: Maybe (EsResultFound a)} deriving (Eq, Show) + +instance (FromJSON a) => FromJSON (EsResult a) where + parseJSON jsonVal@(Object v) = do + found <- v .:? "found" .!= False + fr <- if found + then parseJSON jsonVal + else return Nothing + EsResult <$> v .: "_index" <*> + v .: "_type" <*> + v .: "_id" <*> + pure fr + parseJSON _ = empty + +{-| 'EsResultFound' contains the document and its metadata inside of an + 'EsResult' when the document was successfully found. +-} +data EsResultFound a = EsResultFound { _version :: DocVersion + , _source :: a } deriving (Eq, Show) + +instance (FromJSON a) => FromJSON (EsResultFound a) where + parseJSON (Object v) = EsResultFound <$> + v .: "_version" <*> + v .: "_source" + parseJSON _ = empty + +{-| 'EsError' is the generic type that will be returned when there was a + problem. If you can't parse the expected response, its a good idea to + try parsing this. +-} +data EsError = EsError { errorStatus :: Int + , errorMessage :: Text } deriving (Eq, Show) + +{-| 'EsProtocolException' will be thrown if Bloodhound cannot parse a response +returned by the ElasticSearch server. If you encounter this error, please +verify that your domain data types and FromJSON instances are working properly +(for example, the 'a' of '[Hit a]' in 'SearchResult.searchHits.hits'). If you're +sure that your mappings are correct, then this error may be an indication of an +incompatibility between Bloodhound and ElasticSearch. Please open a bug report +and be sure to include the exception body. +-} +data EsProtocolException = EsProtocolException { esProtoExBody :: LByteString } + deriving (Eq, Show) + +instance Exception EsProtocolException + +data IndexAlias = IndexAlias { srcIndex :: IndexName + , indexAlias :: IndexAliasName } deriving (Eq, Show) + +{-| 'DocVersion' is an integer version number for a document between 1 +and 9.2e+18 used for <>. +-} +newtype DocVersion = DocVersion { + docVersionNumber :: Int + } deriving (Eq, Show, Ord, ToJSON) + +instance FromJSON DocVersion where + parseJSON v = do + i <- parseJSON v + maybe (fail "DocVersion out of range") return $ mkDocVersion i + +-- | Smart constructor for in-range doc version +mkDocVersion :: Int -> Maybe DocVersion +mkDocVersion i + | i >= (docVersionNumber minBound) && i <= (docVersionNumber maxBound) = + Just $ DocVersion i + | otherwise = Nothing + +data IndexAliasAction = AddAlias IndexAlias IndexAliasCreate + | RemoveAlias IndexAlias deriving (Show, Eq) + +data IndexAliasCreate = IndexAliasCreate { aliasCreateRouting :: Maybe AliasRouting + , aliasCreateFilter :: Maybe Filter} + deriving (Show, Eq) + +data AliasRouting = AllAliasRouting RoutingValue + | GranularAliasRouting (Maybe SearchAliasRouting) (Maybe IndexAliasRouting) + deriving (Show, Eq) + +newtype SearchAliasRouting = SearchAliasRouting (NonEmpty RoutingValue) deriving (Show, Eq) + +newtype IndexAliasRouting = IndexAliasRouting RoutingValue deriving (Show, Eq, ToJSON, FromJSON) + +newtype RoutingValue = RoutingValue { routingValue :: Text } deriving (Show, Eq, ToJSON, FromJSON) + +newtype IndexAliasesSummary = IndexAliasesSummary { indexAliasesSummary :: [IndexAliasSummary] } deriving (Show, Eq) + +{-| 'IndexAliasSummary' is a summary of an index alias configured for a server. -} +data IndexAliasSummary = IndexAliasSummary { indexAliasSummaryAlias :: IndexAlias + , indexAliasSummaryCreate :: IndexAliasCreate} deriving (Show, Eq) + +{-| 'ExternalDocVersion' is a convenience wrapper if your code uses its +own version numbers instead of ones from ES. +-} +newtype ExternalDocVersion = ExternalDocVersion DocVersion + deriving (Eq, Show, Ord, Bounded, Enum, ToJSON) + +{-| 'VersionControl' is specified when indexing documents as a +optimistic concurrency control. +-} +data VersionControl = NoVersionControl + -- ^ Don't send a version. This is a pure overwrite. + | InternalVersion DocVersion + -- ^ Use the default ES versioning scheme. Only + -- index the document if the version is the same + -- as the one specified. Only applicable to + -- updates, as you should be getting Version from + -- a search result. + | ExternalGT ExternalDocVersion + -- ^ Use your own version numbering. Only index + -- the document if the version is strictly higher + -- OR the document doesn't exist. The given + -- version will be used as the new version number + -- for the stored document. N.B. All updates must + -- increment this number, meaning there is some + -- global, external ordering of updates. + | ExternalGTE ExternalDocVersion + -- ^ Use your own version numbering. Only index + -- the document if the version is equal or higher + -- than the stored version. Will succeed if there + -- is no existing document. The given version will + -- be used as the new version number for the + -- stored document. Use with care, as this could + -- result in data loss. + | ForceVersion ExternalDocVersion + -- ^ The document will always be indexed and the + -- given version will be the new version. This is + -- typically used for correcting errors. Use with + -- care, as this could result in data loss. + deriving (Show, Eq, Ord) + +{-| 'DocumentParent' is used to specify a parent document. +-} +newtype DocumentParent = DocumentParent DocId + deriving (Eq, Show) + +{-| 'IndexDocumentSettings' are special settings supplied when indexing +a document. For the best backwards compatiblity when new fields are +added, you should probably prefer to start with 'defaultIndexDocumentSettings' +-} +data IndexDocumentSettings = + IndexDocumentSettings { idsVersionControl :: VersionControl + , idsParent :: Maybe DocumentParent + } deriving (Eq, Show) + +{-| Reasonable default settings. Chooses no version control and no parent. +-} +defaultIndexDocumentSettings :: IndexDocumentSettings +defaultIndexDocumentSettings = IndexDocumentSettings NoVersionControl Nothing + +{-| 'IndexSelection' is used for APIs which take a single index, a list of + indexes, or the special @_all@ index. +-} +--TODO: this does not fully support . It wouldn't be too hard to implement but you'd have to add the optional parameters (ignore_unavailable, allow_no_indices, expand_wildcards) to any APIs using it. Also would be a breaking API. +data IndexSelection = IndexList (NonEmpty IndexName) + | AllIndexes deriving (Eq, Show) + +{-| 'NodeSelection' is used for most cluster APIs. See for more details. +-} +data NodeSelection = LocalNode + -- ^ Whatever node receives this request + | NodeList (NonEmpty NodeSelector) + | AllNodes deriving (Eq, Show) + + +-- | An exact match or pattern to identify a node. Note that All of +-- these options support wildcarding, so your node name, server, attr +-- name can all contain * characters to be a fuzzy match. +data NodeSelector = NodeByName NodeName + | NodeByFullNodeId FullNodeId + | NodeByHost Server + -- ^ e.g. 10.0.0.1 or even 10.0.0.* + | NodeByAttribute NodeAttrName Text + -- ^ NodeAttrName can be a pattern, e.g. rack*. The value can too. + deriving (Eq, Show) + +-- | A human-readable node name that is supplied by the user in the +-- node config or automatically generated by ElasticSearch. +newtype NodeName = NodeName { nodeName :: Text } + deriving (Eq, Ord, Show, FromJSON) + +-- | Unique, automatically-generated name assigned to nodes that are +-- usually returned in node-oriented APIs. +newtype FullNodeId = FullNodeId { fullNodeId :: Text } + deriving (Eq, Ord, Show, FromJSON) + +-- | Username type used for HTTP Basic authentication. See 'basicAuthHook'. +newtype EsUsername = EsUsername { esUsername :: Text } deriving (Show, Eq) + +-- | Password type used for HTTP Basic authentication. See 'basicAuthHook'. +newtype EsPassword = EsPassword { esPassword :: Text } deriving (Show, Eq) + +data SnapshotRepoSelection = SnapshotRepoList (NonEmpty SnapshotRepoPattern) + | AllSnapshotRepos deriving (Eq, Show) + + +-- | Either specifies an exact repo name or one with globs in it, +-- e.g. @RepoPattern "foo*"@ __NOTE__: Patterns are not supported on ES < 1.7 +data SnapshotRepoPattern = ExactRepo SnapshotRepoName + | RepoPattern Text + deriving (Eq, Show) + +-- | The unique name of a snapshot repository. +newtype SnapshotRepoName = SnapshotRepoName { snapshotRepoName :: Text } + deriving (Eq, Ord, Show, ToJSON, FromJSON) + + +-- | A generic representation of a snapshot repo. This is what gets +-- sent to and parsed from the server. For repo types enabled by +-- plugins that aren't exported by this library, consider making a +-- custom type which implements 'SnapshotRepo'. If it is a common repo +-- type, consider submitting a pull request to have it included in the +-- library proper +data GenericSnapshotRepo = GenericSnapshotRepo { + gSnapshotRepoName :: SnapshotRepoName + , gSnapshotRepoType :: SnapshotRepoType + , gSnapshotRepoSettings :: GenericSnapshotRepoSettings + } deriving (Eq, Show) + +-- | Law: fromGSnapshotRepo (toGSnapshotRepo r) == Right r +class SnapshotRepo r where + toGSnapshotRepo :: r -> GenericSnapshotRepo + fromGSnapshotRepo :: GenericSnapshotRepo -> Either SnapshotRepoConversionError r + +data SnapshotRepoConversionError = RepoTypeMismatch SnapshotRepoType SnapshotRepoType + -- ^ Expected type and actual type + | OtherRepoConversionError Text + deriving (Show, Eq) + +instance SnapshotRepo GenericSnapshotRepo where + toGSnapshotRepo = id + fromGSnapshotRepo = Right + + +newtype SnapshotRepoType = SnapshotRepoType { snapshotRepoType :: Text } + deriving (Eq, Ord, Show, ToJSON, FromJSON) + + +-- | Opaque representation of snapshot repo settings. Instances of +-- 'SnapshotRepo' will produce this. +newtype GenericSnapshotRepoSettings = GenericSnapshotRepoSettings { gSnapshotRepoSettingsObject :: Object } + deriving (Eq, Show, ToJSON) + + + -- Regardless of whether you send strongly typed json, my version of + -- ES sends back stringly typed json in the settings, e.g. booleans + -- as strings, so we'll try to convert them. +instance FromJSON GenericSnapshotRepoSettings where + parseJSON = fmap (GenericSnapshotRepoSettings . fmap unStringlyTypeJSON). parseJSON + +-- | The result of running 'verifySnapshotRepo'. +newtype SnapshotVerification = SnapshotVerification { snapshotNodeVerifications :: [SnapshotNodeVerification] } + deriving (Eq, Show) + + +instance FromJSON SnapshotVerification where + parseJSON = withObject "SnapshotVerification" parse + where + parse o = do + o2 <- o .: "nodes" + SnapshotVerification <$> mapM (uncurry parse') (HM.toList o2) + parse' rawFullId = withObject "SnapshotNodeVerification" $ \o -> + SnapshotNodeVerification (FullNodeId rawFullId) <$> o .: "name" + + +-- | A node that has verified a snapshot +data SnapshotNodeVerification = SnapshotNodeVerification { + snvFullId :: FullNodeId + , snvNodeName :: NodeName + } deriving (Eq, Show) + +newtype ClusterName = ClusterName { clusterName :: Text } + deriving (Eq, Ord, Show, FromJSON) + +data NodesInfo = NodesInfo { + nodesInfo :: [NodeInfo] + , nodesClusterName :: ClusterName + } deriving (Eq, Show) + +instance FromJSON NodesInfo where + parseJSON = withObject "NodesInfo" parse + where + parse o = do + nodes <- o .: "nodes" + infos <- forM (HM.toList nodes) $ \(fullNID, v) -> do + node <- parseJSON v + parseNodeInfo (FullNodeId fullNID) node + cn <- o .: "cluster_name" + return (NodesInfo infos cn) + +data NodesStats = NodesStats { + nodesStats :: [NodeStats] + , nodesStatsClusterName :: ClusterName + } deriving (Eq, Show) + +data NodeStats = NodeStats { + nodeStatsName :: NodeName + , nodeStatsFullId :: FullNodeId + , nodeStatsBreakersStats :: Maybe NodeBreakersStats + , nodeStatsHTTP :: NodeHTTPStats + , nodeStatsTransport :: NodeTransportStats + , nodeStatsFS :: NodeFSStats + , nodeStatsNetwork :: NodeNetworkStats + , nodeStatsThreadPool :: NodeThreadPoolsStats + , nodeStatsJVM :: NodeJVMStats + , nodeStatsProcess :: NodeProcessStats + , nodeStatsOS :: NodeOSStats + , nodeStatsIndices :: NodeIndicesStats + } deriving (Eq, Show) + +data NodeBreakersStats = NodeBreakersStats { + nodeStatsParentBreaker :: NodeBreakerStats + , nodeStatsRequestBreaker :: NodeBreakerStats + , nodeStatsFieldDataBreaker :: NodeBreakerStats + } deriving (Eq, Show) + +data NodeBreakerStats = NodeBreakerStats { + nodeBreakersTripped :: Int + , nodeBreakersOverhead :: Double + , nodeBreakersEstSize :: Bytes + , nodeBreakersLimitSize :: Bytes + } deriving (Eq, Show) + +data NodeHTTPStats = NodeHTTPStats { + nodeHTTPTotalOpened :: Int + , nodeHTTPCurrentOpen :: Int + } deriving (Eq, Show) + +data NodeTransportStats = NodeTransportStats { + nodeTransportTXSize :: Bytes + , nodeTransportCount :: Int + , nodeTransportRXSize :: Bytes + , nodeTransportRXCount :: Int + , nodeTransportServerOpen :: Int + } deriving (Eq, Show) + +data NodeFSStats = NodeFSStats { + nodeFSDataPaths :: [NodeDataPathStats] + , nodeFSTotal :: NodeFSTotalStats + , nodeFSTimestamp :: UTCTime + } deriving (Eq, Show) + +data NodeDataPathStats = NodeDataPathStats { + nodeDataPathDiskServiceTime :: Maybe Double + , nodeDataPathDiskQueue :: Maybe Double + , nodeDataPathIOSize :: Maybe Bytes + , nodeDataPathWriteSize :: Maybe Bytes + , nodeDataPathReadSize :: Maybe Bytes + , nodeDataPathIOOps :: Maybe Int + , nodeDataPathWrites :: Maybe Int + , nodeDataPathReads :: Maybe Int + , nodeDataPathAvailable :: Bytes + , nodeDataPathFree :: Bytes + , nodeDataPathTotal :: Bytes + , nodeDataPathType :: Maybe Text + , nodeDataPathDevice :: Text + , nodeDataPathMount :: Text + , nodeDataPathPath :: Text + } deriving (Eq, Show) + +data NodeFSTotalStats = NodeFSTotalStats { + nodeFSTotalDiskServiceTime :: Maybe Double + , nodeFSTotalDiskQueue :: Maybe Double + , nodeFSTotalIOSize :: Maybe Bytes + , nodeFSTotalWriteSize :: Maybe Bytes + , nodeFSTotalReadSize :: Maybe Bytes + , nodeFSTotalIOOps :: Maybe Int + , nodeFSTotalWrites :: Maybe Int + , nodeFSTotalReads :: Maybe Int + , nodeFSTotalAvailable :: Bytes + , nodeFSTotalFree :: Bytes + , nodeFSTotalTotal :: Bytes + } deriving (Eq, Show) + +data NodeNetworkStats = NodeNetworkStats { + nodeNetTCPOutRSTs :: Int + , nodeNetTCPInErrs :: Int + , nodeNetTCPAttemptFails :: Int + , nodeNetTCPEstabResets :: Int + , nodeNetTCPRetransSegs :: Int + , nodeNetTCPOutSegs :: Int + , nodeNetTCPInSegs :: Int + , nodeNetTCPCurrEstab :: Int + , nodeNetTCPPassiveOpens :: Int + , nodeNetTCPActiveOpens :: Int + } deriving (Eq, Show) + +data NodeThreadPoolsStats = NodeThreadPoolsStats { + nodeThreadPoolsStatsSnapshot :: NodeThreadPoolStats + , nodeThreadPoolsStatsBulk :: NodeThreadPoolStats + , nodeThreadPoolsStatsMerge :: NodeThreadPoolStats + , nodeThreadPoolsStatsGet :: NodeThreadPoolStats + , nodeThreadPoolsStatsManagement :: NodeThreadPoolStats + , nodeThreadPoolsStatsFetchShardStore :: Maybe NodeThreadPoolStats + , nodeThreadPoolsStatsOptimize :: NodeThreadPoolStats + , nodeThreadPoolsStatsFlush :: NodeThreadPoolStats + , nodeThreadPoolsStatsSearch :: NodeThreadPoolStats + , nodeThreadPoolsStatsWarmer :: NodeThreadPoolStats + , nodeThreadPoolsStatsGeneric :: NodeThreadPoolStats + , nodeThreadPoolsStatsSuggest :: NodeThreadPoolStats + , nodeThreadPoolsStatsRefresh :: NodeThreadPoolStats + , nodeThreadPoolsStatsIndex :: NodeThreadPoolStats + , nodeThreadPoolsStatsListener :: Maybe NodeThreadPoolStats + , nodeThreadPoolsStatsFetchShardStarted :: Maybe NodeThreadPoolStats + , nodeThreadPoolsStatsPercolate :: NodeThreadPoolStats + } deriving (Eq, Show) + +data NodeThreadPoolStats = NodeThreadPoolStats { + nodeThreadPoolCompleted :: Int + , nodeThreadPoolLargest :: Int + , nodeThreadPoolRejected :: Int + , nodeThreadPoolActive :: Int + , nodeThreadPoolQueue :: Int + , nodeThreadPoolThreads :: Int + } deriving (Eq, Show) + +data NodeJVMStats = NodeJVMStats { + nodeJVMStatsMappedBufferPool :: JVMBufferPoolStats + , nodeJVMStatsDirectBufferPool :: JVMBufferPoolStats + , nodeJVMStatsGCOldCollector :: JVMGCStats + , nodeJVMStatsGCYoungCollector :: JVMGCStats + , nodeJVMStatsPeakThreadsCount :: Int + , nodeJVMStatsThreadsCount :: Int + , nodeJVMStatsOldPool :: JVMPoolStats + , nodeJVMStatsSurvivorPool :: JVMPoolStats + , nodeJVMStatsYoungPool :: JVMPoolStats + , nodeJVMStatsNonHeapCommitted :: Bytes + , nodeJVMStatsNonHeapUsed :: Bytes + , nodeJVMStatsHeapMax :: Bytes + , nodeJVMStatsHeapCommitted :: Bytes + , nodeJVMStatsHeapUsedPercent :: Int + , nodeJVMStatsHeapUsed :: Bytes + , nodeJVMStatsUptime :: NominalDiffTime + , nodeJVMStatsTimestamp :: UTCTime + } deriving (Eq, Show) + +data JVMBufferPoolStats = JVMBufferPoolStats { + jvmBufferPoolStatsTotalCapacity :: Bytes + , jvmBufferPoolStatsUsed :: Bytes + , jvmBufferPoolStatsCount :: Int + } deriving (Eq, Show) + +data JVMGCStats = JVMGCStats { + jvmGCStatsCollectionTime :: NominalDiffTime + , jvmGCStatsCollectionCount :: Int + } deriving (Eq, Show) + +data JVMPoolStats = JVMPoolStats { + jvmPoolStatsPeakMax :: Bytes + , jvmPoolStatsPeakUsed :: Bytes + , jvmPoolStatsMax :: Bytes + , jvmPoolStatsUsed :: Bytes + } deriving (Eq, Show) + +data NodeProcessStats = NodeProcessStats { + nodeProcessMemTotalVirtual :: Bytes + , nodeProcessMemShare :: Bytes + , nodeProcessMemResident :: Bytes + , nodeProcessCPUTotal :: NominalDiffTime + , nodeProcessCPUUser :: NominalDiffTime + , nodeProcessCPUSys :: NominalDiffTime + , nodeProcessCPUPercent :: Int + , nodeProcessOpenFDs :: Int + , nodeProcessTimestamp :: UTCTime + } deriving (Eq, Show) + +data NodeOSStats = NodeOSStats { + nodeOSSwapFree :: Bytes + , nodeOSSwapUsed :: Bytes + , nodeOSMemActualUsed :: Bytes + , nodeOSMemActualFree :: Bytes + , nodeOSMemUsedPercent :: Int + , nodeOSMemFreePercent :: Int + , nodeOSMemUsed :: Bytes + , nodeOSMemFree :: Bytes + , nodeOSCPUStolen :: Int + , nodeOSCPUUsage :: Int + , nodeOSCPUIdle :: Int + , nodeOSCPUUser :: Int + , nodeOSCPUSys :: Int + , nodeOSLoad :: Maybe LoadAvgs + , nodeOSUptime :: NominalDiffTime + , nodeOSTimestamp :: UTCTime + } deriving (Eq, Show) + +data LoadAvgs = LoadAvgs { + loadAvg1Min :: Double + , loadAvg5Min :: Double + , loadAvg15Min :: Double + } deriving (Eq, Show) + +data NodeIndicesStats = NodeIndicesStats { + nodeIndicesStatsRecoveryThrottleTime :: Maybe NominalDiffTime + , nodeIndicesStatsRecoveryCurrentAsTarget :: Maybe Int + , nodeIndicesStatsRecoveryCurrentAsSource :: Maybe Int + , nodeIndicesStatsQueryCacheMisses :: Maybe Int + , nodeIndicesStatsQueryCacheHits :: Maybe Int + , nodeIndicesStatsQueryCacheEvictions :: Maybe Int + , nodeIndicesStatsQueryCacheSize :: Maybe Bytes + , nodeIndicesStatsSuggestCurrent :: Int + , nodeIndicesStatsSuggestTime :: NominalDiffTime + , nodeIndicesStatsSuggestTotal :: Int + , nodeIndicesStatsTranslogSize :: Bytes + , nodeIndicesStatsTranslogOps :: Int + , nodeIndicesStatsSegFixedBitSetMemory :: Maybe Bytes + , nodeIndicesStatsSegVersionMapMemory :: Bytes + , nodeIndicesStatsSegIndexWriterMaxMemory :: Maybe Bytes + , nodeIndicesStatsSegIndexWriterMemory :: Bytes + , nodeIndicesStatsSegMemory :: Bytes + , nodeIndicesStatsSegCount :: Int + , nodeIndicesStatsCompletionSize :: Bytes + , nodeIndicesStatsPercolateQueries :: Int + , nodeIndicesStatsPercolateMemory :: Bytes + , nodeIndicesStatsPercolateCurrent :: Int + , nodeIndicesStatsPercolateTime :: NominalDiffTime + , nodeIndicesStatsPercolateTotal :: Int + , nodeIndicesStatsFieldDataEvictions :: Int + , nodeIndicesStatsFieldDataMemory :: Bytes + , nodeIndicesStatsIDCacheMemory :: Bytes + , nodeIndicesStatsFilterCacheEvictions :: Int + , nodeIndicesStatsFilterCacheMemory :: Bytes + , nodeIndicesStatsWarmerTotalTime :: NominalDiffTime + , nodeIndicesStatsWarmerTotal :: Int + , nodeIndicesStatsWarmerCurrent :: Int + , nodeIndicesStatsFlushTotalTime :: NominalDiffTime + , nodeIndicesStatsFlushTotal :: Int + , nodeIndicesStatsRefreshTotalTime :: NominalDiffTime + , nodeIndicesStatsRefreshTotal :: Int + , nodeIndicesStatsMergesTotalSize :: Bytes + , nodeIndicesStatsMergesTotalDocs :: Int + , nodeIndicesStatsMergesTotalTime :: NominalDiffTime + , nodeIndicesStatsMergesTotal :: Int + , nodeIndicesStatsMergesCurrentSize :: Bytes + , nodeIndicesStatsMergesCurrentDocs :: Int + , nodeIndicesStatsMergesCurrent :: Int + , nodeIndicesStatsSearchFetchCurrent :: Int + , nodeIndicesStatsSearchFetchTime :: NominalDiffTime + , nodeIndicesStatsSearchFetchTotal :: Int + , nodeIndicesStatsSearchQueryCurrent :: Int + , nodeIndicesStatsSearchQueryTime :: NominalDiffTime + , nodeIndicesStatsSearchQueryTotal :: Int + , nodeIndicesStatsSearchOpenContexts :: Int + , nodeIndicesStatsGetCurrent :: Int + , nodeIndicesStatsGetMissingTime :: NominalDiffTime + , nodeIndicesStatsGetMissingTotal :: Int + , nodeIndicesStatsGetExistsTime :: NominalDiffTime + , nodeIndicesStatsGetExistsTotal :: Int + , nodeIndicesStatsGetTime :: NominalDiffTime + , nodeIndicesStatsGetTotal :: Int + , nodeIndicesStatsIndexingThrottleTime :: Maybe NominalDiffTime + , nodeIndicesStatsIndexingIsThrottled :: Maybe Bool + , nodeIndicesStatsIndexingNoopUpdateTotal :: Maybe Int + , nodeIndicesStatsIndexingDeleteCurrent :: Int + , nodeIndicesStatsIndexingDeleteTime :: NominalDiffTime + , nodeIndicesStatsIndexingDeleteTotal :: Int + , nodeIndicesStatsIndexingIndexCurrent :: Int + , nodeIndicesStatsIndexingIndexTime :: NominalDiffTime + , nodeIndicesStatsIndexingTotal :: Int + , nodeIndicesStatsStoreThrottleTime :: NominalDiffTime + , nodeIndicesStatsStoreSize :: Bytes + , nodeIndicesStatsDocsDeleted :: Int + , nodeIndicesStatsDocsCount :: Int + } deriving (Eq, Show) + +data NodeInfo = NodeInfo { + nodeInfoHTTPAddress :: EsAddress + , nodeInfoBuild :: BuildHash + , nodeInfoESVersion :: VersionNumber + , nodeInfoIP :: Server + , nodeInfoHost :: Server + , nodeInfoTransportAddress :: EsAddress + , nodeInfoName :: NodeName + , nodeInfoFullId :: FullNodeId + , nodeInfoPlugins :: [NodePluginInfo] + , nodeInfoHTTP :: NodeHTTPInfo + , nodeInfoTransport :: NodeTransportInfo + , nodeInfoNetwork :: NodeNetworkInfo + , nodeInfoThreadPool :: NodeThreadPoolsInfo + , nodeInfoJVM :: NodeJVMInfo + , nodeInfoProcess :: NodeProcessInfo + , nodeInfoOS :: NodeOSInfo + , nodeInfoSettings :: Object + -- ^ The members of the settings objects are not consistent, + -- dependent on plugins, etc. + } deriving (Eq, Show) + +data NodePluginInfo = NodePluginInfo { + nodePluginSite :: Bool + -- ^ Is this a site plugin? + , nodePluginJVM :: Bool + -- ^ Is this plugin running on the JVM + , nodePluginDescription :: Text + , nodePluginVersion :: MaybeNA VersionNumber + , nodePluginName :: PluginName + } deriving (Eq, Show) + +data NodeHTTPInfo = NodeHTTPInfo { + nodeHTTPMaxContentLength :: Bytes + , nodeHTTPTransportAddress :: BoundTransportAddress + } deriving (Eq, Show) + +data NodeTransportInfo = NodeTransportInfo { + nodeTransportProfiles :: [BoundTransportAddress] + , nodeTransportAddress :: BoundTransportAddress + } deriving (Eq, Show) + +data BoundTransportAddress = BoundTransportAddress { + publishAddress :: EsAddress + , boundAddress :: EsAddress + } deriving (Eq, Show) + +data NodeNetworkInfo = NodeNetworkInfo { + nodeNetworkPrimaryInterface :: NodeNetworkInterface + , nodeNetworkRefreshInterval :: NominalDiffTime + } deriving (Eq, Show) + +newtype MacAddress = MacAddress { macAddress :: Text } + deriving (Eq, Ord, Show, FromJSON) + +newtype NetworkInterfaceName = NetworkInterfaceName { networkInterfaceName :: Text } + deriving (Eq, Ord, Show, FromJSON) + +data NodeNetworkInterface = NodeNetworkInterface { + nodeNetIfaceMacAddress :: MacAddress + , nodeNetIfaceName :: NetworkInterfaceName + , nodeNetIfaceAddress :: Server + } deriving (Eq, Show) + +data NodeThreadPoolsInfo = NodeThreadPoolsInfo { + nodeThreadPoolsRefresh :: NodeThreadPoolInfo + , nodeThreadPoolsManagement :: NodeThreadPoolInfo + , nodeThreadPoolsPercolate :: NodeThreadPoolInfo + , nodeThreadPoolsListener :: Maybe NodeThreadPoolInfo + , nodeThreadPoolsFetchShardStarted :: Maybe NodeThreadPoolInfo + , nodeThreadPoolsSearch :: NodeThreadPoolInfo + , nodeThreadPoolsFlush :: NodeThreadPoolInfo + , nodeThreadPoolsWarmer :: NodeThreadPoolInfo + , nodeThreadPoolsOptimize :: NodeThreadPoolInfo + , nodeThreadPoolsBulk :: NodeThreadPoolInfo + , nodeThreadPoolsSuggest :: NodeThreadPoolInfo + , nodeThreadPoolsMerge :: NodeThreadPoolInfo + , nodeThreadPoolsSnapshot :: NodeThreadPoolInfo + , nodeThreadPoolsGet :: NodeThreadPoolInfo + , nodeThreadPoolsFetchShardStore :: Maybe NodeThreadPoolInfo + , nodeThreadPoolsIndex :: NodeThreadPoolInfo + , nodeThreadPoolsGeneric :: NodeThreadPoolInfo + } deriving (Eq, Show) + +data NodeThreadPoolInfo = NodeThreadPoolInfo { + nodeThreadPoolQueueSize :: ThreadPoolSize + , nodeThreadPoolKeepalive :: Maybe NominalDiffTime + , nodeThreadPoolMin :: Maybe Int + , nodeThreadPoolMax :: Maybe Int + , nodeThreadPoolType :: ThreadPoolType + } deriving (Eq, Show) + +data ThreadPoolSize = ThreadPoolBounded Int + | ThreadPoolUnbounded + deriving (Eq, Show) + +data ThreadPoolType = ThreadPoolScaling + | ThreadPoolFixed + | ThreadPoolCached + deriving (Eq, Show) + +data NodeJVMInfo = NodeJVMInfo { + nodeJVMInfoMemoryPools :: [JVMMemoryPool] + , nodeJVMInfoMemoryPoolsGCCollectors :: [JVMGCCollector] + , nodeJVMInfoMemoryInfo :: JVMMemoryInfo + , nodeJVMInfoStartTime :: UTCTime + , nodeJVMInfoVMVendor :: Text + , nodeJVMVMVersion :: VersionNumber + -- ^ JVM doesn't seme to follow normal version conventions + , nodeJVMVMName :: Text + , nodeJVMVersion :: VersionNumber + , nodeJVMPID :: PID + } deriving (Eq, Show) + +-- | Handles quirks in the way JVM versions are rendered (1.7.0_101 -> 1.7.0.101) +newtype JVMVersion = JVMVersion { unJVMVersion :: VersionNumber } + +data JVMMemoryInfo = JVMMemoryInfo { + jvmMemoryInfoDirectMax :: Bytes + , jvmMemoryInfoNonHeapMax :: Bytes + , jvmMemoryInfoNonHeapInit :: Bytes + , jvmMemoryInfoHeapMax :: Bytes + , jvmMemoryInfoHeapInit :: Bytes + } deriving (Eq, Show) + +newtype JVMMemoryPool = JVMMemoryPool { + jvmMemoryPool :: Text + } deriving (Eq, Show, FromJSON) + +newtype JVMGCCollector = JVMGCCollector { + jvmGCCollector :: Text + } deriving (Eq, Show, FromJSON) + +newtype PID = PID { + pid :: Int + } deriving (Eq, Show, FromJSON) + +data NodeOSInfo = NodeOSInfo { + nodeOSSwap :: Bytes + , nodeOSMem :: Bytes + , nodeOSCPUInfo :: CPUInfo + , nodeOSAvailableProcessors :: Int + , nodeOSRefreshInterval :: NominalDiffTime + } deriving (Eq, Show) + +data CPUInfo = CPUInfo { + cpuCacheSize :: Bytes + , cpuCoresPerSocket :: Int + , cpuTotalSockets :: Int + , cpuTotalCores :: Int + , cpuMHZ :: Int + , cpuModel :: Text + , cpuVendor :: Text + } deriving (Eq, Show) + +data NodeProcessInfo = NodeProcessInfo { + nodeProcessMLockAll :: Bool + -- ^ See + , nodeProcessMaxFileDescriptors :: Int + , nodeProcessId :: PID + , nodeProcessRefreshInterval :: NominalDiffTime + } deriving (Eq, Show) + +-- | A quirky address format used throughout ElasticSearch. An example +-- would be inet[/1.1.1.1:9200]. inet may be a placeholder for a +-- . +newtype EsAddress = EsAddress { esAddress :: Text } + deriving (Eq, Ord, Show, FromJSON) + +newtype PluginName = PluginName { pluginName :: Text } + deriving (Eq, Ord, Show, FromJSON) + +data ShardResult = + ShardResult { shardTotal :: Int + , shardsSuccessful :: Int + , shardsFailed :: Int } deriving (Eq, Show) + +data SnapshotState = SnapshotInit + | SnapshotStarted + | SnapshotSuccess + | SnapshotFailed + | SnapshotAborted + | SnapshotMissing + | SnapshotWaiting + deriving (Show, Eq) + +instance FromJSON SnapshotState where + parseJSON = withText "SnapshotState" parse + where + parse "INIT" = return SnapshotInit + parse "STARTED" = return SnapshotStarted + parse "SUCCESS" = return SnapshotSuccess + parse "FAILED" = return SnapshotFailed + parse "ABORTED" = return SnapshotAborted + parse "MISSING" = return SnapshotMissing + parse "WAITING" = return SnapshotWaiting + parse t = fail ("Invalid snapshot state " <> T.unpack t) + +data SnapshotRestoreSettings = SnapshotRestoreSettings { + snapRestoreWaitForCompletion :: Bool + -- ^ Should the API call return immediately after initializing + -- the restore or wait until completed? Note that if this is + -- enabled, it could wait a long time, so you should adjust your + -- 'ManagerSettings' accordingly to set long timeouts or + -- explicitly handle timeouts. + , snapRestoreIndices :: Maybe IndexSelection + -- ^ Nothing will restore all indices in the snapshot. Just [] is + -- permissable and will essentially be a no-op restore. + , snapRestoreIgnoreUnavailable :: Bool + -- ^ If set to True, any indices that do not exist will be ignored + -- during snapshot rather than failing the restore. + , snapRestoreIncludeGlobalState :: Bool + -- ^ If set to false, will ignore any global state in the snapshot + -- and will not restore it. + , snapRestoreRenamePattern :: Maybe RestoreRenamePattern + -- ^ A regex pattern for matching indices. Used with + -- 'snapRestoreRenameReplacement', the restore can reference the + -- matched index and create a new index name upon restore. + , snapRestoreRenameReplacement :: Maybe (NonEmpty RestoreRenameToken) + -- ^ Expression of how index renames should be constructed. + , snapRestorePartial :: Bool + -- ^ If some indices fail to restore, should the process proceed? + , snapRestoreIncludeAliases :: Bool + -- ^ Should the restore also restore the aliases captured in the + -- snapshot. + , snapRestoreIndexSettingsOverrides :: Maybe RestoreIndexSettings + -- ^ Settings to apply during the restore process. __NOTE:__ This + -- option is not supported in ES < 1.5 and should be set to + -- Nothing in that case. + , snapRestoreIgnoreIndexSettings :: Maybe (NonEmpty Text) + -- ^ This type could be more rich but it isn't clear which + -- settings are allowed to be ignored during restore, so we're + -- going with including this feature in a basic form rather than + -- omitting it. One example here would be + -- "index.refresh_interval". Any setting specified here will + -- revert back to the server default during the restore process. + } deriving (Eq, Show) + +-- | Regex-stype pattern, e.g. "index_(.+)" to match index names +newtype RestoreRenamePattern = RestoreRenamePattern { rrPattern :: Text } + deriving (Show, Eq, Ord, ToJSON) + + +-- | A single token in a index renaming scheme for a restore. These +-- are concatenated into a string before being sent to +-- ElasticSearch. Check out these Java +-- to find out more if you're into that sort of thing. +data RestoreRenameToken = RRTLit Text + -- ^ Just a literal string of characters + | RRSubWholeMatch + -- ^ Equivalent to $0. The entire matched pattern, not any subgroup + | RRSubGroup RRGroupRefNum + -- ^ A specific reference to a group number + deriving (Show, Eq) + + +-- | A group number for regex matching. Only values from 1-9 are +-- supported. Construct with 'mkRRGroupRefNum' +newtype RRGroupRefNum = RRGroupRefNum { rrGroupRefNum :: Int } + deriving (Show, Eq, Ord) + +instance Bounded RRGroupRefNum where + minBound = RRGroupRefNum 1 + maxBound = RRGroupRefNum 9 + + +-- | Only allows valid group number references (1-9). +mkRRGroupRefNum :: Int -> Maybe RRGroupRefNum +mkRRGroupRefNum i + | i >= (rrGroupRefNum minBound) && i <= (rrGroupRefNum maxBound) = + Just $ RRGroupRefNum i + | otherwise = Nothing + +-- | Index settings that can be overridden. The docs only mention you +-- can update number of replicas, but there may be more. You +-- definitely cannot override shard count. +data RestoreIndexSettings = RestoreIndexSettings { + restoreOverrideReplicas :: Maybe ReplicaCount + } deriving (Show, Eq) + +instance ToJSON RestoreIndexSettings where + toJSON RestoreIndexSettings {..} = object prs + where + prs = catMaybes [("index.number_of_replicas" .=) <$> restoreOverrideReplicas] + +data SnapshotRepoUpdateSettings = SnapshotRepoUpdateSettings { + repoUpdateVerify :: Bool + -- ^ After creation/update, synchronously check that nodes can + -- write to this repo. Defaults to True. You may use False if you + -- need a faster response and plan on verifying manually later + -- with 'verifySnapshotRepo'. + } deriving (Eq, Show) + +-- | Reasonable defaults for repo creation/update +-- +-- * repoUpdateVerify True +defaultSnapshotRepoUpdateSettings :: SnapshotRepoUpdateSettings +defaultSnapshotRepoUpdateSettings = SnapshotRepoUpdateSettings True + +-- | A filesystem-based snapshot repo that ships with +-- ElasticSearch. This is an instance of 'SnapshotRepo' so it can be +-- used with 'updateSnapshotRepo' +data FsSnapshotRepo = FsSnapshotRepo { + fsrName :: SnapshotRepoName + , fsrLocation :: FilePath + , fsrCompressMetadata :: Bool + , fsrChunkSize :: Maybe Bytes + -- ^ Size by which to split large files during snapshotting. + , fsrMaxRestoreBytesPerSec :: Maybe Bytes + -- ^ Throttle node restore rate. If not supplied, defaults to 40mb/sec + , fsrMaxSnapshotBytesPerSec :: Maybe Bytes + -- ^ Throttle node snapshot rate. If not supplied, defaults to 40mb/sec + } deriving (Eq, Show) + +instance SnapshotRepo FsSnapshotRepo where + toGSnapshotRepo FsSnapshotRepo {..} = + GenericSnapshotRepo fsrName fsRepoType (GenericSnapshotRepoSettings settings) + where + Object settings = object $ [ "location" .= fsrLocation + , "compress" .= fsrCompressMetadata + ] ++ optionalPairs + optionalPairs = catMaybes [ ("chunk_size" .=) <$> fsrChunkSize + , ("max_restore_bytes_per_sec" .=) <$> fsrMaxRestoreBytesPerSec + , ("max_snapshot_bytes_per_sec" .=) <$> fsrMaxSnapshotBytesPerSec + ] + fromGSnapshotRepo GenericSnapshotRepo {..} + | gSnapshotRepoType == fsRepoType = do + let o = gSnapshotRepoSettingsObject gSnapshotRepoSettings + parseRepo $ do + FsSnapshotRepo gSnapshotRepoName <$> o .: "location" + <*> o .:? "compress" .!= False + <*> o .:? "chunk_size" + <*> o .:? "max_restore_bytes_per_sec" + <*> o .:? "max_snapshot_bytes_per_sec" + | otherwise = Left (RepoTypeMismatch fsRepoType gSnapshotRepoType) + +data SnapshotCreateSettings = SnapshotCreateSettings { + snapWaitForCompletion :: Bool + -- ^ Should the API call return immediately after initializing + -- the snapshot or wait until completed? Note that if this is + -- enabled it could wait a long time, so you should adjust your + -- 'ManagerSettings' accordingly to set long timeouts or + -- explicitly handle timeouts. + , snapIndices :: Maybe IndexSelection + -- ^ Nothing will snapshot all indices. Just [] is permissable and + -- will essentially be a no-op snapshot. + , snapIgnoreUnavailable :: Bool + -- ^ If set to True, any matched indices that don't exist will be + -- ignored. Otherwise it will be an error and fail. + , snapIncludeGlobalState :: Bool + , snapPartial :: Bool + -- ^ If some indices failed to snapshot (e.g. if not all primary + -- shards are available), should the process proceed? + } deriving (Eq, Show) + +-- | Reasonable defaults for snapshot creation +-- +-- * snapWaitForCompletion False +-- * snapIndices Nothing +-- * snapIgnoreUnavailable False +-- * snapIncludeGlobalState True +-- * snapPartial False +defaultSnapshotCreateSettings :: SnapshotCreateSettings +defaultSnapshotCreateSettings = SnapshotCreateSettings { + snapWaitForCompletion = False + , snapIndices = Nothing + , snapIgnoreUnavailable = False + , snapIncludeGlobalState = True + , snapPartial = False + } + + +data SnapshotSelection = SnapshotList (NonEmpty SnapshotPattern) + | AllSnapshots deriving (Eq, Show) + + +-- | Either specifies an exact snapshot name or one with globs in it, +-- e.g. @SnapPattern "foo*"@ __NOTE__: Patterns are not supported on +-- ES < 1.7 +data SnapshotPattern = ExactSnap SnapshotName + | SnapPattern Text + deriving (Eq, Show) + + +-- | General information about the state of a snapshot. Has some +-- redundancies with 'SnapshotStatus' +data SnapshotInfo = SnapshotInfo { + snapInfoShards :: ShardResult + , snapInfoFailures :: [SnapshotShardFailure] + , snapInfoDuration :: NominalDiffTime + , snapInfoEndTime :: UTCTime + , snapInfoStartTime :: UTCTime + , snapInfoState :: SnapshotState + , snapInfoIndices :: [IndexName] + , snapInfoName :: SnapshotName + } deriving (Eq, Show) + + +instance FromJSON SnapshotInfo where + parseJSON = withObject "SnapshotInfo" parse + where + parse o = SnapshotInfo <$> o .: "shards" + <*> o .: "failures" + <*> (unMS <$> o .: "duration_in_millis") + <*> (posixMS <$> o .: "end_time_in_millis") + <*> (posixMS <$> o .: "start_time_in_millis") + <*> o .: "state" + <*> o .: "indices" + <*> o .: "snapshot" + +data SnapshotShardFailure = SnapshotShardFailure { + snapShardFailureIndex :: IndexName + , snapShardFailureNodeId :: Maybe NodeName -- I'm not 100% sure this isn't actually 'FullNodeId' + , snapShardFailureReason :: Text + , snapShardFailureShardId :: ShardId + } deriving (Eq, Show) + + +instance FromJSON SnapshotShardFailure where + parseJSON = withObject "SnapshotShardFailure" parse + where + parse o = SnapshotShardFailure <$> o .: "index" + <*> o .:? "node_id" + <*> o .: "reason" + <*> o .: "shard_id" + +parseRepo :: Parser a -> Either SnapshotRepoConversionError a +parseRepo parser = case parseEither (const parser) () of + Left e -> Left (OtherRepoConversionError (T.pack e)) + Right a -> Right a + +parseNodeStats :: FullNodeId -> Object -> Parser NodeStats +parseNodeStats fnid o = do + NodeStats <$> o .: "name" + <*> pure fnid + <*> o .:? "breakers" + <*> o .: "http" + <*> o .: "transport" + <*> o .: "fs" + <*> o .: "network" + <*> o .: "thread_pool" + <*> o .: "jvm" + <*> o .: "process" + <*> o .: "os" + <*> o .: "indices" + +parseNodeInfo :: FullNodeId -> Object -> Parser NodeInfo +parseNodeInfo nid o = + NodeInfo <$> o .: "http_address" + <*> o .: "build" + <*> o .: "version" + <*> o .: "ip" + <*> o .: "host" + <*> o .: "transport_address" + <*> o .: "name" + <*> pure nid + <*> o .: "plugins" + <*> o .: "http" + <*> o .: "transport" + <*> o .: "network" + <*> o .: "thread_pool" + <*> o .: "jvm" + <*> o .: "process" + <*> o .: "os" + <*> o .: "settings" + +-- | Reasonable defaults for snapshot restores +-- +-- * snapRestoreWaitForCompletion False +-- * snapRestoreIndices Nothing +-- * snapRestoreIgnoreUnavailable False +-- * snapRestoreIncludeGlobalState True +-- * snapRestoreRenamePattern Nothing +-- * snapRestoreRenameReplacement Nothing +-- * snapRestorePartial False +-- * snapRestoreIncludeAliases True +-- * snapRestoreIndexSettingsOverrides Nothing +-- * snapRestoreIgnoreIndexSettings Nothing +defaultSnapshotRestoreSettings :: SnapshotRestoreSettings +defaultSnapshotRestoreSettings = SnapshotRestoreSettings { + snapRestoreWaitForCompletion = False + , snapRestoreIndices = Nothing + , snapRestoreIgnoreUnavailable = False + , snapRestoreIncludeGlobalState = True + , snapRestoreRenamePattern = Nothing + , snapRestoreRenameReplacement = Nothing + , snapRestorePartial = False + , snapRestoreIncludeAliases = True + , snapRestoreIndexSettingsOverrides = Nothing + , snapRestoreIgnoreIndexSettings = Nothing + } + +fsRepoType :: SnapshotRepoType +fsRepoType = SnapshotRepoType "fs" + +{-| 'Reply' and 'Method' are type synonyms from 'Network.HTTP.Types.Method.Method' -} +type Reply = Network.HTTP.Client.Response LByteString + +instance FromJSON IndexSettingsSummary where + parseJSON = withObject "IndexSettingsSummary" parse + where parse o = case HM.toList o of + [(ixn, v@(Object o'))] -> IndexSettingsSummary (IndexName ixn) + <$> parseJSON v + <*> (fmap (filter (not . redundant)) . parseSettings =<< o' .: "settings") + _ -> fail "Expected single-key object with index name" + redundant (NumberOfReplicas _) = True + redundant _ = False + +instance ToJSON VersionNumber where + toJSON = toJSON . Vers.showVersion . versionNumber + +instance FromJSON VersionNumber where + parseJSON = withText "VersionNumber" (parse . T.unpack) + where + parse s = case filter (null . snd)(RP.readP_to_S Vers.parseVersion s) of + [(v, _)] -> pure (VersionNumber v) + [] -> fail ("Invalid version string " ++ s) + xs -> fail ("Ambiguous version string " ++ s ++ " (" ++ intercalate ", " (Vers.showVersion . fst <$> xs) ++ ")") + +instance ToJSON Interval where + toJSON Year = "year" + toJSON Quarter = "quarter" + toJSON Month = "month" + toJSON Week = "week" + toJSON Day = "day" + toJSON Hour = "hour" + toJSON Minute = "minute" + toJSON Second = "second" + toJSON (FractionalInterval fraction interval) = toJSON $ show fraction ++ show interval + +parseStringInterval :: (Monad m) => String -> m NominalDiffTime +parseStringInterval s = case span isNumber s of + ("", _) -> fail "Invalid interval" + (nS, unitS) -> case (readMay nS, readMay unitS) of + (Just n, Just unit) -> return (fromInteger (n * unitNDT unit)) + (Nothing, _) -> fail "Invalid interval number" + (_, Nothing) -> fail "Invalid interval unit" + where + unitNDT Seconds = 1 + unitNDT Minutes = 60 + unitNDT Hours = 60 * 60 + unitNDT Days = 24 * 60 * 60 + unitNDT Weeks = 7 * 24 * 60 * 60 + +instance ToJSON IndexSettings where + toJSON (IndexSettings s r) = object ["settings" .= + object ["index" .= + object ["number_of_shards" .= s, "number_of_replicas" .= r] + ] + ] + +instance FromJSON IndexSettings where + parseJSON = withObject "IndexSettings" parse + where parse o = do s <- o .: "settings" + i <- s .: "index" + IndexSettings <$> i .: "number_of_shards" + <*> i .: "number_of_replicas" + +instance Bounded DocVersion where + minBound = DocVersion 1 + maxBound = DocVersion 9200000000000000000 -- 9.2e+18 + +instance Enum DocVersion where + succ x + | x /= maxBound = DocVersion (succ $ docVersionNumber x) + | otherwise = succError "DocVersion" + pred x + | x /= minBound = DocVersion (pred $ docVersionNumber x) + | otherwise = predError "DocVersion" + toEnum i = + fromMaybe (error $ show i ++ " out of DocVersion range") $ mkDocVersion i + fromEnum = docVersionNumber + enumFrom = boundedEnumFrom + enumFromThen = boundedEnumFromThen + +instance FromJSON ShardResult where + parseJSON (Object v) = ShardResult <$> + v .: "total" <*> + v .: "successful" <*> + v .: "failed" + parseJSON _ = empty + +instance FromJSON NodeBreakerStats where + parseJSON = withObject "NodeBreakerStats" parse + where + parse o = NodeBreakerStats <$> o .: "tripped" + <*> o .: "overhead" + <*> o .: "estimated_size_in_bytes" + <*> o .: "limit_size_in_bytes" + +instance FromJSON NodeHTTPStats where + parseJSON = withObject "NodeHTTPStats" parse + where + parse o = NodeHTTPStats <$> o .: "total_opened" + <*> o .: "current_open" + +instance FromJSON NodeTransportStats where + parseJSON = withObject "NodeTransportStats" parse + where + parse o = NodeTransportStats <$> o .: "tx_size_in_bytes" + <*> o .: "tx_count" + <*> o .: "rx_size_in_bytes" + <*> o .: "rx_count" + <*> o .: "server_open" + +instance FromJSON NodeFSStats where + parseJSON = withObject "NodeFSStats" parse + where + parse o = NodeFSStats <$> o .: "data" + <*> o .: "total" + <*> (posixMS <$> o .: "timestamp") + +instance FromJSON NodeDataPathStats where + parseJSON = withObject "NodeDataPathStats" parse + where + parse o = + NodeDataPathStats <$> (fmap unStringlyTypedDouble <$> o .:? "disk_service_time") + <*> (fmap unStringlyTypedDouble <$> o .:? "disk_queue") + <*> o .:? "disk_io_size_in_bytes" + <*> o .:? "disk_write_size_in_bytes" + <*> o .:? "disk_read_size_in_bytes" + <*> o .:? "disk_io_op" + <*> o .:? "disk_writes" + <*> o .:? "disk_reads" + <*> o .: "available_in_bytes" + <*> o .: "free_in_bytes" + <*> o .: "total_in_bytes" + <*> o .:? "type" + <*> o .: "dev" + <*> o .: "mount" + <*> o .: "path" + +instance FromJSON NodeNetworkStats where + parseJSON = withObject "NodeNetworkStats" parse + where + parse o = do + tcp <- o .: "tcp" + NodeNetworkStats <$> tcp .: "out_rsts" + <*> tcp .: "in_errs" + <*> tcp .: "attempt_fails" + <*> tcp .: "estab_resets" + <*> tcp .: "retrans_segs" + <*> tcp .: "out_segs" + <*> tcp .: "in_segs" + <*> tcp .: "curr_estab" + <*> tcp .: "passive_opens" + <*> tcp .: "active_opens" + +instance FromJSON NodeThreadPoolsStats where + parseJSON = withObject "NodeThreadPoolsStats" parse + where + parse o = NodeThreadPoolsStats <$> o .: "snapshot" + <*> o .: "bulk" + <*> o .: "merge" + <*> o .: "get" + <*> o .: "management" + <*> o .:? "fetch_shard_store" + <*> o .: "optimize" + <*> o .: "flush" + <*> o .: "search" + <*> o .: "warmer" + <*> o .: "generic" + <*> o .: "suggest" + <*> o .: "refresh" + <*> o .: "index" + <*> o .:? "listener" + <*> o .:? "fetch_shard_started" + <*> o .: "percolate" +instance FromJSON NodeThreadPoolStats where + parseJSON = withObject "NodeThreadPoolStats" parse + where + parse o = NodeThreadPoolStats <$> o .: "completed" + <*> o .: "largest" + <*> o .: "rejected" + <*> o .: "active" + <*> o .: "queue" + <*> o .: "threads" + +instance FromJSON NodeJVMStats where + parseJSON = withObject "NodeJVMStats" parse + where + parse o = do + bufferPools <- o .: "buffer_pools" + mapped <- bufferPools .: "mapped" + direct <- bufferPools .: "direct" + gc <- o .: "gc" + collectors <- gc .: "collectors" + oldC <- collectors .: "old" + youngC <- collectors .: "young" + threads <- o .: "threads" + mem <- o .: "mem" + pools <- mem .: "pools" + oldM <- pools .: "old" + survivorM <- pools .: "survivor" + youngM <- pools .: "young" + NodeJVMStats <$> pure mapped + <*> pure direct + <*> pure oldC + <*> pure youngC + <*> threads .: "peak_count" + <*> threads .: "count" + <*> pure oldM + <*> pure survivorM + <*> pure youngM + <*> mem .: "non_heap_committed_in_bytes" + <*> mem .: "non_heap_used_in_bytes" + <*> mem .: "heap_max_in_bytes" + <*> mem .: "heap_committed_in_bytes" + <*> mem .: "heap_used_percent" + <*> mem .: "heap_used_in_bytes" + <*> (unMS <$> o .: "uptime_in_millis") + <*> (posixMS <$> o .: "timestamp") + +instance FromJSON JVMBufferPoolStats where + parseJSON = withObject "JVMBufferPoolStats" parse + where + parse o = JVMBufferPoolStats <$> o .: "total_capacity_in_bytes" + <*> o .: "used_in_bytes" + <*> o .: "count" + +instance FromJSON JVMGCStats where + parseJSON = withObject "JVMGCStats" parse + where + parse o = JVMGCStats <$> (unMS <$> o .: "collection_time_in_millis") + <*> o .: "collection_count" + +instance FromJSON JVMPoolStats where + parseJSON = withObject "JVMPoolStats" parse + where + parse o = JVMPoolStats <$> o .: "peak_max_in_bytes" + <*> o .: "peak_used_in_bytes" + <*> o .: "max_in_bytes" + <*> o .: "used_in_bytes" + +instance FromJSON NodeProcessStats where + parseJSON = withObject "NodeProcessStats" parse + where + parse o = do + mem <- o .: "mem" + cpu <- o .: "cpu" + NodeProcessStats <$> mem .: "total_virtual_in_bytes" + <*> mem .: "share_in_bytes" + <*> mem .: "resident_in_bytes" + <*> (unMS <$> cpu .: "total_in_millis") + <*> (unMS <$> cpu .: "user_in_millis") + <*> (unMS <$> cpu .: "sys_in_millis") + <*> cpu .: "percent" + <*> o .: "open_file_descriptors" + <*> (posixMS <$> o .: "timestamp") + +instance FromJSON NodeOSStats where + parseJSON = withObject "NodeOSStats" parse + where + parse o = do + swap <- o .: "swap" + mem <- o .: "mem" + cpu <- o .: "cpu" + load <- o .:? "load_average" + NodeOSStats <$> swap .: "free_in_bytes" + <*> swap .: "used_in_bytes" + <*> mem .: "actual_used_in_bytes" + <*> mem .: "actual_free_in_bytes" + <*> mem .: "used_percent" + <*> mem .: "free_percent" + <*> mem .: "used_in_bytes" + <*> mem .: "free_in_bytes" + <*> cpu .: "stolen" + <*> cpu .: "usage" + <*> cpu .: "idle" + <*> cpu .: "user" + <*> cpu .: "sys" + <*> pure load + <*> (unMS <$> o .: "uptime_in_millis") + <*> (posixMS <$> o .: "timestamp") + +instance FromJSON LoadAvgs where + parseJSON = withArray "LoadAvgs" parse + where + parse v = case V.toList v of + [one, five, fifteen] -> LoadAvgs <$> parseJSON one + <*> parseJSON five + <*> parseJSON fifteen + _ -> fail "Expecting a triple of Doubles" + +instance FromJSON NodeIndicesStats where + parseJSON = withObject "NodeIndicesStats" parse + where + parse o = do + let (.::) mv k = case mv of + Just v -> Just <$> v .: k + Nothing -> pure Nothing + mRecovery <- o .:? "recovery" + mQueryCache <- o .:? "query_cache" + suggest <- o .: "suggest" + translog <- o .: "translog" + segments <- o .: "segments" + completion <- o .: "completion" + percolate <- o .: "percolate" + fielddata <- o .: "fielddata" + idCache <- o .: "id_cache" + filterCache <- o .: "filter_cache" + warmer <- o .: "warmer" + flush <- o .: "flush" + refresh <- o .: "refresh" + merges <- o .: "merges" + search <- o .: "search" + getStats <- o .: "get" + indexing <- o .: "indexing" + store <- o .: "store" + docs <- o .: "docs" + NodeIndicesStats <$> (fmap unMS <$> mRecovery .:: "throttle_time_in_millis") + <*> mRecovery .:: "current_as_target" + <*> mRecovery .:: "current_as_source" + <*> mQueryCache .:: "miss_count" + <*> mQueryCache .:: "hit_count" + <*> mQueryCache .:: "evictions" + <*> mQueryCache .:: "memory_size_in_bytes" + <*> suggest .: "current" + <*> (unMS <$> suggest .: "time_in_millis") + <*> suggest .: "total" + <*> translog .: "size_in_bytes" + <*> translog .: "operations" + <*> segments .:? "fixed_bit_set_memory_in_bytes" + <*> segments .: "version_map_memory_in_bytes" + <*> segments .:? "index_writer_max_memory_in_bytes" + <*> segments .: "index_writer_memory_in_bytes" + <*> segments .: "memory_in_bytes" + <*> segments .: "count" + <*> completion .: "size_in_bytes" + <*> percolate .: "queries" + <*> percolate .: "memory_size_in_bytes" + <*> percolate .: "current" + <*> (unMS <$> percolate .: "time_in_millis") + <*> percolate .: "total" + <*> fielddata .: "evictions" + <*> fielddata .: "memory_size_in_bytes" + <*> idCache .: "memory_size_in_bytes" + <*> filterCache .: "evictions" + <*> filterCache .: "memory_size_in_bytes" + <*> (unMS <$> warmer .: "total_time_in_millis") + <*> warmer .: "total" + <*> warmer .: "current" + <*> (unMS <$> flush .: "total_time_in_millis") + <*> flush .: "total" + <*> (unMS <$> refresh .: "total_time_in_millis") + <*> refresh .: "total" + <*> merges .: "total_size_in_bytes" + <*> merges .: "total_docs" + <*> (unMS <$> merges .: "total_time_in_millis") + <*> merges .: "total" + <*> merges .: "current_size_in_bytes" + <*> merges .: "current_docs" + <*> merges .: "current" + <*> search .: "fetch_current" + <*> (unMS <$> search .: "fetch_time_in_millis") + <*> search .: "fetch_total" + <*> search .: "query_current" + <*> (unMS <$> search .: "query_time_in_millis") + <*> search .: "query_total" + <*> search .: "open_contexts" + <*> getStats .: "current" + <*> (unMS <$> getStats .: "missing_time_in_millis") + <*> getStats .: "missing_total" + <*> (unMS <$> getStats .: "exists_time_in_millis") + <*> getStats .: "exists_total" + <*> (unMS <$> getStats .: "time_in_millis") + <*> getStats .: "total" + <*> (fmap unMS <$> indexing .:? "throttle_time_in_millis") + <*> indexing .:? "is_throttled" + <*> indexing .:? "noop_update_total" + <*> indexing .: "delete_current" + <*> (unMS <$> indexing .: "delete_time_in_millis") + <*> indexing .: "delete_total" + <*> indexing .: "index_current" + <*> (unMS <$> indexing .: "index_time_in_millis") + <*> indexing .: "index_total" + <*> (unMS <$> store .: "throttle_time_in_millis") + <*> store .: "size_in_bytes" + <*> docs .: "deleted" + <*> docs .: "count" + +instance FromJSON NodeBreakersStats where + parseJSON = withObject "NodeBreakersStats" parse + where + parse o = NodeBreakersStats <$> o .: "parent" + <*> o .: "request" + <*> o .: "fielddata" + +instance FromJSON NodePluginInfo where + parseJSON = withObject "NodePluginInfo" parse + where + parse o = NodePluginInfo <$> o .: "site" + <*> o .: "jvm" + <*> o .: "description" + <*> o .: "version" + <*> o .: "name" + +instance FromJSON NodeHTTPInfo where + parseJSON = withObject "NodeHTTPInfo" parse + where + parse o = NodeHTTPInfo <$> o .: "max_content_length_in_bytes" + <*> parseJSON (Object o) + +instance FromJSON NodeTransportInfo where + parseJSON = withObject "NodeTransportInfo" parse + where + parse o = NodeTransportInfo <$> (maybe (return mempty) parseProfiles =<< o .:? "profiles") + <*> parseJSON (Object o) + parseProfiles (Object o) | HM.null o = return [] + parseProfiles v@(Array _) = parseJSON v + parseProfiles Null = return [] + parseProfiles _ = fail "Could not parse profiles" + +instance FromJSON NodeNetworkInfo where + parseJSON = withObject "NodeNetworkInfo" parse + where + parse o = NodeNetworkInfo <$> o .: "primary_interface" + <*> (unMS <$> o .: "refresh_interval_in_millis") + +instance FromJSON NodeNetworkInterface where + parseJSON = withObject "NodeNetworkInterface" parse + where + parse o = NodeNetworkInterface <$> o .: "mac_address" + <*> o .: "name" + <*> o .: "address" + +instance FromJSON NodeJVMInfo where + parseJSON = withObject "NodeJVMInfo" parse + where + parse o = NodeJVMInfo <$> o .: "memory_pools" + <*> o .: "gc_collectors" + <*> o .: "mem" + <*> (posixMS <$> o .: "start_time_in_millis") + <*> o .: "vm_vendor" + <*> o .: "vm_version" + <*> o .: "vm_name" + <*> (unJVMVersion <$> o .: "version") + <*> o .: "pid" + +instance FromJSON NodeThreadPoolsInfo where + parseJSON = withObject "NodeThreadPoolsInfo" parse + where + parse o = NodeThreadPoolsInfo <$> o .: "refresh" + <*> o .: "management" + <*> o .: "percolate" + <*> o .:? "listener" + <*> o .:? "fetch_shard_started" + <*> o .: "search" + <*> o .: "flush" + <*> o .: "warmer" + <*> o .: "optimize" + <*> o .: "bulk" + <*> o .: "suggest" + <*> o .: "merge" + <*> o .: "snapshot" + <*> o .: "get" + <*> o .:? "fetch_shard_store" + <*> o .: "index" + <*> o .: "generic" + +instance FromJSON NodeProcessInfo where + parseJSON = withObject "NodeProcessInfo" parse + where + parse o = NodeProcessInfo <$> o .: "mlockall" + <*> o .: "max_file_descriptors" + <*> o .: "id" + <*> (unMS <$> o .: "refresh_interval_in_millis") + +instance FromJSON NodeOSInfo where + parseJSON = withObject "NodeOSInfo" parse + where + parse o = do + swap <- o .: "swap" + mem <- o .: "mem" + NodeOSInfo <$> swap .: "total_in_bytes" + <*> mem .: "total_in_bytes" + <*> o .: "cpu" + <*> o .: "available_processors" + <*> (unMS <$> o .: "refresh_interval_in_millis") + +instance FromJSON ReplicaBounds where + parseJSON v = withText "ReplicaBounds" parseText v + <|> withBool "ReplicaBounds" parseBool v + where parseText t = case T.splitOn "-" t of + [a, "all"] -> ReplicasLowerBounded <$> parseReadText a + [a, b] -> ReplicasBounded <$> parseReadText a + <*> parseReadText b + _ -> fail ("Could not parse ReplicaBounds: " <> show t) + parseBool False = pure ReplicasUnbounded + parseBool _ = fail "ReplicasUnbounded cannot be represented with True" + +instance FromJSON NominalDiffTimeJSON where + parseJSON = withText "NominalDiffTime" parse + where parse t = case T.takeEnd 1 t of + "s" -> NominalDiffTimeJSON . fromInteger <$> parseReadText (T.dropEnd 1 t) + _ -> fail "Invalid or missing NominalDiffTime unit (expected s)" + +instance FromJSON AllocationPolicy where + parseJSON = withText "AllocationPolicy" parse + where parse "all" = pure AllocAll + parse "primaries" = pure AllocPrimaries + parse "new_primaries" = pure AllocNewPrimaries + parse "none" = pure AllocNone + parse t = fail ("Invlaid AllocationPolicy: " <> show t) + +instance ToJSON CompoundFormat where + toJSON (CompoundFileFormat x) = Bool x + toJSON (MergeSegmentVsTotalIndex x) = toJSON x + +instance FromJSON CompoundFormat where + parseJSON v = CompoundFileFormat <$> parseJSON v + <|> MergeSegmentVsTotalIndex <$> parseJSON v + +instance FromJSON NodeFSTotalStats where + parseJSON = withObject "NodeFSTotalStats" parse + where + parse o = NodeFSTotalStats <$> (fmap unStringlyTypedDouble <$> o .:? "disk_service_time") + <*> (fmap unStringlyTypedDouble <$> o .:? "disk_queue") + <*> o .:? "disk_io_size_in_bytes" + <*> o .:? "disk_write_size_in_bytes" + <*> o .:? "disk_read_size_in_bytes" + <*> o .:? "disk_io_op" + <*> o .:? "disk_writes" + <*> o .:? "disk_reads" + <*> o .: "available_in_bytes" + <*> o .: "free_in_bytes" + <*> o .: "total_in_bytes" + +instance FromJSON BoundTransportAddress where + parseJSON = withObject "BoundTransportAddress" parse + where + parse o = BoundTransportAddress <$> o .: "publish_address" + <*> o .: "bound_address" + +instance FromJSON JVMMemoryInfo where + parseJSON = withObject "JVMMemoryInfo" parse + where + parse o = JVMMemoryInfo <$> o .: "direct_max_in_bytes" + <*> o .: "non_heap_max_in_bytes" + <*> o .: "non_heap_init_in_bytes" + <*> o .: "heap_max_in_bytes" + <*> o .: "heap_init_in_bytes" + +instance FromJSON JVMVersion where + parseJSON (String t) = + JVMVersion <$> parseJSON (String (T.replace "_" "." t)) + parseJSON v = JVMVersion <$> parseJSON v + +instance FromJSON NodeThreadPoolInfo where + parseJSON = withObject "NodeThreadPoolInfo" parse + where + parse o = do + ka <- maybe (return Nothing) (fmap Just . parseStringInterval) =<< o .:? "keep_alive" + NodeThreadPoolInfo <$> (parseJSON . unStringlyTypeJSON =<< o .: "queue_size") + <*> pure ka + <*> o .:? "min" + <*> o .:? "max" + <*> o .: "type" + +instance FromJSON CPUInfo where + parseJSON = withObject "CPUInfo" parse + where + parse o = CPUInfo <$> o .: "cache_size_in_bytes" + <*> o .: "cores_per_socket" + <*> o .: "total_sockets" + <*> o .: "total_cores" + <*> o .: "mhz" + <*> o .: "model" + <*> o .: "vendor" + +instance FromJSON ThreadPoolSize where + parseJSON v = parseAsNumber v <|> parseAsString v + where + parseAsNumber = parseAsInt <=< parseJSON + parseAsInt (-1) = return ThreadPoolUnbounded + parseAsInt n + | n >= 0 = return (ThreadPoolBounded n) + | otherwise = fail "Thread pool size must be >= -1." + parseAsString = withText "ThreadPoolSize" $ \t -> + case first (readMay . T.unpack) (T.span isNumber t) of + (Just n, "k") -> return (ThreadPoolBounded (n * 1000)) + (Just n, "") -> return (ThreadPoolBounded n) + _ -> fail ("Invalid thread pool size " <> T.unpack t) + +instance FromJSON ThreadPoolType where + parseJSON = withText "ThreadPoolType" parse + where + parse "scaling" = return ThreadPoolScaling + parse "fixed" = return ThreadPoolFixed + parse "cached" = return ThreadPoolCached + parse e = fail ("Unexpected thread pool type" <> T.unpack e) + +data Source = + NoSource + | SourcePatterns PatternOrPatterns + | SourceIncludeExclude Include Exclude + deriving (Show, Eq) + +instance ToJSON Source where + toJSON NoSource = toJSON False + toJSON (SourcePatterns patterns) = toJSON patterns + toJSON (SourceIncludeExclude incl excl) = object [ "include" .= incl, "exclude" .= excl ] + +data PatternOrPatterns = + PopPattern Pattern + | PopPatterns [Pattern] + deriving (Eq, Show) + +data Include = Include [Pattern] deriving (Eq, Show) +data Exclude = Exclude [Pattern] deriving (Eq, Show) + +newtype Pattern = Pattern Text deriving (Eq, Show) + +newtype ScrollId = ScrollId Text deriving (Eq, Show, Ord, ToJSON, FromJSON) + +instance ToJSON PatternOrPatterns where + toJSON (PopPattern pattern) = toJSON pattern + toJSON (PopPatterns patterns) = toJSON patterns + +instance ToJSON Include where + toJSON (Include patterns) = toJSON patterns + +instance ToJSON Exclude where + toJSON (Exclude patterns) = toJSON patterns + +instance ToJSON Pattern where + toJSON (Pattern pattern) = toJSON pattern + +instance FromJSON NodesStats where + parseJSON = withObject "NodesStats" parse + where + parse o = do + nodes <- o .: "nodes" + stats <- forM (HM.toList nodes) $ \(fullNID, v) -> do + node <- parseJSON v + parseNodeStats (FullNodeId fullNID) node + cn <- o .: "cluster_name" + return (NodesStats stats cn) + +instance ToJSON UpdatableIndexSetting where + toJSON (NumberOfReplicas x) = oPath ("index" :| ["number_of_replicas"]) x + toJSON (AutoExpandReplicas x) = oPath ("index" :| ["auto_expand_replicas"]) x + toJSON (RefreshInterval x) = oPath ("index" :| ["refresh_interval"]) (NominalDiffTimeJSON x) + toJSON (IndexConcurrency x) = oPath ("index" :| ["concurrency"]) x + toJSON (FailOnMergeFailure x) = oPath ("index" :| ["fail_on_merge_failure"]) x + toJSON (TranslogFlushThresholdOps x) = oPath ("index" :| ["translog", "flush_threshold_ops"]) x + toJSON (TranslogFlushThresholdSize x) = oPath ("index" :| ["translog", "flush_threshold_size"]) x + toJSON (TranslogFlushThresholdPeriod x) = oPath ("index" :| ["translog", "flush_threshold_period"]) (NominalDiffTimeJSON x) + toJSON (TranslogDisableFlush x) = oPath ("index" :| ["translog", "disable_flush"]) x + toJSON (CacheFilterMaxSize x) = oPath ("index" :| ["cache", "filter", "max_size"]) x + toJSON (CacheFilterExpire x) = oPath ("index" :| ["cache", "filter", "expire"]) (NominalDiffTimeJSON <$> x) + toJSON (GatewaySnapshotInterval x) = oPath ("index" :| ["gateway", "snapshot_interval"]) (NominalDiffTimeJSON x) + toJSON (RoutingAllocationInclude fs) = oPath ("index" :| ["routing", "allocation", "include"]) (attrFilterJSON fs) + toJSON (RoutingAllocationExclude fs) = oPath ("index" :| ["routing", "allocation", "exclude"]) (attrFilterJSON fs) + toJSON (RoutingAllocationRequire fs) = oPath ("index" :| ["routing", "allocation", "require"]) (attrFilterJSON fs) + toJSON (RoutingAllocationEnable x) = oPath ("index" :| ["routing", "allocation", "enable"]) x + toJSON (RoutingAllocationShardsPerNode x) = oPath ("index" :| ["routing", "allocation", "total_shards_per_node"]) x + toJSON (RecoveryInitialShards x) = oPath ("index" :| ["recovery", "initial_shards"]) x + toJSON (GCDeletes x) = oPath ("index" :| ["gc_deletes"]) (NominalDiffTimeJSON x) + toJSON (TTLDisablePurge x) = oPath ("index" :| ["ttl", "disable_purge"]) x + toJSON (TranslogFSType x) = oPath ("index" :| ["translog", "fs", "type"]) x + toJSON (IndexCompoundFormat x) = oPath ("index" :| ["compound_format"]) x + toJSON (IndexCompoundOnFlush x) = oPath ("index" :| ["compound_on_flush"]) x + toJSON (WarmerEnabled x) = oPath ("index" :| ["warmer", "enabled"]) x + toJSON (BlocksReadOnly x) = oPath ("blocks" :| ["read_only"]) x + toJSON (BlocksRead x) = oPath ("blocks" :| ["read"]) x + toJSON (BlocksWrite x) = oPath ("blocks" :| ["write"]) x + toJSON (BlocksMetaData x) = oPath ("blocks" :| ["metadata"]) x + +instance FromJSON UpdatableIndexSetting where + parseJSON = withObject "UpdatableIndexSetting" parse + where parse o = numberOfReplicas `taggedAt` ["index", "number_of_replicas"] + <|> autoExpandReplicas `taggedAt` ["index", "auto_expand_replicas"] + <|> refreshInterval `taggedAt` ["index", "refresh_interval"] + <|> indexConcurrency `taggedAt` ["index", "concurrency"] + <|> failOnMergeFailure `taggedAt` ["index", "fail_on_merge_failure"] + <|> translogFlushThresholdOps `taggedAt` ["index", "translog", "flush_threshold_ops"] + <|> translogFlushThresholdSize `taggedAt` ["index", "translog", "flush_threshold_size"] + <|> translogFlushThresholdPeriod `taggedAt` ["index", "translog", "flush_threshold_period"] + <|> translogDisableFlush `taggedAt` ["index", "translog", "disable_flush"] + <|> cacheFilterMaxSize `taggedAt` ["index", "cache", "filter", "max_size"] + <|> cacheFilterExpire `taggedAt` ["index", "cache", "filter", "expire"] + <|> gatewaySnapshotInterval `taggedAt` ["index", "gateway", "snapshot_interval"] + <|> routingAllocationInclude `taggedAt` ["index", "routing", "allocation", "include"] + <|> routingAllocationExclude `taggedAt` ["index", "routing", "allocation", "exclude"] + <|> routingAllocationRequire `taggedAt` ["index", "routing", "allocation", "require"] + <|> routingAllocationEnable `taggedAt` ["index", "routing", "allocation", "enable"] + <|> routingAllocationShardsPerNode `taggedAt` ["index", "routing", "allocation", "total_shards_per_node"] + <|> recoveryInitialShards `taggedAt` ["index", "recovery", "initial_shards"] + <|> gcDeletes `taggedAt` ["index", "gc_deletes"] + <|> ttlDisablePurge `taggedAt` ["index", "ttl", "disable_purge"] + <|> translogFSType `taggedAt` ["index", "translog", "fs", "type"] + <|> compoundFormat `taggedAt` ["index", "compound_format"] + <|> compoundOnFlush `taggedAt` ["index", "compound_on_flush"] + <|> warmerEnabled `taggedAt` ["index", "warmer", "enabled"] + <|> blocksReadOnly `taggedAt` ["blocks", "read_only"] + <|> blocksRead `taggedAt` ["blocks", "read"] + <|> blocksWrite `taggedAt` ["blocks", "write"] + <|> blocksMetaData `taggedAt` ["blocks", "metadata"] + where taggedAt f ks = taggedAt' f (Object o) ks + taggedAt' f v [] = f =<< (parseJSON v <|> (parseJSON (unStringlyTypeJSON v))) + taggedAt' f v (k:ks) = withObject "Object" (\o -> do v' <- o .: k + taggedAt' f v' ks) v + numberOfReplicas = pure . NumberOfReplicas + autoExpandReplicas = pure . AutoExpandReplicas + refreshInterval = pure . RefreshInterval . ndtJSON + indexConcurrency = pure . IndexConcurrency + failOnMergeFailure = pure . FailOnMergeFailure + translogFlushThresholdOps = pure . TranslogFlushThresholdOps + translogFlushThresholdSize = pure . TranslogFlushThresholdSize + translogFlushThresholdPeriod = pure . TranslogFlushThresholdPeriod . ndtJSON + translogDisableFlush = pure . TranslogDisableFlush + cacheFilterMaxSize = pure . CacheFilterMaxSize + cacheFilterExpire = pure . CacheFilterExpire . fmap ndtJSON + gatewaySnapshotInterval = pure . GatewaySnapshotInterval . ndtJSON + routingAllocationInclude = fmap RoutingAllocationInclude . parseAttrFilter + routingAllocationExclude = fmap RoutingAllocationExclude . parseAttrFilter + routingAllocationRequire = fmap RoutingAllocationRequire . parseAttrFilter + routingAllocationEnable = pure . RoutingAllocationEnable + routingAllocationShardsPerNode = pure . RoutingAllocationShardsPerNode + recoveryInitialShards = pure . RecoveryInitialShards + gcDeletes = pure . GCDeletes . ndtJSON + ttlDisablePurge = pure . TTLDisablePurge + translogFSType = pure . TranslogFSType + compoundFormat = pure . IndexCompoundFormat + compoundOnFlush = pure . IndexCompoundOnFlush + warmerEnabled = pure . WarmerEnabled + blocksReadOnly = pure . BlocksReadOnly + blocksRead = pure . BlocksRead + blocksWrite = pure . BlocksWrite + blocksMetaData = pure . BlocksMetaData + +instance ToJSON ReplicaBounds where + toJSON (ReplicasBounded a b) = String (showText a <> "-" <> showText b) + toJSON (ReplicasLowerBounded a) = String (showText a <> "-all") + toJSON ReplicasUnbounded = Bool False + +instance ToJSON NominalDiffTimeJSON where + toJSON (NominalDiffTimeJSON t) = String (showText (round t :: Integer) <> "s") + +instance ToJSON AllocationPolicy where + toJSON AllocAll = String "all" + toJSON AllocPrimaries = String "primaries" + toJSON AllocNewPrimaries = String "new_primaries" + toJSON AllocNone = String "none" + +instance ToJSON IndexTemplate where + toJSON (IndexTemplate p s m) = merge + (object [ "template" .= p + , "mappings" .= foldl' merge (object []) m + ]) + (toJSON s) + where + merge (Object o1) (Object o2) = toJSON $ HM.union o1 o2 + merge o Null = o + merge _ _ = undefined + +instance FromJSON IndexAliasesSummary where + parseJSON = withObject "IndexAliasesSummary" parse + where parse o = IndexAliasesSummary . mconcat <$> mapM (uncurry go) (HM.toList o) + go ixn = withObject "index aliases" $ \ia -> do + aliases <- ia .:? "aliases" .!= mempty + forM (HM.toList aliases) $ \(aName, v) -> do + let indexAlias = IndexAlias (IndexName ixn) (IndexAliasName (IndexName aName)) + IndexAliasSummary indexAlias <$> parseJSON v + +instance ToJSON IndexAliasAction where + toJSON (AddAlias ia opts) = object ["add" .= (iaObj <> optsObj)] + where Object iaObj = toJSON ia + Object optsObj = toJSON opts + toJSON (RemoveAlias ia) = object ["remove" .= iaObj] + where Object iaObj = toJSON ia + +instance ToJSON IndexAlias where + toJSON IndexAlias {..} = object ["index" .= srcIndex + , "alias" .= indexAlias + ] + +instance FromJSON AliasRouting where + parseJSON = withObject "AliasRouting" parse + where parse o = parseAll o <|> parseGranular o + parseAll o = AllAliasRouting <$> o .: "routing" + parseGranular o = do + sr <- o .:? "search_routing" + ir <- o .:? "index_routing" + if isNothing sr && isNothing ir + then fail "Both search_routing and index_routing can't be blank" + else return (GranularAliasRouting sr ir) + +instance FromJSON IndexAliasCreate where + parseJSON v = withObject "IndexAliasCreate" parse v + where parse o = IndexAliasCreate <$> optional (parseJSON v) + <*> o .:? "filter" + +instance ToJSON IndexAliasCreate where + toJSON IndexAliasCreate {..} = Object (filterObj <> routingObj) + where filterObj = maybe mempty (HM.singleton "filter" . toJSON) aliasCreateFilter + Object routingObj = maybe (Object mempty) toJSON aliasCreateRouting + +instance ToJSON AliasRouting where + toJSON (AllAliasRouting v) = object ["routing" .= v] + toJSON (GranularAliasRouting srch idx) = object (catMaybes prs) + where prs = [("search_routing" .=) <$> srch + ,("index_routing" .=) <$> idx] + +instance ToJSON SearchAliasRouting where + toJSON (SearchAliasRouting rvs) = toJSON (T.intercalate "," (routingValue <$> toList rvs)) + +instance FromJSON SearchAliasRouting where + parseJSON = withText "SearchAliasRouting" parse + where parse t = SearchAliasRouting <$> parseNEJSON (String <$> T.splitOn "," t) + +instance FromJSON EsError where + parseJSON (Object v) = EsError <$> + v .: "status" <*> + (v .: "error" <|> (v .: "error" >>= (.: "reason"))) + parseJSON _ = empty diff --git a/src/Database/V1/Bloodhound/Internal/Highlight.hs b/src/Database/V1/Bloodhound/Internal/Highlight.hs new file mode 100644 index 0000000..0b3488b --- /dev/null +++ b/src/Database/V1/Bloodhound/Internal/Highlight.hs @@ -0,0 +1,144 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Database.V1.Bloodhound.Internal.Highlight where + + +import Bloodhound.Import + +import qualified Data.Map as M + +import Database.V1.Bloodhound.Internal.Newtypes +import Database.V1.Bloodhound.Internal.Query + +type HitHighlight = M.Map Text [Text] + +data Highlights = Highlights { globalsettings :: Maybe HighlightSettings + , highlightFields :: [FieldHighlight] + } deriving (Show, Eq) + +instance ToJSON Highlights where + toJSON (Highlights global fields) = + omitNulls (("fields" .= fields) + : highlightSettingsPairs global) + +data HighlightSettings = Plain PlainHighlight + | Postings PostingsHighlight + | FastVector FastVectorHighlight + deriving (Show, Eq) + +data FieldHighlight = FieldHighlight FieldName (Maybe HighlightSettings) + deriving (Show, Eq) + +data PlainHighlight = + PlainHighlight { plainCommon :: Maybe CommonHighlight + , plainNonPost :: Maybe NonPostings } deriving (Show, Eq) + + -- This requires that index_options are set to 'offset' in the mapping. +data PostingsHighlight = PostingsHighlight (Maybe CommonHighlight) deriving (Show, Eq) + +-- This requires that term_vector is set to 'with_positions_offsets' in the mapping. +data FastVectorHighlight = + FastVectorHighlight { fvCommon :: Maybe CommonHighlight + , fvNonPostSettings :: Maybe NonPostings + , boundaryChars :: Maybe Text + , boundaryMaxScan :: Maybe Int + , fragmentOffset :: Maybe Int + , matchedFields :: [Text] + , phraseLimit :: Maybe Int + } deriving (Show, Eq) + +data CommonHighlight = + CommonHighlight { order :: Maybe Text + , forceSource :: Maybe Bool + , tag :: Maybe HighlightTag + , encoder :: Maybe HighlightEncoder + , noMatchSize :: Maybe Int + , highlightQuery :: Maybe Query + , requireFieldMatch :: Maybe Bool + } deriving (Show, Eq) + +-- Settings that are only applicable to FastVector and Plain highlighters. +data NonPostings = + NonPostings { fragmentSize :: Maybe Int + , numberOfFragments :: Maybe Int} deriving (Show, Eq) + +data HighlightEncoder = DefaultEncoder + | HTMLEncoder + deriving (Show, Eq) + +-- NOTE: Should the tags use some kind of HTML type, rather than Text? +data HighlightTag = TagSchema Text + | CustomTags ([Text], [Text]) -- Only uses more than the first value in the lists if fvh + deriving (Show, Eq) + +highlightSettingsPairs :: Maybe HighlightSettings -> [Pair] +highlightSettingsPairs Nothing = [] +highlightSettingsPairs (Just (Plain plh)) = plainHighPairs (Just plh) +highlightSettingsPairs (Just (Postings ph)) = postHighPairs (Just ph) +highlightSettingsPairs (Just (FastVector fvh)) = fastVectorHighPairs (Just fvh) + +plainHighPairs :: Maybe PlainHighlight -> [Pair] +plainHighPairs Nothing = [] +plainHighPairs (Just (PlainHighlight plCom plNonPost)) = + [ "type" .= String "plain"] + ++ commonHighlightPairs plCom + ++ nonPostingsToPairs plNonPost + +postHighPairs :: Maybe PostingsHighlight -> [Pair] +postHighPairs Nothing = [] +postHighPairs (Just (PostingsHighlight pCom)) = + ("type" .= String "postings") + : commonHighlightPairs pCom + +fastVectorHighPairs :: Maybe FastVectorHighlight -> [Pair] +fastVectorHighPairs Nothing = [] +fastVectorHighPairs (Just + (FastVectorHighlight fvCom fvNonPostSettings' fvBoundChars + fvBoundMaxScan fvFragOff fvMatchedFields + fvPhraseLim)) = + [ "type" .= String "fvh" + , "boundary_chars" .= fvBoundChars + , "boundary_max_scan" .= fvBoundMaxScan + , "fragment_offset" .= fvFragOff + , "matched_fields" .= fvMatchedFields + , "phraseLimit" .= fvPhraseLim] + ++ commonHighlightPairs fvCom + ++ nonPostingsToPairs fvNonPostSettings' + +commonHighlightPairs :: Maybe CommonHighlight -> [Pair] +commonHighlightPairs Nothing = [] +commonHighlightPairs (Just (CommonHighlight chScore chForceSource chTag chEncoder + chNoMatchSize chHighlightQuery + chRequireFieldMatch)) = + [ "order" .= chScore + , "force_source" .= chForceSource + , "encoder" .= chEncoder + , "no_match_size" .= chNoMatchSize + , "highlight_query" .= chHighlightQuery + , "require_fieldMatch" .= chRequireFieldMatch] + ++ highlightTagToPairs chTag + +nonPostingsToPairs :: Maybe NonPostings -> [Pair] +nonPostingsToPairs Nothing = [] +nonPostingsToPairs (Just (NonPostings npFragSize npNumOfFrags)) = + [ "fragment_size" .= npFragSize + , "number_of_fragments" .= npNumOfFrags] + +highlightTagToPairs :: Maybe HighlightTag -> [Pair] +highlightTagToPairs (Just (TagSchema _)) = [ "scheme" .= String "default"] +highlightTagToPairs (Just (CustomTags (pre, post))) = [ "pre_tags" .= pre + , "post_tags" .= post] +highlightTagToPairs Nothing = [] + +instance ToJSON FieldHighlight where + toJSON (FieldHighlight (FieldName fName) (Just fSettings)) = + object [ fName .= fSettings ] + toJSON (FieldHighlight (FieldName fName) Nothing) = + object [ fName .= emptyObject ] + +instance ToJSON HighlightSettings where + toJSON hs = omitNulls (highlightSettingsPairs (Just hs)) + +instance ToJSON HighlightEncoder where + toJSON DefaultEncoder = String "default" + toJSON HTMLEncoder = String "html" diff --git a/src/Database/V1/Bloodhound/Internal/Newtypes.hs b/src/Database/V1/Bloodhound/Internal/Newtypes.hs new file mode 100644 index 0000000..5f3afdd --- /dev/null +++ b/src/Database/V1/Bloodhound/Internal/Newtypes.hs @@ -0,0 +1,211 @@ +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE OverloadedStrings #-} + +module Database.V1.Bloodhound.Internal.Newtypes where + +import Bloodhound.Import + +newtype From = From Int deriving (Eq, Show, ToJSON) +newtype Size = Size Int deriving (Eq, Show, ToJSON, FromJSON) + + +{-| 'FieldName' is used all over the place wherever a specific field within + a document needs to be specified, usually in 'Query's or 'Filter's. +-} +newtype FieldName = + FieldName Text + deriving (Eq, Show, ToJSON, FromJSON) + +newtype Boost = + Boost Double + deriving (Eq, Show, ToJSON, FromJSON) + +newtype BoostTerms = + BoostTerms Double + deriving (Eq, Show, ToJSON, FromJSON) + +{-| 'ReplicaCount' is part of 'IndexSettings' -} +newtype ReplicaCount = + ReplicaCount Int + deriving (Eq, Show, ToJSON) + +{-| 'ShardCount' is part of 'IndexSettings' -} +newtype ShardCount = + ShardCount Int + deriving (Eq, Show, ToJSON) + + +{-| 'TemplateName' is used to describe which template to query/create/delete +-} +newtype TemplateName = TemplateName Text deriving (Eq, Show, ToJSON, FromJSON) + +{-| 'TemplatePattern' represents a pattern which is matched against index names +-} +newtype TemplatePattern = TemplatePattern Text deriving (Eq, Show, ToJSON, FromJSON) + +{-| 'MappingName' is part of mappings which are how ES describes and schematizes + the data in the indices. +-} +newtype MappingName = MappingName Text deriving (Eq, Show, ToJSON, FromJSON) + +{-| 'DocId' is a generic wrapper value for expressing unique Document IDs. + Can be set by the user or created by ES itself. Often used in client + functions for poking at specific documents. +-} +newtype DocId = DocId Text deriving (Eq, Show, ToJSON, FromJSON) + +{-| 'QueryString' is used to wrap query text bodies, be they human written or not. +-} +newtype QueryString = QueryString Text deriving (Eq, Show, ToJSON, FromJSON) + +{-| 'Script' is often used in place of 'FieldName' to specify more +complex ways of extracting a value from a document. +-} +newtype Script = Script { scriptText :: Text } deriving (Eq, Show) + +{-| 'CacheName' is used in 'RegexpFilter' for describing the + 'CacheKey' keyed caching behavior. +-} +newtype CacheName = CacheName Text deriving (Eq, Show, ToJSON, FromJSON) + +{-| 'CacheKey' is used in 'RegexpFilter' to key regex caching. +-} +newtype CacheKey = + CacheKey Text deriving (Eq, Show, ToJSON, FromJSON) +newtype Existence = + Existence Bool deriving (Eq, Show, ToJSON, FromJSON) +newtype NullValue = + NullValue Bool deriving (Eq, Show, ToJSON, FromJSON) +newtype CutoffFrequency = + CutoffFrequency Double deriving (Eq, Show, ToJSON, FromJSON) +newtype Analyzer = + Analyzer Text deriving (Eq, Show, ToJSON, FromJSON) +newtype MaxExpansions = + MaxExpansions Int deriving (Eq, Show, ToJSON, FromJSON) + +{-| 'Lenient', if set to true, will cause format based failures to be + ignored. I don't know what the bloody default is, Elasticsearch + documentation didn't say what it was. Let me know if you figure it out. +-} +newtype Lenient = + Lenient Bool deriving (Eq, Show, ToJSON, FromJSON) +newtype Tiebreaker = + Tiebreaker Double deriving (Eq, Show, ToJSON, FromJSON) + +{-| 'MinimumMatch' controls how many should clauses in the bool query should + match. Can be an absolute value (2) or a percentage (30%) or a + combination of both. +-} +newtype MinimumMatch = + MinimumMatch Int deriving (Eq, Show, ToJSON, FromJSON) +newtype DisableCoord = + DisableCoord Bool deriving (Eq, Show, ToJSON, FromJSON) +newtype IgnoreTermFrequency = + IgnoreTermFrequency Bool deriving (Eq, Show, ToJSON, FromJSON) +newtype MinimumTermFrequency = + MinimumTermFrequency Int deriving (Eq, Show, ToJSON, FromJSON) +newtype MaxQueryTerms = + MaxQueryTerms Int deriving (Eq, Show, ToJSON, FromJSON) +newtype Fuzziness = + Fuzziness Double deriving (Eq, Show, ToJSON, FromJSON) + +{-| 'PrefixLength' is the prefix length used in queries, defaults to 0. -} +newtype PrefixLength = + PrefixLength Int deriving (Eq, Show, ToJSON, FromJSON) +newtype TypeName = + TypeName Text deriving (Eq, Show, ToJSON, FromJSON) +newtype PercentMatch = + PercentMatch Double deriving (Eq, Show, ToJSON, FromJSON) +newtype StopWord = + StopWord Text deriving (Eq, Show, ToJSON, FromJSON) +newtype QueryPath = + QueryPath Text deriving (Eq, Show, ToJSON, FromJSON) + +{-| Allowing a wildcard at the beginning of a word (eg "*ing") is particularly + heavy, because all terms in the index need to be examined, just in case + they match. Leading wildcards can be disabled by setting + 'AllowLeadingWildcard' to false. -} +newtype AllowLeadingWildcard = + AllowLeadingWildcard Bool deriving (Eq, Show, ToJSON, FromJSON) +newtype LowercaseExpanded = + LowercaseExpanded Bool deriving (Eq, Show, ToJSON, FromJSON) +newtype EnablePositionIncrements = + EnablePositionIncrements Bool deriving (Eq, Show, ToJSON, FromJSON) + +{-| By default, wildcard terms in a query are not analyzed. + Setting 'AnalyzeWildcard' to true enables best-effort analysis. +-} +newtype AnalyzeWildcard = AnalyzeWildcard Bool deriving (Eq, Show, ToJSON, FromJSON) + +{-| 'GeneratePhraseQueries' defaults to false. +-} +newtype GeneratePhraseQueries = + GeneratePhraseQueries Bool deriving (Eq, Show, ToJSON, FromJSON) + +{-| 'Locale' is used for string conversions - defaults to ROOT. +-} +newtype Locale = Locale Text deriving (Eq, Show, ToJSON, FromJSON) +newtype MaxWordLength = MaxWordLength Int deriving (Eq, Show, ToJSON, FromJSON) +newtype MinWordLength = MinWordLength Int deriving (Eq, Show, ToJSON, FromJSON) + +{-| 'PhraseSlop' sets the default slop for phrases, 0 means exact + phrase matches. Default is 0. +-} +newtype PhraseSlop = PhraseSlop Int deriving (Eq, Show, ToJSON, FromJSON) +newtype MinDocFrequency = MinDocFrequency Int deriving (Eq, Show, ToJSON, FromJSON) +newtype MaxDocFrequency = MaxDocFrequency Int deriving (Eq, Show, ToJSON, FromJSON) + +-- | Newtype wrapper to parse ES's concerning tendency to in some APIs return a floating point number of milliseconds since epoch ಠ_ಠ +newtype POSIXMS = POSIXMS { posixMS :: UTCTime } + +instance FromJSON POSIXMS where + parseJSON = withScientific "POSIXMS" (return . parse) + where parse n = let n' = truncate n :: Integer + in POSIXMS (posixSecondsToUTCTime (fromInteger (n' `div` 1000))) + +{-| 'IndexName' is used to describe which index to query/create/delete +-} +newtype IndexName = IndexName Text deriving (Eq, Show, ToJSON, FromJSON) + +newtype IndexAliasName = IndexAliasName { indexAliasName :: IndexName } deriving (Eq, Show, ToJSON) + +type Score = Maybe Double + +newtype ShardId = ShardId { shardId :: Int } + deriving (Eq, Show, FromJSON) + +-- | Milliseconds +newtype MS = MS NominalDiffTime + + +-- keeps the unexported constructor warnings at bay +unMS :: MS -> NominalDiffTime +unMS (MS t) = t + +instance FromJSON MS where + parseJSON = withScientific "MS" (return . MS . parse) + where + parse n = fromInteger ((truncate n) * 1000) + +newtype MaybeNA a = MaybeNA { unMaybeNA :: Maybe a } + deriving (Show, Eq) + +instance FromJSON a => FromJSON (MaybeNA a) where + parseJSON (String "NA") = pure $ MaybeNA Nothing + parseJSON o = MaybeNA . Just <$> parseJSON o + +newtype SnapshotName = SnapshotName { snapshotName :: Text } + deriving (Show, Eq, Ord, ToJSON, FromJSON) + +instance FromJSON ShardCount where + parseJSON v = parseAsInt v + <|> parseAsString v + where parseAsInt = fmap ShardCount . parseJSON + parseAsString = withText "ShardCount" (fmap ShardCount . parseReadText) + + +instance FromJSON ReplicaCount where + parseJSON v = parseAsInt v + <|> parseAsString v + where parseAsInt = fmap ReplicaCount . parseJSON + parseAsString = withText "ReplicaCount" (fmap ReplicaCount . parseReadText) diff --git a/src/Database/V1/Bloodhound/Internal/Query.hs b/src/Database/V1/Bloodhound/Internal/Query.hs new file mode 100644 index 0000000..2e22695 --- /dev/null +++ b/src/Database/V1/Bloodhound/Internal/Query.hs @@ -0,0 +1,1689 @@ +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} + +module Database.V1.Bloodhound.Internal.Query where + + +import Bloodhound.Import + +import qualified Data.HashMap.Strict as HM +import qualified Data.Text as T + +import Database.V1.Bloodhound.Internal.Newtypes +import Database.V1.Bloodhound.Types.Class + + +data GeoPoint = + GeoPoint { geoField :: FieldName + , latLon :: LatLon} deriving (Eq, Show) + +instance ToJSON GeoPoint where + toJSON (GeoPoint (FieldName geoPointField) geoPointLatLon) = + object [ geoPointField .= geoPointLatLon ] + + +data LatLon = LatLon { lat :: Double + , lon :: Double } deriving (Eq, Show) + +instance ToJSON LatLon where + toJSON (LatLon lLat lLon) = + object ["lat" .= lLat + , "lon" .= lLon] + +instance FromJSON LatLon where + parseJSON = withObject "LatLon" parse + where parse o = LatLon <$> o .: "lat" + <*> o .: "lon" + +data DistanceUnit = Miles + | Yards + | Feet + | Inches + | Kilometers + | Meters + | Centimeters + | Millimeters + | NauticalMiles deriving (Eq, Show) + +instance ToJSON DistanceUnit where + toJSON Miles = String "mi" + toJSON Yards = String "yd" + toJSON Feet = String "ft" + toJSON Inches = String "in" + toJSON Kilometers = String "km" + toJSON Meters = String "m" + toJSON Centimeters = String "cm" + toJSON Millimeters = String "mm" + toJSON NauticalMiles = String "nmi" + +instance FromJSON DistanceUnit where + parseJSON = withText "DistanceUnit" parse + where parse "mi" = pure Miles + parse "yd" = pure Yards + parse "ft" = pure Feet + parse "in" = pure Inches + parse "km" = pure Kilometers + parse "m" = pure Meters + parse "cm" = pure Centimeters + parse "mm" = pure Millimeters + parse "nmi" = pure NauticalMiles + parse u = fail ("Unrecognized DistanceUnit: " <> show u) + +{-| 'Cache' is for telling ES whether it should cache a 'Filter' not. + 'Query's cannot be cached. +-} +type Cache = Bool -- caching on/off + + +data Filter = AndFilter [Filter] Cache + | OrFilter [Filter] Cache + | NotFilter Filter Cache + | IdentityFilter + | BoolFilter BoolMatch + | ExistsFilter FieldName -- always cached + | GeoBoundingBoxFilter GeoBoundingBoxConstraint + | GeoDistanceFilter GeoPoint Distance DistanceType OptimizeBbox Cache + | GeoDistanceRangeFilter GeoPoint DistanceRange + | GeoPolygonFilter FieldName [LatLon] + | IdsFilter MappingName [DocId] + | LimitFilter Int + | MissingFilter FieldName Existence NullValue + | PrefixFilter FieldName PrefixValue Cache + | QueryFilter Query Cache + | RangeFilter FieldName RangeValue RangeExecution Cache + | RegexpFilter FieldName Regexp RegexpFlags CacheName Cache CacheKey + | TermFilter Term Cache + deriving (Eq, Show) + +instance Semigroup Filter where + a <> b = AndFilter [a, b] defaultCache + +instance Monoid Filter where + mempty = IdentityFilter + mappend = (<>) + +instance Seminearring Filter where + a <||> b = OrFilter [a, b] defaultCache + +data BoolMatch = MustMatch Term Cache + | MustNotMatch Term Cache + | ShouldMatch [Term] Cache deriving (Eq, Show) + +data Term = Term { termField :: Text + , termValue :: Text } deriving (Eq, Show) + + +data OptimizeBbox = OptimizeGeoFilterType GeoFilterType + | NoOptimizeBbox deriving (Eq, Show) + + +data Distance = + Distance { coefficient :: Double + , unit :: DistanceUnit } deriving (Eq, Show) + +data DistanceRange = + DistanceRange { distanceFrom :: Distance + , distanceTo :: Distance } deriving (Eq, Show) + +-- "memory" or "indexed" +data GeoFilterType = GeoFilterMemory + | GeoFilterIndexed deriving (Eq, Show) + +data GeoBoundingBoxConstraint = + GeoBoundingBoxConstraint { geoBBField :: FieldName + , constraintBox :: GeoBoundingBox + , bbConstraintcache :: Cache + , geoType :: GeoFilterType + } deriving (Eq, Show) + + +data DistanceType = Arc + | SloppyArc -- doesn't exist <1.0 + | Plane deriving (Eq, Show) + +data GeoBoundingBox = + GeoBoundingBox { topLeft :: LatLon + , bottomRight :: LatLon } deriving (Eq, Show) + +{-| 'PrefixValue' is used in 'PrefixQuery' as the main query component. +-} +type PrefixValue = Text + + +data RangeValue = RangeDateLte LessThanEqD + | RangeDateLt LessThanD + | RangeDateGte GreaterThanEqD + | RangeDateGt GreaterThanD + | RangeDateGtLt GreaterThanD LessThanD + | RangeDateGteLte GreaterThanEqD LessThanEqD + | RangeDateGteLt GreaterThanEqD LessThanD + | RangeDateGtLte GreaterThanD LessThanEqD + | RangeDoubleLte LessThanEq + | RangeDoubleLt LessThan + | RangeDoubleGte GreaterThanEq + | RangeDoubleGt GreaterThan + | RangeDoubleGtLt GreaterThan LessThan + | RangeDoubleGteLte GreaterThanEq LessThanEq + | RangeDoubleGteLt GreaterThanEq LessThan + | RangeDoubleGtLte GreaterThan LessThanEq + deriving (Eq, Show) + + +newtype LessThan = LessThan Double deriving (Eq, Show) +newtype LessThanEq = LessThanEq Double deriving (Eq, Show) +newtype GreaterThan = GreaterThan Double deriving (Eq, Show) +newtype GreaterThanEq = GreaterThanEq Double deriving (Eq, Show) + +newtype LessThanD = LessThanD UTCTime deriving (Eq, Show) +newtype LessThanEqD = LessThanEqD UTCTime deriving (Eq, Show) +newtype GreaterThanD = GreaterThanD UTCTime deriving (Eq, Show) +newtype GreaterThanEqD = GreaterThanEqD UTCTime deriving (Eq, Show) + + +data Query = + TermQuery Term (Maybe Boost) + | TermsQuery Text (NonEmpty Text) + | QueryMatchQuery MatchQuery + | QueryMultiMatchQuery MultiMatchQuery + | QueryBoolQuery BoolQuery + | QueryBoostingQuery BoostingQuery + | QueryCommonTermsQuery CommonTermsQuery + | ConstantScoreFilter Filter Boost + | ConstantScoreQuery Query Boost + | QueryDisMaxQuery DisMaxQuery + | QueryFilteredQuery FilteredQuery + | QueryFuzzyLikeThisQuery FuzzyLikeThisQuery + | QueryFuzzyLikeFieldQuery FuzzyLikeFieldQuery + | QueryFuzzyQuery FuzzyQuery + | QueryHasChildQuery HasChildQuery + | QueryHasParentQuery HasParentQuery + | IdsQuery MappingName [DocId] + | QueryIndicesQuery IndicesQuery + | MatchAllQuery (Maybe Boost) + | QueryMoreLikeThisQuery MoreLikeThisQuery + | QueryMoreLikeThisFieldQuery MoreLikeThisFieldQuery + | QueryNestedQuery NestedQuery + | QueryPrefixQuery PrefixQuery + | QueryQueryStringQuery QueryStringQuery + | QuerySimpleQueryStringQuery SimpleQueryStringQuery + | QueryRangeQuery RangeQuery + | QueryRegexpQuery RegexpQuery + | QueryTemplateQueryInline TemplateQueryInline + deriving (Eq, Show) + +data RegexpQuery = + RegexpQuery { regexpQueryField :: FieldName + , regexpQuery :: Regexp + , regexpQueryFlags :: RegexpFlags + , regexpQueryBoost :: Maybe Boost + } deriving (Eq, Show) + +data RangeQuery = + RangeQuery { rangeQueryField :: FieldName + , rangeQueryRange :: RangeValue + , rangeQueryBoost :: Boost } deriving (Eq, Show) + +mkRangeQuery :: FieldName -> RangeValue -> RangeQuery +mkRangeQuery f r = RangeQuery f r (Boost 1.0) + +data SimpleQueryStringQuery = + SimpleQueryStringQuery + { simpleQueryStringQuery :: QueryString + , simpleQueryStringField :: Maybe FieldOrFields + , simpleQueryStringOperator :: Maybe BooleanOperator + , simpleQueryStringAnalyzer :: Maybe Analyzer + , simpleQueryStringFlags :: Maybe (NonEmpty SimpleQueryFlag) + , simpleQueryStringLowercaseExpanded :: Maybe LowercaseExpanded + , simpleQueryStringLocale :: Maybe Locale + } deriving (Eq, Show) + +data SimpleQueryFlag = + SimpleQueryAll + | SimpleQueryNone + | SimpleQueryAnd + | SimpleQueryOr + | SimpleQueryPrefix + | SimpleQueryPhrase + | SimpleQueryPrecedence + | SimpleQueryEscape + | SimpleQueryWhitespace + | SimpleQueryFuzzy + | SimpleQueryNear + | SimpleQuerySlop deriving (Eq, Show) + +-- use_dis_max and tie_breaker when fields are plural? +data QueryStringQuery = + QueryStringQuery + { queryStringQuery :: QueryString + , queryStringDefaultField :: Maybe FieldName + , queryStringOperator :: Maybe BooleanOperator + , queryStringAnalyzer :: Maybe Analyzer + , queryStringAllowLeadingWildcard :: Maybe AllowLeadingWildcard + , queryStringLowercaseExpanded :: Maybe LowercaseExpanded + , queryStringEnablePositionIncrements :: Maybe EnablePositionIncrements + , queryStringFuzzyMaxExpansions :: Maybe MaxExpansions + , queryStringFuzziness :: Maybe Fuzziness + , queryStringFuzzyPrefixLength :: Maybe PrefixLength + , queryStringPhraseSlop :: Maybe PhraseSlop + , queryStringBoost :: Maybe Boost + , queryStringAnalyzeWildcard :: Maybe AnalyzeWildcard + , queryStringGeneratePhraseQueries :: Maybe GeneratePhraseQueries + , queryStringMinimumShouldMatch :: Maybe MinimumMatch + , queryStringLenient :: Maybe Lenient + , queryStringLocale :: Maybe Locale + } deriving (Eq, Show) + +mkQueryStringQuery :: QueryString -> QueryStringQuery +mkQueryStringQuery qs = + QueryStringQuery qs Nothing Nothing + Nothing Nothing Nothing Nothing + Nothing Nothing Nothing Nothing + Nothing Nothing Nothing Nothing + Nothing Nothing + +data FieldOrFields = FofField FieldName + | FofFields (NonEmpty FieldName) deriving (Eq, Show) + +data PrefixQuery = + PrefixQuery + { prefixQueryField :: FieldName + , prefixQueryPrefixValue :: Text + , prefixQueryBoost :: Maybe Boost } deriving (Eq, Show) + +data NestedQuery = + NestedQuery + { nestedQueryPath :: QueryPath + , nestedQueryScoreType :: ScoreType + , nestedQuery :: Query } deriving (Eq, Show) + +data MoreLikeThisFieldQuery = + MoreLikeThisFieldQuery + { moreLikeThisFieldText :: Text + , moreLikeThisFieldFields :: FieldName + -- default 0.3 (30%) + , moreLikeThisFieldPercentMatch :: Maybe PercentMatch + , moreLikeThisFieldMinimumTermFreq :: Maybe MinimumTermFrequency + , moreLikeThisFieldMaxQueryTerms :: Maybe MaxQueryTerms + , moreLikeThisFieldStopWords :: Maybe (NonEmpty StopWord) + , moreLikeThisFieldMinDocFrequency :: Maybe MinDocFrequency + , moreLikeThisFieldMaxDocFrequency :: Maybe MaxDocFrequency + , moreLikeThisFieldMinWordLength :: Maybe MinWordLength + , moreLikeThisFieldMaxWordLength :: Maybe MaxWordLength + , moreLikeThisFieldBoostTerms :: Maybe BoostTerms + , moreLikeThisFieldBoost :: Maybe Boost + , moreLikeThisFieldAnalyzer :: Maybe Analyzer + } deriving (Eq, Show) + +data MoreLikeThisQuery = + MoreLikeThisQuery + { moreLikeThisText :: Text + , moreLikeThisFields :: Maybe (NonEmpty FieldName) + -- default 0.3 (30%) + , moreLikeThisPercentMatch :: Maybe PercentMatch + , moreLikeThisMinimumTermFreq :: Maybe MinimumTermFrequency + , moreLikeThisMaxQueryTerms :: Maybe MaxQueryTerms + , moreLikeThisStopWords :: Maybe (NonEmpty StopWord) + , moreLikeThisMinDocFrequency :: Maybe MinDocFrequency + , moreLikeThisMaxDocFrequency :: Maybe MaxDocFrequency + , moreLikeThisMinWordLength :: Maybe MinWordLength + , moreLikeThisMaxWordLength :: Maybe MaxWordLength + , moreLikeThisBoostTerms :: Maybe BoostTerms + , moreLikeThisBoost :: Maybe Boost + , moreLikeThisAnalyzer :: Maybe Analyzer + } deriving (Eq, Show) + +data IndicesQuery = + IndicesQuery + { indicesQueryIndices :: [IndexName] + , indicesQuery :: Query + -- default "all" + , indicesQueryNoMatch :: Maybe Query } deriving (Eq, Show) + +data HasParentQuery = + HasParentQuery + { hasParentQueryType :: TypeName + , hasParentQuery :: Query + , hasParentQueryScoreType :: Maybe ScoreType } deriving (Eq, Show) + +data HasChildQuery = + HasChildQuery + { hasChildQueryType :: TypeName + , hasChildQuery :: Query + , hasChildQueryScoreType :: Maybe ScoreType } deriving (Eq, Show) + +data ScoreType = + ScoreTypeMax + | ScoreTypeSum + | ScoreTypeAvg + | ScoreTypeNone deriving (Eq, Show) + +data FuzzyQuery = + FuzzyQuery { fuzzyQueryField :: FieldName + , fuzzyQueryValue :: Text + , fuzzyQueryPrefixLength :: PrefixLength + , fuzzyQueryMaxExpansions :: MaxExpansions + , fuzzyQueryFuzziness :: Fuzziness + , fuzzyQueryBoost :: Maybe Boost + } deriving (Eq, Show) + +data FuzzyLikeFieldQuery = + FuzzyLikeFieldQuery + { fuzzyLikeField :: FieldName + -- anaphora is good for the soul. + , fuzzyLikeFieldText :: Text + , fuzzyLikeFieldMaxQueryTerms :: MaxQueryTerms + , fuzzyLikeFieldIgnoreTermFrequency :: IgnoreTermFrequency + , fuzzyLikeFieldFuzziness :: Fuzziness + , fuzzyLikeFieldPrefixLength :: PrefixLength + , fuzzyLikeFieldBoost :: Boost + , fuzzyLikeFieldAnalyzer :: Maybe Analyzer + } deriving (Eq, Show) + +data FuzzyLikeThisQuery = + FuzzyLikeThisQuery + { fuzzyLikeFields :: [FieldName] + , fuzzyLikeText :: Text + , fuzzyLikeMaxQueryTerms :: MaxQueryTerms + , fuzzyLikeIgnoreTermFrequency :: IgnoreTermFrequency + , fuzzyLikeFuzziness :: Fuzziness + , fuzzyLikePrefixLength :: PrefixLength + , fuzzyLikeBoost :: Boost + , fuzzyLikeAnalyzer :: Maybe Analyzer + } deriving (Eq, Show) + +data FilteredQuery = + FilteredQuery + { filteredQuery :: Query + , filteredFilter :: Filter } deriving (Eq, Show) + +data DisMaxQuery = + DisMaxQuery { disMaxQueries :: [Query] + -- default 0.0 + , disMaxTiebreaker :: Tiebreaker + , disMaxBoost :: Maybe Boost + } deriving (Eq, Show) + +data MatchQuery = + MatchQuery { matchQueryField :: FieldName + , matchQueryQueryString :: QueryString + , matchQueryOperator :: BooleanOperator + , matchQueryZeroTerms :: ZeroTermsQuery + , matchQueryCutoffFrequency :: Maybe CutoffFrequency + , matchQueryMatchType :: Maybe MatchQueryType + , matchQueryAnalyzer :: Maybe Analyzer + , matchQueryMaxExpansions :: Maybe MaxExpansions + , matchQueryLenient :: Maybe Lenient + , matchQueryBoost :: Maybe Boost } deriving (Eq, Show) + +{-| 'mkMatchQuery' is a convenience function that defaults the less common parameters, + enabling you to provide only the 'FieldName' and 'QueryString' to make a 'MatchQuery' +-} +mkMatchQuery :: FieldName -> QueryString -> MatchQuery +mkMatchQuery field query = MatchQuery field query Or ZeroTermsNone Nothing Nothing Nothing Nothing Nothing Nothing + +data MatchQueryType = + MatchPhrase + | MatchPhrasePrefix deriving (Eq, Show) + +data MultiMatchQuery = + MultiMatchQuery { multiMatchQueryFields :: [FieldName] + , multiMatchQueryString :: QueryString + , multiMatchQueryOperator :: BooleanOperator + , multiMatchQueryZeroTerms :: ZeroTermsQuery + , multiMatchQueryTiebreaker :: Maybe Tiebreaker + , multiMatchQueryType :: Maybe MultiMatchQueryType + , multiMatchQueryCutoffFrequency :: Maybe CutoffFrequency + , multiMatchQueryAnalyzer :: Maybe Analyzer + , multiMatchQueryMaxExpansions :: Maybe MaxExpansions + , multiMatchQueryLenient :: Maybe Lenient } deriving (Eq, Show) + +{-| 'mkMultiMatchQuery' is a convenience function that defaults the less common parameters, + enabling you to provide only the list of 'FieldName's and 'QueryString' to + make a 'MultiMatchQuery'. +-} + +mkMultiMatchQuery :: [FieldName] -> QueryString -> MultiMatchQuery +mkMultiMatchQuery matchFields query = + MultiMatchQuery matchFields query + Or ZeroTermsNone Nothing Nothing Nothing Nothing Nothing Nothing + +data MultiMatchQueryType = + MultiMatchBestFields + | MultiMatchMostFields + | MultiMatchCrossFields + | MultiMatchPhrase + | MultiMatchPhrasePrefix deriving (Eq, Show) + +data BoolQuery = + BoolQuery { boolQueryMustMatch :: [Query] + , boolQueryMustNotMatch :: [Query] + , boolQueryShouldMatch :: [Query] + , boolQueryMinimumShouldMatch :: Maybe MinimumMatch + , boolQueryBoost :: Maybe Boost + , boolQueryDisableCoord :: Maybe DisableCoord + } deriving (Eq, Show) + +mkBoolQuery :: [Query] -> [Query] -> [Query] -> BoolQuery +mkBoolQuery must mustNot should = + BoolQuery must mustNot should Nothing Nothing Nothing + +data BoostingQuery = + BoostingQuery { positiveQuery :: Query + , negativeQuery :: Query + , negativeBoost :: Boost } deriving (Eq, Show) + +data CommonTermsQuery = + CommonTermsQuery { commonField :: FieldName + , commonQuery :: QueryString + , commonCutoffFrequency :: CutoffFrequency + , commonLowFreqOperator :: BooleanOperator + , commonHighFreqOperator :: BooleanOperator + , commonMinimumShouldMatch :: Maybe CommonMinimumMatch + , commonBoost :: Maybe Boost + , commonAnalyzer :: Maybe Analyzer + , commonDisableCoord :: Maybe DisableCoord + } deriving (Eq, Show) + +data CommonMinimumMatch = + CommonMinimumMatchHighLow MinimumMatchHighLow + | CommonMinimumMatch MinimumMatch + deriving (Eq, Show) + +data MinimumMatchHighLow = + MinimumMatchHighLow { lowFreq :: MinimumMatch + , highFreq :: MinimumMatch } deriving (Eq, Show) + + +data TemplateQueryInline = + TemplateQueryInline { inline :: Query + , params :: TemplateQueryKeyValuePairs + } + deriving (Eq, Show) + +instance ToJSON TemplateQueryInline where + toJSON TemplateQueryInline{..} = object [ "query" .= inline + , "params" .= params + ] + +instance FromJSON TemplateQueryInline where + parseJSON = withObject "TemplateQueryInline" parse + where parse o = TemplateQueryInline + <$> o .: "query" + <*> o .: "params" + +{-| 'BooleanOperator' is the usual And/Or operators with an ES compatible + JSON encoding baked in. Used all over the place. +-} +data BooleanOperator = And | Or deriving (Eq, Show) + +type TemplateQueryKey = Text +type TemplateQueryValue = Text + +newtype TemplateQueryKeyValuePairs = TemplateQueryKeyValuePairs (HM.HashMap TemplateQueryKey TemplateQueryValue) + deriving (Eq, Show) + +instance ToJSON TemplateQueryKeyValuePairs where + toJSON (TemplateQueryKeyValuePairs x) = Object $ HM.map toJSON x + +instance FromJSON TemplateQueryKeyValuePairs where + parseJSON (Object o) = pure . TemplateQueryKeyValuePairs $ HM.mapMaybe getValue o + where getValue (String x) = Just x + getValue _ = Nothing + parseJSON _ = fail "error parsing TemplateQueryKeyValuePairs" + +newtype Regexp = Regexp Text deriving (Eq, Show, FromJSON) + +data RegexpFlags = AllRegexpFlags + | NoRegexpFlags + | SomeRegexpFlags (NonEmpty RegexpFlag) deriving (Eq, Show) + +data RegexpFlag = AnyString + | Automaton + | Complement + | Empty + | Intersection + | Interval deriving (Eq, Show) + +data RangeExecution = RangeExecutionIndex + | RangeExecutionFielddata deriving (Eq, Show) + +data ZeroTermsQuery = ZeroTermsNone + | ZeroTermsAll deriving (Eq, Show) + +instance ToJSON Query where + toJSON (TermQuery (Term termQueryField termQueryValue) boost) = + object [ "term" .= + object [termQueryField .= object merged]] + where + base = [ "value" .= termQueryValue ] + boosted = maybe [] (return . ("boost" .=)) boost + merged = mappend base boosted + + toJSON (TermsQuery fieldName terms) = + object [ "terms" .= object conjoined ] + where conjoined = [fieldName .= terms] + + toJSON (IdsQuery idsQueryMappingName docIds) = + object [ "ids" .= object conjoined ] + where conjoined = [ "type" .= idsQueryMappingName + , "values" .= fmap toJSON docIds ] + + toJSON (QueryQueryStringQuery qQueryStringQuery) = + object [ "query_string" .= qQueryStringQuery ] + + toJSON (QueryMatchQuery matchQuery) = + object [ "match" .= matchQuery ] + + toJSON (QueryMultiMatchQuery multiMatchQuery) = + toJSON multiMatchQuery + + toJSON (QueryBoolQuery boolQuery) = + object [ "bool" .= boolQuery ] + + toJSON (QueryBoostingQuery boostingQuery) = + object [ "boosting" .= boostingQuery ] + + toJSON (QueryCommonTermsQuery commonTermsQuery) = + object [ "common" .= commonTermsQuery ] + + toJSON (ConstantScoreFilter csFilter boost) = + object ["constant_score" .= object ["filter" .= csFilter + , "boost" .= boost]] + + toJSON (ConstantScoreQuery query boost) = + object ["constant_score" .= object ["query" .= query + , "boost" .= boost]] + + toJSON (QueryDisMaxQuery disMaxQuery) = + object [ "dis_max" .= disMaxQuery ] + + toJSON (QueryFilteredQuery qFilteredQuery) = + object [ "filtered" .= qFilteredQuery ] + + toJSON (QueryFuzzyLikeThisQuery fuzzyQuery) = + object [ "fuzzy_like_this" .= fuzzyQuery ] + + toJSON (QueryFuzzyLikeFieldQuery fuzzyFieldQuery) = + object [ "fuzzy_like_this_field" .= fuzzyFieldQuery ] + + toJSON (QueryFuzzyQuery fuzzyQuery) = + object [ "fuzzy" .= fuzzyQuery ] + + toJSON (QueryHasChildQuery childQuery) = + object [ "has_child" .= childQuery ] + + toJSON (QueryHasParentQuery parentQuery) = + object [ "has_parent" .= parentQuery ] + + toJSON (QueryIndicesQuery qIndicesQuery) = + object [ "indices" .= qIndicesQuery ] + + toJSON (MatchAllQuery boost) = + object [ "match_all" .= omitNulls [ "boost" .= boost ] ] + + toJSON (QueryMoreLikeThisQuery query) = + object [ "more_like_this" .= query ] + + toJSON (QueryMoreLikeThisFieldQuery query) = + object [ "more_like_this_field" .= query ] + + toJSON (QueryNestedQuery query) = + object [ "nested" .= query ] + + toJSON (QueryPrefixQuery query) = + object [ "prefix" .= query ] + + toJSON (QueryRangeQuery query) = + object [ "range" .= query ] + + toJSON (QueryRegexpQuery query) = + object [ "regexp" .= query ] + + toJSON (QuerySimpleQueryStringQuery query) = + object [ "simple_query_string" .= query ] + + toJSON (QueryTemplateQueryInline templateQuery) = + object [ "template" .= templateQuery ] + +instance FromJSON Query where + parseJSON v = withObject "Query" parse v + where parse o = termQuery `taggedWith` "term" + <|> termsQuery `taggedWith` "terms" + <|> idsQuery `taggedWith` "ids" + <|> queryQueryStringQuery `taggedWith` "query_string" + <|> queryMatchQuery `taggedWith` "match" + <|> queryMultiMatchQuery + <|> queryBoolQuery `taggedWith` "bool" + <|> queryBoostingQuery `taggedWith` "boosting" + <|> queryCommonTermsQuery `taggedWith` "common" + <|> constantScoreFilter `taggedWith` "constant_score" + <|> constantScoreQuery `taggedWith` "constant_score" + <|> queryDisMaxQuery `taggedWith` "dis_max" + <|> queryFilteredQuery `taggedWith` "filtered" + <|> queryFuzzyLikeThisQuery `taggedWith` "fuzzy_like_this" + <|> queryFuzzyLikeFieldQuery `taggedWith` "fuzzy_like_this_field" + <|> queryFuzzyQuery `taggedWith` "fuzzy" + <|> queryHasChildQuery `taggedWith` "has_child" + <|> queryHasParentQuery `taggedWith` "has_parent" + <|> queryIndicesQuery `taggedWith` "indices" + <|> matchAllQuery `taggedWith` "match_all" + <|> queryMoreLikeThisQuery `taggedWith` "more_like_this" + <|> queryMoreLikeThisFieldQuery `taggedWith` "more_like_this_field" + <|> queryNestedQuery `taggedWith` "nested" + <|> queryPrefixQuery `taggedWith` "prefix" + <|> queryRangeQuery `taggedWith` "range" + <|> queryRegexpQuery `taggedWith` "regexp" + <|> querySimpleQueryStringQuery `taggedWith` "simple_query_string" + <|> queryTemplateQueryInline `taggedWith` "template" + where taggedWith parser k = parser =<< o .: k + termQuery = fieldTagged $ \(FieldName fn) o -> + TermQuery <$> (Term fn <$> o .: "value") <*> o .:? "boost" + termsQuery o = case HM.toList o of + [(fn, vs)] -> do vals <- parseJSON vs + case vals of + x:xs -> return (TermsQuery fn (x :| xs)) + _ -> fail "Expected non empty list of values" + _ -> fail "Expected object with 1 field-named key" + idsQuery o = IdsQuery <$> o .: "type" + <*> o .: "values" + queryQueryStringQuery = pure . QueryQueryStringQuery + queryMatchQuery = pure . QueryMatchQuery + queryMultiMatchQuery = QueryMultiMatchQuery <$> parseJSON v + queryBoolQuery = pure . QueryBoolQuery + queryBoostingQuery = pure . QueryBoostingQuery + queryCommonTermsQuery = pure . QueryCommonTermsQuery + constantScoreFilter o = case HM.lookup "filter" o of + Just x -> ConstantScoreFilter <$> parseJSON x + <*> o .: "boost" + _ -> fail "Does not appear to be a ConstantScoreFilter" + constantScoreQuery o = case HM.lookup "query" o of + Just x -> ConstantScoreQuery <$> parseJSON x + <*> o .: "boost" + _ -> fail "Does not appear to be a ConstantScoreQuery" + queryDisMaxQuery = pure . QueryDisMaxQuery + queryFilteredQuery = pure . QueryFilteredQuery + queryFuzzyLikeThisQuery = pure . QueryFuzzyLikeThisQuery + queryFuzzyLikeFieldQuery = pure . QueryFuzzyLikeFieldQuery + queryFuzzyQuery = pure . QueryFuzzyQuery + queryHasChildQuery = pure . QueryHasChildQuery + queryHasParentQuery = pure . QueryHasParentQuery + queryIndicesQuery = pure . QueryIndicesQuery + matchAllQuery o = MatchAllQuery <$> o .:? "boost" + queryMoreLikeThisQuery = pure . QueryMoreLikeThisQuery + queryMoreLikeThisFieldQuery = pure . QueryMoreLikeThisFieldQuery + queryNestedQuery = pure . QueryNestedQuery + queryPrefixQuery = pure . QueryPrefixQuery + queryRangeQuery = pure . QueryRangeQuery + queryRegexpQuery = pure . QueryRegexpQuery + querySimpleQueryStringQuery = pure . QuerySimpleQueryStringQuery + queryTemplateQueryInline = pure . QueryTemplateQueryInline + +instance ToJSON SimpleQueryStringQuery where + toJSON SimpleQueryStringQuery {..} = + omitNulls (base ++ maybeAdd) + where base = [ "query" .= simpleQueryStringQuery ] + maybeAdd = [ "fields" .= simpleQueryStringField + , "default_operator" .= simpleQueryStringOperator + , "analyzer" .= simpleQueryStringAnalyzer + , "flags" .= simpleQueryStringFlags + , "lowercase_expanded_terms" .= simpleQueryStringLowercaseExpanded + , "locale" .= simpleQueryStringLocale ] + +instance FromJSON SimpleQueryStringQuery where + parseJSON = withObject "SimpleQueryStringQuery" parse + where parse o = SimpleQueryStringQuery <$> o .: "query" + <*> o .:? "fields" + <*> o .:? "default_operator" + <*> o .:? "analyzer" + <*> (parseFlags <$> o .:? "flags") + <*> o .:? "lowercase_expanded_terms" + <*> o .:? "locale" + parseFlags (Just (x:xs)) = Just (x :| xs) + parseFlags _ = Nothing + +instance ToJSON FieldOrFields where + toJSON (FofField fieldName) = + toJSON fieldName + toJSON (FofFields fieldNames) = + toJSON fieldNames + +instance FromJSON FieldOrFields where + parseJSON v = FofField <$> parseJSON v + <|> FofFields <$> (parseNEJSON =<< parseJSON v) + +instance ToJSON SimpleQueryFlag where + toJSON SimpleQueryAll = "ALL" + toJSON SimpleQueryNone = "NONE" + toJSON SimpleQueryAnd = "AND" + toJSON SimpleQueryOr = "OR" + toJSON SimpleQueryPrefix = "PREFIX" + toJSON SimpleQueryPhrase = "PHRASE" + toJSON SimpleQueryPrecedence = "PRECEDENCE" + toJSON SimpleQueryEscape = "ESCAPE" + toJSON SimpleQueryWhitespace = "WHITESPACE" + toJSON SimpleQueryFuzzy = "FUZZY" + toJSON SimpleQueryNear = "NEAR" + toJSON SimpleQuerySlop = "SLOP" + +instance FromJSON SimpleQueryFlag where + parseJSON = withText "SimpleQueryFlag" parse + where parse "ALL" = pure SimpleQueryAll + parse "NONE" = pure SimpleQueryNone + parse "AND" = pure SimpleQueryAnd + parse "OR" = pure SimpleQueryOr + parse "PREFIX" = pure SimpleQueryPrefix + parse "PHRASE" = pure SimpleQueryPhrase + parse "PRECEDENCE" = pure SimpleQueryPrecedence + parse "ESCAPE" = pure SimpleQueryEscape + parse "WHITESPACE" = pure SimpleQueryWhitespace + parse "FUZZY" = pure SimpleQueryFuzzy + parse "NEAR" = pure SimpleQueryNear + parse "SLOP" = pure SimpleQuerySlop + parse f = fail ("Unexpected SimpleQueryFlag: " <> show f) + +instance ToJSON RegexpQuery where + toJSON (RegexpQuery (FieldName rqQueryField) + (Regexp regexpQueryQuery) rqQueryFlags + rqQueryBoost) = + object [ rqQueryField .= omitNulls base ] + where base = [ "value" .= regexpQueryQuery + , "flags" .= rqQueryFlags + , "boost" .= rqQueryBoost ] + +instance FromJSON RegexpQuery where + parseJSON = withObject "RegexpQuery" parse + where parse = fieldTagged $ \fn o -> + RegexpQuery fn + <$> o .: "value" + <*> o .: "flags" + <*> o .:? "boost" + +instance ToJSON QueryStringQuery where + toJSON (QueryStringQuery qsQueryString + qsDefaultField qsOperator + qsAnalyzer qsAllowWildcard + qsLowercaseExpanded qsEnablePositionIncrements + qsFuzzyMaxExpansions qsFuzziness + qsFuzzyPrefixLength qsPhraseSlop + qsBoost qsAnalyzeWildcard + qsGeneratePhraseQueries qsMinimumShouldMatch + qsLenient qsLocale) = + omitNulls base + where + base = [ "query" .= qsQueryString + , "default_field" .= qsDefaultField + , "default_operator" .= qsOperator + , "analyzer" .= qsAnalyzer + , "allow_leading_wildcard" .= qsAllowWildcard + , "lowercase_expanded_terms" .= qsLowercaseExpanded + , "enable_position_increments" .= qsEnablePositionIncrements + , "fuzzy_max_expansions" .= qsFuzzyMaxExpansions + , "fuzziness" .= qsFuzziness + , "fuzzy_prefix_length" .= qsFuzzyPrefixLength + , "phrase_slop" .= qsPhraseSlop + , "boost" .= qsBoost + , "analyze_wildcard" .= qsAnalyzeWildcard + , "auto_generate_phrase_queries" .= qsGeneratePhraseQueries + , "minimum_should_match" .= qsMinimumShouldMatch + , "lenient" .= qsLenient + , "locale" .= qsLocale ] + +instance FromJSON QueryStringQuery where + parseJSON = withObject "QueryStringQuery" parse + where parse o = QueryStringQuery + <$> o .: "query" + <*> o .:? "default_field" + <*> o .:? "default_operator" + <*> o .:? "analyzer" + <*> o .:? "allow_leading_wildcard" + <*> o .:? "lowercase_expanded_terms" + <*> o .:? "enable_position_increments" + <*> o .:? "fuzzy_max_expansions" + <*> o .:? "fuzziness" + <*> o .:? "fuzzy_prefix_length" + <*> o .:? "phrase_slop" + <*> o .:? "boost" + <*> o .:? "analyze_wildcard" + <*> o .:? "auto_generate_phrase_queries" + <*> o .:? "minimum_should_match" + <*> o .:? "lenient" + <*> o .:? "locale" + +instance ToJSON RangeQuery where + toJSON (RangeQuery (FieldName fieldName) range boost) = + object [ fieldName .= object conjoined ] + where conjoined = [ "boost" .= boost ] ++ (rangeValueToPair range) + +instance FromJSON RangeQuery where + parseJSON = withObject "RangeQuery" parse + where parse = fieldTagged $ \fn o -> + RangeQuery fn + <$> parseJSON (Object o) + <*> o .: "boost" + +instance FromJSON RangeValue where + parseJSON = withObject "RangeValue" parse + where parse o = parseDate o + <|> parseDouble o + parseDate o = do lt <- o .:? "lt" + lte <- o .:? "lte" + gt <- o .:? "gt" + gte <- o .:? "gte" + case (lt, lte, gt, gte) of + (Just a, _, Just b, _) -> return (RangeDateGtLt (GreaterThanD b) (LessThanD a)) + (Just a, _, _, Just b)-> return (RangeDateGteLt (GreaterThanEqD b) (LessThanD a)) + (_, Just a, Just b, _)-> return (RangeDateGtLte (GreaterThanD b) (LessThanEqD a)) + (_, Just a, _, Just b)-> return (RangeDateGteLte (GreaterThanEqD b) (LessThanEqD a)) + (_, _, Just a, _)-> return (RangeDateGt (GreaterThanD a)) + (Just a, _, _, _)-> return (RangeDateLt (LessThanD a)) + (_, _, _, Just a)-> return (RangeDateGte (GreaterThanEqD a)) + (_, Just a, _, _)-> return (RangeDateLte (LessThanEqD a)) + (Nothing, Nothing, Nothing, Nothing) -> mzero + parseDouble o = do lt <- o .:? "lt" + lte <- o .:? "lte" + gt <- o .:? "gt" + gte <- o .:? "gte" + case (lt, lte, gt, gte) of + (Just a, _, Just b, _) -> return (RangeDoubleGtLt (GreaterThan b) (LessThan a)) + (Just a, _, _, Just b)-> return (RangeDoubleGteLt (GreaterThanEq b) (LessThan a)) + (_, Just a, Just b, _)-> return (RangeDoubleGtLte (GreaterThan b) (LessThanEq a)) + (_, Just a, _, Just b)-> return (RangeDoubleGteLte (GreaterThanEq b) (LessThanEq a)) + (_, _, Just a, _)-> return (RangeDoubleGt (GreaterThan a)) + (Just a, _, _, _)-> return (RangeDoubleLt (LessThan a)) + (_, _, _, Just a)-> return (RangeDoubleGte (GreaterThanEq a)) + (_, Just a, _, _)-> return (RangeDoubleLte (LessThanEq a)) + (Nothing, Nothing, Nothing, Nothing) -> mzero + +instance ToJSON PrefixQuery where + toJSON (PrefixQuery (FieldName fieldName) queryValue boost) = + object [ fieldName .= omitNulls base ] + where base = [ "value" .= queryValue + , "boost" .= boost ] + +instance FromJSON PrefixQuery where + parseJSON = withObject "PrefixQuery" parse + where parse = fieldTagged $ \fn o -> + PrefixQuery fn + <$> o .: "value" + <*> o .:? "boost" + +instance ToJSON NestedQuery where + toJSON (NestedQuery nqPath nqScoreType nqQuery) = + object [ "path" .= nqPath + , "score_mode" .= nqScoreType + , "query" .= nqQuery ] + +instance FromJSON NestedQuery where + parseJSON = withObject "NestedQuery" parse + where parse o = NestedQuery + <$> o .: "path" + <*> o .: "score_mode" + <*> o .: "query" + +instance ToJSON MoreLikeThisFieldQuery where + toJSON (MoreLikeThisFieldQuery text (FieldName fieldName) + percent mtf mqt stopwords mindf maxdf + minwl maxwl boostTerms boost analyzer) = + object [ fieldName .= omitNulls base ] + where base = [ "like_text" .= text + , "percent_terms_to_match" .= percent + , "min_term_freq" .= mtf + , "max_query_terms" .= mqt + , "stop_words" .= stopwords + , "min_doc_freq" .= mindf + , "max_doc_freq" .= maxdf + , "min_word_length" .= minwl + , "max_word_length" .= maxwl + , "boost_terms" .= boostTerms + , "boost" .= boost + , "analyzer" .= analyzer ] + +instance FromJSON MoreLikeThisFieldQuery where + parseJSON = withObject "MoreLikeThisFieldQuery" parse + where parse = fieldTagged $ \fn o -> + MoreLikeThisFieldQuery + <$> o .: "like_text" + <*> pure fn + <*> o .:? "percent_terms_to_match" + <*> o .:? "min_term_freq" + <*> o .:? "max_query_terms" + -- <*> (optionalNE =<< o .:? "stop_words") + <*> o .:? "stop_words" + <*> o .:? "min_doc_freq" + <*> o .:? "max_doc_freq" + <*> o .:? "min_word_length" + <*> o .:? "max_word_length" + <*> o .:? "boost_terms" + <*> o .:? "boost" + <*> o .:? "analyzer" + -- optionalNE = maybe (pure Nothing) (fmap Just . parseNEJSON) + +instance ToJSON MoreLikeThisQuery where + toJSON (MoreLikeThisQuery text fields percent + mtf mqt stopwords mindf maxdf + minwl maxwl boostTerms boost analyzer) = + omitNulls base + where base = [ "like_text" .= text + , "fields" .= fields + , "percent_terms_to_match" .= percent + , "min_term_freq" .= mtf + , "max_query_terms" .= mqt + , "stop_words" .= stopwords + , "min_doc_freq" .= mindf + , "max_doc_freq" .= maxdf + , "min_word_length" .= minwl + , "max_word_length" .= maxwl + , "boost_terms" .= boostTerms + , "boost" .= boost + , "analyzer" .= analyzer ] + +instance FromJSON MoreLikeThisQuery where + parseJSON = withObject "MoreLikeThisQuery" parse + where parse o = MoreLikeThisQuery + <$> o .: "like_text" + -- <*> (optionalNE =<< o .:? "fields") + <*> o .:? "fields" + <*> o .:? "percent_terms_to_match" + <*> o .:? "min_term_freq" + <*> o .:? "max_query_terms" + -- <*> (optionalNE =<< o .:? "stop_words") + <*> o .:? "stop_words" + <*> o .:? "min_doc_freq" + <*> o .:? "max_doc_freq" + <*> o .:? "min_word_length" + <*> o .:? "max_word_length" + <*> o .:? "boost_terms" + <*> o .:? "boost" + <*> o .:? "analyzer" + -- optionalNE = maybe (pure Nothing) (fmap Just . parseNEJSON) + +instance ToJSON IndicesQuery where + toJSON (IndicesQuery indices query noMatch) = + omitNulls [ "indices" .= indices + , "no_match_query" .= noMatch + , "query" .= query ] + +instance FromJSON IndicesQuery where + parseJSON = withObject "IndicesQuery" parse + where parse o = IndicesQuery + <$> o .:? "indices" .!= [] + <*> o .: "query" + <*> o .:? "no_match_query" + +instance ToJSON HasParentQuery where + toJSON (HasParentQuery queryType query scoreType) = + omitNulls [ "parent_type" .= queryType + , "score_type" .= scoreType + , "query" .= query ] + +instance FromJSON HasParentQuery where + parseJSON = withObject "HasParentQuery" parse + where parse o = HasParentQuery + <$> o .: "parent_type" + <*> o .: "query" + <*> o .:? "score_type" + +instance ToJSON HasChildQuery where + toJSON (HasChildQuery queryType query scoreType) = + omitNulls [ "query" .= query + , "score_type" .= scoreType + , "type" .= queryType ] + +instance FromJSON HasChildQuery where + parseJSON = withObject "HasChildQuery" parse + where parse o = HasChildQuery + <$> o .: "type" + <*> o .: "query" + <*> o .:? "score_type" + +instance ToJSON FuzzyQuery where + toJSON (FuzzyQuery (FieldName fieldName) queryText + prefixLength maxEx fuzziness boost) = + object [ fieldName .= omitNulls base ] + where base = [ "value" .= queryText + , "fuzziness" .= fuzziness + , "prefix_length" .= prefixLength + , "boost" .= boost + , "max_expansions" .= maxEx ] + +instance FromJSON FuzzyQuery where + parseJSON = withObject "FuzzyQuery" parse + where parse = fieldTagged $ \fn o -> + FuzzyQuery fn + <$> o .: "value" + <*> o .: "prefix_length" + <*> o .: "max_expansions" + <*> o .: "fuzziness" + <*> o .:? "boost" + +instance ToJSON FuzzyLikeFieldQuery where + toJSON (FuzzyLikeFieldQuery (FieldName fieldName) + fieldText maxTerms ignoreFreq fuzziness prefixLength + boost analyzer) = + object [ fieldName .= + omitNulls [ "like_text" .= fieldText + , "max_query_terms" .= maxTerms + , "ignore_tf" .= ignoreFreq + , "fuzziness" .= fuzziness + , "prefix_length" .= prefixLength + , "analyzer" .= analyzer + , "boost" .= boost ]] + +instance FromJSON FuzzyLikeFieldQuery where + parseJSON = withObject "FuzzyLikeFieldQuery" parse + where parse = fieldTagged $ \fn o -> + FuzzyLikeFieldQuery fn + <$> o .: "like_text" + <*> o .: "max_query_terms" + <*> o .: "ignore_tf" + <*> o .: "fuzziness" + <*> o .: "prefix_length" + <*> o .: "boost" + <*> o .:? "analyzer" + +instance ToJSON FuzzyLikeThisQuery where + toJSON (FuzzyLikeThisQuery fields text maxTerms + ignoreFreq fuzziness prefixLength boost analyzer) = + omitNulls base + where base = [ "fields" .= fields + , "like_text" .= text + , "max_query_terms" .= maxTerms + , "ignore_tf" .= ignoreFreq + , "fuzziness" .= fuzziness + , "prefix_length" .= prefixLength + , "analyzer" .= analyzer + , "boost" .= boost ] + +instance FromJSON FuzzyLikeThisQuery where + parseJSON = withObject "FuzzyLikeThisQuery" parse + where parse o = FuzzyLikeThisQuery + <$> o .:? "fields" .!= [] + <*> o .: "like_text" + <*> o .: "max_query_terms" + <*> o .: "ignore_tf" + <*> o .: "fuzziness" + <*> o .: "prefix_length" + <*> o .: "boost" + <*> o .:? "analyzer" + +instance ToJSON FilteredQuery where + toJSON (FilteredQuery query fFilter) = + object [ "query" .= query + , "filter" .= fFilter ] + +instance FromJSON FilteredQuery where + parseJSON = withObject "FilteredQuery" parse + where parse o = FilteredQuery + <$> o .: "query" + <*> o .: "filter" + +instance ToJSON DisMaxQuery where + toJSON (DisMaxQuery queries tiebreaker boost) = + omitNulls base + where base = [ "queries" .= queries + , "boost" .= boost + , "tie_breaker" .= tiebreaker ] + +instance FromJSON DisMaxQuery where + parseJSON = withObject "DisMaxQuery" parse + where parse o = DisMaxQuery + <$> o .:? "queries" .!= [] + <*> o .: "tie_breaker" + <*> o .:? "boost" + +instance ToJSON CommonTermsQuery where + toJSON (CommonTermsQuery (FieldName fieldName) + (QueryString query) cf lfo hfo msm + boost analyzer disableCoord) = + object [fieldName .= omitNulls base ] + where base = [ "query" .= query + , "cutoff_frequency" .= cf + , "low_freq_operator" .= lfo + , "minimum_should_match" .= msm + , "boost" .= boost + , "analyzer" .= analyzer + , "disable_coord" .= disableCoord + , "high_freq_operator" .= hfo ] + +instance FromJSON CommonTermsQuery where + parseJSON = withObject "CommonTermsQuery" parse + where parse = fieldTagged $ \fn o -> + CommonTermsQuery fn + <$> o .: "query" + <*> o .: "cutoff_frequency" + <*> o .: "low_freq_operator" + <*> o .: "high_freq_operator" + <*> o .:? "minimum_should_match" + <*> o .:? "boost" + <*> o .:? "analyzer" + <*> o .:? "disable_coord" + +instance ToJSON CommonMinimumMatch where + toJSON (CommonMinimumMatch mm) = toJSON mm + toJSON (CommonMinimumMatchHighLow (MinimumMatchHighLow lowF highF)) = + object [ "low_freq" .= lowF + , "high_freq" .= highF ] + +instance FromJSON CommonMinimumMatch where + parseJSON v = parseMinimum v + <|> parseMinimumHighLow v + where parseMinimum = fmap CommonMinimumMatch . parseJSON + parseMinimumHighLow = fmap CommonMinimumMatchHighLow . withObject "CommonMinimumMatchHighLow" (\o -> + MinimumMatchHighLow + <$> o .: "low_freq" + <*> o .: "high_freq") + + +instance ToJSON BoostingQuery where + toJSON (BoostingQuery bqPositiveQuery bqNegativeQuery bqNegativeBoost) = + object [ "positive" .= bqPositiveQuery + , "negative" .= bqNegativeQuery + , "negative_boost" .= bqNegativeBoost ] + +instance FromJSON BoostingQuery where + parseJSON = withObject "BoostingQuery" parse + where parse o = BoostingQuery + <$> o .: "positive" + <*> o .: "negative" + <*> o .: "negative_boost" + +instance ToJSON BoolQuery where + toJSON (BoolQuery mustM notM shouldM bqMin boost disableCoord) = + omitNulls base + where base = [ "must" .= mustM + , "must_not" .= notM + , "should" .= shouldM + , "minimum_should_match" .= bqMin + , "boost" .= boost + , "disable_coord" .= disableCoord ] + +instance FromJSON BoolQuery where + parseJSON = withObject "BoolQuery" parse + where parse o = BoolQuery + <$> o .:? "must" .!= [] + <*> o .:? "must_not" .!= [] + <*> o .:? "should" .!= [] + <*> o .:? "minimum_should_match" + <*> o .:? "boost" + <*> o .:? "disable_coord" + +instance ToJSON MatchQuery where + toJSON (MatchQuery (FieldName fieldName) + (QueryString mqQueryString) booleanOperator + zeroTermsQuery cutoffFrequency matchQueryType + analyzer maxExpansions lenient boost) = + object [ fieldName .= omitNulls base ] + where base = [ "query" .= mqQueryString + , "operator" .= booleanOperator + , "zero_terms_query" .= zeroTermsQuery + , "cutoff_frequency" .= cutoffFrequency + , "type" .= matchQueryType + , "analyzer" .= analyzer + , "max_expansions" .= maxExpansions + , "lenient" .= lenient + , "boost" .= boost ] + +instance FromJSON MatchQuery where + parseJSON = withObject "MatchQuery" parse + where parse = fieldTagged $ \fn o -> + MatchQuery fn + <$> o .: "query" + <*> o .: "operator" + <*> o .: "zero_terms_query" + <*> o .:? "cutoff_frequency" + <*> o .:? "type" + <*> o .:? "analyzer" + <*> o .:? "max_expansions" + <*> o .:? "lenient" + <*> o .:? "boost" + +instance ToJSON MultiMatchQuery where + toJSON (MultiMatchQuery fields (QueryString query) boolOp + ztQ tb mmqt cf analyzer maxEx lenient) = + object ["multi_match" .= omitNulls base] + where base = [ "fields" .= fmap toJSON fields + , "query" .= query + , "operator" .= boolOp + , "zero_terms_query" .= ztQ + , "tie_breaker" .= tb + , "type" .= mmqt + , "cutoff_frequency" .= cf + , "analyzer" .= analyzer + , "max_expansions" .= maxEx + , "lenient" .= lenient ] + +instance FromJSON MultiMatchQuery where + parseJSON = withObject "MultiMatchQuery" parse + where parse raw = do o <- raw .: "multi_match" + MultiMatchQuery + <$> o .:? "fields" .!= [] + <*> o .: "query" + <*> o .: "operator" + <*> o .: "zero_terms_query" + <*> o .:? "tie_breaker" + <*> o .:? "type" + <*> o .:? "cutoff_frequency" + <*> o .:? "analyzer" + <*> o .:? "max_expansions" + <*> o .:? "lenient" + +instance ToJSON Filter where + toJSON (AndFilter filters cache) = + object ["and" .= + object [ "filters" .= fmap toJSON filters + , "_cache" .= cache]] + + toJSON (OrFilter filters cache) = + object ["or" .= + object [ "filters" .= fmap toJSON filters + , "_cache" .= cache]] + + toJSON (NotFilter notFilter cache) = + object ["not" .= + object ["filter" .= notFilter + , "_cache" .= cache]] + + toJSON (IdentityFilter) = + object ["match_all" .= object []] + + toJSON (TermFilter (Term termFilterField termFilterValue) cache) = + object ["term" .= object base] + where base = [termFilterField .= termFilterValue, + "_cache" .= cache] + + toJSON (ExistsFilter (FieldName fieldName)) = + object ["exists" .= object + ["field" .= fieldName]] + + toJSON (BoolFilter boolMatch) = + object ["bool" .= boolMatch] + + toJSON (GeoBoundingBoxFilter bbConstraint) = + object ["geo_bounding_box" .= bbConstraint] + + toJSON (GeoDistanceFilter (GeoPoint (FieldName distanceGeoField) geoDistLatLon) + distance distanceType optimizeBbox cache) = + object ["geo_distance" .= + object ["distance" .= distance + , "distance_type" .= distanceType + , "optimize_bbox" .= optimizeBbox + , distanceGeoField .= geoDistLatLon + , "_cache" .= cache]] + + toJSON (GeoDistanceRangeFilter (GeoPoint (FieldName gddrField) drLatLon) + (DistanceRange geoDistRangeDistFrom drDistanceTo)) = + object ["geo_distance_range" .= + object ["from" .= geoDistRangeDistFrom + , "to" .= drDistanceTo + , gddrField .= drLatLon]] + + toJSON (GeoPolygonFilter (FieldName geoPolygonFilterField) latLons) = + object ["geo_polygon" .= + object [geoPolygonFilterField .= + object ["points" .= fmap toJSON latLons]]] + + toJSON (IdsFilter (MappingName mappingName) values) = + object ["ids" .= + object ["type" .= mappingName + , "values" .= fmap unpackId values]] + + toJSON (LimitFilter limit) = + object ["limit" .= object ["value" .= limit]] + + toJSON (MissingFilter (FieldName fieldName) (Existence existence) (NullValue nullValue)) = + object ["missing" .= + object [ "field" .= fieldName + , "existence" .= existence + , "null_value" .= nullValue]] + + toJSON (PrefixFilter (FieldName fieldName) fieldValue cache) = + object ["prefix" .= + object [fieldName .= fieldValue + , "_cache" .= cache]] + + toJSON (QueryFilter query False) = + object ["query" .= toJSON query ] + toJSON (QueryFilter query True) = + object ["fquery" .= + object [ "query" .= toJSON query + , "_cache" .= True ]] + + toJSON (RangeFilter (FieldName fieldName) rangeValue rangeExecution cache) = + object ["range" .= + object [ fieldName .= object (rangeValueToPair rangeValue) + , "execution" .= rangeExecution + , "_cache" .= cache]] + + toJSON (RegexpFilter (FieldName fieldName) + (Regexp regexText) flags (CacheName cacheName) cache (CacheKey cacheKey)) = + object ["regexp" .= + object [fieldName .= + object ["value" .= regexText + , "flags" .= flags] + , "_name" .= cacheName + , "_cache" .= cache + , "_cache_key" .= cacheKey]] + +instance FromJSON Filter where + parseJSON = withObject "Filter" parse + where parse o = andFilter `taggedWith` "and" + <|> orFilter `taggedWith` "or" + <|> notFilter `taggedWith` "not" + <|> identityFilter `taggedWith` "match_all" + <|> boolFilter `taggedWith` "bool" + <|> existsFilter `taggedWith` "exists" + <|> geoBoundingBoxFilter `taggedWith` "geo_bounding_box" + <|> geoDistanceFilter `taggedWith` "geo_distance" + <|> geoDistanceRangeFilter `taggedWith` "geo_distance_range" + <|> geoPolygonFilter `taggedWith` "geo_polygon" + <|> idsFilter `taggedWith` "ids" + <|> limitFilter `taggedWith` "limit" + <|> missingFilter `taggedWith` "missing" + <|> prefixFilter `taggedWith` "prefix" + <|> queryFilter `taggedWith` "query" + <|> fqueryFilter `taggedWith` "fquery" + <|> rangeFilter `taggedWith` "range" + <|> regexpFilter `taggedWith` "regexp" + <|> termFilter `taggedWith` "term" + where taggedWith parser k = parser =<< o .: k + andFilter o = AndFilter <$> o .: "filters" + <*> o .:? "_cache" .!= defaultCache + orFilter o = OrFilter <$> o .: "filters" + <*> o .:? "_cache" .!= defaultCache + notFilter o = NotFilter <$> o .: "filter" + <*> o .: "_cache" .!= defaultCache + identityFilter :: Object -> Parser Filter + identityFilter m + | HM.null m = pure IdentityFilter + | otherwise = fail ("Identityfilter expected empty object but got " <> show m) + boolFilter = pure . BoolFilter + existsFilter o = ExistsFilter <$> o .: "field" + geoBoundingBoxFilter = pure . GeoBoundingBoxFilter + geoDistanceFilter o = do + case HM.toList (deleteSeveral ["distance", "distance_type", "optimize_bbox", "_cache"] o) of + [(fn, v)] -> do + gp <- GeoPoint (FieldName fn) <$> parseJSON v + GeoDistanceFilter gp <$> o .: "distance" + <*> o .: "distance_type" + <*> o .: "optimize_bbox" + <*> o .:? "_cache" .!= defaultCache + _ -> fail "Could not find GeoDistanceFilter field name" + geoDistanceRangeFilter o = do + case HM.toList (deleteSeveral ["from", "to"] o) of + [(fn, v)] -> do + gp <- GeoPoint (FieldName fn) <$> parseJSON v + rng <- DistanceRange <$> o .: "from" <*> o .: "to" + return (GeoDistanceRangeFilter gp rng) + _ -> fail "Could not find GeoDistanceRangeFilter field name" + geoPolygonFilter = fieldTagged $ \fn o -> GeoPolygonFilter fn <$> o .: "points" + idsFilter o = IdsFilter <$> o .: "type" + <*> o .: "values" + limitFilter o = LimitFilter <$> o .: "value" + missingFilter o = MissingFilter <$> o .: "field" + <*> o .: "existence" + <*> o .: "null_value" + prefixFilter o = case HM.toList (HM.delete "_cache" o) of + [(fn, String v)] -> PrefixFilter (FieldName fn) v <$> o .:? "_cache" .!= defaultCache + _ -> fail "Could not parse PrefixFilter" + + queryFilter q = pure (QueryFilter q False) + fqueryFilter o = QueryFilter <$> o .: "query" <*> pure True + rangeFilter o = case HM.toList (deleteSeveral ["execution", "_cache"] o) of + [(fn, v)] -> RangeFilter (FieldName fn) + <$> parseJSON v + <*> o .: "execution" + <*> o .:? "_cache" .!= defaultCache + _ -> fail "Could not find field name for RangeFilter" + regexpFilter o = case HM.toList (deleteSeveral ["_name", "_cache", "_cache_key"] o) of + [(fn, Object o')] -> RegexpFilter (FieldName fn) + <$> o' .: "value" + <*> o' .: "flags" + <*> o .: "_name" + <*> o .:? "_cache" .!= defaultCache + <*> o .: "_cache_key" + _ -> fail "Could not find field name for RegexpFilter" + termFilter o = case HM.toList (HM.delete "_cache" o) of + [(termField, String termVal)] -> TermFilter (Term termField termVal) + <$> o .:? "_cache" .!= defaultCache + _ -> fail "Could not find term field for TermFilter" + +instance ToJSON BooleanOperator where + toJSON And = String "and" + toJSON Or = String "or" + +instance FromJSON BooleanOperator where + parseJSON = withText "BooleanOperator" parse + where parse "and" = pure And + parse "or" = pure Or + parse o = fail ("Unexpected BooleanOperator: " <> show o) + +instance ToJSON ZeroTermsQuery where + toJSON ZeroTermsNone = String "none" + toJSON ZeroTermsAll = String "all" + +instance FromJSON ZeroTermsQuery where + parseJSON = withText "ZeroTermsQuery" parse + where parse "none" = pure ZeroTermsNone + parse "all" = pure ZeroTermsAll + parse q = fail ("Unexpected ZeroTermsQuery: " <> show q) + +fieldTagged :: Monad m => (FieldName -> Object -> m a) -> Object -> m a +fieldTagged f o = case HM.toList o of + [(k, Object o')] -> f (FieldName k) o' + _ -> fail "Expected object with 1 field-named key" + +instance ToJSON RangeExecution where + toJSON RangeExecutionIndex = "index" + toJSON RangeExecutionFielddata = "fielddata" + + +instance FromJSON RangeExecution where + parseJSON = withText "RangeExecution" parse + where parse "index" = pure RangeExecutionIndex + parse "fielddata" = pure RangeExecutionFielddata + parse t = error ("Unrecognized RangeExecution " <> show t) + +instance ToJSON RegexpFlags where + toJSON AllRegexpFlags = String "ALL" + toJSON NoRegexpFlags = String "NONE" + toJSON (SomeRegexpFlags (h :| fs)) = String $ T.intercalate "|" flagStrs + where flagStrs = map flagStr . nub $ h:fs + flagStr AnyString = "ANYSTRING" + flagStr Automaton = "AUTOMATON" + flagStr Complement = "COMPLEMENT" + flagStr Empty = "EMPTY" + flagStr Intersection = "INTERSECTION" + flagStr Interval = "INTERVAL" + +instance FromJSON RegexpFlags where + parseJSON = withText "RegexpFlags" parse + where parse "ALL" = pure AllRegexpFlags + parse "NONE" = pure NoRegexpFlags + parse t = SomeRegexpFlags <$> parseNEJSON (String <$> T.splitOn "|" t) + +instance FromJSON RegexpFlag where + parseJSON = withText "RegexpFlag" parse + where parse "ANYSTRING" = pure AnyString + parse "AUTOMATON" = pure Automaton + parse "COMPLEMENT" = pure Complement + parse "EMPTY" = pure Empty + parse "INTERSECTION" = pure Intersection + parse "INTERVAL" = pure Interval + parse f = fail ("Unknown RegexpFlag: " <> show f) + +rangeValueToPair :: RangeValue -> [Pair] +rangeValueToPair rv = case rv of + RangeDateLte (LessThanEqD t) -> ["lte" .= t] + RangeDateGte (GreaterThanEqD t) -> ["gte" .= t] + RangeDateLt (LessThanD t) -> ["lt" .= t] + RangeDateGt (GreaterThanD t) -> ["gt" .= t] + RangeDateGteLte (GreaterThanEqD l) (LessThanEqD g) -> ["gte" .= l, "lte" .= g] + RangeDateGtLte (GreaterThanD l) (LessThanEqD g) -> ["gt" .= l, "lte" .= g] + RangeDateGteLt (GreaterThanEqD l) (LessThanD g) -> ["gte" .= l, "lt" .= g] + RangeDateGtLt (GreaterThanD l) (LessThanD g) -> ["gt" .= l, "lt" .= g] + RangeDoubleLte (LessThanEq t) -> ["lte" .= t] + RangeDoubleGte (GreaterThanEq t) -> ["gte" .= t] + RangeDoubleLt (LessThan t) -> ["lt" .= t] + RangeDoubleGt (GreaterThan t) -> ["gt" .= t] + RangeDoubleGteLte (GreaterThanEq l) (LessThanEq g) -> ["gte" .= l, "lte" .= g] + RangeDoubleGtLte (GreaterThan l) (LessThanEq g) -> ["gt" .= l, "lte" .= g] + RangeDoubleGteLt (GreaterThanEq l) (LessThan g) -> ["gte" .= l, "lt" .= g] + RangeDoubleGtLt (GreaterThan l) (LessThan g) -> ["gt" .= l, "lt" .= g] + +instance ToJSON ScoreType where + toJSON ScoreTypeMax = "max" + toJSON ScoreTypeAvg = "avg" + toJSON ScoreTypeSum = "sum" + toJSON ScoreTypeNone = "none" + +instance FromJSON ScoreType where + parseJSON = withText "ScoreType" parse + where parse "max" = pure ScoreTypeMax + parse "avg" = pure ScoreTypeAvg + parse "sum" = pure ScoreTypeSum + parse "none" = pure ScoreTypeNone + parse t = fail ("Unexpected ScoreType: " <> show t) + +instance ToJSON MatchQueryType where + toJSON MatchPhrase = "phrase" + toJSON MatchPhrasePrefix = "phrase_prefix" + +instance FromJSON MatchQueryType where + parseJSON = withText "MatchQueryType" parse + where parse "phrase" = pure MatchPhrase + parse "phrase_prefix" = pure MatchPhrasePrefix + parse t = fail ("Unexpected MatchQueryType: " <> show t) + +instance ToJSON MultiMatchQueryType where + toJSON MultiMatchBestFields = "best_fields" + toJSON MultiMatchMostFields = "most_fields" + toJSON MultiMatchCrossFields = "cross_fields" + toJSON MultiMatchPhrase = "phrase" + toJSON MultiMatchPhrasePrefix = "phrase_prefix" + +instance FromJSON MultiMatchQueryType where + parseJSON = withText "MultiMatchPhrasePrefix" parse + where parse "best_fields" = pure MultiMatchBestFields + parse "most_fields" = pure MultiMatchMostFields + parse "cross_fields" = pure MultiMatchCrossFields + parse "phrase" = pure MultiMatchPhrase + parse "phrase_prefix" = pure MultiMatchPhrasePrefix + parse t = fail ("Unexpected MultiMatchPhrasePrefix: " <> show t) + +defaultCache :: Cache +defaultCache = False + +instance ToJSON BoolMatch where + toJSON (MustMatch term cache) = object ["must" .= term, + "_cache" .= cache] + toJSON (MustNotMatch term cache) = object ["must_not" .= term, + "_cache" .= cache] + toJSON (ShouldMatch terms cache) = object ["should" .= fmap toJSON terms, + "_cache" .= cache] + +instance FromJSON BoolMatch where + parseJSON = withObject "BoolMatch" parse + where parse o = mustMatch `taggedWith` "must" + <|> mustNotMatch `taggedWith` "must_not" + <|> shouldMatch `taggedWith` "should" + where taggedWith parser k = parser =<< o .: k + mustMatch t = MustMatch t <$> o .:? "_cache" .!= defaultCache + mustNotMatch t = MustNotMatch t <$> o .:? "_cache" .!= defaultCache + shouldMatch t = ShouldMatch t <$> o .:? "_cache" .!= defaultCache + +instance ToJSON GeoBoundingBoxConstraint where + toJSON (GeoBoundingBoxConstraint + (FieldName gbbcGeoBBField) gbbcConstraintBox cache type') = + object [gbbcGeoBBField .= gbbcConstraintBox + , "_cache" .= cache + , "type" .= type'] + +instance FromJSON GeoBoundingBoxConstraint where + parseJSON = withObject "GeoBoundingBoxConstraint" parse + where parse o = case HM.toList (deleteSeveral ["type", "_cache"] o) of + [(fn, v)] -> GeoBoundingBoxConstraint (FieldName fn) + <$> parseJSON v + <*> o .:? "_cache" .!= defaultCache + <*> o .: "type" + _ -> fail "Could not find field name for GeoBoundingBoxConstraint" + +instance ToJSON Distance where + toJSON (Distance dCoefficient dUnit) = + String boltedTogether where + coefText = showText dCoefficient + (String unitText) = toJSON dUnit + boltedTogether = mappend coefText unitText + +instance FromJSON Distance where + parseJSON = withText "Distance" parse + where parse t = Distance <$> parseCoeff nT + <*> parseJSON (String unitT) + where (nT, unitT) = T.span validForNumber t + -- may be a better way to do this + validForNumber '-' = True + validForNumber '.' = True + validForNumber 'e' = True + validForNumber c = isNumber c + parseCoeff "" = fail "Empty string cannot be parsed as number" + parseCoeff s = return (read (T.unpack s)) + +instance ToJSON DistanceType where + toJSON Arc = String "arc" + toJSON SloppyArc = String "sloppy_arc" + toJSON Plane = String "plane" + +instance FromJSON DistanceType where + parseJSON = withText "DistanceType" parse + where parse "arc" = pure Arc + parse "sloppy_arc" = pure SloppyArc + parse "plane" = pure Plane + parse t = fail ("Unrecognized DistanceType: " <> show t) + +instance ToJSON OptimizeBbox where + toJSON NoOptimizeBbox = String "none" + toJSON (OptimizeGeoFilterType gft) = toJSON gft + +instance FromJSON OptimizeBbox where + parseJSON v = withText "NoOptimizeBbox" parseNoOptimize v + <|> parseOptimize v + where parseNoOptimize "none" = pure NoOptimizeBbox + parseNoOptimize _ = mzero + parseOptimize = fmap OptimizeGeoFilterType . parseJSON + +instance ToJSON Term where + toJSON (Term field value) = object ["term" .= object + [field .= value]] + +instance FromJSON Term where + parseJSON = withObject "Term" parse + where parse o = do termObj <- o .: "term" + case HM.toList termObj of + [(fn, v)] -> Term fn <$> parseJSON v + _ -> fail "Expected object with 1 field-named key" + +instance ToJSON GeoBoundingBox where + toJSON (GeoBoundingBox gbbTopLeft gbbBottomRight) = + object ["top_left" .= gbbTopLeft + , "bottom_right" .= gbbBottomRight] + +instance FromJSON GeoBoundingBox where + parseJSON = withObject "GeoBoundingBox" parse + where parse o = GeoBoundingBox + <$> o .: "top_left" + <*> o .: "bottom_right" + +instance ToJSON GeoFilterType where + toJSON GeoFilterMemory = String "memory" + toJSON GeoFilterIndexed = String "indexed" + +instance FromJSON GeoFilterType where + parseJSON = withText "GeoFilterType" parse + where parse "memory" = pure GeoFilterMemory + parse "indexed" = pure GeoFilterIndexed + parse t = fail ("Unrecognized GeoFilterType: " <> show t) + +{-| 'unpackId' is a silly convenience function that gets used once. +-} +unpackId :: DocId -> Text +unpackId (DocId docId) = docId diff --git a/src/Database/V1/Bloodhound/Internal/Sort.hs b/src/Database/V1/Bloodhound/Internal/Sort.hs new file mode 100644 index 0000000..3c29847 --- /dev/null +++ b/src/Database/V1/Bloodhound/Internal/Sort.hs @@ -0,0 +1,106 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Database.V1.Bloodhound.Internal.Sort where + + +import Bloodhound.Import + +import Database.V1.Bloodhound.Internal.Newtypes +import Database.V1.Bloodhound.Internal.Query + + +{-| 'Sort' is a synonym for a list of 'SortSpec's. Sort behavior is order + dependent with later sorts acting as tie-breakers for earlier sorts. +-} +type Sort = [SortSpec] + + +{-| The two main kinds of 'SortSpec' are 'DefaultSortSpec' and + 'GeoDistanceSortSpec'. The latter takes a 'SortOrder', 'GeoPoint', and + 'DistanceUnit' to express "nearness" to a single geographical point as a + sort specification. + + +-} +data SortSpec = DefaultSortSpec DefaultSort + | GeoDistanceSortSpec SortOrder GeoPoint DistanceUnit deriving (Eq, Show) + +instance ToJSON SortSpec where + toJSON (DefaultSortSpec + (DefaultSort (FieldName dsSortFieldName) dsSortOrder dsIgnoreUnmapped + dsSortMode dsMissingSort dsNestedFilter)) = + object [dsSortFieldName .= omitNulls base] where + base = [ "order" .= dsSortOrder + , "ignore_unmapped" .= dsIgnoreUnmapped + , "mode" .= dsSortMode + , "missing" .= dsMissingSort + , "nested_filter" .= dsNestedFilter ] + + toJSON (GeoDistanceSortSpec gdsSortOrder (GeoPoint (FieldName field) gdsLatLon) units) = + object [ "unit" .= units + , field .= gdsLatLon + , "order" .= gdsSortOrder ] + +{-| 'DefaultSort' is usually the kind of 'SortSpec' you'll want. There's a + 'mkSort' convenience function for when you want to specify only the most + common parameters. + + +-} +data DefaultSort = + DefaultSort { sortFieldName :: FieldName + , sortOrder :: SortOrder + -- default False + , ignoreUnmapped :: Bool + , sortMode :: Maybe SortMode + , missingSort :: Maybe Missing + , nestedFilter :: Maybe Filter } deriving (Eq, Show) + +{-| 'SortOrder' is 'Ascending' or 'Descending', as you might expect. These get + encoded into "asc" or "desc" when turned into JSON. + + +-} +data SortOrder = Ascending + | Descending deriving (Eq, Show) + + +instance ToJSON SortOrder where + toJSON Ascending = String "asc" + toJSON Descending = String "desc" + +{-| 'SortMode' prescribes how to handle sorting array/multi-valued fields. + +http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-sort.html#_sort_mode_option +-} +data SortMode = SortMin + | SortMax + | SortSum + | SortAvg deriving (Eq, Show) + +instance ToJSON SortMode where + toJSON SortMin = String "min" + toJSON SortMax = String "max" + toJSON SortSum = String "sum" + toJSON SortAvg = String "avg" + +{-| 'Missing' prescribes how to handle missing fields. A missing field can be + sorted last, first, or using a custom value as a substitute. + + +-} +data Missing = LastMissing + | FirstMissing + | CustomMissing Text deriving (Eq, Show) + +instance ToJSON Missing where + toJSON LastMissing = String "_last" + toJSON FirstMissing = String "_first" + toJSON (CustomMissing txt) = String txt + +-- {-| 'mkSort' defaults everything but the 'FieldName' and the 'SortOrder' so +-- that you can concisely describe the usual kind of 'SortSpec's you want. +-- -} +mkSort :: FieldName -> SortOrder -> DefaultSort +mkSort fieldName sOrder = DefaultSort fieldName sOrder False Nothing Nothing Nothing + diff --git a/src/Database/V1/Bloodhound/Internal/StringlyTyped.hs b/src/Database/V1/Bloodhound/Internal/StringlyTyped.hs new file mode 100644 index 0000000..366ecef --- /dev/null +++ b/src/Database/V1/Bloodhound/Internal/StringlyTyped.hs @@ -0,0 +1,26 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Database.V1.Bloodhound.Internal.StringlyTyped where + +import Bloodhound.Import + +import qualified Data.Text as T + +newtype StringlyTypedDouble = StringlyTypedDouble { unStringlyTypedDouble :: Double } + + +instance FromJSON StringlyTypedDouble where + parseJSON = fmap StringlyTypedDouble . parseJSON . unStringlyTypeJSON + + +-- | For some reason in several settings APIs, all leaf values get returned +-- as strings. This function attepmts to recover from this for all +-- non-recursive JSON types. If nothing can be done, the value is left alone. +unStringlyTypeJSON :: Value -> Value +unStringlyTypeJSON (String "true") = Bool True +unStringlyTypeJSON (String "false") = Bool False +unStringlyTypeJSON (String "null") = Null +unStringlyTypeJSON v@(String t) = case readMay (T.unpack t) of + Just n -> Number n + Nothing -> v +unStringlyTypeJSON v = v diff --git a/src/Database/V1/Bloodhound/Internal/Suggest.hs b/src/Database/V1/Bloodhound/Internal/Suggest.hs new file mode 100644 index 0000000..64d2a47 --- /dev/null +++ b/src/Database/V1/Bloodhound/Internal/Suggest.hs @@ -0,0 +1,252 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} + +module Database.V1.Bloodhound.Internal.Suggest where + + +import Bloodhound.Import + +import qualified Data.HashMap.Strict as HM + +import Database.V1.Bloodhound.Internal.Newtypes +import Database.V1.Bloodhound.Internal.Query + +data Suggest = Suggest { suggestText :: Text + , suggestName :: Text + , suggestType :: SuggestType + } + deriving (Show, Eq) + +instance ToJSON Suggest where + toJSON Suggest{..} = object [ "text" .= suggestText + , suggestName .= suggestType + ] + +instance FromJSON Suggest where + parseJSON (Object o) = do + suggestText' <- o .: "text" + let dropTextList = HM.toList $ HM.filterWithKey (\x _ -> x /= "text") o + suggestName' <- case dropTextList of + [(x, _)] -> return x + _ -> fail "error parsing Suggest field name" + suggestType' <- o .: suggestName' + return $ Suggest suggestText' suggestName' suggestType' + parseJSON x = typeMismatch "Suggest" x + +data SuggestType = SuggestTypePhraseSuggester PhraseSuggester + deriving (Show, Eq) + +instance ToJSON SuggestType where + toJSON (SuggestTypePhraseSuggester x) = object ["phrase" .= x] + +instance FromJSON SuggestType where + parseJSON = withObject "SuggestType" parse + where parse o = phraseSuggester `taggedWith` "phrase" + where taggedWith parser k = parser =<< o .: k + phraseSuggester = pure . SuggestTypePhraseSuggester + +data PhraseSuggester = + PhraseSuggester { phraseSuggesterField :: FieldName + , phraseSuggesterGramSize :: Maybe Int + , phraseSuggesterRealWordErrorLikelihood :: Maybe Int + , phraseSuggesterConfidence :: Maybe Int + , phraseSuggesterMaxErrors :: Maybe Int + , phraseSuggesterSeparator :: Maybe Text + , phraseSuggesterSize :: Maybe Size + , phraseSuggesterAnalyzer :: Maybe Analyzer + , phraseSuggesterShardSize :: Maybe Int + , phraseSuggesterHighlight :: Maybe PhraseSuggesterHighlighter + , phraseSuggesterCollate :: Maybe PhraseSuggesterCollate + , phraseSuggesterCandidateGenerators :: [DirectGenerators] + } + deriving (Show, Eq) + +instance ToJSON PhraseSuggester where + toJSON PhraseSuggester{..} = omitNulls [ "field" .= phraseSuggesterField + , "gram_size" .= phraseSuggesterGramSize + , "real_word_error_likelihood" .= phraseSuggesterRealWordErrorLikelihood + , "confidence" .= phraseSuggesterConfidence + , "max_errors" .= phraseSuggesterMaxErrors + , "separator" .= phraseSuggesterSeparator + , "size" .= phraseSuggesterSize + , "analyzer" .= phraseSuggesterAnalyzer + , "shard_size" .= phraseSuggesterShardSize + , "highlight" .= phraseSuggesterHighlight + , "collate" .= phraseSuggesterCollate + , "direct_generator" .= phraseSuggesterCandidateGenerators + ] + +instance FromJSON PhraseSuggester where + parseJSON = withObject "PhraseSuggester" parse + where parse o = PhraseSuggester + <$> o .: "field" + <*> o .:? "gram_size" + <*> o .:? "real_word_error_likelihood" + <*> o .:? "confidence" + <*> o .:? "max_errors" + <*> o .:? "separator" + <*> o .:? "size" + <*> o .:? "analyzer" + <*> o .:? "shard_size" + <*> o .:? "highlight" + <*> o .:? "collate" + <*> o .:? "direct_generator" .!= [] + +mkPhraseSuggester :: FieldName -> PhraseSuggester +mkPhraseSuggester fName = + PhraseSuggester fName Nothing Nothing Nothing Nothing Nothing Nothing + Nothing Nothing Nothing Nothing [] + +data PhraseSuggesterHighlighter = + PhraseSuggesterHighlighter { phraseSuggesterHighlighterPreTag :: Text + , phraseSuggesterHighlighterPostTag :: Text + } + deriving (Show, Eq) + +instance ToJSON PhraseSuggesterHighlighter where + toJSON PhraseSuggesterHighlighter{..} = + object [ "pre_tag" .= phraseSuggesterHighlighterPreTag + , "post_tag" .= phraseSuggesterHighlighterPostTag + ] + +instance FromJSON PhraseSuggesterHighlighter where + parseJSON = withObject "PhraseSuggesterHighlighter" parse + where parse o = PhraseSuggesterHighlighter + <$> o .: "pre_tag" + <*> o .: "post_tag" + +data PhraseSuggesterCollate = + PhraseSuggesterCollate { phraseSuggesterCollateTemplateQuery :: TemplateQueryInline + , phraseSuggesterCollatePrune :: Bool + } + deriving (Show, Eq) + +instance ToJSON PhraseSuggesterCollate where + toJSON PhraseSuggesterCollate{..} = object [ "query" .= object + [ "inline" .= (inline phraseSuggesterCollateTemplateQuery) + ] + , "params" .= (params phraseSuggesterCollateTemplateQuery) + , "prune" .= phraseSuggesterCollatePrune + ] + +instance FromJSON PhraseSuggesterCollate where + parseJSON (Object o) = do + query' <- o .: "query" + inline' <- query' .: "inline" + params' <- o .: "params" + prune' <- o .:? "prune" .!= False + return $ PhraseSuggesterCollate (TemplateQueryInline inline' params') prune' + parseJSON x = typeMismatch "PhraseSuggesterCollate" x + +data DirectGenerators = DirectGenerators + { directGeneratorsField :: FieldName + , directGeneratorsSize :: Maybe Int + , directGeneratorSuggestMode :: DirectGeneratorSuggestModeTypes + , directGeneratorMaxEdits :: Maybe Double + , directGeneratorPrefixLength :: Maybe Int + , directGeneratorMinWordLength :: Maybe Int + , directGeneratorMaxInspections :: Maybe Int + , directGeneratorMinDocFreq :: Maybe Double + , directGeneratorMaxTermFreq :: Maybe Double + , directGeneratorPreFilter :: Maybe Text + , directGeneratorPostFilter :: Maybe Text + } + deriving (Show, Eq) + + +instance ToJSON DirectGenerators where + toJSON DirectGenerators{..} = omitNulls [ "field" .= directGeneratorsField + , "size" .= directGeneratorsSize + , "suggest_mode" .= directGeneratorSuggestMode + , "max_edits" .= directGeneratorMaxEdits + , "prefix_length" .= directGeneratorPrefixLength + , "min_word_length" .= directGeneratorMinWordLength + , "max_inspections" .= directGeneratorMaxInspections + , "min_doc_freq" .= directGeneratorMinDocFreq + , "max_term_freq" .= directGeneratorMaxTermFreq + , "pre_filter" .= directGeneratorPreFilter + , "post_filter" .= directGeneratorPostFilter + ] + +instance FromJSON DirectGenerators where + parseJSON = withObject "DirectGenerators" parse + where parse o = DirectGenerators + <$> o .: "field" + <*> o .:? "size" + <*> o .: "suggest_mode" + <*> o .:? "max_edits" + <*> o .:? "prefix_length" + <*> o .:? "min_word_length" + <*> o .:? "max_inspections" + <*> o .:? "min_doc_freq" + <*> o .:? "max_term_freq" + <*> o .:? "pre_filter" + <*> o .:? "post_filter" + +mkDirectGenerators :: FieldName -> DirectGenerators +mkDirectGenerators fn = DirectGenerators fn Nothing DirectGeneratorSuggestModeMissing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing + +data DirectGeneratorSuggestModeTypes = DirectGeneratorSuggestModeMissing + | DirectGeneratorSuggestModePopular + | DirectGeneratorSuggestModeAlways + deriving (Show, Eq) + +instance ToJSON DirectGeneratorSuggestModeTypes where + toJSON DirectGeneratorSuggestModeMissing = "missing" + toJSON DirectGeneratorSuggestModePopular = "popular" + toJSON DirectGeneratorSuggestModeAlways = "always" + +instance FromJSON DirectGeneratorSuggestModeTypes where + parseJSON = withText "DirectGeneratorSuggestModeTypes" parse + where parse "missing" = pure DirectGeneratorSuggestModeMissing + parse "popular" = pure DirectGeneratorSuggestModePopular + parse "always" = pure DirectGeneratorSuggestModeAlways + parse f = fail ("Unexpected DirectGeneratorSuggestModeTypes: " <> show f) + +data SuggestOptions = + SuggestOptions { suggestOptionsText :: Text + , suggestOptionsScore :: Double + , suggestOptionsFreq :: Maybe Int + , suggestOptionsHighlighted :: Maybe Text + } + deriving (Eq, Show) + +instance FromJSON SuggestOptions where + parseJSON = withObject "SuggestOptions" parse + where parse o = SuggestOptions + <$> o .: "text" + <*> o .: "score" + <*> o .:? "freq" + <*> o .:? "highlighted" + +data SuggestResponse = + SuggestResponse { suggestResponseText :: Text + , suggestResponseOffset :: Int + , suggestResponseLength :: Int + , suggestResponseOptions :: [SuggestOptions] + } + deriving (Eq, Show) + +instance FromJSON SuggestResponse where + parseJSON = withObject "SuggestResponse" parse + where parse o = SuggestResponse + <$> o .: "text" + <*> o .: "offset" + <*> o .: "length" + <*> o .: "options" + +data NamedSuggestionResponse = + NamedSuggestionResponse { nsrName :: Text + , nsrResponses :: [SuggestResponse] + } + deriving (Eq, Show) + +instance FromJSON NamedSuggestionResponse where + parseJSON (Object o) = do + suggestionName' <- case HM.toList o of + [(x, _)] -> return x + _ -> fail "error parsing NamedSuggestionResponse name" + suggestionResponses' <- o .: suggestionName' + return $ NamedSuggestionResponse suggestionName' suggestionResponses' + + parseJSON x = typeMismatch "NamedSuggestionResponse" x diff --git a/src/Database/V1/Bloodhound/Types.hs b/src/Database/V1/Bloodhound/Types.hs index 81583aa..63bf8e9 100644 --- a/src/Database/V1/Bloodhound/Types.hs +++ b/src/Database/V1/Bloodhound/Types.hs @@ -4,7 +4,6 @@ {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE MultiParamTypeClasses #-} --- {-# LANGUAGE NoMonomorphismRestriction #-} {-# LANGUAGE NamedFieldPuns #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} @@ -379,734 +378,29 @@ module Database.V1.Bloodhound.Types , EsPassword(..) ) where -import Control.Applicative as A -import Control.Arrow (first) -import Control.Monad.Catch -import Control.Monad.Except -import Control.Monad.Reader (MonadReader (..), - ReaderT (..)) -import Control.Monad.State (MonadState) -import Control.Monad.Writer (MonadWriter) -import Data.Aeson -import Data.Aeson.Types (Pair, Parser, - emptyObject, - parseEither, parseMaybe, - typeMismatch) -import qualified Data.ByteString.Lazy.Char8 as L -import Data.Char -import Data.Hashable (Hashable) -import qualified Data.HashMap.Strict as HM -import Data.List (foldl', intercalate, - nub) -import Data.List.NonEmpty (NonEmpty (..), toList) -import qualified Data.Map.Strict as M -import Data.Maybe -import Data.Scientific (Scientific) -import Data.Semigroup -import Data.Text (Text) -import qualified Data.Text as T -import Data.Time.Calendar -import Data.Time.Clock (NominalDiffTime, - UTCTime) -import Data.Time.Clock.POSIX -import qualified Data.Traversable as DT -import Data.Typeable (Typeable) -import qualified Data.Vector as V -import qualified Data.Version as Vers -import GHC.Enum -import GHC.Generics (Generic) -import Network.HTTP.Client -import qualified Network.HTTP.Types.Method as NHTM -import qualified Text.ParserCombinators.ReadP as RP -import qualified Text.Read as TR +import Bloodhound.Import import Database.V1.Bloodhound.Types.Class -import Database.V1.Bloodhound.Types.Internal - --- $setup --- >>> :set -XOverloadedStrings --- >>> import Data.Aeson --- >>> import Database.V1.Bloodhound --- >>> let testServer = (Server "http://localhost:9200") --- >>> let testIndex = IndexName "twitter" --- >>> let testMapping = MappingName "tweet" --- >>> let defaultIndexSettings = IndexSettings (ShardCount 3) (ReplicaCount 2) - --- defaultIndexSettings is exported by Database.Bloodhound as well --- no trailing slashes in servers, library handles building the path. - --- | Create a 'BHEnv' with all optional fields defaulted. HTTP hook --- will be a noop. You can use the exported fields to customize it further, e.g.: --- --- >> (mkBHEnv myServer myManager) { bhRequestHook = customHook } -mkBHEnv :: Server -> Manager -> BHEnv -mkBHEnv s m = BHEnv s m return - -newtype BH m a = BH { - unBH :: ReaderT BHEnv m a - } deriving ( Functor - , A.Applicative - , Monad - , MonadIO - , MonadState s - , MonadWriter w - , MonadError e - , Alternative - , MonadPlus - , MonadFix - , MonadThrow - , MonadCatch - , MonadMask) - -instance MonadTrans BH where - lift = BH . lift - -instance (MonadReader r m) => MonadReader r (BH m) where - ask = lift ask - local f (BH (ReaderT m)) = BH $ ReaderT $ \r -> - local f (m r) - -instance (Functor m, Applicative m, MonadIO m) => MonadBH (BH m) where - getBHEnv = BH getBHEnv - -runBH :: BHEnv -> BH m a -> m a -runBH e f = runReaderT (unBH f) e - -{-| 'Version' is embedded in 'Status' -} -data Version = Version { number :: VersionNumber - , build_hash :: BuildHash - , build_timestamp :: UTCTime - , build_snapshot :: Bool - , lucene_version :: VersionNumber } deriving (Eq, Read, Show, Generic, Typeable) - --- | Traditional software versioning number -newtype VersionNumber = VersionNumber { versionNumber :: Vers.Version - } deriving (Eq, Read, Show, Generic, Typeable, Ord) - -{-| 'Status' is a data type for describing the JSON body returned by - Elasticsearch when you query its status. This was deprecated in 1.2.0. - - --} - -data Status = Status { ok :: Maybe Bool - , status :: Int - , name :: Text - , version :: Version - , tagline :: Text } deriving (Eq, Read, Show, Generic) - -{-| 'IndexSettings' is used to configure the shards and replicas when you create - an Elasticsearch Index. - - --} - -data IndexSettings = - IndexSettings { indexShards :: ShardCount - , indexReplicas :: ReplicaCount } deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'defaultIndexSettings' is an 'IndexSettings' with 3 shards and 2 replicas. -} -defaultIndexSettings :: IndexSettings -defaultIndexSettings = IndexSettings (ShardCount 3) (ReplicaCount 2) - - -{-| 'IndexOptimizationSettings' is used to configure index optimization. See - - for more info. --} -data IndexOptimizationSettings = - IndexOptimizationSettings { maxNumSegments :: Maybe Int - -- ^ Number of segments to optimize to. 1 will fully optimize the index. If omitted, the default behavior is to only optimize if the server deems it necessary. - , onlyExpungeDeletes :: Bool - -- ^ Should the optimize process only expunge segments with deletes in them? If the purpose of the optimization is to free disk space, this should be set to True. - , flushAfterOptimize :: Bool - -- ^ Should a flush be performed after the optimize. - } deriving (Eq, Show, Generic, Typeable) - - -{-| 'defaultIndexOptimizationSettings' implements the default settings that - Elasticsearch uses for index optimization. 'maxNumSegments' is Nothing, - 'onlyExpungeDeletes' is False, and flushAfterOptimize is True. --} -defaultIndexOptimizationSettings :: IndexOptimizationSettings -defaultIndexOptimizationSettings = IndexOptimizationSettings Nothing False True - -{-| 'UpdatableIndexSetting' are settings which may be updated after an index is created. - - --} -data UpdatableIndexSetting = NumberOfReplicas ReplicaCount - -- ^ The number of replicas each shard has. - | AutoExpandReplicas ReplicaBounds - | BlocksReadOnly Bool - -- ^ Set to True to have the index read only. False to allow writes and metadata changes. - | BlocksRead Bool - -- ^ Set to True to disable read operations against the index. - | BlocksWrite Bool - -- ^ Set to True to disable write operations against the index. - | BlocksMetaData Bool - -- ^ Set to True to disable metadata operations against the index. - | RefreshInterval NominalDiffTime - -- ^ The async refresh interval of a shard - | IndexConcurrency Int - | FailOnMergeFailure Bool - | TranslogFlushThresholdOps Int - -- ^ When to flush on operations. - | TranslogFlushThresholdSize Bytes - -- ^ When to flush based on translog (bytes) size. - | TranslogFlushThresholdPeriod NominalDiffTime - -- ^ When to flush based on a period of not flushing. - | TranslogDisableFlush Bool - -- ^ Disables flushing. Note, should be set for a short interval and then enabled. - | CacheFilterMaxSize (Maybe Bytes) - -- ^ The maximum size of filter cache (per segment in shard). - | CacheFilterExpire (Maybe NominalDiffTime) - -- ^ The expire after access time for filter cache. - | GatewaySnapshotInterval NominalDiffTime - -- ^ The gateway snapshot interval (only applies to shared gateways). - | RoutingAllocationInclude (NonEmpty NodeAttrFilter) - -- ^ A node matching any rule will be allowed to host shards from the index. - | RoutingAllocationExclude (NonEmpty NodeAttrFilter) - -- ^ A node matching any rule will NOT be allowed to host shards from the index. - | RoutingAllocationRequire (NonEmpty NodeAttrFilter) - -- ^ Only nodes matching all rules will be allowed to host shards from the index. - | RoutingAllocationEnable AllocationPolicy - -- ^ Enables shard allocation for a specific index. - | RoutingAllocationShardsPerNode ShardCount - -- ^ Controls the total number of shards (replicas and primaries) allowed to be allocated on a single node. - | RecoveryInitialShards InitialShardCount - -- ^ When using local gateway a particular shard is recovered only if there can be allocated quorum shards in the cluster. - | GCDeletes NominalDiffTime - | TTLDisablePurge Bool - -- ^ Disables temporarily the purge of expired docs. - | TranslogFSType FSType - | IndexCompoundFormat CompoundFormat - | IndexCompoundOnFlush Bool - | WarmerEnabled Bool - deriving (Eq, Show, Generic, Typeable) - -data AllocationPolicy = AllocAll - -- ^ Allows shard allocation for all shards. - | AllocPrimaries - -- ^ Allows shard allocation only for primary shards. - | AllocNewPrimaries - -- ^ Allows shard allocation only for primary shards for new indices. - | AllocNone - -- ^ No shard allocation is allowed - deriving (Eq, Read, Show, Generic, Typeable) - -data ReplicaBounds = ReplicasBounded Int Int - | ReplicasLowerBounded Int - | ReplicasUnbounded - deriving (Eq, Read, Show, Generic, Typeable) - --- | A measure of bytes used for various configurations. You may want --- to use smart constructors like 'gigabytes' for larger values. --- --- >>> gigabytes 9 --- Bytes 9000000000 --- --- >>> megabytes 9 --- Bytes 9000000 --- --- >>> kilobytes 9 --- Bytes 9000 -newtype Bytes = Bytes Int deriving (Eq, Read, Show, Generic, Typeable, Ord, ToJSON, FromJSON) - -gigabytes :: Int -> Bytes -gigabytes n = megabytes (1000 * n) - - -megabytes :: Int -> Bytes -megabytes n = kilobytes (1000 * n) - - -kilobytes :: Int -> Bytes -kilobytes n = Bytes (1000 * n) - - -data FSType = FSSimple - | FSBuffered deriving (Eq, Read, Show, Generic, Typeable, Ord) - -data InitialShardCount = QuorumShards - | QuorumMinus1Shards - | FullShards - | FullMinus1Shards - | ExplicitShards Int - deriving (Eq, Read, Show, Generic, Typeable) - -data NodeAttrFilter = NodeAttrFilter { nodeAttrFilterName :: NodeAttrName - , nodeAttrFilterValues :: NonEmpty Text} - deriving (Eq, Read, Show, Generic, Ord, Typeable) - -newtype NodeAttrName = NodeAttrName Text deriving (Eq, Read, Show, Ord, Generic, Typeable) - -data CompoundFormat = CompoundFileFormat Bool - | MergeSegmentVsTotalIndex Double - -- ^ percentage between 0 and 1 where 0 is false, 1 is true - deriving (Eq, Read, Show, Generic, Typeable) - -newtype NominalDiffTimeJSON = NominalDiffTimeJSON { ndtJSON :: NominalDiffTime } - -data IndexSettingsSummary = IndexSettingsSummary { sSummaryIndexName :: IndexName - , sSummaryFixedSettings :: IndexSettings - , sSummaryUpdateable :: [UpdatableIndexSetting]} - deriving (Eq, Show, Generic, Typeable) - -{-| 'Reply' and 'Method' are type synonyms from 'Network.HTTP.Types.Method.Method' -} -type Reply = Network.HTTP.Client.Response L.ByteString -type Method = NHTM.Method - -{-| 'OpenCloseIndex' is a sum type for opening and closing indices. - - --} -data OpenCloseIndex = OpenIndex | CloseIndex deriving (Eq, Read, Show, Generic, Typeable) - -data FieldType = GeoPointType - | GeoShapeType - | FloatType - | IntegerType - | LongType - | ShortType - | ByteType deriving (Eq, Read, Show, Generic, Typeable) - -data FieldDefinition = - FieldDefinition { fieldType :: FieldType } deriving (Eq, Read, Show, Generic, Typeable) - -{-| An 'IndexTemplate' defines a template that will automatically be - applied to new indices created. The templates include both - 'IndexSettings' and mappings, and a simple 'TemplatePattern' that - controls if the template will be applied to the index created. - Specify mappings as follows: @[toJSON TweetMapping, ...]@ - - https://www.elastic.co/guide/en/elasticsearch/reference/1.7/indices-templates.html --} -data IndexTemplate = - IndexTemplate { templatePattern :: TemplatePattern - , templateSettings :: Maybe IndexSettings - , templateMappings :: [Value] - } - -data MappingField = - MappingField { mappingFieldName :: FieldName - , fieldDefinition :: FieldDefinition } deriving (Eq, Read, Show, Generic, Typeable) - -{-| Support for type reification of 'Mapping's is currently incomplete, for - now the mapping API verbiage expects a 'ToJSON'able blob. - - Indexes have mappings, mappings are schemas for the documents contained in the - index. I'd recommend having only one mapping per index, always having a mapping, - and keeping different kinds of documents separated if possible. --} -data Mapping = Mapping { typeName :: TypeName - , mappingFields :: [MappingField] } deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'BulkOperation' is a sum type for expressing the four kinds of bulk - operation index, create, delete, and update. 'BulkIndex' behaves like an - "upsert", 'BulkCreate' will fail if a document already exists at the DocId. - - --} -data BulkOperation = - BulkIndex IndexName MappingName DocId Value - | BulkCreate IndexName MappingName DocId Value - | BulkDelete IndexName MappingName DocId - | BulkUpdate IndexName MappingName DocId Value deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'EsResult' describes the standard wrapper JSON document that you see in - successful Elasticsearch lookups or lookups that couldn't find the document. --} -data EsResult a = EsResult { _index :: Text - , _type :: Text - , _id :: Text - , foundResult :: Maybe (EsResultFound a)} deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'EsResultFound' contains the document and its metadata inside of an - 'EsResult' when the document was successfully found. --} -data EsResultFound a = EsResultFound { _version :: DocVersion - , _source :: a } deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'EsError' is the generic type that will be returned when there was a - problem. If you can't parse the expected response, its a good idea to - try parsing this. --} -data EsError = EsError { errorStatus :: Int - , errorMessage :: Text } deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'EsProtocolException' will be thrown if Bloodhound cannot parse a response -returned by the Elasticsearch server. If you encounter this error, please -verify that your domain data types and FromJSON instances are working properly -(for example, the 'a' of '[Hit a]' in 'SearchResult.searchHits.hits'). If you're -sure that your mappings are correct, then this error may be an indication of an -incompatibility between Bloodhound and Elasticsearch. Please open a bug report -and be sure to include the exception body. --} -data EsProtocolException = EsProtocolException { esProtoExBody :: L.ByteString } - deriving (Eq, Read, Show, Generic, Typeable) - -instance Exception EsProtocolException - -data IndexAlias = IndexAlias { srcIndex :: IndexName - , indexAlias :: IndexAliasName } deriving (Eq, Read, Show, Generic, Typeable) - -newtype IndexAliasName = IndexAliasName { indexAliasName :: IndexName } deriving (Eq, Read, Show, Generic, ToJSON) - -data IndexAliasAction = AddAlias IndexAlias IndexAliasCreate - | RemoveAlias IndexAlias deriving (Read, Show, Eq, Generic, Typeable) - -data IndexAliasCreate = IndexAliasCreate { aliasCreateRouting :: Maybe AliasRouting - , aliasCreateFilter :: Maybe Filter} - deriving (Read, Show, Eq, Generic, Typeable) - -data AliasRouting = AllAliasRouting RoutingValue - | GranularAliasRouting (Maybe SearchAliasRouting) (Maybe IndexAliasRouting) - deriving (Read, Show, Eq, Generic, Typeable) - -newtype SearchAliasRouting = SearchAliasRouting (NonEmpty RoutingValue) deriving (Read, Show, Eq, Generic, Typeable) - -newtype IndexAliasRouting = IndexAliasRouting RoutingValue deriving (Read, Show, Eq, Generic, ToJSON, FromJSON, Typeable) - -newtype RoutingValue = RoutingValue { routingValue :: Text } deriving (Read, Show, Eq, Generic, ToJSON, FromJSON, Typeable) - -newtype IndexAliasesSummary = IndexAliasesSummary { indexAliasesSummary :: [IndexAliasSummary] } deriving (Read, Show, Eq, Generic, Typeable) - -{-| 'IndexAliasSummary' is a summary of an index alias configured for a server. -} -data IndexAliasSummary = IndexAliasSummary { indexAliasSummaryAlias :: IndexAlias - , indexAliasSummaryCreate :: IndexAliasCreate} deriving (Read, Show, Eq, Generic, Typeable) - -{-| 'DocVersion' is an integer version number for a document between 1 -and 9.2e+18 used for <>. --} -newtype DocVersion = DocVersion { - docVersionNumber :: Int - } deriving (Eq, Read, Show, Generic, Ord, ToJSON) - --- | Smart constructor for in-range doc version -mkDocVersion :: Int -> Maybe DocVersion -mkDocVersion i - | i >= (docVersionNumber minBound) && i <= (docVersionNumber maxBound) = - Just $ DocVersion i - | otherwise = Nothing - - -{-| 'ExternalDocVersion' is a convenience wrapper if your code uses its -own version numbers instead of ones from ES. --} -newtype ExternalDocVersion = ExternalDocVersion DocVersion - deriving (Eq, Read, Show, Generic, Ord, Bounded, Enum, ToJSON) - -{-| 'VersionControl' is specified when indexing documents as a -optimistic concurrency control. --} -data VersionControl = NoVersionControl - -- ^ Don't send a version. This is a pure overwrite. - | InternalVersion DocVersion - -- ^ Use the default ES versioning scheme. Only - -- index the document if the version is the same - -- as the one specified. Only applicable to - -- updates, as you should be getting Version from - -- a search result. - | ExternalGT ExternalDocVersion - -- ^ Use your own version numbering. Only index - -- the document if the version is strictly higher - -- OR the document doesn't exist. The given - -- version will be used as the new version number - -- for the stored document. N.B. All updates must - -- increment this number, meaning there is some - -- global, external ordering of updates. - | ExternalGTE ExternalDocVersion - -- ^ Use your own version numbering. Only index - -- the document if the version is equal or higher - -- than the stored version. Will succeed if there - -- is no existing document. The given version will - -- be used as the new version number for the - -- stored document. Use with care, as this could - -- result in data loss. - | ForceVersion ExternalDocVersion - -- ^ The document will always be indexed and the - -- given version will be the new version. This is - -- typically used for correcting errors. Use with - -- care, as this could result in data loss. - deriving (Read, Show, Eq, Generic, Ord) - -{-| 'DocumentParent' is used to specify a parent document. --} -newtype DocumentParent = DocumentParent DocId - deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'IndexDocumentSettings' are special settings supplied when indexing -a document. For the best backwards compatiblity when new fields are -added, you should probably prefer to start with 'defaultIndexDocumentSettings' --} -data IndexDocumentSettings = - IndexDocumentSettings { idsVersionControl :: VersionControl - , idsParent :: Maybe DocumentParent - } deriving (Eq, Read, Show, Generic, Typeable) - -{-| Reasonable default settings. Chooses no version control and no parent. --} -defaultIndexDocumentSettings :: IndexDocumentSettings -defaultIndexDocumentSettings = IndexDocumentSettings NoVersionControl Nothing - -{-| 'Sort' is a synonym for a list of 'SortSpec's. Sort behavior is order - dependent with later sorts acting as tie-breakers for earlier sorts. --} -type Sort = [SortSpec] - -{-| The two main kinds of 'SortSpec' are 'DefaultSortSpec' and - 'GeoDistanceSortSpec'. The latter takes a 'SortOrder', 'GeoPoint', and - 'DistanceUnit' to express "nearness" to a single geographical point as a - sort specification. - - --} -data SortSpec = DefaultSortSpec DefaultSort - | GeoDistanceSortSpec SortOrder GeoPoint DistanceUnit deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'DefaultSort' is usually the kind of 'SortSpec' you'll want. There's a - 'mkSort' convenience function for when you want to specify only the most - common parameters. - - --} -data DefaultSort = - DefaultSort { sortFieldName :: FieldName - , sortOrder :: SortOrder - -- default False - , ignoreUnmapped :: Bool - , sortMode :: Maybe SortMode - , missingSort :: Maybe Missing - , nestedFilter :: Maybe Filter } deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'SortOrder' is 'Ascending' or 'Descending', as you might expect. These get - encoded into "asc" or "desc" when turned into JSON. - - --} -data SortOrder = Ascending - | Descending deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'Missing' prescribes how to handle missing fields. A missing field can be - sorted last, first, or using a custom value as a substitute. - - --} -data Missing = LastMissing - | FirstMissing - | CustomMissing Text deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'SortMode' prescribes how to handle sorting array/multi-valued fields. - -http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-sort.html#_sort_mode_option --} -data SortMode = SortMin - | SortMax - | SortSum - | SortAvg deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'mkSort' defaults everything but the 'FieldName' and the 'SortOrder' so - that you can concisely describe the usual kind of 'SortSpec's you want. --} -mkSort :: FieldName -> SortOrder -> DefaultSort -mkSort fieldName sOrder = DefaultSort fieldName sOrder False Nothing Nothing Nothing - -{-| 'Cache' is for telling ES whether it should cache a 'Filter' not. - 'Query's cannot be cached. --} -type Cache = Bool -- caching on/off -defaultCache :: Cache -defaultCache = False - -{-| 'PrefixValue' is used in 'PrefixQuery' as the main query component. --} -type PrefixValue = Text - -{-| 'BooleanOperator' is the usual And/Or operators with an ES compatible - JSON encoding baked in. Used all over the place. --} -data BooleanOperator = And | Or deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'ShardCount' is part of 'IndexSettings' --} -newtype ShardCount = ShardCount Int deriving (Eq, Read, Show, Generic, ToJSON, Typeable) - -{-| 'ReplicaCount' is part of 'IndexSettings' --} -newtype ReplicaCount = ReplicaCount Int deriving (Eq, Read, Show, Generic, ToJSON, Typeable) - -{-| 'IndexName' is used to describe which index to query/create/delete --} -newtype IndexName = IndexName Text deriving (Eq, Generic, Read, Show, ToJSON, FromJSON, Typeable) - -{-| 'IndexSelection' is used for APIs which take a single index, a list of - indexes, or the special @_all@ index. --} ---TODO: this does not fully support . It wouldn't be too hard to implement but you'd have to add the optional parameters (ignore_unavailable, allow_no_indices, expand_wildcards) to any APIs using it. Also would be a breaking API. -data IndexSelection = IndexList (NonEmpty IndexName) - | AllIndexes deriving (Eq, Generic, Show, Typeable) - -{-| 'NodeSelection' is used for most cluster APIs. See for more details. --} -data NodeSelection = LocalNode - -- ^ Whatever node receives this request - | NodeList (NonEmpty NodeSelector) - | AllNodes deriving (Eq, Generic, Show, Typeable) - - --- | An exact match or pattern to identify a node. Note that All of --- these options support wildcarding, so your node name, server, attr --- name can all contain * characters to be a fuzzy match. -data NodeSelector = NodeByName NodeName - | NodeByFullNodeId FullNodeId - | NodeByHost Server - -- ^ e.g. 10.0.0.1 or even 10.0.0.* - | NodeByAttribute NodeAttrName Text - -- ^ NodeAttrName can be a pattern, e.g. rack*. The value can too. - deriving (Eq, Generic, Show, Typeable) - -{-| 'TemplateName' is used to describe which template to query/create/delete --} -newtype TemplateName = TemplateName Text deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) - -{-| 'TemplatePattern' represents a pattern which is matched against index names --} -newtype TemplatePattern = TemplatePattern Text deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) - -{-| 'MappingName' is part of mappings which are how ES describes and schematizes - the data in the indices. --} -newtype MappingName = MappingName Text deriving (Eq, Generic, Read, Show, ToJSON, FromJSON, Typeable) - -{-| 'DocId' is a generic wrapper value for expressing unique Document IDs. - Can be set by the user or created by ES itself. Often used in client - functions for poking at specific documents. --} -newtype DocId = DocId Text deriving (Eq, Generic, Read, Show, ToJSON, FromJSON, Typeable) - -{-| 'QueryString' is used to wrap query text bodies, be they human written or not. --} -newtype QueryString = QueryString Text deriving (Eq, Generic, Read, Show, ToJSON, FromJSON, Typeable) - -{-| 'FieldName' is used all over the place wherever a specific field within - a document needs to be specified, usually in 'Query's or 'Filter's. --} -newtype FieldName = FieldName Text deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) - - -{-| 'Script' is often used in place of 'FieldName' to specify more -complex ways of extracting a value from a document. --} -newtype Script = Script { scriptText :: Text } deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'CacheName' is used in 'RegexpFilter' for describing the - 'CacheKey' keyed caching behavior. --} -newtype CacheName = CacheName Text deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) - -{-| 'CacheKey' is used in 'RegexpFilter' to key regex caching. --} -newtype CacheKey = - CacheKey Text deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype Existence = - Existence Bool deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype NullValue = - NullValue Bool deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype CutoffFrequency = - CutoffFrequency Double deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype Analyzer = - Analyzer Text deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype MaxExpansions = - MaxExpansions Int deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) - -{-| 'Lenient', if set to true, will cause format based failures to be - ignored. I don't know what the bloody default is, Elasticsearch - documentation didn't say what it was. Let me know if you figure it out. --} -newtype Lenient = - Lenient Bool deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype Tiebreaker = - Tiebreaker Double deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype Boost = - Boost Double deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype BoostTerms = - BoostTerms Double deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) - -{-| 'MinimumMatch' controls how many should clauses in the bool query should - match. Can be an absolute value (2) or a percentage (30%) or a - combination of both. --} -newtype MinimumMatch = - MinimumMatch Int deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype DisableCoord = - DisableCoord Bool deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype IgnoreTermFrequency = - IgnoreTermFrequency Bool deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype MinimumTermFrequency = - MinimumTermFrequency Int deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype MaxQueryTerms = - MaxQueryTerms Int deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype Fuzziness = - Fuzziness Double deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) - -{-| 'PrefixLength' is the prefix length used in queries, defaults to 0. -} -newtype PrefixLength = - PrefixLength Int deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype TypeName = - TypeName Text deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype PercentMatch = - PercentMatch Double deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype StopWord = - StopWord Text deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype QueryPath = - QueryPath Text deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) - -{-| Allowing a wildcard at the beginning of a word (eg "*ing") is particularly - heavy, because all terms in the index need to be examined, just in case - they match. Leading wildcards can be disabled by setting - 'AllowLeadingWildcard' to false. -} -newtype AllowLeadingWildcard = - AllowLeadingWildcard Bool deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype LowercaseExpanded = - LowercaseExpanded Bool deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype EnablePositionIncrements = - EnablePositionIncrements Bool deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) - -{-| By default, wildcard terms in a query are not analyzed. - Setting 'AnalyzeWildcard' to true enables best-effort analysis. --} -newtype AnalyzeWildcard = AnalyzeWildcard Bool deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) - -{-| 'GeneratePhraseQueries' defaults to false. --} -newtype GeneratePhraseQueries = - GeneratePhraseQueries Bool deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) - -{-| 'Locale' is used for string conversions - defaults to ROOT. --} -newtype Locale = Locale Text deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype MaxWordLength = MaxWordLength Int deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype MinWordLength = MinWordLength Int deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) - -{-| 'PhraseSlop' sets the default slop for phrases, 0 means exact - phrase matches. Default is 0. --} -newtype PhraseSlop = PhraseSlop Int deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype MinDocFrequency = MinDocFrequency Int deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype MaxDocFrequency = MaxDocFrequency Int deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) - --- | Newtype wrapper to parse ES's concerning tendency to in some APIs return a floating point number of milliseconds since epoch ಠ_ಠ -newtype POSIXMS = POSIXMS { posixMS :: UTCTime } - -{-| 'unpackId' is a silly convenience function that gets used once. --} -unpackId :: DocId -> Text -unpackId (DocId docId) = docId +import Database.V1.Bloodhound.Internal.Aggregation +import Database.V1.Bloodhound.Internal.Client +import Database.V1.Bloodhound.Internal.Highlight +import Database.V1.Bloodhound.Internal.Newtypes +import Database.V1.Bloodhound.Internal.Query +import Database.V1.Bloodhound.Internal.Sort +import Database.V1.Bloodhound.Internal.Suggest + +data SearchResult a = + SearchResult { took :: Int + , timedOut :: Bool + , shards :: ShardResult + , searchHits :: SearchHits a + , aggregations :: Maybe AggregationResults + , scrollId :: Maybe ScrollId + , suggest :: Maybe NamedSuggestionResponse -- ^ Only one Suggestion request / response per Search is supported. + } + deriving (Eq, Show) type TrackSortScores = Bool -newtype From = From Int deriving (Eq, Read, Show, Generic, ToJSON) -newtype Size = Size Int deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) data Search = Search { queryBody :: Maybe Query , filterBody :: Maybe Filter @@ -1121,7 +415,7 @@ data Search = Search { queryBody :: Maybe Query , fields :: Maybe [FieldName] , source :: Maybe Source , suggestBody :: Maybe Suggest -- ^ Only one Suggestion request / response per Search is supported. - } deriving (Eq, Read, Show, Generic, Typeable) + } deriving (Eq, Show) data SearchType = SearchTypeQueryThenFetch | SearchTypeDfsQueryThenFetch @@ -1129,2345 +423,7 @@ data SearchType = SearchTypeQueryThenFetch | SearchTypeScan | SearchTypeQueryAndFetch | SearchTypeDfsQueryAndFetch - deriving (Eq, Read, Show, Generic, Typeable) - -data Source = - NoSource - | SourcePatterns PatternOrPatterns - | SourceIncludeExclude Include Exclude - deriving (Read, Show, Eq, Generic, Typeable) - -data PatternOrPatterns = PopPattern Pattern - | PopPatterns [Pattern] deriving (Eq, Read, Show, Generic, Typeable) - -data Include = Include [Pattern] deriving (Eq, Read, Show, Generic, Typeable) -data Exclude = Exclude [Pattern] deriving (Eq, Read, Show, Generic, Typeable) - -newtype Pattern = Pattern Text deriving (Eq, Read, Show, Generic, Typeable) - -data Highlights = Highlights { globalsettings :: Maybe HighlightSettings - , highlightFields :: [FieldHighlight] - } deriving (Read, Show, Eq, Generic, Typeable) - -data FieldHighlight = FieldHighlight FieldName (Maybe HighlightSettings) - deriving (Read, Show, Eq, Generic, Typeable) - - -data HighlightSettings = Plain PlainHighlight - | Postings PostingsHighlight - | FastVector FastVectorHighlight - deriving (Read, Show, Eq, Generic, Typeable) -data PlainHighlight = - PlainHighlight { plainCommon :: Maybe CommonHighlight - , plainNonPost :: Maybe NonPostings } deriving (Read, Show, Eq, Generic, Typeable) - - -- This requires that index_options are set to 'offset' in the mapping. -data PostingsHighlight = PostingsHighlight (Maybe CommonHighlight) deriving (Read, Show, Eq, Generic, Typeable) - --- This requires that term_vector is set to 'with_positions_offsets' in the mapping. -data FastVectorHighlight = - FastVectorHighlight { fvCommon :: Maybe CommonHighlight - , fvNonPostSettings :: Maybe NonPostings - , boundaryChars :: Maybe Text - , boundaryMaxScan :: Maybe Int - , fragmentOffset :: Maybe Int - , matchedFields :: [Text] - , phraseLimit :: Maybe Int - } deriving (Read, Show, Eq, Generic, Typeable) - -data CommonHighlight = - CommonHighlight { order :: Maybe Text - , forceSource :: Maybe Bool - , tag :: Maybe HighlightTag - , encoder :: Maybe HighlightEncoder - , noMatchSize :: Maybe Int - , highlightQuery :: Maybe Query - , requireFieldMatch :: Maybe Bool - } deriving (Read, Show, Eq, Generic, Typeable) - --- Settings that are only applicable to FastVector and Plain highlighters. -data NonPostings = - NonPostings { fragmentSize :: Maybe Int - , numberOfFragments :: Maybe Int} deriving (Read, Show, Eq, Generic, Typeable) - -data HighlightEncoder = DefaultEncoder - | HTMLEncoder - deriving (Read, Show, Eq, Generic, Typeable) - --- NOTE: Should the tags use some kind of HTML type, rather than Text? -data HighlightTag = TagSchema Text - | CustomTags ([Text], [Text]) -- Only uses more than the first value in the lists if fvh - deriving (Read, Show, Eq, Generic, Typeable) - - -data Query = - TermQuery Term (Maybe Boost) - | TermsQuery Text (NonEmpty Text) - | QueryMatchQuery MatchQuery - | QueryMultiMatchQuery MultiMatchQuery - | QueryBoolQuery BoolQuery - | QueryBoostingQuery BoostingQuery - | QueryCommonTermsQuery CommonTermsQuery - | ConstantScoreFilter Filter Boost - | ConstantScoreQuery Query Boost - | QueryDisMaxQuery DisMaxQuery - | QueryFilteredQuery FilteredQuery - | QueryFuzzyLikeThisQuery FuzzyLikeThisQuery - | QueryFuzzyLikeFieldQuery FuzzyLikeFieldQuery - | QueryFuzzyQuery FuzzyQuery - | QueryHasChildQuery HasChildQuery - | QueryHasParentQuery HasParentQuery - | IdsQuery MappingName [DocId] - | QueryIndicesQuery IndicesQuery - | MatchAllQuery (Maybe Boost) - | QueryMoreLikeThisQuery MoreLikeThisQuery - | QueryMoreLikeThisFieldQuery MoreLikeThisFieldQuery - | QueryNestedQuery NestedQuery - | QueryPrefixQuery PrefixQuery - | QueryQueryStringQuery QueryStringQuery - | QuerySimpleQueryStringQuery SimpleQueryStringQuery - | QueryRangeQuery RangeQuery - | QueryRegexpQuery RegexpQuery - | QueryTemplateQueryInline TemplateQueryInline - deriving (Eq, Read, Show, Generic, Typeable) - -data RegexpQuery = - RegexpQuery { regexpQueryField :: FieldName - , regexpQuery :: Regexp - , regexpQueryFlags :: RegexpFlags - , regexpQueryBoost :: Maybe Boost - } deriving (Eq, Read, Show, Generic, Typeable) - -data RangeQuery = - RangeQuery { rangeQueryField :: FieldName - , rangeQueryRange :: RangeValue - , rangeQueryBoost :: Boost } deriving (Eq, Read, Show, Generic, Typeable) - -mkRangeQuery :: FieldName -> RangeValue -> RangeQuery -mkRangeQuery f r = RangeQuery f r (Boost 1.0) - -data SimpleQueryStringQuery = - SimpleQueryStringQuery - { simpleQueryStringQuery :: QueryString - , simpleQueryStringField :: Maybe FieldOrFields - , simpleQueryStringOperator :: Maybe BooleanOperator - , simpleQueryStringAnalyzer :: Maybe Analyzer - , simpleQueryStringFlags :: Maybe (NonEmpty SimpleQueryFlag) - , simpleQueryStringLowercaseExpanded :: Maybe LowercaseExpanded - , simpleQueryStringLocale :: Maybe Locale - } deriving (Eq, Read, Show, Generic, Typeable) - -data SimpleQueryFlag = - SimpleQueryAll - | SimpleQueryNone - | SimpleQueryAnd - | SimpleQueryOr - | SimpleQueryPrefix - | SimpleQueryPhrase - | SimpleQueryPrecedence - | SimpleQueryEscape - | SimpleQueryWhitespace - | SimpleQueryFuzzy - | SimpleQueryNear - | SimpleQuerySlop deriving (Eq, Read, Show, Generic, Typeable) - --- use_dis_max and tie_breaker when fields are plural? -data QueryStringQuery = - QueryStringQuery - { queryStringQuery :: QueryString - , queryStringDefaultField :: Maybe FieldName - , queryStringOperator :: Maybe BooleanOperator - , queryStringAnalyzer :: Maybe Analyzer - , queryStringAllowLeadingWildcard :: Maybe AllowLeadingWildcard - , queryStringLowercaseExpanded :: Maybe LowercaseExpanded - , queryStringEnablePositionIncrements :: Maybe EnablePositionIncrements - , queryStringFuzzyMaxExpansions :: Maybe MaxExpansions - , queryStringFuzziness :: Maybe Fuzziness - , queryStringFuzzyPrefixLength :: Maybe PrefixLength - , queryStringPhraseSlop :: Maybe PhraseSlop - , queryStringBoost :: Maybe Boost - , queryStringAnalyzeWildcard :: Maybe AnalyzeWildcard - , queryStringGeneratePhraseQueries :: Maybe GeneratePhraseQueries - , queryStringMinimumShouldMatch :: Maybe MinimumMatch - , queryStringLenient :: Maybe Lenient - , queryStringLocale :: Maybe Locale - } deriving (Eq, Read, Show, Generic, Typeable) - -mkQueryStringQuery :: QueryString -> QueryStringQuery -mkQueryStringQuery qs = - QueryStringQuery qs Nothing Nothing - Nothing Nothing Nothing Nothing - Nothing Nothing Nothing Nothing - Nothing Nothing Nothing Nothing - Nothing Nothing - -data FieldOrFields = FofField FieldName - | FofFields (NonEmpty FieldName) deriving (Eq, Read, Show, Generic, Typeable) - -data PrefixQuery = - PrefixQuery - { prefixQueryField :: FieldName - , prefixQueryPrefixValue :: Text - , prefixQueryBoost :: Maybe Boost } deriving (Eq, Read, Show, Generic, Typeable) - -data NestedQuery = - NestedQuery - { nestedQueryPath :: QueryPath - , nestedQueryScoreType :: ScoreType - , nestedQuery :: Query } deriving (Eq, Read, Show, Generic, Typeable) - -data MoreLikeThisFieldQuery = - MoreLikeThisFieldQuery - { moreLikeThisFieldText :: Text - , moreLikeThisFieldFields :: FieldName - -- default 0.3 (30%) - , moreLikeThisFieldPercentMatch :: Maybe PercentMatch - , moreLikeThisFieldMinimumTermFreq :: Maybe MinimumTermFrequency - , moreLikeThisFieldMaxQueryTerms :: Maybe MaxQueryTerms - , moreLikeThisFieldStopWords :: Maybe (NonEmpty StopWord) - , moreLikeThisFieldMinDocFrequency :: Maybe MinDocFrequency - , moreLikeThisFieldMaxDocFrequency :: Maybe MaxDocFrequency - , moreLikeThisFieldMinWordLength :: Maybe MinWordLength - , moreLikeThisFieldMaxWordLength :: Maybe MaxWordLength - , moreLikeThisFieldBoostTerms :: Maybe BoostTerms - , moreLikeThisFieldBoost :: Maybe Boost - , moreLikeThisFieldAnalyzer :: Maybe Analyzer - } deriving (Eq, Read, Show, Generic, Typeable) - -data MoreLikeThisQuery = - MoreLikeThisQuery - { moreLikeThisText :: Text - , moreLikeThisFields :: Maybe (NonEmpty FieldName) - -- default 0.3 (30%) - , moreLikeThisPercentMatch :: Maybe PercentMatch - , moreLikeThisMinimumTermFreq :: Maybe MinimumTermFrequency - , moreLikeThisMaxQueryTerms :: Maybe MaxQueryTerms - , moreLikeThisStopWords :: Maybe (NonEmpty StopWord) - , moreLikeThisMinDocFrequency :: Maybe MinDocFrequency - , moreLikeThisMaxDocFrequency :: Maybe MaxDocFrequency - , moreLikeThisMinWordLength :: Maybe MinWordLength - , moreLikeThisMaxWordLength :: Maybe MaxWordLength - , moreLikeThisBoostTerms :: Maybe BoostTerms - , moreLikeThisBoost :: Maybe Boost - , moreLikeThisAnalyzer :: Maybe Analyzer - } deriving (Eq, Read, Show, Generic, Typeable) - -data IndicesQuery = - IndicesQuery - { indicesQueryIndices :: [IndexName] - , indicesQuery :: Query - -- default "all" - , indicesQueryNoMatch :: Maybe Query } deriving (Eq, Read, Show, Generic, Typeable) - -data HasParentQuery = - HasParentQuery - { hasParentQueryType :: TypeName - , hasParentQuery :: Query - , hasParentQueryScoreType :: Maybe ScoreType } deriving (Eq, Read, Show, Generic, Typeable) - -data HasChildQuery = - HasChildQuery - { hasChildQueryType :: TypeName - , hasChildQuery :: Query - , hasChildQueryScoreType :: Maybe ScoreType } deriving (Eq, Read, Show, Generic, Typeable) - -data ScoreType = - ScoreTypeMax - | ScoreTypeSum - | ScoreTypeAvg - | ScoreTypeNone deriving (Eq, Read, Show, Generic, Typeable) - -data FuzzyQuery = - FuzzyQuery { fuzzyQueryField :: FieldName - , fuzzyQueryValue :: Text - , fuzzyQueryPrefixLength :: PrefixLength - , fuzzyQueryMaxExpansions :: MaxExpansions - , fuzzyQueryFuzziness :: Fuzziness - , fuzzyQueryBoost :: Maybe Boost - } deriving (Eq, Read, Show, Generic, Typeable) - -data FuzzyLikeFieldQuery = - FuzzyLikeFieldQuery - { fuzzyLikeField :: FieldName - -- anaphora is good for the soul. - , fuzzyLikeFieldText :: Text - , fuzzyLikeFieldMaxQueryTerms :: MaxQueryTerms - , fuzzyLikeFieldIgnoreTermFrequency :: IgnoreTermFrequency - , fuzzyLikeFieldFuzziness :: Fuzziness - , fuzzyLikeFieldPrefixLength :: PrefixLength - , fuzzyLikeFieldBoost :: Boost - , fuzzyLikeFieldAnalyzer :: Maybe Analyzer - } deriving (Eq, Read, Show, Generic, Typeable) - -data FuzzyLikeThisQuery = - FuzzyLikeThisQuery - { fuzzyLikeFields :: [FieldName] - , fuzzyLikeText :: Text - , fuzzyLikeMaxQueryTerms :: MaxQueryTerms - , fuzzyLikeIgnoreTermFrequency :: IgnoreTermFrequency - , fuzzyLikeFuzziness :: Fuzziness - , fuzzyLikePrefixLength :: PrefixLength - , fuzzyLikeBoost :: Boost - , fuzzyLikeAnalyzer :: Maybe Analyzer - } deriving (Eq, Read, Show, Generic, Typeable) - -data FilteredQuery = - FilteredQuery - { filteredQuery :: Query - , filteredFilter :: Filter } deriving (Eq, Read, Show, Generic, Typeable) - -data DisMaxQuery = - DisMaxQuery { disMaxQueries :: [Query] - -- default 0.0 - , disMaxTiebreaker :: Tiebreaker - , disMaxBoost :: Maybe Boost - } deriving (Eq, Read, Show, Generic, Typeable) - -data MatchQuery = - MatchQuery { matchQueryField :: FieldName - , matchQueryQueryString :: QueryString - , matchQueryOperator :: BooleanOperator - , matchQueryZeroTerms :: ZeroTermsQuery - , matchQueryCutoffFrequency :: Maybe CutoffFrequency - , matchQueryMatchType :: Maybe MatchQueryType - , matchQueryAnalyzer :: Maybe Analyzer - , matchQueryMaxExpansions :: Maybe MaxExpansions - , matchQueryLenient :: Maybe Lenient - , matchQueryBoost :: Maybe Boost } deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'mkMatchQuery' is a convenience function that defaults the less common parameters, - enabling you to provide only the 'FieldName' and 'QueryString' to make a 'MatchQuery' --} -mkMatchQuery :: FieldName -> QueryString -> MatchQuery -mkMatchQuery field query = MatchQuery field query Or ZeroTermsNone Nothing Nothing Nothing Nothing Nothing Nothing - -data MatchQueryType = - MatchPhrase - | MatchPhrasePrefix deriving (Eq, Read, Show, Generic, Typeable) - -data MultiMatchQuery = - MultiMatchQuery { multiMatchQueryFields :: [FieldName] - , multiMatchQueryString :: QueryString - , multiMatchQueryOperator :: BooleanOperator - , multiMatchQueryZeroTerms :: ZeroTermsQuery - , multiMatchQueryTiebreaker :: Maybe Tiebreaker - , multiMatchQueryType :: Maybe MultiMatchQueryType - , multiMatchQueryCutoffFrequency :: Maybe CutoffFrequency - , multiMatchQueryAnalyzer :: Maybe Analyzer - , multiMatchQueryMaxExpansions :: Maybe MaxExpansions - , multiMatchQueryLenient :: Maybe Lenient } deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'mkMultiMatchQuery' is a convenience function that defaults the less common parameters, - enabling you to provide only the list of 'FieldName's and 'QueryString' to - make a 'MultiMatchQuery'. --} - -mkMultiMatchQuery :: [FieldName] -> QueryString -> MultiMatchQuery -mkMultiMatchQuery matchFields query = - MultiMatchQuery matchFields query - Or ZeroTermsNone Nothing Nothing Nothing Nothing Nothing Nothing - -data MultiMatchQueryType = - MultiMatchBestFields - | MultiMatchMostFields - | MultiMatchCrossFields - | MultiMatchPhrase - | MultiMatchPhrasePrefix deriving (Eq, Read, Show, Generic, Typeable) - -data BoolQuery = - BoolQuery { boolQueryMustMatch :: [Query] - , boolQueryMustNotMatch :: [Query] - , boolQueryShouldMatch :: [Query] - , boolQueryMinimumShouldMatch :: Maybe MinimumMatch - , boolQueryBoost :: Maybe Boost - , boolQueryDisableCoord :: Maybe DisableCoord - } deriving (Eq, Read, Show, Generic, Typeable) - -mkBoolQuery :: [Query] -> [Query] -> [Query] -> BoolQuery -mkBoolQuery must mustNot should = - BoolQuery must mustNot should Nothing Nothing Nothing - -data BoostingQuery = - BoostingQuery { positiveQuery :: Query - , negativeQuery :: Query - , negativeBoost :: Boost } deriving (Eq, Read, Show, Generic, Typeable) - -data CommonTermsQuery = - CommonTermsQuery { commonField :: FieldName - , commonQuery :: QueryString - , commonCutoffFrequency :: CutoffFrequency - , commonLowFreqOperator :: BooleanOperator - , commonHighFreqOperator :: BooleanOperator - , commonMinimumShouldMatch :: Maybe CommonMinimumMatch - , commonBoost :: Maybe Boost - , commonAnalyzer :: Maybe Analyzer - , commonDisableCoord :: Maybe DisableCoord - } deriving (Eq, Read, Show, Generic, Typeable) - -data CommonMinimumMatch = - CommonMinimumMatchHighLow MinimumMatchHighLow - | CommonMinimumMatch MinimumMatch - deriving (Eq, Read, Show, Generic, Typeable) - -data MinimumMatchHighLow = - MinimumMatchHighLow { lowFreq :: MinimumMatch - , highFreq :: MinimumMatch } deriving (Eq, Read, Show, Generic, Typeable) - -data Filter = AndFilter [Filter] Cache - | OrFilter [Filter] Cache - | NotFilter Filter Cache - | IdentityFilter - | BoolFilter BoolMatch - | ExistsFilter FieldName -- always cached - | GeoBoundingBoxFilter GeoBoundingBoxConstraint - | GeoDistanceFilter GeoPoint Distance DistanceType OptimizeBbox Cache - | GeoDistanceRangeFilter GeoPoint DistanceRange - | GeoPolygonFilter FieldName [LatLon] - | IdsFilter MappingName [DocId] - | LimitFilter Int - | MissingFilter FieldName Existence NullValue - | PrefixFilter FieldName PrefixValue Cache - | QueryFilter Query Cache - | RangeFilter FieldName RangeValue RangeExecution Cache - | RegexpFilter FieldName Regexp RegexpFlags CacheName Cache CacheKey - | TermFilter Term Cache - deriving (Eq, Read, Show, Generic, Typeable) - -data ZeroTermsQuery = ZeroTermsNone - | ZeroTermsAll deriving (Eq, Read, Show, Generic, Typeable) - -data RangeExecution = RangeExecutionIndex - | RangeExecutionFielddata deriving (Eq, Read, Show, Generic, Typeable) - -newtype Regexp = Regexp Text deriving (Eq, Read, Show, Generic, Typeable, FromJSON) - -data RegexpFlags = AllRegexpFlags - | NoRegexpFlags - | SomeRegexpFlags (NonEmpty RegexpFlag) deriving (Eq, Read, Show, Generic, Typeable) - -data RegexpFlag = AnyString - | Automaton - | Complement - | Empty - | Intersection - | Interval deriving (Eq, Read, Show, Generic, Typeable) - -newtype LessThan = LessThan Double deriving (Eq, Read, Show, Generic, Typeable) -newtype LessThanEq = LessThanEq Double deriving (Eq, Read, Show, Generic, Typeable) -newtype GreaterThan = GreaterThan Double deriving (Eq, Read, Show, Generic, Typeable) -newtype GreaterThanEq = GreaterThanEq Double deriving (Eq, Read, Show, Generic, Typeable) - -newtype LessThanD = LessThanD UTCTime deriving (Eq, Read, Show, Generic, Typeable) -newtype LessThanEqD = LessThanEqD UTCTime deriving (Eq, Read, Show, Generic, Typeable) -newtype GreaterThanD = GreaterThanD UTCTime deriving (Eq, Read, Show, Generic, Typeable) -newtype GreaterThanEqD = GreaterThanEqD UTCTime deriving (Eq, Read, Show, Generic, Typeable) - -data RangeValue = RangeDateLte LessThanEqD - | RangeDateLt LessThanD - | RangeDateGte GreaterThanEqD - | RangeDateGt GreaterThanD - | RangeDateGtLt GreaterThanD LessThanD - | RangeDateGteLte GreaterThanEqD LessThanEqD - | RangeDateGteLt GreaterThanEqD LessThanD - | RangeDateGtLte GreaterThanD LessThanEqD - | RangeDoubleLte LessThanEq - | RangeDoubleLt LessThan - | RangeDoubleGte GreaterThanEq - | RangeDoubleGt GreaterThan - | RangeDoubleGtLt GreaterThan LessThan - | RangeDoubleGteLte GreaterThanEq LessThanEq - | RangeDoubleGteLt GreaterThanEq LessThan - | RangeDoubleGtLte GreaterThan LessThanEq - deriving (Eq, Read, Show, Generic, Typeable) - -rangeValueToPair :: RangeValue -> [Pair] -rangeValueToPair rv = case rv of - RangeDateLte (LessThanEqD t) -> ["lte" .= t] - RangeDateGte (GreaterThanEqD t) -> ["gte" .= t] - RangeDateLt (LessThanD t) -> ["lt" .= t] - RangeDateGt (GreaterThanD t) -> ["gt" .= t] - RangeDateGteLte (GreaterThanEqD l) (LessThanEqD g) -> ["gte" .= l, "lte" .= g] - RangeDateGtLte (GreaterThanD l) (LessThanEqD g) -> ["gt" .= l, "lte" .= g] - RangeDateGteLt (GreaterThanEqD l) (LessThanD g) -> ["gte" .= l, "lt" .= g] - RangeDateGtLt (GreaterThanD l) (LessThanD g) -> ["gt" .= l, "lt" .= g] - RangeDoubleLte (LessThanEq t) -> ["lte" .= t] - RangeDoubleGte (GreaterThanEq t) -> ["gte" .= t] - RangeDoubleLt (LessThan t) -> ["lt" .= t] - RangeDoubleGt (GreaterThan t) -> ["gt" .= t] - RangeDoubleGteLte (GreaterThanEq l) (LessThanEq g) -> ["gte" .= l, "lte" .= g] - RangeDoubleGtLte (GreaterThan l) (LessThanEq g) -> ["gt" .= l, "lte" .= g] - RangeDoubleGteLt (GreaterThanEq l) (LessThan g) -> ["gte" .= l, "lt" .= g] - RangeDoubleGtLt (GreaterThan l) (LessThan g) -> ["gt" .= l, "lt" .= g] - -data Term = Term { termField :: Text - , termValue :: Text } deriving (Eq, Read, Show, Generic, Typeable) - -data BoolMatch = MustMatch Term Cache - | MustNotMatch Term Cache - | ShouldMatch [Term] Cache deriving (Eq, Read, Show, Generic, Typeable) - --- "memory" or "indexed" -data GeoFilterType = GeoFilterMemory - | GeoFilterIndexed deriving (Eq, Read, Show, Generic, Typeable) - -data LatLon = LatLon { lat :: Double - , lon :: Double } deriving (Eq, Read, Show, Generic, Typeable) - -data GeoBoundingBox = - GeoBoundingBox { topLeft :: LatLon - , bottomRight :: LatLon } deriving (Eq, Read, Show, Generic, Typeable) - -data GeoBoundingBoxConstraint = - GeoBoundingBoxConstraint { geoBBField :: FieldName - , constraintBox :: GeoBoundingBox - , bbConstraintcache :: Cache - , geoType :: GeoFilterType - } deriving (Eq, Read, Show, Generic, Typeable) - -data GeoPoint = - GeoPoint { geoField :: FieldName - , latLon :: LatLon} deriving (Eq, Read, Show, Generic, Typeable) - -data DistanceUnit = Miles - | Yards - | Feet - | Inches - | Kilometers - | Meters - | Centimeters - | Millimeters - | NauticalMiles deriving (Eq, Read, Show, Generic, Typeable) - -data DistanceType = Arc - | SloppyArc -- doesn't exist <1.0 - | Plane deriving (Eq, Read, Show, Generic, Typeable) - -data OptimizeBbox = OptimizeGeoFilterType GeoFilterType - | NoOptimizeBbox deriving (Eq, Read, Show, Generic, Typeable) - -data Distance = - Distance { coefficient :: Double - , unit :: DistanceUnit } deriving (Eq, Read, Show, Generic, Typeable) - -data DistanceRange = - DistanceRange { distanceFrom :: Distance - , distanceTo :: Distance } deriving (Eq, Read, Show, Generic, Typeable) - -type TemplateQueryKey = Text -type TemplateQueryValue = Text - -newtype TemplateQueryKeyValuePairs = TemplateQueryKeyValuePairs (HM.HashMap TemplateQueryKey TemplateQueryValue) - deriving (Eq, Read, Show, Generic, Typeable) - -instance ToJSON TemplateQueryKeyValuePairs where - toJSON (TemplateQueryKeyValuePairs x) = Object $ HM.map toJSON x - -instance FromJSON TemplateQueryKeyValuePairs where - parseJSON (Object o) = pure . TemplateQueryKeyValuePairs $ HM.mapMaybe getValue o - where getValue (String x) = Just x - getValue _ = Nothing - parseJSON _ = fail "error parsing TemplateQueryKeyValuePairs" - -data TemplateQueryInline = - TemplateQueryInline { inline :: Query - , params :: TemplateQueryKeyValuePairs - } - deriving (Eq, Read, Show, Generic, Typeable) - -instance ToJSON TemplateQueryInline where - toJSON TemplateQueryInline{..} = object [ "query" .= inline - , "params" .= params - ] - -instance FromJSON TemplateQueryInline where - parseJSON = withObject "TemplateQueryInline" parse - where parse o = TemplateQueryInline - <$> o .: "query" - <*> o .: "params" - -data SearchResult a = - SearchResult { took :: Int - , timedOut :: Bool - , shards :: ShardResult - , searchHits :: SearchHits a - , aggregations :: Maybe AggregationResults - , scrollId :: Maybe ScrollId - , suggest :: Maybe NamedSuggestionResponse -- ^ Only one Suggestion request / response per Search is supported. - } - deriving (Eq, Read, Show, Generic, Typeable) - -newtype ScrollId = ScrollId Text deriving (Eq, Read, Show, Generic, Ord, ToJSON, FromJSON) - -type Score = Maybe Double - -data SearchHits a = - SearchHits { hitsTotal :: Int - , maxScore :: Score - , hits :: [Hit a] } deriving (Eq, Read, Show, Generic, Typeable) - -instance Semigroup (SearchHits a) where - (SearchHits ta ma ha) <> (SearchHits tb mb hb) = SearchHits (ta + tb) (max ma mb) (ha <> hb) - -instance Monoid (SearchHits a) where - mempty = SearchHits 0 Nothing mempty - mappend = (<>) - -data Hit a = - Hit { hitIndex :: IndexName - , hitType :: MappingName - , hitDocId :: DocId - , hitScore :: Score - , hitSource :: Maybe a - , hitHighlight :: Maybe HitHighlight } deriving (Eq, Read, Show, Generic, Typeable) - -data ShardResult = - ShardResult { shardTotal :: Int - , shardsSuccessful :: Int - , shardsFailed :: Int } deriving (Eq, Read, Show, Generic, Typeable) - -type HitHighlight = M.Map Text [Text] - -showText :: Show a => a -> Text -showText = T.pack . show - -readMay :: Read a => String -> Maybe a -readMay s = case reads s of - (a, ""):_ -> Just a - _ -> Nothing - -parseReadText :: Read a => Text -> Parser a -parseReadText = maybe mzero return . readMay . T.unpack - -type Aggregations = M.Map Text Aggregation - -emptyAggregations :: Aggregations -emptyAggregations = M.empty - -mkAggregations :: Text -> Aggregation -> Aggregations -mkAggregations name aggregation = M.insert name aggregation emptyAggregations - -data TermOrder = TermOrder{ termSortField :: Text - , termSortOrder :: SortOrder } deriving (Eq, Read, Show, Generic, Typeable) - -data TermInclusion = TermInclusion Text - | TermPattern Text Text deriving (Eq, Read, Show, Generic, Typeable) - -data CollectionMode = BreadthFirst - | DepthFirst deriving (Eq, Read, Show, Generic, Typeable) - -data ExecutionHint = Ordinals - | GlobalOrdinals - | GlobalOrdinalsHash - | GlobalOrdinalsLowCardinality - | Map deriving (Eq, Read, Show, Generic, Typeable) - -data TimeInterval = Weeks - | Days - | Hours - | Minutes - | Seconds deriving Eq - -data Interval = Year - | Quarter - | Month - | Week - | Day - | Hour - | Minute - | Second - | FractionalInterval Float TimeInterval deriving (Eq, Read, Show, Generic, Typeable) - -data Aggregation = TermsAgg TermsAggregation - | CardinalityAgg CardinalityAggregation - | DateHistogramAgg DateHistogramAggregation - | ValueCountAgg ValueCountAggregation - | FilterAgg FilterAggregation - | DateRangeAgg DateRangeAggregation - | MissingAgg MissingAggregation - | TopHitsAgg TopHitsAggregation - deriving (Eq, Read, Show, Generic, Typeable) - -data TopHitsAggregation = TopHitsAggregation - { taFrom :: Maybe From - , taSize :: Maybe Size - , taSort :: Maybe Sort - } deriving (Eq, Read, Show) - -data MissingAggregation = MissingAggregation - { maField :: Text - } deriving (Eq, Read, Show, Generic, Typeable) - -data TermsAggregation = TermsAggregation { term :: Either Text Text - , termInclude :: Maybe TermInclusion - , termExclude :: Maybe TermInclusion - , termOrder :: Maybe TermOrder - , termMinDocCount :: Maybe Int - , termSize :: Maybe Int - , termShardSize :: Maybe Int - , termCollectMode :: Maybe CollectionMode - , termExecutionHint :: Maybe ExecutionHint - , termAggs :: Maybe Aggregations - } deriving (Eq, Read, Show, Generic, Typeable) - -data CardinalityAggregation = CardinalityAggregation { cardinalityField :: FieldName, - precisionThreshold :: Maybe Int - } deriving (Eq, Read, Show, Generic, Typeable) - -data DateHistogramAggregation = DateHistogramAggregation { dateField :: FieldName - , dateInterval :: Interval - , dateFormat :: Maybe Text - -- pre and post deprecated in 1.5 - , datePreZone :: Maybe Text - , datePostZone :: Maybe Text - , datePreOffset :: Maybe Text - , datePostOffset :: Maybe Text - , dateAggs :: Maybe Aggregations - } deriving (Eq, Read, Show, Generic, Typeable) - - -data DateRangeAggregation = DateRangeAggregation { draField :: FieldName - , draFormat :: Maybe Text - , draRanges :: NonEmpty DateRangeAggRange - } deriving (Eq, Read, Show, Generic, Typeable) - -data DateRangeAggRange = DateRangeFrom DateMathExpr - | DateRangeTo DateMathExpr - | DateRangeFromAndTo DateMathExpr DateMathExpr deriving (Eq, Read, Show, Generic, Typeable) - --- | See for more information. -data DateMathExpr = DateMathExpr DateMathAnchor [DateMathModifier] deriving (Eq, Read, Show, Generic, Typeable) - - --- | Starting point for a date range. This along with the 'DateMathModifiers' gets you the date ES will start from. -data DateMathAnchor = DMNow - | DMDate Day deriving (Eq, Read, Show, Generic, Typeable) - -data DateMathModifier = AddTime Int DateMathUnit - | SubtractTime Int DateMathUnit - | RoundDownTo DateMathUnit deriving (Eq, Read, Show, Generic, Typeable) - -data DateMathUnit = DMYear - | DMMonth - | DMWeek - | DMDay - | DMHour - | DMMinute - | DMSecond deriving (Eq, Read, Show, Generic, Typeable) - --- | See for more information. -data ValueCountAggregation = FieldValueCount FieldName - | ScriptValueCount Script deriving (Eq, Read, Show, Generic, Typeable) - --- | Single-bucket filter aggregations. See for more information. -data FilterAggregation = FilterAggregation { faFilter :: Filter - , faAggs :: Maybe Aggregations} deriving (Eq, Read, Show, Generic, Typeable) - -mkTermsAggregation :: Text -> TermsAggregation -mkTermsAggregation t = TermsAggregation (Left t) Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing - -mkTermsScriptAggregation :: Text -> TermsAggregation -mkTermsScriptAggregation t = TermsAggregation (Right t) Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing - -mkDateHistogram :: FieldName -> Interval -> DateHistogramAggregation -mkDateHistogram t i = DateHistogramAggregation t i Nothing Nothing Nothing Nothing Nothing Nothing - -mkCardinalityAggregation :: FieldName -> CardinalityAggregation -mkCardinalityAggregation t = CardinalityAggregation t Nothing - -instance ToJSON Version where - toJSON Version {..} = object ["number" .= number - ,"build_hash" .= build_hash - ,"build_timestamp" .= build_timestamp - ,"build_snapshot" .= build_snapshot - ,"lucene_version" .= lucene_version] - -instance FromJSON Version where - parseJSON = withObject "Version" parse - where parse o = Version - <$> o .: "number" - <*> o .: "build_hash" - <*> o .: "build_timestamp" - <*> o .: "build_snapshot" - <*> o .: "lucene_version" - -instance ToJSON VersionNumber where - toJSON = toJSON . Vers.showVersion . versionNumber - -instance FromJSON VersionNumber where - parseJSON = withText "VersionNumber" (parse . T.unpack) - where - parse s = case filter (null . snd)(RP.readP_to_S Vers.parseVersion s) of - [(v, _)] -> pure (VersionNumber v) - [] -> fail ("Invalid version string " ++ s) - xs -> fail ("Ambiguous version string " ++ s ++ " (" ++ intercalate ", " (Vers.showVersion . fst <$> xs) ++ ")") - -instance ToJSON TermOrder where - toJSON (TermOrder termSortField termSortOrder) = object [termSortField .= termSortOrder] - -instance ToJSON TermInclusion where - toJSON (TermInclusion x) = toJSON x - toJSON (TermPattern pattern flags) = omitNulls [ "pattern" .= pattern, - "flags" .= flags] - -instance ToJSON CollectionMode where - toJSON BreadthFirst = "breadth_first" - toJSON DepthFirst = "depth_first" - -instance ToJSON ExecutionHint where - toJSON Ordinals = "ordinals" - toJSON GlobalOrdinals = "global_ordinals" - toJSON GlobalOrdinalsHash = "global_ordinals_hash" - toJSON GlobalOrdinalsLowCardinality = "global_ordinals_low_cardinality" - toJSON Map = "map" - -instance ToJSON Interval where - toJSON Year = "year" - toJSON Quarter = "quarter" - toJSON Month = "month" - toJSON Week = "week" - toJSON Day = "day" - toJSON Hour = "hour" - toJSON Minute = "minute" - toJSON Second = "second" - toJSON (FractionalInterval fraction interval) = toJSON $ show fraction ++ show interval - -instance Show TimeInterval where - show Weeks = "w" - show Days = "d" - show Hours = "h" - show Minutes = "m" - show Seconds = "s" - -instance Read TimeInterval where - readPrec = f =<< TR.get - where - f 'w' = return Weeks - f 'd' = return Days - f 'h' = return Hours - f 'm' = return Minutes - f 's' = return Seconds - f _ = fail "TimeInterval expected one of w, d, h, m, s" - -instance ToJSON Aggregation where - toJSON (TermsAgg (TermsAggregation term include exclude order minDocCount size shardSize collectMode executionHint termAggs)) = - omitNulls ["terms" .= omitNulls [ toJSON' term, - "include" .= include, - "exclude" .= exclude, - "order" .= order, - "min_doc_count" .= minDocCount, - "size" .= size, - "shard_size" .= shardSize, - "collect_mode" .= collectMode, - "execution_hint" .= executionHint - ], - "aggs" .= termAggs ] - where - toJSON' x = case x of { Left y -> "field" .= y; Right y -> "script" .= y } - - toJSON (CardinalityAgg (CardinalityAggregation field precisionThreshold)) = - object ["cardinality" .= omitNulls [ "field" .= field, - "precisionThreshold" .= precisionThreshold - ] - ] - - toJSON (DateHistogramAgg (DateHistogramAggregation field interval format preZone postZone preOffset postOffset dateHistoAggs)) = - omitNulls ["date_histogram" .= omitNulls [ "field" .= field, - "interval" .= interval, - "format" .= format, - "pre_zone" .= preZone, - "post_zone" .= postZone, - "pre_offset" .= preOffset, - "post_offset" .= postOffset - ], - "aggs" .= dateHistoAggs ] - toJSON (ValueCountAgg a) = object ["value_count" .= v] - where v = case a of - (FieldValueCount (FieldName n)) -> object ["field" .= n] - (ScriptValueCount (Script s)) -> object ["script" .= s] - toJSON (FilterAgg (FilterAggregation filt ags)) = - omitNulls [ "filter" .= filt - , "aggs" .= ags] - toJSON (DateRangeAgg a) = object [ "date_range" .= a - ] - toJSON (MissingAgg (MissingAggregation{..})) = - object ["missing" .= object ["field" .= maField]] - - toJSON (TopHitsAgg (TopHitsAggregation mfrom msize msort)) = - omitNulls ["top_hits" .= omitNulls [ "size" .= msize - , "from" .= mfrom - , "sort" .= msort - ] - ] - -instance ToJSON DateRangeAggregation where - toJSON DateRangeAggregation {..} = - omitNulls [ "field" .= draField - , "format" .= draFormat - , "ranges" .= toList draRanges - ] - -instance ToJSON DateRangeAggRange where - toJSON (DateRangeFrom e) = object [ "from" .= e ] - toJSON (DateRangeTo e) = object [ "to" .= e ] - toJSON (DateRangeFromAndTo f t) = object [ "from" .= f, "to" .= t ] - -instance ToJSON DateMathExpr where - toJSON (DateMathExpr a mods) = String (fmtA a <> mconcat (fmtMod <$> mods)) - where fmtA DMNow = "now" - fmtA (DMDate date) = (T.pack $ showGregorian date) <> "||" - fmtMod (AddTime n u) = "+" <> showText n <> fmtU u - fmtMod (SubtractTime n u) = "-" <> showText n <> fmtU u - fmtMod (RoundDownTo u) = "/" <> fmtU u - fmtU DMYear = "y" - fmtU DMMonth = "M" - fmtU DMWeek = "w" - fmtU DMDay = "d" - fmtU DMHour = "h" - fmtU DMMinute = "m" - fmtU DMSecond = "s" - - -type AggregationResults = M.Map Text Value - -class BucketAggregation a where - key :: a -> BucketValue - docCount :: a -> Int - aggs :: a -> Maybe AggregationResults - - -data Bucket a = Bucket { buckets :: [a]} deriving (Read, Show) - -data BucketValue = TextValue Text - | ScientificValue Scientific - | BoolValue Bool deriving (Read, Show) - -data MissingResult = MissingResult { missingDocCount :: Int } deriving (Show) - -data TopHitResult a = TopHitResult { tarHits :: (SearchHits a) - } deriving Show - -data TermsResult = TermsResult { termKey :: BucketValue - , termsDocCount :: Int - , termsAggs :: Maybe AggregationResults } deriving (Read, Show) - -data DateHistogramResult = DateHistogramResult { dateKey :: Int - , dateKeyStr :: Maybe Text - , dateDocCount :: Int - , dateHistogramAggs :: Maybe AggregationResults } deriving (Read, Show) - -data DateRangeResult = DateRangeResult { dateRangeKey :: Text - , dateRangeFrom :: Maybe UTCTime - , dateRangeFromAsString :: Maybe Text - , dateRangeTo :: Maybe UTCTime - , dateRangeToAsString :: Maybe Text - , dateRangeDocCount :: Int - , dateRangeAggs :: Maybe AggregationResults } deriving (Read, Show, Eq, Generic, Typeable) - -toTerms :: Text -> AggregationResults -> Maybe (Bucket TermsResult) -toTerms = toAggResult - -toDateHistogram :: Text -> AggregationResults -> Maybe (Bucket DateHistogramResult) -toDateHistogram = toAggResult - -toMissing :: Text -> AggregationResults -> Maybe MissingResult -toMissing = toAggResult - -toTopHits :: (FromJSON a) => Text -> AggregationResults -> Maybe (TopHitResult a) -toTopHits = toAggResult - -toAggResult :: (FromJSON a) => Text -> AggregationResults -> Maybe a -toAggResult t a = M.lookup t a >>= deserialize - where deserialize = parseMaybe parseJSON - -instance BucketAggregation TermsResult where - key = termKey - docCount = termsDocCount - aggs = termsAggs - -instance BucketAggregation DateHistogramResult where - key = TextValue . showText . dateKey - docCount = dateDocCount - aggs = dateHistogramAggs - -instance BucketAggregation DateRangeResult where - key = TextValue . dateRangeKey - docCount = dateRangeDocCount - aggs = dateRangeAggs - -instance (FromJSON a) => FromJSON (Bucket a) where - parseJSON (Object v) = Bucket <$> - v .: "buckets" - parseJSON _ = mempty - -instance FromJSON BucketValue where - parseJSON (String t) = return $ TextValue t - parseJSON (Number s) = return $ ScientificValue s - parseJSON (Bool b) = return $ BoolValue b - parseJSON _ = mempty - -instance FromJSON MissingResult where - parseJSON = withObject "MissingResult" parse - where parse v = MissingResult <$> v .: "doc_count" - -instance FromJSON TermsResult where - parseJSON (Object v) = TermsResult <$> - v .: "key" <*> - v .: "doc_count" <*> - (pure $ getNamedSubAgg v ["key", "doc_count"]) - parseJSON _ = mempty - -instance FromJSON DateHistogramResult where - parseJSON (Object v) = DateHistogramResult <$> - v .: "key" <*> - v .:? "key_as_string" <*> - v .: "doc_count" <*> - (pure $ getNamedSubAgg v [ "key" - , "doc_count" - , "key_as_string" - ] - ) - parseJSON _ = mempty - -instance FromJSON DateRangeResult where - parseJSON = withObject "DateRangeResult" parse - where parse v = DateRangeResult <$> - v .: "key" <*> - (fmap posixMS <$> v .:? "from") <*> - v .:? "from_as_string" <*> - (fmap posixMS <$> v .:? "to") <*> - v .:? "to_as_string" <*> - v .: "doc_count" <*> - (pure $ getNamedSubAgg v [ "key" - , "from" - , "from_as_string" - , "to" - , "to_as_string" - , "doc_count" - ] - ) - -instance FromJSON POSIXMS where - parseJSON = withScientific "POSIXMS" (return . parse) - where parse n = let n' = truncate n :: Integer - in POSIXMS (posixSecondsToUTCTime (fromInteger (n' `div` 1000))) - -instance (FromJSON a) => FromJSON (TopHitResult a) where - parseJSON (Object v) = TopHitResult <$> - v .: "hits" - parseJSON _ = fail "Failure in FromJSON (TopHitResult a)" - -instance Semigroup Filter where - a <> b = AndFilter [a, b] defaultCache - -instance Monoid Filter where - mempty = IdentityFilter - mappend = (<>) - -instance Seminearring Filter where - a <||> b = OrFilter [a, b] defaultCache - -instance ToJSON Filter where - toJSON (AndFilter filters cache) = - object ["and" .= - object [ "filters" .= fmap toJSON filters - , "_cache" .= cache]] - - toJSON (OrFilter filters cache) = - object ["or" .= - object [ "filters" .= fmap toJSON filters - , "_cache" .= cache]] - - toJSON (NotFilter notFilter cache) = - object ["not" .= - object ["filter" .= notFilter - , "_cache" .= cache]] - - toJSON (IdentityFilter) = - object ["match_all" .= object []] - - toJSON (TermFilter (Term termFilterField termFilterValue) cache) = - object ["term" .= object base] - where base = [termFilterField .= termFilterValue, - "_cache" .= cache] - - toJSON (ExistsFilter (FieldName fieldName)) = - object ["exists" .= object - ["field" .= fieldName]] - - toJSON (BoolFilter boolMatch) = - object ["bool" .= boolMatch] - - toJSON (GeoBoundingBoxFilter bbConstraint) = - object ["geo_bounding_box" .= bbConstraint] - - toJSON (GeoDistanceFilter (GeoPoint (FieldName distanceGeoField) geoDistLatLon) - distance distanceType optimizeBbox cache) = - object ["geo_distance" .= - object ["distance" .= distance - , "distance_type" .= distanceType - , "optimize_bbox" .= optimizeBbox - , distanceGeoField .= geoDistLatLon - , "_cache" .= cache]] - - toJSON (GeoDistanceRangeFilter (GeoPoint (FieldName gddrField) drLatLon) - (DistanceRange geoDistRangeDistFrom drDistanceTo)) = - object ["geo_distance_range" .= - object ["from" .= geoDistRangeDistFrom - , "to" .= drDistanceTo - , gddrField .= drLatLon]] - - toJSON (GeoPolygonFilter (FieldName geoPolygonFilterField) latLons) = - object ["geo_polygon" .= - object [geoPolygonFilterField .= - object ["points" .= fmap toJSON latLons]]] - - toJSON (IdsFilter (MappingName mappingName) values) = - object ["ids" .= - object ["type" .= mappingName - , "values" .= fmap unpackId values]] - - toJSON (LimitFilter limit) = - object ["limit" .= object ["value" .= limit]] - - toJSON (MissingFilter (FieldName fieldName) (Existence existence) (NullValue nullValue)) = - object ["missing" .= - object [ "field" .= fieldName - , "existence" .= existence - , "null_value" .= nullValue]] - - toJSON (PrefixFilter (FieldName fieldName) fieldValue cache) = - object ["prefix" .= - object [fieldName .= fieldValue - , "_cache" .= cache]] - - toJSON (QueryFilter query False) = - object ["query" .= toJSON query ] - toJSON (QueryFilter query True) = - object ["fquery" .= - object [ "query" .= toJSON query - , "_cache" .= True ]] - - toJSON (RangeFilter (FieldName fieldName) rangeValue rangeExecution cache) = - object ["range" .= - object [ fieldName .= object (rangeValueToPair rangeValue) - , "execution" .= rangeExecution - , "_cache" .= cache]] - - toJSON (RegexpFilter (FieldName fieldName) - (Regexp regexText) flags (CacheName cacheName) cache (CacheKey cacheKey)) = - object ["regexp" .= - object [fieldName .= - object ["value" .= regexText - , "flags" .= flags] - , "_name" .= cacheName - , "_cache" .= cache - , "_cache_key" .= cacheKey]] - -instance FromJSON Filter where - parseJSON = withObject "Filter" parse - where parse o = andFilter `taggedWith` "and" - <|> orFilter `taggedWith` "or" - <|> notFilter `taggedWith` "not" - <|> identityFilter `taggedWith` "match_all" - <|> boolFilter `taggedWith` "bool" - <|> existsFilter `taggedWith` "exists" - <|> geoBoundingBoxFilter `taggedWith` "geo_bounding_box" - <|> geoDistanceFilter `taggedWith` "geo_distance" - <|> geoDistanceRangeFilter `taggedWith` "geo_distance_range" - <|> geoPolygonFilter `taggedWith` "geo_polygon" - <|> idsFilter `taggedWith` "ids" - <|> limitFilter `taggedWith` "limit" - <|> missingFilter `taggedWith` "missing" - <|> prefixFilter `taggedWith` "prefix" - <|> queryFilter `taggedWith` "query" - <|> fqueryFilter `taggedWith` "fquery" - <|> rangeFilter `taggedWith` "range" - <|> regexpFilter `taggedWith` "regexp" - <|> termFilter `taggedWith` "term" - where taggedWith parser k = parser =<< o .: k - andFilter o = AndFilter <$> o .: "filters" - <*> o .:? "_cache" .!= defaultCache - orFilter o = OrFilter <$> o .: "filters" - <*> o .:? "_cache" .!= defaultCache - notFilter o = NotFilter <$> o .: "filter" - <*> o .: "_cache" .!= defaultCache - identityFilter :: Object -> Parser Filter - identityFilter m - | HM.null m = pure IdentityFilter - | otherwise = fail ("Identityfilter expected empty object but got " <> show m) - boolFilter = pure . BoolFilter - existsFilter o = ExistsFilter <$> o .: "field" - geoBoundingBoxFilter = pure . GeoBoundingBoxFilter - geoDistanceFilter o = do - case HM.toList (deleteSeveral ["distance", "distance_type", "optimize_bbox", "_cache"] o) of - [(fn, v)] -> do - gp <- GeoPoint (FieldName fn) <$> parseJSON v - GeoDistanceFilter gp <$> o .: "distance" - <*> o .: "distance_type" - <*> o .: "optimize_bbox" - <*> o .:? "_cache" .!= defaultCache - _ -> fail "Could not find GeoDistanceFilter field name" - geoDistanceRangeFilter o = do - case HM.toList (deleteSeveral ["from", "to"] o) of - [(fn, v)] -> do - gp <- GeoPoint (FieldName fn) <$> parseJSON v - rng <- DistanceRange <$> o .: "from" <*> o .: "to" - return (GeoDistanceRangeFilter gp rng) - _ -> fail "Could not find GeoDistanceRangeFilter field name" - geoPolygonFilter = fieldTagged $ \fn o -> GeoPolygonFilter fn <$> o .: "points" - idsFilter o = IdsFilter <$> o .: "type" - <*> o .: "values" - limitFilter o = LimitFilter <$> o .: "value" - missingFilter o = MissingFilter <$> o .: "field" - <*> o .: "existence" - <*> o .: "null_value" - prefixFilter o = case HM.toList (HM.delete "_cache" o) of - [(fn, String v)] -> PrefixFilter (FieldName fn) v <$> o .:? "_cache" .!= defaultCache - _ -> fail "Could not parse PrefixFilter" - - queryFilter q = pure (QueryFilter q False) - fqueryFilter o = QueryFilter <$> o .: "query" <*> pure True - rangeFilter o = case HM.toList (deleteSeveral ["execution", "_cache"] o) of - [(fn, v)] -> RangeFilter (FieldName fn) - <$> parseJSON v - <*> o .: "execution" - <*> o .:? "_cache" .!= defaultCache - _ -> fail "Could not find field name for RangeFilter" - regexpFilter o = case HM.toList (deleteSeveral ["_name", "_cache", "_cache_key"] o) of - [(fn, Object o')] -> RegexpFilter (FieldName fn) - <$> o' .: "value" - <*> o' .: "flags" - <*> o .: "_name" - <*> o .:? "_cache" .!= defaultCache - <*> o .: "_cache_key" - _ -> fail "Could not find field name for RegexpFilter" - termFilter o = case HM.toList (HM.delete "_cache" o) of - [(termField, String termVal)] -> TermFilter (Term termField termVal) - <$> o .:? "_cache" .!= defaultCache - _ -> fail "Could not find term field for TermFilter" - -fieldTagged :: Monad m => (FieldName -> Object -> m a) -> Object -> m a -fieldTagged f o = case HM.toList o of - [(k, Object o')] -> f (FieldName k) o' - _ -> fail "Expected object with 1 field-named key" - --- Try to get an AggregationResults when we don't know the --- field name. We filter out the known keys to try to minimize the noise. -getNamedSubAgg :: Object -> [Text] -> Maybe AggregationResults -getNamedSubAgg o knownKeys = maggRes - where unknownKeys = HM.filterWithKey (\k _ -> k `notElem` knownKeys) o - maggRes - | HM.null unknownKeys = Nothing - | otherwise = Just . M.fromList $ HM.toList unknownKeys - -instance ToJSON GeoPoint where - toJSON (GeoPoint (FieldName geoPointField) geoPointLatLon) = - object [ geoPointField .= geoPointLatLon ] - - -instance ToJSON Query where - toJSON (TermQuery (Term termQueryField termQueryValue) boost) = - object [ "term" .= - object [termQueryField .= object merged]] - where - base = [ "value" .= termQueryValue ] - boosted = maybe [] (return . ("boost" .=)) boost - merged = mappend base boosted - - toJSON (TermsQuery fieldName terms) = - object [ "terms" .= object conjoined ] - where conjoined = [fieldName .= terms] - - toJSON (IdsQuery idsQueryMappingName docIds) = - object [ "ids" .= object conjoined ] - where conjoined = [ "type" .= idsQueryMappingName - , "values" .= fmap toJSON docIds ] - - toJSON (QueryQueryStringQuery qQueryStringQuery) = - object [ "query_string" .= qQueryStringQuery ] - - toJSON (QueryMatchQuery matchQuery) = - object [ "match" .= matchQuery ] - - toJSON (QueryMultiMatchQuery multiMatchQuery) = - toJSON multiMatchQuery - - toJSON (QueryBoolQuery boolQuery) = - object [ "bool" .= boolQuery ] - - toJSON (QueryBoostingQuery boostingQuery) = - object [ "boosting" .= boostingQuery ] - - toJSON (QueryCommonTermsQuery commonTermsQuery) = - object [ "common" .= commonTermsQuery ] - - toJSON (ConstantScoreFilter csFilter boost) = - object ["constant_score" .= object ["filter" .= csFilter - , "boost" .= boost]] - - toJSON (ConstantScoreQuery query boost) = - object ["constant_score" .= object ["query" .= query - , "boost" .= boost]] - - toJSON (QueryDisMaxQuery disMaxQuery) = - object [ "dis_max" .= disMaxQuery ] - - toJSON (QueryFilteredQuery qFilteredQuery) = - object [ "filtered" .= qFilteredQuery ] - - toJSON (QueryFuzzyLikeThisQuery fuzzyQuery) = - object [ "fuzzy_like_this" .= fuzzyQuery ] - - toJSON (QueryFuzzyLikeFieldQuery fuzzyFieldQuery) = - object [ "fuzzy_like_this_field" .= fuzzyFieldQuery ] - - toJSON (QueryFuzzyQuery fuzzyQuery) = - object [ "fuzzy" .= fuzzyQuery ] - - toJSON (QueryHasChildQuery childQuery) = - object [ "has_child" .= childQuery ] - - toJSON (QueryHasParentQuery parentQuery) = - object [ "has_parent" .= parentQuery ] - - toJSON (QueryIndicesQuery qIndicesQuery) = - object [ "indices" .= qIndicesQuery ] - - toJSON (MatchAllQuery boost) = - object [ "match_all" .= omitNulls [ "boost" .= boost ] ] - - toJSON (QueryMoreLikeThisQuery query) = - object [ "more_like_this" .= query ] - - toJSON (QueryMoreLikeThisFieldQuery query) = - object [ "more_like_this_field" .= query ] - - toJSON (QueryNestedQuery query) = - object [ "nested" .= query ] - - toJSON (QueryPrefixQuery query) = - object [ "prefix" .= query ] - - toJSON (QueryRangeQuery query) = - object [ "range" .= query ] - - toJSON (QueryRegexpQuery query) = - object [ "regexp" .= query ] - - toJSON (QuerySimpleQueryStringQuery query) = - object [ "simple_query_string" .= query ] - - toJSON (QueryTemplateQueryInline templateQuery) = - object [ "template" .= templateQuery ] - -instance FromJSON Query where - parseJSON v = withObject "Query" parse v - where parse o = termQuery `taggedWith` "term" - <|> termsQuery `taggedWith` "terms" - <|> idsQuery `taggedWith` "ids" - <|> queryQueryStringQuery `taggedWith` "query_string" - <|> queryMatchQuery `taggedWith` "match" - <|> queryMultiMatchQuery - <|> queryBoolQuery `taggedWith` "bool" - <|> queryBoostingQuery `taggedWith` "boosting" - <|> queryCommonTermsQuery `taggedWith` "common" - <|> constantScoreFilter `taggedWith` "constant_score" - <|> constantScoreQuery `taggedWith` "constant_score" - <|> queryDisMaxQuery `taggedWith` "dis_max" - <|> queryFilteredQuery `taggedWith` "filtered" - <|> queryFuzzyLikeThisQuery `taggedWith` "fuzzy_like_this" - <|> queryFuzzyLikeFieldQuery `taggedWith` "fuzzy_like_this_field" - <|> queryFuzzyQuery `taggedWith` "fuzzy" - <|> queryHasChildQuery `taggedWith` "has_child" - <|> queryHasParentQuery `taggedWith` "has_parent" - <|> queryIndicesQuery `taggedWith` "indices" - <|> matchAllQuery `taggedWith` "match_all" - <|> queryMoreLikeThisQuery `taggedWith` "more_like_this" - <|> queryMoreLikeThisFieldQuery `taggedWith` "more_like_this_field" - <|> queryNestedQuery `taggedWith` "nested" - <|> queryPrefixQuery `taggedWith` "prefix" - <|> queryRangeQuery `taggedWith` "range" - <|> queryRegexpQuery `taggedWith` "regexp" - <|> querySimpleQueryStringQuery `taggedWith` "simple_query_string" - <|> queryTemplateQueryInline `taggedWith` "template" - where taggedWith parser k = parser =<< o .: k - termQuery = fieldTagged $ \(FieldName fn) o -> - TermQuery <$> (Term fn <$> o .: "value") <*> o .:? "boost" - termsQuery o = case HM.toList o of - [(fn, vs)] -> do vals <- parseJSON vs - case vals of - x:xs -> return (TermsQuery fn (x :| xs)) - _ -> fail "Expected non empty list of values" - _ -> fail "Expected object with 1 field-named key" - idsQuery o = IdsQuery <$> o .: "type" - <*> o .: "values" - queryQueryStringQuery = pure . QueryQueryStringQuery - queryMatchQuery = pure . QueryMatchQuery - queryMultiMatchQuery = QueryMultiMatchQuery <$> parseJSON v - queryBoolQuery = pure . QueryBoolQuery - queryBoostingQuery = pure . QueryBoostingQuery - queryCommonTermsQuery = pure . QueryCommonTermsQuery - constantScoreFilter o = case HM.lookup "filter" o of - Just x -> ConstantScoreFilter <$> parseJSON x - <*> o .: "boost" - _ -> fail "Does not appear to be a ConstantScoreFilter" - constantScoreQuery o = case HM.lookup "query" o of - Just x -> ConstantScoreQuery <$> parseJSON x - <*> o .: "boost" - _ -> fail "Does not appear to be a ConstantScoreQuery" - queryDisMaxQuery = pure . QueryDisMaxQuery - queryFilteredQuery = pure . QueryFilteredQuery - queryFuzzyLikeThisQuery = pure . QueryFuzzyLikeThisQuery - queryFuzzyLikeFieldQuery = pure . QueryFuzzyLikeFieldQuery - queryFuzzyQuery = pure . QueryFuzzyQuery - queryHasChildQuery = pure . QueryHasChildQuery - queryHasParentQuery = pure . QueryHasParentQuery - queryIndicesQuery = pure . QueryIndicesQuery - matchAllQuery o = MatchAllQuery <$> o .:? "boost" - queryMoreLikeThisQuery = pure . QueryMoreLikeThisQuery - queryMoreLikeThisFieldQuery = pure . QueryMoreLikeThisFieldQuery - queryNestedQuery = pure . QueryNestedQuery - queryPrefixQuery = pure . QueryPrefixQuery - queryRangeQuery = pure . QueryRangeQuery - queryRegexpQuery = pure . QueryRegexpQuery - querySimpleQueryStringQuery = pure . QuerySimpleQueryStringQuery - queryTemplateQueryInline = pure . QueryTemplateQueryInline - - -omitNulls :: [(Text, Value)] -> Value -omitNulls = object . filter notNull where - notNull (_, Null) = False - notNull (_, Array a) = (not . V.null) a - notNull _ = True - - -instance ToJSON SimpleQueryStringQuery where - toJSON SimpleQueryStringQuery {..} = - omitNulls (base ++ maybeAdd) - where base = [ "query" .= simpleQueryStringQuery ] - maybeAdd = [ "fields" .= simpleQueryStringField - , "default_operator" .= simpleQueryStringOperator - , "analyzer" .= simpleQueryStringAnalyzer - , "flags" .= simpleQueryStringFlags - , "lowercase_expanded_terms" .= simpleQueryStringLowercaseExpanded - , "locale" .= simpleQueryStringLocale ] - -instance FromJSON SimpleQueryStringQuery where - parseJSON = withObject "SimpleQueryStringQuery" parse - where parse o = SimpleQueryStringQuery <$> o .: "query" - <*> o .:? "fields" - <*> o .:? "default_operator" - <*> o .:? "analyzer" - <*> (parseFlags <$> o .:? "flags") - <*> o .:? "lowercase_expanded_terms" - <*> o .:? "locale" - parseFlags (Just (x:xs)) = Just (x :| xs) - parseFlags _ = Nothing - -instance ToJSON FieldOrFields where - toJSON (FofField fieldName) = - toJSON fieldName - toJSON (FofFields fieldNames) = - toJSON fieldNames - -instance FromJSON FieldOrFields where - parseJSON v = FofField <$> parseJSON v - <|> FofFields <$> (parseNEJSON =<< parseJSON v) - -instance ToJSON SimpleQueryFlag where - toJSON SimpleQueryAll = "ALL" - toJSON SimpleQueryNone = "NONE" - toJSON SimpleQueryAnd = "AND" - toJSON SimpleQueryOr = "OR" - toJSON SimpleQueryPrefix = "PREFIX" - toJSON SimpleQueryPhrase = "PHRASE" - toJSON SimpleQueryPrecedence = "PRECEDENCE" - toJSON SimpleQueryEscape = "ESCAPE" - toJSON SimpleQueryWhitespace = "WHITESPACE" - toJSON SimpleQueryFuzzy = "FUZZY" - toJSON SimpleQueryNear = "NEAR" - toJSON SimpleQuerySlop = "SLOP" - -instance FromJSON SimpleQueryFlag where - parseJSON = withText "SimpleQueryFlag" parse - where parse "ALL" = pure SimpleQueryAll - parse "NONE" = pure SimpleQueryNone - parse "AND" = pure SimpleQueryAnd - parse "OR" = pure SimpleQueryOr - parse "PREFIX" = pure SimpleQueryPrefix - parse "PHRASE" = pure SimpleQueryPhrase - parse "PRECEDENCE" = pure SimpleQueryPrecedence - parse "ESCAPE" = pure SimpleQueryEscape - parse "WHITESPACE" = pure SimpleQueryWhitespace - parse "FUZZY" = pure SimpleQueryFuzzy - parse "NEAR" = pure SimpleQueryNear - parse "SLOP" = pure SimpleQuerySlop - parse f = fail ("Unexpected SimpleQueryFlag: " <> show f) - -instance ToJSON RegexpQuery where - toJSON (RegexpQuery (FieldName rqQueryField) - (Regexp regexpQueryQuery) rqQueryFlags - rqQueryBoost) = - object [ rqQueryField .= omitNulls base ] - where base = [ "value" .= regexpQueryQuery - , "flags" .= rqQueryFlags - , "boost" .= rqQueryBoost ] - -instance FromJSON RegexpQuery where - parseJSON = withObject "RegexpQuery" parse - where parse = fieldTagged $ \fn o -> - RegexpQuery fn - <$> o .: "value" - <*> o .: "flags" - <*> o .:? "boost" - -instance ToJSON QueryStringQuery where - toJSON (QueryStringQuery qsQueryString - qsDefaultField qsOperator - qsAnalyzer qsAllowWildcard - qsLowercaseExpanded qsEnablePositionIncrements - qsFuzzyMaxExpansions qsFuzziness - qsFuzzyPrefixLength qsPhraseSlop - qsBoost qsAnalyzeWildcard - qsGeneratePhraseQueries qsMinimumShouldMatch - qsLenient qsLocale) = - omitNulls base - where - base = [ "query" .= qsQueryString - , "default_field" .= qsDefaultField - , "default_operator" .= qsOperator - , "analyzer" .= qsAnalyzer - , "allow_leading_wildcard" .= qsAllowWildcard - , "lowercase_expanded_terms" .= qsLowercaseExpanded - , "enable_position_increments" .= qsEnablePositionIncrements - , "fuzzy_max_expansions" .= qsFuzzyMaxExpansions - , "fuzziness" .= qsFuzziness - , "fuzzy_prefix_length" .= qsFuzzyPrefixLength - , "phrase_slop" .= qsPhraseSlop - , "boost" .= qsBoost - , "analyze_wildcard" .= qsAnalyzeWildcard - , "auto_generate_phrase_queries" .= qsGeneratePhraseQueries - , "minimum_should_match" .= qsMinimumShouldMatch - , "lenient" .= qsLenient - , "locale" .= qsLocale ] - -instance FromJSON QueryStringQuery where - parseJSON = withObject "QueryStringQuery" parse - where parse o = QueryStringQuery - <$> o .: "query" - <*> o .:? "default_field" - <*> o .:? "default_operator" - <*> o .:? "analyzer" - <*> o .:? "allow_leading_wildcard" - <*> o .:? "lowercase_expanded_terms" - <*> o .:? "enable_position_increments" - <*> o .:? "fuzzy_max_expansions" - <*> o .:? "fuzziness" - <*> o .:? "fuzzy_prefix_length" - <*> o .:? "phrase_slop" - <*> o .:? "boost" - <*> o .:? "analyze_wildcard" - <*> o .:? "auto_generate_phrase_queries" - <*> o .:? "minimum_should_match" - <*> o .:? "lenient" - <*> o .:? "locale" - -instance ToJSON RangeQuery where - toJSON (RangeQuery (FieldName fieldName) range boost) = - object [ fieldName .= object conjoined ] - where conjoined = [ "boost" .= boost ] ++ (rangeValueToPair range) - -instance FromJSON RangeQuery where - parseJSON = withObject "RangeQuery" parse - where parse = fieldTagged $ \fn o -> - RangeQuery fn - <$> parseJSON (Object o) - <*> o .: "boost" - -instance FromJSON RangeValue where - parseJSON = withObject "RangeValue" parse - where parse o = parseDate o - <|> parseDouble o - parseDate o = do lt <- o .:? "lt" - lte <- o .:? "lte" - gt <- o .:? "gt" - gte <- o .:? "gte" - case (lt, lte, gt, gte) of - (Just a, _, Just b, _) -> return (RangeDateGtLt (GreaterThanD b) (LessThanD a)) - (Just a, _, _, Just b)-> return (RangeDateGteLt (GreaterThanEqD b) (LessThanD a)) - (_, Just a, Just b, _)-> return (RangeDateGtLte (GreaterThanD b) (LessThanEqD a)) - (_, Just a, _, Just b)-> return (RangeDateGteLte (GreaterThanEqD b) (LessThanEqD a)) - (_, _, Just a, _)-> return (RangeDateGt (GreaterThanD a)) - (Just a, _, _, _)-> return (RangeDateLt (LessThanD a)) - (_, _, _, Just a)-> return (RangeDateGte (GreaterThanEqD a)) - (_, Just a, _, _)-> return (RangeDateLte (LessThanEqD a)) - (Nothing, Nothing, Nothing, Nothing) -> mzero - parseDouble o = do lt <- o .:? "lt" - lte <- o .:? "lte" - gt <- o .:? "gt" - gte <- o .:? "gte" - case (lt, lte, gt, gte) of - (Just a, _, Just b, _) -> return (RangeDoubleGtLt (GreaterThan b) (LessThan a)) - (Just a, _, _, Just b)-> return (RangeDoubleGteLt (GreaterThanEq b) (LessThan a)) - (_, Just a, Just b, _)-> return (RangeDoubleGtLte (GreaterThan b) (LessThanEq a)) - (_, Just a, _, Just b)-> return (RangeDoubleGteLte (GreaterThanEq b) (LessThanEq a)) - (_, _, Just a, _)-> return (RangeDoubleGt (GreaterThan a)) - (Just a, _, _, _)-> return (RangeDoubleLt (LessThan a)) - (_, _, _, Just a)-> return (RangeDoubleGte (GreaterThanEq a)) - (_, Just a, _, _)-> return (RangeDoubleLte (LessThanEq a)) - (Nothing, Nothing, Nothing, Nothing) -> mzero - -instance ToJSON PrefixQuery where - toJSON (PrefixQuery (FieldName fieldName) queryValue boost) = - object [ fieldName .= omitNulls base ] - where base = [ "value" .= queryValue - , "boost" .= boost ] - -instance FromJSON PrefixQuery where - parseJSON = withObject "PrefixQuery" parse - where parse = fieldTagged $ \fn o -> - PrefixQuery fn - <$> o .: "value" - <*> o .:? "boost" - -instance ToJSON NestedQuery where - toJSON (NestedQuery nqPath nqScoreType nqQuery) = - object [ "path" .= nqPath - , "score_mode" .= nqScoreType - , "query" .= nqQuery ] - -instance FromJSON NestedQuery where - parseJSON = withObject "NestedQuery" parse - where parse o = NestedQuery - <$> o .: "path" - <*> o .: "score_mode" - <*> o .: "query" - -instance ToJSON MoreLikeThisFieldQuery where - toJSON (MoreLikeThisFieldQuery text (FieldName fieldName) - percent mtf mqt stopwords mindf maxdf - minwl maxwl boostTerms boost analyzer) = - object [ fieldName .= omitNulls base ] - where base = [ "like_text" .= text - , "percent_terms_to_match" .= percent - , "min_term_freq" .= mtf - , "max_query_terms" .= mqt - , "stop_words" .= stopwords - , "min_doc_freq" .= mindf - , "max_doc_freq" .= maxdf - , "min_word_length" .= minwl - , "max_word_length" .= maxwl - , "boost_terms" .= boostTerms - , "boost" .= boost - , "analyzer" .= analyzer ] - -instance FromJSON MoreLikeThisFieldQuery where - parseJSON = withObject "MoreLikeThisFieldQuery" parse - where parse = fieldTagged $ \fn o -> - MoreLikeThisFieldQuery - <$> o .: "like_text" - <*> pure fn - <*> o .:? "percent_terms_to_match" - <*> o .:? "min_term_freq" - <*> o .:? "max_query_terms" - -- <*> (optionalNE =<< o .:? "stop_words") - <*> o .:? "stop_words" - <*> o .:? "min_doc_freq" - <*> o .:? "max_doc_freq" - <*> o .:? "min_word_length" - <*> o .:? "max_word_length" - <*> o .:? "boost_terms" - <*> o .:? "boost" - <*> o .:? "analyzer" - -- optionalNE = maybe (pure Nothing) (fmap Just . parseNEJSON) - -instance ToJSON MoreLikeThisQuery where - toJSON (MoreLikeThisQuery text fields percent - mtf mqt stopwords mindf maxdf - minwl maxwl boostTerms boost analyzer) = - omitNulls base - where base = [ "like_text" .= text - , "fields" .= fields - , "percent_terms_to_match" .= percent - , "min_term_freq" .= mtf - , "max_query_terms" .= mqt - , "stop_words" .= stopwords - , "min_doc_freq" .= mindf - , "max_doc_freq" .= maxdf - , "min_word_length" .= minwl - , "max_word_length" .= maxwl - , "boost_terms" .= boostTerms - , "boost" .= boost - , "analyzer" .= analyzer ] - -instance FromJSON MoreLikeThisQuery where - parseJSON = withObject "MoreLikeThisQuery" parse - where parse o = MoreLikeThisQuery - <$> o .: "like_text" - -- <*> (optionalNE =<< o .:? "fields") - <*> o .:? "fields" - <*> o .:? "percent_terms_to_match" - <*> o .:? "min_term_freq" - <*> o .:? "max_query_terms" - -- <*> (optionalNE =<< o .:? "stop_words") - <*> o .:? "stop_words" - <*> o .:? "min_doc_freq" - <*> o .:? "max_doc_freq" - <*> o .:? "min_word_length" - <*> o .:? "max_word_length" - <*> o .:? "boost_terms" - <*> o .:? "boost" - <*> o .:? "analyzer" - -- optionalNE = maybe (pure Nothing) (fmap Just . parseNEJSON) - -instance ToJSON IndicesQuery where - toJSON (IndicesQuery indices query noMatch) = - omitNulls [ "indices" .= indices - , "no_match_query" .= noMatch - , "query" .= query ] - -instance FromJSON IndicesQuery where - parseJSON = withObject "IndicesQuery" parse - where parse o = IndicesQuery - <$> o .:? "indices" .!= [] - <*> o .: "query" - <*> o .:? "no_match_query" - -instance ToJSON HasParentQuery where - toJSON (HasParentQuery queryType query scoreType) = - omitNulls [ "parent_type" .= queryType - , "score_type" .= scoreType - , "query" .= query ] - -instance FromJSON HasParentQuery where - parseJSON = withObject "HasParentQuery" parse - where parse o = HasParentQuery - <$> o .: "parent_type" - <*> o .: "query" - <*> o .:? "score_type" - -instance ToJSON HasChildQuery where - toJSON (HasChildQuery queryType query scoreType) = - omitNulls [ "query" .= query - , "score_type" .= scoreType - , "type" .= queryType ] - -instance FromJSON HasChildQuery where - parseJSON = withObject "HasChildQuery" parse - where parse o = HasChildQuery - <$> o .: "type" - <*> o .: "query" - <*> o .:? "score_type" - -instance ToJSON FuzzyQuery where - toJSON (FuzzyQuery (FieldName fieldName) queryText - prefixLength maxEx fuzziness boost) = - object [ fieldName .= omitNulls base ] - where base = [ "value" .= queryText - , "fuzziness" .= fuzziness - , "prefix_length" .= prefixLength - , "boost" .= boost - , "max_expansions" .= maxEx ] - -instance FromJSON FuzzyQuery where - parseJSON = withObject "FuzzyQuery" parse - where parse = fieldTagged $ \fn o -> - FuzzyQuery fn - <$> o .: "value" - <*> o .: "prefix_length" - <*> o .: "max_expansions" - <*> o .: "fuzziness" - <*> o .:? "boost" - -instance ToJSON FuzzyLikeFieldQuery where - toJSON (FuzzyLikeFieldQuery (FieldName fieldName) - fieldText maxTerms ignoreFreq fuzziness prefixLength - boost analyzer) = - object [ fieldName .= - omitNulls [ "like_text" .= fieldText - , "max_query_terms" .= maxTerms - , "ignore_tf" .= ignoreFreq - , "fuzziness" .= fuzziness - , "prefix_length" .= prefixLength - , "analyzer" .= analyzer - , "boost" .= boost ]] - -instance FromJSON FuzzyLikeFieldQuery where - parseJSON = withObject "FuzzyLikeFieldQuery" parse - where parse = fieldTagged $ \fn o -> - FuzzyLikeFieldQuery fn - <$> o .: "like_text" - <*> o .: "max_query_terms" - <*> o .: "ignore_tf" - <*> o .: "fuzziness" - <*> o .: "prefix_length" - <*> o .: "boost" - <*> o .:? "analyzer" - -instance ToJSON FuzzyLikeThisQuery where - toJSON (FuzzyLikeThisQuery fields text maxTerms - ignoreFreq fuzziness prefixLength boost analyzer) = - omitNulls base - where base = [ "fields" .= fields - , "like_text" .= text - , "max_query_terms" .= maxTerms - , "ignore_tf" .= ignoreFreq - , "fuzziness" .= fuzziness - , "prefix_length" .= prefixLength - , "analyzer" .= analyzer - , "boost" .= boost ] - -instance FromJSON FuzzyLikeThisQuery where - parseJSON = withObject "FuzzyLikeThisQuery" parse - where parse o = FuzzyLikeThisQuery - <$> o .:? "fields" .!= [] - <*> o .: "like_text" - <*> o .: "max_query_terms" - <*> o .: "ignore_tf" - <*> o .: "fuzziness" - <*> o .: "prefix_length" - <*> o .: "boost" - <*> o .:? "analyzer" - -instance ToJSON FilteredQuery where - toJSON (FilteredQuery query fFilter) = - object [ "query" .= query - , "filter" .= fFilter ] - -instance FromJSON FilteredQuery where - parseJSON = withObject "FilteredQuery" parse - where parse o = FilteredQuery - <$> o .: "query" - <*> o .: "filter" - -instance ToJSON DisMaxQuery where - toJSON (DisMaxQuery queries tiebreaker boost) = - omitNulls base - where base = [ "queries" .= queries - , "boost" .= boost - , "tie_breaker" .= tiebreaker ] - -instance FromJSON DisMaxQuery where - parseJSON = withObject "DisMaxQuery" parse - where parse o = DisMaxQuery - <$> o .:? "queries" .!= [] - <*> o .: "tie_breaker" - <*> o .:? "boost" - -instance ToJSON CommonTermsQuery where - toJSON (CommonTermsQuery (FieldName fieldName) - (QueryString query) cf lfo hfo msm - boost analyzer disableCoord) = - object [fieldName .= omitNulls base ] - where base = [ "query" .= query - , "cutoff_frequency" .= cf - , "low_freq_operator" .= lfo - , "minimum_should_match" .= msm - , "boost" .= boost - , "analyzer" .= analyzer - , "disable_coord" .= disableCoord - , "high_freq_operator" .= hfo ] - -instance FromJSON CommonTermsQuery where - parseJSON = withObject "CommonTermsQuery" parse - where parse = fieldTagged $ \fn o -> - CommonTermsQuery fn - <$> o .: "query" - <*> o .: "cutoff_frequency" - <*> o .: "low_freq_operator" - <*> o .: "high_freq_operator" - <*> o .:? "minimum_should_match" - <*> o .:? "boost" - <*> o .:? "analyzer" - <*> o .:? "disable_coord" - -instance ToJSON CommonMinimumMatch where - toJSON (CommonMinimumMatch mm) = toJSON mm - toJSON (CommonMinimumMatchHighLow (MinimumMatchHighLow lowF highF)) = - object [ "low_freq" .= lowF - , "high_freq" .= highF ] - -instance FromJSON CommonMinimumMatch where - parseJSON v = parseMinimum v - <|> parseMinimumHighLow v - where parseMinimum = fmap CommonMinimumMatch . parseJSON - parseMinimumHighLow = fmap CommonMinimumMatchHighLow . withObject "CommonMinimumMatchHighLow" (\o -> - MinimumMatchHighLow - <$> o .: "low_freq" - <*> o .: "high_freq") - - -instance ToJSON BoostingQuery where - toJSON (BoostingQuery bqPositiveQuery bqNegativeQuery bqNegativeBoost) = - object [ "positive" .= bqPositiveQuery - , "negative" .= bqNegativeQuery - , "negative_boost" .= bqNegativeBoost ] - -instance FromJSON BoostingQuery where - parseJSON = withObject "BoostingQuery" parse - where parse o = BoostingQuery - <$> o .: "positive" - <*> o .: "negative" - <*> o .: "negative_boost" - -instance ToJSON BoolQuery where - toJSON (BoolQuery mustM notM shouldM bqMin boost disableCoord) = - omitNulls base - where base = [ "must" .= mustM - , "must_not" .= notM - , "should" .= shouldM - , "minimum_should_match" .= bqMin - , "boost" .= boost - , "disable_coord" .= disableCoord ] - -instance FromJSON BoolQuery where - parseJSON = withObject "BoolQuery" parse - where parse o = BoolQuery - <$> o .:? "must" .!= [] - <*> o .:? "must_not" .!= [] - <*> o .:? "should" .!= [] - <*> o .:? "minimum_should_match" - <*> o .:? "boost" - <*> o .:? "disable_coord" - -instance ToJSON MatchQuery where - toJSON (MatchQuery (FieldName fieldName) - (QueryString mqQueryString) booleanOperator - zeroTermsQuery cutoffFrequency matchQueryType - analyzer maxExpansions lenient boost) = - object [ fieldName .= omitNulls base ] - where base = [ "query" .= mqQueryString - , "operator" .= booleanOperator - , "zero_terms_query" .= zeroTermsQuery - , "cutoff_frequency" .= cutoffFrequency - , "type" .= matchQueryType - , "analyzer" .= analyzer - , "max_expansions" .= maxExpansions - , "lenient" .= lenient - , "boost" .= boost ] - -instance FromJSON MatchQuery where - parseJSON = withObject "MatchQuery" parse - where parse = fieldTagged $ \fn o -> - MatchQuery fn - <$> o .: "query" - <*> o .: "operator" - <*> o .: "zero_terms_query" - <*> o .:? "cutoff_frequency" - <*> o .:? "type" - <*> o .:? "analyzer" - <*> o .:? "max_expansions" - <*> o .:? "lenient" - <*> o .:? "boost" - -instance ToJSON MultiMatchQuery where - toJSON (MultiMatchQuery fields (QueryString query) boolOp - ztQ tb mmqt cf analyzer maxEx lenient) = - object ["multi_match" .= omitNulls base] - where base = [ "fields" .= fmap toJSON fields - , "query" .= query - , "operator" .= boolOp - , "zero_terms_query" .= ztQ - , "tie_breaker" .= tb - , "type" .= mmqt - , "cutoff_frequency" .= cf - , "analyzer" .= analyzer - , "max_expansions" .= maxEx - , "lenient" .= lenient ] - -instance FromJSON MultiMatchQuery where - parseJSON = withObject "MultiMatchQuery" parse - where parse raw = do o <- raw .: "multi_match" - MultiMatchQuery - <$> o .:? "fields" .!= [] - <*> o .: "query" - <*> o .: "operator" - <*> o .: "zero_terms_query" - <*> o .:? "tie_breaker" - <*> o .:? "type" - <*> o .:? "cutoff_frequency" - <*> o .:? "analyzer" - <*> o .:? "max_expansions" - <*> o .:? "lenient" - -instance ToJSON MultiMatchQueryType where - toJSON MultiMatchBestFields = "best_fields" - toJSON MultiMatchMostFields = "most_fields" - toJSON MultiMatchCrossFields = "cross_fields" - toJSON MultiMatchPhrase = "phrase" - toJSON MultiMatchPhrasePrefix = "phrase_prefix" - -instance FromJSON MultiMatchQueryType where - parseJSON = withText "MultiMatchPhrasePrefix" parse - where parse "best_fields" = pure MultiMatchBestFields - parse "most_fields" = pure MultiMatchMostFields - parse "cross_fields" = pure MultiMatchCrossFields - parse "phrase" = pure MultiMatchPhrase - parse "phrase_prefix" = pure MultiMatchPhrasePrefix - parse t = fail ("Unexpected MultiMatchPhrasePrefix: " <> show t) - -instance ToJSON BooleanOperator where - toJSON And = String "and" - toJSON Or = String "or" - -instance FromJSON BooleanOperator where - parseJSON = withText "BooleanOperator" parse - where parse "and" = pure And - parse "or" = pure Or - parse o = fail ("Unexpected BooleanOperator: " <> show o) - -instance ToJSON ZeroTermsQuery where - toJSON ZeroTermsNone = String "none" - toJSON ZeroTermsAll = String "all" - -instance FromJSON ZeroTermsQuery where - parseJSON = withText "ZeroTermsQuery" parse - where parse "none" = pure ZeroTermsNone - parse "all" = pure ZeroTermsAll - parse q = fail ("Unexpected ZeroTermsQuery: " <> show q) - -instance ToJSON MatchQueryType where - toJSON MatchPhrase = "phrase" - toJSON MatchPhrasePrefix = "phrase_prefix" - -instance FromJSON MatchQueryType where - parseJSON = withText "MatchQueryType" parse - where parse "phrase" = pure MatchPhrase - parse "phrase_prefix" = pure MatchPhrasePrefix - parse t = fail ("Unexpected MatchQueryType: " <> show t) - -instance FromJSON Status where - parseJSON (Object v) = Status <$> - v .:? "ok" <*> - (v .:? "status" .!= 200) <*> - v .: "name" <*> - v .: "version" <*> - v .: "tagline" - parseJSON _ = empty - - -instance ToJSON IndexSettings where - toJSON (IndexSettings s r) = object ["settings" .= - object ["index" .= - object ["number_of_shards" .= s, "number_of_replicas" .= r] - ] - ] - -instance FromJSON IndexSettings where - parseJSON = withObject "IndexSettings" parse - where parse o = do s <- o .: "settings" - i <- s .: "index" - IndexSettings <$> i .: "number_of_shards" - <*> i .: "number_of_replicas" - -instance ToJSON UpdatableIndexSetting where - toJSON (NumberOfReplicas x) = oPath ("index" :| ["number_of_replicas"]) x - toJSON (AutoExpandReplicas x) = oPath ("index" :| ["auto_expand_replicas"]) x - toJSON (RefreshInterval x) = oPath ("index" :| ["refresh_interval"]) (NominalDiffTimeJSON x) - toJSON (IndexConcurrency x) = oPath ("index" :| ["concurrency"]) x - toJSON (FailOnMergeFailure x) = oPath ("index" :| ["fail_on_merge_failure"]) x - toJSON (TranslogFlushThresholdOps x) = oPath ("index" :| ["translog", "flush_threshold_ops"]) x - toJSON (TranslogFlushThresholdSize x) = oPath ("index" :| ["translog", "flush_threshold_size"]) x - toJSON (TranslogFlushThresholdPeriod x) = oPath ("index" :| ["translog", "flush_threshold_period"]) (NominalDiffTimeJSON x) - toJSON (TranslogDisableFlush x) = oPath ("index" :| ["translog", "disable_flush"]) x - toJSON (CacheFilterMaxSize x) = oPath ("index" :| ["cache", "filter", "max_size"]) x - toJSON (CacheFilterExpire x) = oPath ("index" :| ["cache", "filter", "expire"]) (NominalDiffTimeJSON <$> x) - toJSON (GatewaySnapshotInterval x) = oPath ("index" :| ["gateway", "snapshot_interval"]) (NominalDiffTimeJSON x) - toJSON (RoutingAllocationInclude fs) = oPath ("index" :| ["routing", "allocation", "include"]) (attrFilterJSON fs) - toJSON (RoutingAllocationExclude fs) = oPath ("index" :| ["routing", "allocation", "exclude"]) (attrFilterJSON fs) - toJSON (RoutingAllocationRequire fs) = oPath ("index" :| ["routing", "allocation", "require"]) (attrFilterJSON fs) - toJSON (RoutingAllocationEnable x) = oPath ("index" :| ["routing", "allocation", "enable"]) x - toJSON (RoutingAllocationShardsPerNode x) = oPath ("index" :| ["routing", "allocation", "total_shards_per_node"]) x - toJSON (RecoveryInitialShards x) = oPath ("index" :| ["recovery", "initial_shards"]) x - toJSON (GCDeletes x) = oPath ("index" :| ["gc_deletes"]) (NominalDiffTimeJSON x) - toJSON (TTLDisablePurge x) = oPath ("index" :| ["ttl", "disable_purge"]) x - toJSON (TranslogFSType x) = oPath ("index" :| ["translog", "fs", "type"]) x - toJSON (IndexCompoundFormat x) = oPath ("index" :| ["compound_format"]) x - toJSON (IndexCompoundOnFlush x) = oPath ("index" :| ["compound_on_flush"]) x - toJSON (WarmerEnabled x) = oPath ("index" :| ["warmer", "enabled"]) x - toJSON (BlocksReadOnly x) = oPath ("blocks" :| ["read_only"]) x - toJSON (BlocksRead x) = oPath ("blocks" :| ["read"]) x - toJSON (BlocksWrite x) = oPath ("blocks" :| ["write"]) x - toJSON (BlocksMetaData x) = oPath ("blocks" :| ["metadata"]) x - -instance FromJSON UpdatableIndexSetting where - parseJSON = withObject "UpdatableIndexSetting" parse - where parse o = numberOfReplicas `taggedAt` ["index", "number_of_replicas"] - <|> autoExpandReplicas `taggedAt` ["index", "auto_expand_replicas"] - <|> refreshInterval `taggedAt` ["index", "refresh_interval"] - <|> indexConcurrency `taggedAt` ["index", "concurrency"] - <|> failOnMergeFailure `taggedAt` ["index", "fail_on_merge_failure"] - <|> translogFlushThresholdOps `taggedAt` ["index", "translog", "flush_threshold_ops"] - <|> translogFlushThresholdSize `taggedAt` ["index", "translog", "flush_threshold_size"] - <|> translogFlushThresholdPeriod `taggedAt` ["index", "translog", "flush_threshold_period"] - <|> translogDisableFlush `taggedAt` ["index", "translog", "disable_flush"] - <|> cacheFilterMaxSize `taggedAt` ["index", "cache", "filter", "max_size"] - <|> cacheFilterExpire `taggedAt` ["index", "cache", "filter", "expire"] - <|> gatewaySnapshotInterval `taggedAt` ["index", "gateway", "snapshot_interval"] - <|> routingAllocationInclude `taggedAt` ["index", "routing", "allocation", "include"] - <|> routingAllocationExclude `taggedAt` ["index", "routing", "allocation", "exclude"] - <|> routingAllocationRequire `taggedAt` ["index", "routing", "allocation", "require"] - <|> routingAllocationEnable `taggedAt` ["index", "routing", "allocation", "enable"] - <|> routingAllocationShardsPerNode `taggedAt` ["index", "routing", "allocation", "total_shards_per_node"] - <|> recoveryInitialShards `taggedAt` ["index", "recovery", "initial_shards"] - <|> gcDeletes `taggedAt` ["index", "gc_deletes"] - <|> ttlDisablePurge `taggedAt` ["index", "ttl", "disable_purge"] - <|> translogFSType `taggedAt` ["index", "translog", "fs", "type"] - <|> compoundFormat `taggedAt` ["index", "compound_format"] - <|> compoundOnFlush `taggedAt` ["index", "compound_on_flush"] - <|> warmerEnabled `taggedAt` ["index", "warmer", "enabled"] - <|> blocksReadOnly `taggedAt` ["blocks", "read_only"] - <|> blocksRead `taggedAt` ["blocks", "read"] - <|> blocksWrite `taggedAt` ["blocks", "write"] - <|> blocksMetaData `taggedAt` ["blocks", "metadata"] - where taggedAt f ks = taggedAt' f (Object o) ks - taggedAt' f v [] = f =<< (parseJSON v <|> (parseJSON (unStringlyTypeJSON v))) - taggedAt' f v (k:ks) = withObject "Object" (\o -> do v' <- o .: k - taggedAt' f v' ks) v - numberOfReplicas = pure . NumberOfReplicas - autoExpandReplicas = pure . AutoExpandReplicas - refreshInterval = pure . RefreshInterval . ndtJSON - indexConcurrency = pure . IndexConcurrency - failOnMergeFailure = pure . FailOnMergeFailure - translogFlushThresholdOps = pure . TranslogFlushThresholdOps - translogFlushThresholdSize = pure . TranslogFlushThresholdSize - translogFlushThresholdPeriod = pure . TranslogFlushThresholdPeriod . ndtJSON - translogDisableFlush = pure . TranslogDisableFlush - cacheFilterMaxSize = pure . CacheFilterMaxSize - cacheFilterExpire = pure . CacheFilterExpire . fmap ndtJSON - gatewaySnapshotInterval = pure . GatewaySnapshotInterval . ndtJSON - routingAllocationInclude = fmap RoutingAllocationInclude . parseAttrFilter - routingAllocationExclude = fmap RoutingAllocationExclude . parseAttrFilter - routingAllocationRequire = fmap RoutingAllocationRequire . parseAttrFilter - routingAllocationEnable = pure . RoutingAllocationEnable - routingAllocationShardsPerNode = pure . RoutingAllocationShardsPerNode - recoveryInitialShards = pure . RecoveryInitialShards - gcDeletes = pure . GCDeletes . ndtJSON - ttlDisablePurge = pure . TTLDisablePurge - translogFSType = pure . TranslogFSType - compoundFormat = pure . IndexCompoundFormat - compoundOnFlush = pure . IndexCompoundOnFlush - warmerEnabled = pure . WarmerEnabled - blocksReadOnly = pure . BlocksReadOnly - blocksRead = pure . BlocksRead - blocksWrite = pure . BlocksWrite - blocksMetaData = pure . BlocksMetaData - -instance FromJSON IndexSettingsSummary where - parseJSON = withObject "IndexSettingsSummary" parse - where parse o = case HM.toList o of - [(ixn, v@(Object o'))] -> IndexSettingsSummary (IndexName ixn) - <$> parseJSON v - <*> (fmap (filter (not . redundant)) . parseSettings =<< o' .: "settings") - _ -> fail "Expected single-key object with index name" - redundant (NumberOfReplicas _) = True - redundant _ = False - --- | For some reason in several settings APIs, all leaf values get returned --- as strings. This function attepmts to recover from this for all --- non-recursive JSON types. If nothing can be done, the value is left alone. -unStringlyTypeJSON :: Value -> Value -unStringlyTypeJSON (String "true") = Bool True -unStringlyTypeJSON (String "false") = Bool False -unStringlyTypeJSON (String "null") = Null -unStringlyTypeJSON v@(String t) = case readMay (T.unpack t) of - Just n -> Number n - Nothing -> v -unStringlyTypeJSON v = v - - -parseSettings :: Object -> Parser [UpdatableIndexSetting] -parseSettings o = do - o' <- o .: "index" - -- slice the index object into singleton hashmaps and try to parse each - parses <- forM (HM.toList o') $ \(k, v) -> do - -- blocks are now nested into the "index" key, which is not how they're serialized - let atRoot = Object (HM.singleton k v) - let atIndex = Object (HM.singleton "index" atRoot) - optional (parseJSON atRoot <|> parseJSON atIndex) - return (catMaybes parses) - -oPath :: ToJSON a => NonEmpty Text -> a -> Value -oPath (k :| []) v = object [k .= v] -oPath (k:| (h:t)) v = object [k .= oPath (h :| t) v] - -attrFilterJSON :: NonEmpty NodeAttrFilter -> Value -attrFilterJSON fs = object [ n .= T.intercalate "," (toList vs) - | NodeAttrFilter (NodeAttrName n) vs <- toList fs] - -parseAttrFilter :: Value -> Parser (NonEmpty NodeAttrFilter) -parseAttrFilter = withObject "NonEmpty NodeAttrFilter" parse - where parse o = case HM.toList o of - [] -> fail "Expected non-empty list of NodeAttrFilters" - x:xs -> DT.mapM (uncurry parse') (x :| xs) - parse' n = withText "Text" $ \t -> - case T.splitOn "," t of - fv:fvs -> return (NodeAttrFilter (NodeAttrName n) (fv :| fvs)) - [] -> fail "Expected non-empty list of filter values" - -instance ToJSON ReplicaBounds where - toJSON (ReplicasBounded a b) = String (showText a <> "-" <> showText b) - toJSON (ReplicasLowerBounded a) = String (showText a <> "-all") - toJSON ReplicasUnbounded = Bool False - -instance FromJSON ReplicaBounds where - parseJSON v = withText "ReplicaBounds" parseText v - <|> withBool "ReplicaBounds" parseBool v - where parseText t = case T.splitOn "-" t of - [a, "all"] -> ReplicasLowerBounded <$> parseReadText a - [a, b] -> ReplicasBounded <$> parseReadText a - <*> parseReadText b - _ -> fail ("Could not parse ReplicaBounds: " <> show t) - parseBool False = pure ReplicasUnbounded - parseBool _ = fail "ReplicasUnbounded cannot be represented with True" - -instance ToJSON AllocationPolicy where - toJSON AllocAll = String "all" - toJSON AllocPrimaries = String "primaries" - toJSON AllocNewPrimaries = String "new_primaries" - toJSON AllocNone = String "none" - -instance FromJSON AllocationPolicy where - parseJSON = withText "AllocationPolicy" parse - where parse "all" = pure AllocAll - parse "primaries" = pure AllocPrimaries - parse "new_primaries" = pure AllocNewPrimaries - parse "none" = pure AllocNone - parse t = fail ("Invlaid AllocationPolicy: " <> show t) - -instance ToJSON InitialShardCount where - toJSON QuorumShards = String "quorum" - toJSON QuorumMinus1Shards = String "quorum-1" - toJSON FullShards = String "full" - toJSON FullMinus1Shards = String "full-1" - toJSON (ExplicitShards x) = toJSON x - -instance FromJSON InitialShardCount where - parseJSON v = withText "InitialShardCount" parseText v - <|> ExplicitShards <$> parseJSON v - where parseText "quorum" = pure QuorumShards - parseText "quorum-1" = pure QuorumMinus1Shards - parseText "full" = pure FullShards - parseText "full-1" = pure FullMinus1Shards - parseText _ = mzero - -instance ToJSON FSType where - toJSON FSSimple = "simple" - toJSON FSBuffered = "buffered" - -instance FromJSON FSType where - parseJSON = withText "FSType" parse - where parse "simple" = pure FSSimple - parse "buffered" = pure FSBuffered - parse t = fail ("Invalid FSType: " <> show t) - -instance ToJSON CompoundFormat where - toJSON (CompoundFileFormat x) = Bool x - toJSON (MergeSegmentVsTotalIndex x) = toJSON x - -instance FromJSON CompoundFormat where - parseJSON v = CompoundFileFormat <$> parseJSON v - <|> MergeSegmentVsTotalIndex <$> parseJSON v - -instance ToJSON NominalDiffTimeJSON where - toJSON (NominalDiffTimeJSON t) = String (showText (round t :: Integer) <> "s") - -instance FromJSON NominalDiffTimeJSON where - parseJSON = withText "NominalDiffTime" parse - where parse t = case T.takeEnd 1 t of - "s" -> NominalDiffTimeJSON . fromInteger <$> parseReadText (T.dropEnd 1 t) - _ -> fail "Invalid or missing NominalDiffTime unit (expected s)" - -instance ToJSON IndexTemplate where - toJSON (IndexTemplate p s m) = merge - (object [ "template" .= p - , "mappings" .= foldl' merge (object []) m - ]) - (toJSON s) - where - merge (Object o1) (Object o2) = toJSON $ HM.union o1 o2 - merge o Null = o - merge _ _ = undefined - -instance (FromJSON a) => FromJSON (EsResult a) where - parseJSON jsonVal@(Object v) = do - found <- v .:? "found" .!= False - fr <- if found - then parseJSON jsonVal - else return Nothing - EsResult <$> v .: "_index" <*> - v .: "_type" <*> - v .: "_id" <*> - pure fr - parseJSON _ = empty - -instance (FromJSON a) => FromJSON (EsResultFound a) where - parseJSON (Object v) = EsResultFound <$> - v .: "_version" <*> - v .: "_source" - parseJSON _ = empty - -instance FromJSON EsError where - parseJSON (Object v) = EsError <$> - v .: "status" <*> - (v .: "error" <|> (v .: "error" >>= (.: "reason"))) - parseJSON _ = empty - -instance FromJSON IndexAliasesSummary where - parseJSON = withObject "IndexAliasesSummary" parse - where parse o = IndexAliasesSummary . mconcat <$> mapM (uncurry go) (HM.toList o) - go ixn = withObject "index aliases" $ \ia -> do - aliases <- ia .:? "aliases" .!= mempty - forM (HM.toList aliases) $ \(aName, v) -> do - let indexAlias = IndexAlias (IndexName ixn) (IndexAliasName (IndexName aName)) - IndexAliasSummary indexAlias <$> parseJSON v - - -instance ToJSON IndexAliasAction where - toJSON (AddAlias ia opts) = object ["add" .= (iaObj <> optsObj)] - where Object iaObj = toJSON ia - Object optsObj = toJSON opts - toJSON (RemoveAlias ia) = object ["remove" .= iaObj] - where Object iaObj = toJSON ia - -instance ToJSON IndexAlias where - toJSON IndexAlias {..} = object ["index" .= srcIndex - , "alias" .= indexAlias - ] - -instance ToJSON IndexAliasCreate where - toJSON IndexAliasCreate {..} = Object (filterObj <> routingObj) - where filterObj = maybe mempty (HM.singleton "filter" . toJSON) aliasCreateFilter - Object routingObj = maybe (Object mempty) toJSON aliasCreateRouting - -instance ToJSON AliasRouting where - toJSON (AllAliasRouting v) = object ["routing" .= v] - toJSON (GranularAliasRouting srch idx) = object (catMaybes prs) - where prs = [("search_routing" .=) <$> srch - ,("index_routing" .=) <$> idx] - -instance FromJSON AliasRouting where - parseJSON = withObject "AliasRouting" parse - where parse o = parseAll o <|> parseGranular o - parseAll o = AllAliasRouting <$> o .: "routing" - parseGranular o = do - sr <- o .:? "search_routing" - ir <- o .:? "index_routing" - if isNothing sr && isNothing ir - then fail "Both search_routing and index_routing can't be blank" - else return (GranularAliasRouting sr ir) - -instance FromJSON IndexAliasCreate where - parseJSON v = withObject "IndexAliasCreate" parse v - where parse o = IndexAliasCreate <$> optional (parseJSON v) - <*> o .:? "filter" - -instance ToJSON SearchAliasRouting where - toJSON (SearchAliasRouting rvs) = toJSON (T.intercalate "," (routingValue <$> toList rvs)) - -instance FromJSON SearchAliasRouting where - parseJSON = withText "SearchAliasRouting" parse - where parse t = SearchAliasRouting <$> parseNEJSON (String <$> T.splitOn "," t) + deriving (Eq, Show) instance ToJSON Search where toJSON (Search query sFilter sort searchAggs highlight sTrackSortScores sFrom sSize _ sFields sSource sSuggest) = @@ -3484,346 +440,6 @@ instance ToJSON Search where , "suggest" .= sSuggest] -instance ToJSON Source where - toJSON NoSource = toJSON False - toJSON (SourcePatterns patterns) = toJSON patterns - toJSON (SourceIncludeExclude incl excl) = object [ "include" .= incl, "exclude" .= excl ] - -instance ToJSON PatternOrPatterns where - toJSON (PopPattern pattern) = toJSON pattern - toJSON (PopPatterns patterns) = toJSON patterns - -instance ToJSON Include where - toJSON (Include patterns) = toJSON patterns - -instance ToJSON Exclude where - toJSON (Exclude patterns) = toJSON patterns - -instance ToJSON Pattern where - toJSON (Pattern pattern) = toJSON pattern - - -instance ToJSON FieldHighlight where - toJSON (FieldHighlight (FieldName fName) (Just fSettings)) = - object [ fName .= fSettings ] - toJSON (FieldHighlight (FieldName fName) Nothing) = - object [ fName .= emptyObject ] - -instance ToJSON Highlights where - toJSON (Highlights global fields) = - omitNulls (("fields" .= fields) - : highlightSettingsPairs global) - -instance ToJSON HighlightSettings where - toJSON hs = omitNulls (highlightSettingsPairs (Just hs)) - -highlightSettingsPairs :: Maybe HighlightSettings -> [Pair] -highlightSettingsPairs Nothing = [] -highlightSettingsPairs (Just (Plain plh)) = plainHighPairs (Just plh) -highlightSettingsPairs (Just (Postings ph)) = postHighPairs (Just ph) -highlightSettingsPairs (Just (FastVector fvh)) = fastVectorHighPairs (Just fvh) - - -plainHighPairs :: Maybe PlainHighlight -> [Pair] -plainHighPairs Nothing = [] -plainHighPairs (Just (PlainHighlight plCom plNonPost)) = - [ "type" .= String "plain"] - ++ commonHighlightPairs plCom - ++ nonPostingsToPairs plNonPost - -postHighPairs :: Maybe PostingsHighlight -> [Pair] -postHighPairs Nothing = [] -postHighPairs (Just (PostingsHighlight pCom)) = - ("type" .= String "postings") - : commonHighlightPairs pCom - -fastVectorHighPairs :: Maybe FastVectorHighlight -> [Pair] -fastVectorHighPairs Nothing = [] -fastVectorHighPairs (Just - (FastVectorHighlight fvCom fvNonPostSettings fvBoundChars - fvBoundMaxScan fvFragOff fvMatchedFields - fvPhraseLim)) = - [ "type" .= String "fvh" - , "boundary_chars" .= fvBoundChars - , "boundary_max_scan" .= fvBoundMaxScan - , "fragment_offset" .= fvFragOff - , "matched_fields" .= fvMatchedFields - , "phraseLimit" .= fvPhraseLim] - ++ commonHighlightPairs fvCom - ++ nonPostingsToPairs fvNonPostSettings - -deleteSeveral :: (Eq k, Hashable k) => [k] -> HM.HashMap k v -> HM.HashMap k v -deleteSeveral ks hm = foldr HM.delete hm ks - -commonHighlightPairs :: Maybe CommonHighlight -> [Pair] -commonHighlightPairs Nothing = [] -commonHighlightPairs (Just (CommonHighlight chScore chForceSource chTag chEncoder - chNoMatchSize chHighlightQuery - chRequireFieldMatch)) = - [ "order" .= chScore - , "force_source" .= chForceSource - , "encoder" .= chEncoder - , "no_match_size" .= chNoMatchSize - , "highlight_query" .= chHighlightQuery - , "require_fieldMatch" .= chRequireFieldMatch] - ++ highlightTagToPairs chTag - - -nonPostingsToPairs :: Maybe NonPostings -> [Pair] -nonPostingsToPairs Nothing = [] -nonPostingsToPairs (Just (NonPostings npFragSize npNumOfFrags)) = - [ "fragment_size" .= npFragSize - , "number_of_fragments" .= npNumOfFrags] - -parseNEJSON :: (FromJSON a) => [Value] -> Parser (NonEmpty a) -parseNEJSON [] = fail "Expected non-empty list" -parseNEJSON (x:xs) = DT.mapM parseJSON (x :| xs) - - -instance ToJSON HighlightEncoder where - toJSON DefaultEncoder = String "default" - toJSON HTMLEncoder = String "html" - -highlightTagToPairs :: Maybe HighlightTag -> [Pair] -highlightTagToPairs (Just (TagSchema _)) = [ "scheme" .= String "default"] -highlightTagToPairs (Just (CustomTags (pre, post))) = [ "pre_tags" .= pre - , "post_tags" .= post] -highlightTagToPairs Nothing = [] - -instance ToJSON SortSpec where - toJSON (DefaultSortSpec - (DefaultSort (FieldName dsSortFieldName) dsSortOrder dsIgnoreUnmapped - dsSortMode dsMissingSort dsNestedFilter)) = - object [dsSortFieldName .= omitNulls base] where - base = [ "order" .= dsSortOrder - , "ignore_unmapped" .= dsIgnoreUnmapped - , "mode" .= dsSortMode - , "missing" .= dsMissingSort - , "nested_filter" .= dsNestedFilter ] - - toJSON (GeoDistanceSortSpec gdsSortOrder (GeoPoint (FieldName field) gdsLatLon) units) = - object [ "unit" .= units - , field .= gdsLatLon - , "order" .= gdsSortOrder ] - - -instance ToJSON SortOrder where - toJSON Ascending = String "asc" - toJSON Descending = String "desc" - - -instance ToJSON SortMode where - toJSON SortMin = String "min" - toJSON SortMax = String "max" - toJSON SortSum = String "sum" - toJSON SortAvg = String "avg" - - -instance ToJSON Missing where - toJSON LastMissing = String "_last" - toJSON FirstMissing = String "_first" - toJSON (CustomMissing txt) = String txt - - -instance ToJSON ScoreType where - toJSON ScoreTypeMax = "max" - toJSON ScoreTypeAvg = "avg" - toJSON ScoreTypeSum = "sum" - toJSON ScoreTypeNone = "none" - -instance FromJSON ScoreType where - parseJSON = withText "ScoreType" parse - where parse "max" = pure ScoreTypeMax - parse "avg" = pure ScoreTypeAvg - parse "sum" = pure ScoreTypeSum - parse "none" = pure ScoreTypeNone - parse t = fail ("Unexpected ScoreType: " <> show t) - -instance ToJSON Distance where - toJSON (Distance dCoefficient dUnit) = - String boltedTogether where - coefText = showText dCoefficient - (String unitText) = toJSON dUnit - boltedTogether = mappend coefText unitText - -instance FromJSON Distance where - parseJSON = withText "Distance" parse - where parse t = Distance <$> parseCoeff nT - <*> parseJSON (String unitT) - where (nT, unitT) = T.span validForNumber t - -- may be a better way to do this - validForNumber '-' = True - validForNumber '.' = True - validForNumber 'e' = True - validForNumber c = isNumber c - parseCoeff "" = fail "Empty string cannot be parsed as number" - parseCoeff s = return (read (T.unpack s)) - -instance ToJSON DistanceUnit where - toJSON Miles = String "mi" - toJSON Yards = String "yd" - toJSON Feet = String "ft" - toJSON Inches = String "in" - toJSON Kilometers = String "km" - toJSON Meters = String "m" - toJSON Centimeters = String "cm" - toJSON Millimeters = String "mm" - toJSON NauticalMiles = String "nmi" - - -instance FromJSON DistanceUnit where - parseJSON = withText "DistanceUnit" parse - where parse "mi" = pure Miles - parse "yd" = pure Yards - parse "ft" = pure Feet - parse "in" = pure Inches - parse "km" = pure Kilometers - parse "m" = pure Meters - parse "cm" = pure Centimeters - parse "mm" = pure Millimeters - parse "nmi" = pure NauticalMiles - parse u = fail ("Unrecognized DistanceUnit: " <> show u) - -instance ToJSON DistanceType where - toJSON Arc = String "arc" - toJSON SloppyArc = String "sloppy_arc" - toJSON Plane = String "plane" - -instance FromJSON DistanceType where - parseJSON = withText "DistanceType" parse - where parse "arc" = pure Arc - parse "sloppy_arc" = pure SloppyArc - parse "plane" = pure Plane - parse t = fail ("Unrecognized DistanceType: " <> show t) - - -instance ToJSON OptimizeBbox where - toJSON NoOptimizeBbox = String "none" - toJSON (OptimizeGeoFilterType gft) = toJSON gft - -instance FromJSON OptimizeBbox where - parseJSON v = withText "NoOptimizeBbox" parseNoOptimize v - <|> parseOptimize v - where parseNoOptimize "none" = pure NoOptimizeBbox - parseNoOptimize _ = mzero - parseOptimize = fmap OptimizeGeoFilterType . parseJSON - -instance ToJSON GeoBoundingBoxConstraint where - toJSON (GeoBoundingBoxConstraint - (FieldName gbbcGeoBBField) gbbcConstraintBox cache type') = - object [gbbcGeoBBField .= gbbcConstraintBox - , "_cache" .= cache - , "type" .= type'] - -instance FromJSON GeoBoundingBoxConstraint where - parseJSON = withObject "GeoBoundingBoxConstraint" parse - where parse o = case HM.toList (deleteSeveral ["type", "_cache"] o) of - [(fn, v)] -> GeoBoundingBoxConstraint (FieldName fn) - <$> parseJSON v - <*> o .:? "_cache" .!= defaultCache - <*> o .: "type" - _ -> fail "Could not find field name for GeoBoundingBoxConstraint" - -instance ToJSON GeoFilterType where - toJSON GeoFilterMemory = String "memory" - toJSON GeoFilterIndexed = String "indexed" - -instance FromJSON GeoFilterType where - parseJSON = withText "GeoFilterType" parse - where parse "memory" = pure GeoFilterMemory - parse "indexed" = pure GeoFilterIndexed - parse t = fail ("Unrecognized GeoFilterType: " <> show t) - -instance ToJSON GeoBoundingBox where - toJSON (GeoBoundingBox gbbTopLeft gbbBottomRight) = - object ["top_left" .= gbbTopLeft - , "bottom_right" .= gbbBottomRight] - -instance FromJSON GeoBoundingBox where - parseJSON = withObject "GeoBoundingBox" parse - where parse o = GeoBoundingBox - <$> o .: "top_left" - <*> o .: "bottom_right" - -instance ToJSON LatLon where - toJSON (LatLon lLat lLon) = - object ["lat" .= lLat - , "lon" .= lLon] - -instance FromJSON LatLon where - parseJSON = withObject "LatLon" parse - where parse o = LatLon <$> o .: "lat" - <*> o .: "lon" - --- index for smaller ranges, fielddata for longer ranges -instance ToJSON RangeExecution where - toJSON RangeExecutionIndex = "index" - toJSON RangeExecutionFielddata = "fielddata" - - -instance FromJSON RangeExecution where - parseJSON = withText "RangeExecution" parse - where parse "index" = pure RangeExecutionIndex - parse "fielddata" = pure RangeExecutionFielddata - parse t = error ("Unrecognized RangeExecution " <> show t) - -instance ToJSON RegexpFlags where - toJSON AllRegexpFlags = String "ALL" - toJSON NoRegexpFlags = String "NONE" - toJSON (SomeRegexpFlags (h :| fs)) = String $ T.intercalate "|" flagStrs - where flagStrs = map flagStr . nub $ h:fs - flagStr AnyString = "ANYSTRING" - flagStr Automaton = "AUTOMATON" - flagStr Complement = "COMPLEMENT" - flagStr Empty = "EMPTY" - flagStr Intersection = "INTERSECTION" - flagStr Interval = "INTERVAL" - -instance FromJSON RegexpFlags where - parseJSON = withText "RegexpFlags" parse - where parse "ALL" = pure AllRegexpFlags - parse "NONE" = pure NoRegexpFlags - parse t = SomeRegexpFlags <$> parseNEJSON (String <$> T.splitOn "|" t) - -instance FromJSON RegexpFlag where - parseJSON = withText "RegexpFlag" parse - where parse "ANYSTRING" = pure AnyString - parse "AUTOMATON" = pure Automaton - parse "COMPLEMENT" = pure Complement - parse "EMPTY" = pure Empty - parse "INTERSECTION" = pure Intersection - parse "INTERVAL" = pure Interval - parse f = fail ("Unknown RegexpFlag: " <> show f) - -instance ToJSON Term where - toJSON (Term field value) = object ["term" .= object - [field .= value]] - -instance FromJSON Term where - parseJSON = withObject "Term" parse - where parse o = do termObj <- o .: "term" - case HM.toList termObj of - [(fn, v)] -> Term fn <$> parseJSON v - _ -> fail "Expected object with 1 field-named key" - -instance ToJSON BoolMatch where - toJSON (MustMatch term cache) = object ["must" .= term, - "_cache" .= cache] - toJSON (MustNotMatch term cache) = object ["must_not" .= term, - "_cache" .= cache] - toJSON (ShouldMatch terms cache) = object ["should" .= fmap toJSON terms, - "_cache" .= cache] - -instance FromJSON BoolMatch where - parseJSON = withObject "BoolMatch" parse - where parse o = mustMatch `taggedWith` "must" - <|> mustNotMatch `taggedWith` "must_not" - <|> shouldMatch `taggedWith` "should" - where taggedWith parser k = parser =<< o .: k - mustMatch t = MustMatch t <$> o .:? "_cache" .!= defaultCache - mustNotMatch t = MustNotMatch t <$> o .:? "_cache" .!= defaultCache - shouldMatch t = ShouldMatch t <$> o .:? "_cache" .!= defaultCache - instance (FromJSON a) => FromJSON (SearchResult a) where parseJSON (Object v) = SearchResult <$> v .: "took" <*> @@ -3834,1747 +450,3 @@ instance (FromJSON a) => FromJSON (SearchResult a) where v .:? "_scroll_id" <*> v .:? "suggest" parseJSON _ = empty - -instance (FromJSON a) => FromJSON (SearchHits a) where - parseJSON (Object v) = SearchHits <$> - v .: "total" <*> - v .: "max_score" <*> - v .: "hits" - parseJSON _ = empty - -instance (FromJSON a) => FromJSON (Hit a) where - parseJSON (Object v) = Hit <$> - v .: "_index" <*> - v .: "_type" <*> - v .: "_id" <*> - v .: "_score" <*> - v .:? "_source" <*> - v .:? "highlight" - parseJSON _ = empty - -instance FromJSON ShardResult where - parseJSON (Object v) = ShardResult <$> - v .: "total" <*> - v .: "successful" <*> - v .: "failed" - parseJSON _ = empty - - -instance FromJSON DocVersion where - parseJSON v = do - i <- parseJSON v - maybe (fail "DocVersion out of range") return $ mkDocVersion i - --- This insanity is because ES *sometimes* returns Replica/Shard counts as strings -instance FromJSON ReplicaCount where - parseJSON v = parseAsInt v - <|> parseAsString v - where parseAsInt = fmap ReplicaCount . parseJSON - parseAsString = withText "ReplicaCount" (fmap ReplicaCount . parseReadText) - -instance FromJSON ShardCount where - parseJSON v = parseAsInt v - <|> parseAsString v - where parseAsInt = fmap ShardCount . parseJSON - parseAsString = withText "ShardCount" (fmap ShardCount . parseReadText) - -instance Bounded DocVersion where - minBound = DocVersion 1 - maxBound = DocVersion 9200000000000000000 -- 9.2e+18 - -instance Enum DocVersion where - succ x - | x /= maxBound = DocVersion (succ $ docVersionNumber x) - | otherwise = succError "DocVersion" - pred x - | x /= minBound = DocVersion (pred $ docVersionNumber x) - | otherwise = predError "DocVersion" - toEnum i = - fromMaybe (error $ show i ++ " out of DocVersion range") $ mkDocVersion i - fromEnum = docVersionNumber - enumFrom = boundedEnumFrom - enumFromThen = boundedEnumFromThen - --- | Username type used for HTTP Basic authentication. See 'basicAuthHook'. -newtype EsUsername = EsUsername { esUsername :: Text } deriving (Read, Show, Eq) - --- | Password type used for HTTP Basic authentication. See 'basicAuthHook'. -newtype EsPassword = EsPassword { esPassword :: Text } deriving (Read, Show, Eq) - - -data SnapshotRepoSelection = SnapshotRepoList (NonEmpty SnapshotRepoPattern) - | AllSnapshotRepos deriving (Eq, Generic, Show, Typeable) - - --- | Either specifies an exact repo name or one with globs in it, --- e.g. @RepoPattern "foo*"@ __NOTE__: Patterns are not supported on ES < 1.7 -data SnapshotRepoPattern = ExactRepo SnapshotRepoName - | RepoPattern Text - deriving (Eq, Generic, Show, Typeable) - --- | The unique name of a snapshot repository. -newtype SnapshotRepoName = SnapshotRepoName { snapshotRepoName :: Text } - deriving (Eq, Ord, Generic, Show, Typeable, ToJSON, FromJSON) - - --- | A generic representation of a snapshot repo. This is what gets --- sent to and parsed from the server. For repo types enabled by --- plugins that aren't exported by this library, consider making a --- custom type which implements 'SnapshotRepo'. If it is a common repo --- type, consider submitting a pull request to have it included in the --- library proper -data GenericSnapshotRepo = GenericSnapshotRepo { - gSnapshotRepoName :: SnapshotRepoName - , gSnapshotRepoType :: SnapshotRepoType - , gSnapshotRepoSettings :: GenericSnapshotRepoSettings - } deriving (Eq, Generic, Show, Typeable) - - -instance SnapshotRepo GenericSnapshotRepo where - toGSnapshotRepo = id - fromGSnapshotRepo = Right - - -newtype SnapshotRepoType = SnapshotRepoType { snapshotRepoType :: Text } - deriving (Eq, Ord, Generic, Show, Typeable, ToJSON, FromJSON) - - --- | Opaque representation of snapshot repo settings. Instances of --- 'SnapshotRepo' will produce this. -newtype GenericSnapshotRepoSettings = GenericSnapshotRepoSettings { gSnapshotRepoSettingsObject :: Object } - deriving (Eq, Generic, Show, Typeable, ToJSON) - - - -- Regardless of whether you send strongly typed json, my version of - -- ES sends back stringly typed json in the settings, e.g. booleans - -- as strings, so we'll try to convert them. -instance FromJSON GenericSnapshotRepoSettings where - parseJSON = fmap (GenericSnapshotRepoSettings . fmap unStringlyTypeJSON). parseJSON - --- | The result of running 'verifySnapshotRepo'. -newtype SnapshotVerification = SnapshotVerification { snapshotNodeVerifications :: [SnapshotNodeVerification] } - deriving (Eq, Generic, Show, Typeable) - - -instance FromJSON SnapshotVerification where - parseJSON = withObject "SnapshotVerification" parse - where - parse o = do - o2 <- o .: "nodes" - SnapshotVerification <$> mapM (uncurry parse') (HM.toList o2) - parse' rawFullId = withObject "SnapshotNodeVerification" $ \o -> - SnapshotNodeVerification (FullNodeId rawFullId) <$> o .: "name" - - --- | A node that has verified a snapshot -data SnapshotNodeVerification = SnapshotNodeVerification { - snvFullId :: FullNodeId - , snvNodeName :: NodeName - } deriving (Eq, Generic, Show, Typeable) - - --- | Unique, automatically-generated name assigned to nodes that are --- usually returned in node-oriented APIs. -newtype FullNodeId = FullNodeId { fullNodeId :: Text } - deriving (Eq, Ord, Generic, Show, Typeable, FromJSON) - - --- | A human-readable node name that is supplied by the user in the --- node config or automatically generated by Elasticsearch. -newtype NodeName = NodeName { nodeName :: Text } - deriving (Eq, Ord, Generic, Show, Typeable, FromJSON) - -newtype ClusterName = ClusterName { clusterName :: Text } - deriving (Eq, Ord, Generic, Show, Typeable, FromJSON) - -data NodesInfo = NodesInfo { - nodesInfo :: [NodeInfo] - , nodesClusterName :: ClusterName - } deriving (Eq, Show, Generic, Typeable) - -data NodesStats = NodesStats { - nodesStats :: [NodeStats] - , nodesStatsClusterName :: ClusterName - } deriving (Eq, Show, Generic, Typeable) - -data NodeStats = NodeStats { - nodeStatsName :: NodeName - , nodeStatsFullId :: FullNodeId - , nodeStatsBreakersStats :: Maybe NodeBreakersStats - , nodeStatsHTTP :: NodeHTTPStats - , nodeStatsTransport :: NodeTransportStats - , nodeStatsFS :: NodeFSStats - , nodeStatsNetwork :: NodeNetworkStats - , nodeStatsThreadPool :: NodeThreadPoolsStats - , nodeStatsJVM :: NodeJVMStats - , nodeStatsProcess :: NodeProcessStats - , nodeStatsOS :: NodeOSStats - , nodeStatsIndices :: NodeIndicesStats - } deriving (Eq, Show, Generic, Typeable) - -data NodeBreakersStats = NodeBreakersStats { - nodeStatsParentBreaker :: NodeBreakerStats - , nodeStatsRequestBreaker :: NodeBreakerStats - , nodeStatsFieldDataBreaker :: NodeBreakerStats - } deriving (Eq, Show, Generic, Typeable) - -data NodeBreakerStats = NodeBreakerStats { - nodeBreakersTripped :: Int - , nodeBreakersOverhead :: Double - , nodeBreakersEstSize :: Bytes - , nodeBreakersLimitSize :: Bytes - } deriving (Eq, Show, Generic, Typeable) - -data NodeHTTPStats = NodeHTTPStats { - nodeHTTPTotalOpened :: Int - , nodeHTTPCurrentOpen :: Int - } deriving (Eq, Show, Generic, Typeable) - -data NodeTransportStats = NodeTransportStats { - nodeTransportTXSize :: Bytes - , nodeTransportCount :: Int - , nodeTransportRXSize :: Bytes - , nodeTransportRXCount :: Int - , nodeTransportServerOpen :: Int - } deriving (Eq, Show, Generic, Typeable) - -data NodeFSStats = NodeFSStats { - nodeFSDataPaths :: [NodeDataPathStats] - , nodeFSTotal :: NodeFSTotalStats - , nodeFSTimestamp :: UTCTime - } deriving (Eq, Show, Generic, Typeable) - -data NodeDataPathStats = NodeDataPathStats { - nodeDataPathDiskServiceTime :: Maybe Double - , nodeDataPathDiskQueue :: Maybe Double - , nodeDataPathIOSize :: Maybe Bytes - , nodeDataPathWriteSize :: Maybe Bytes - , nodeDataPathReadSize :: Maybe Bytes - , nodeDataPathIOOps :: Maybe Int - , nodeDataPathWrites :: Maybe Int - , nodeDataPathReads :: Maybe Int - , nodeDataPathAvailable :: Bytes - , nodeDataPathFree :: Bytes - , nodeDataPathTotal :: Bytes - , nodeDataPathType :: Maybe Text - , nodeDataPathDevice :: Text - , nodeDataPathMount :: Text - , nodeDataPathPath :: Text - } deriving (Eq, Show, Generic, Typeable) - -data NodeFSTotalStats = NodeFSTotalStats { - nodeFSTotalDiskServiceTime :: Maybe Double - , nodeFSTotalDiskQueue :: Maybe Double - , nodeFSTotalIOSize :: Maybe Bytes - , nodeFSTotalWriteSize :: Maybe Bytes - , nodeFSTotalReadSize :: Maybe Bytes - , nodeFSTotalIOOps :: Maybe Int - , nodeFSTotalWrites :: Maybe Int - , nodeFSTotalReads :: Maybe Int - , nodeFSTotalAvailable :: Bytes - , nodeFSTotalFree :: Bytes - , nodeFSTotalTotal :: Bytes - } deriving (Eq, Show, Generic, Typeable) - -data NodeNetworkStats = NodeNetworkStats { - nodeNetTCPOutRSTs :: Int - , nodeNetTCPInErrs :: Int - , nodeNetTCPAttemptFails :: Int - , nodeNetTCPEstabResets :: Int - , nodeNetTCPRetransSegs :: Int - , nodeNetTCPOutSegs :: Int - , nodeNetTCPInSegs :: Int - , nodeNetTCPCurrEstab :: Int - , nodeNetTCPPassiveOpens :: Int - , nodeNetTCPActiveOpens :: Int - } deriving (Eq, Show, Generic, Typeable) - -data NodeThreadPoolsStats = NodeThreadPoolsStats { - nodeThreadPoolsStatsSnapshot :: NodeThreadPoolStats - , nodeThreadPoolsStatsBulk :: NodeThreadPoolStats - , nodeThreadPoolsStatsMerge :: NodeThreadPoolStats - , nodeThreadPoolsStatsGet :: NodeThreadPoolStats - , nodeThreadPoolsStatsManagement :: NodeThreadPoolStats - , nodeThreadPoolsStatsFetchShardStore :: Maybe NodeThreadPoolStats - , nodeThreadPoolsStatsOptimize :: NodeThreadPoolStats - , nodeThreadPoolsStatsFlush :: NodeThreadPoolStats - , nodeThreadPoolsStatsSearch :: NodeThreadPoolStats - , nodeThreadPoolsStatsWarmer :: NodeThreadPoolStats - , nodeThreadPoolsStatsGeneric :: NodeThreadPoolStats - , nodeThreadPoolsStatsSuggest :: NodeThreadPoolStats - , nodeThreadPoolsStatsRefresh :: NodeThreadPoolStats - , nodeThreadPoolsStatsIndex :: NodeThreadPoolStats - , nodeThreadPoolsStatsListener :: Maybe NodeThreadPoolStats - , nodeThreadPoolsStatsFetchShardStarted :: Maybe NodeThreadPoolStats - , nodeThreadPoolsStatsPercolate :: NodeThreadPoolStats - } deriving (Eq, Show, Generic, Typeable) - -data NodeThreadPoolStats = NodeThreadPoolStats { - nodeThreadPoolCompleted :: Int - , nodeThreadPoolLargest :: Int - , nodeThreadPoolRejected :: Int - , nodeThreadPoolActive :: Int - , nodeThreadPoolQueue :: Int - , nodeThreadPoolThreads :: Int - } deriving (Eq, Show, Generic, Typeable) - -data NodeJVMStats = NodeJVMStats { - nodeJVMStatsMappedBufferPool :: JVMBufferPoolStats - , nodeJVMStatsDirectBufferPool :: JVMBufferPoolStats - , nodeJVMStatsGCOldCollector :: JVMGCStats - , nodeJVMStatsGCYoungCollector :: JVMGCStats - , nodeJVMStatsPeakThreadsCount :: Int - , nodeJVMStatsThreadsCount :: Int - , nodeJVMStatsOldPool :: JVMPoolStats - , nodeJVMStatsSurvivorPool :: JVMPoolStats - , nodeJVMStatsYoungPool :: JVMPoolStats - , nodeJVMStatsNonHeapCommitted :: Bytes - , nodeJVMStatsNonHeapUsed :: Bytes - , nodeJVMStatsHeapMax :: Bytes - , nodeJVMStatsHeapCommitted :: Bytes - , nodeJVMStatsHeapUsedPercent :: Int - , nodeJVMStatsHeapUsed :: Bytes - , nodeJVMStatsUptime :: NominalDiffTime - , nodeJVMStatsTimestamp :: UTCTime - } deriving (Eq, Show, Generic, Typeable) - -data JVMBufferPoolStats = JVMBufferPoolStats { - jvmBufferPoolStatsTotalCapacity :: Bytes - , jvmBufferPoolStatsUsed :: Bytes - , jvmBufferPoolStatsCount :: Int - } deriving (Eq, Show, Generic, Typeable) - -data JVMGCStats = JVMGCStats { - jvmGCStatsCollectionTime :: NominalDiffTime - , jvmGCStatsCollectionCount :: Int - } deriving (Eq, Show, Generic, Typeable) - -data JVMPoolStats = JVMPoolStats { - jvmPoolStatsPeakMax :: Bytes - , jvmPoolStatsPeakUsed :: Bytes - , jvmPoolStatsMax :: Bytes - , jvmPoolStatsUsed :: Bytes - } deriving (Eq, Show, Generic, Typeable) - -data NodeProcessStats = NodeProcessStats { - nodeProcessMemTotalVirtual :: Bytes - , nodeProcessMemShare :: Bytes - , nodeProcessMemResident :: Bytes - , nodeProcessCPUTotal :: NominalDiffTime - , nodeProcessCPUUser :: NominalDiffTime - , nodeProcessCPUSys :: NominalDiffTime - , nodeProcessCPUPercent :: Int - , nodeProcessOpenFDs :: Int - , nodeProcessTimestamp :: UTCTime - } deriving (Eq, Show, Generic, Typeable) - -data NodeOSStats = NodeOSStats { - nodeOSSwapFree :: Bytes - , nodeOSSwapUsed :: Bytes - , nodeOSMemActualUsed :: Bytes - , nodeOSMemActualFree :: Bytes - , nodeOSMemUsedPercent :: Int - , nodeOSMemFreePercent :: Int - , nodeOSMemUsed :: Bytes - , nodeOSMemFree :: Bytes - , nodeOSCPUStolen :: Int - , nodeOSCPUUsage :: Int - , nodeOSCPUIdle :: Int - , nodeOSCPUUser :: Int - , nodeOSCPUSys :: Int - , nodeOSLoad :: Maybe LoadAvgs - , nodeOSUptime :: NominalDiffTime - , nodeOSTimestamp :: UTCTime - } deriving (Eq, Show, Generic, Typeable) - -data LoadAvgs = LoadAvgs { - loadAvg1Min :: Double - , loadAvg5Min :: Double - , loadAvg15Min :: Double - } deriving (Eq, Show, Generic, Typeable) - -data NodeIndicesStats = NodeIndicesStats { - nodeIndicesStatsRecoveryThrottleTime :: Maybe NominalDiffTime - , nodeIndicesStatsRecoveryCurrentAsTarget :: Maybe Int - , nodeIndicesStatsRecoveryCurrentAsSource :: Maybe Int - , nodeIndicesStatsQueryCacheMisses :: Maybe Int - , nodeIndicesStatsQueryCacheHits :: Maybe Int - , nodeIndicesStatsQueryCacheEvictions :: Maybe Int - , nodeIndicesStatsQueryCacheSize :: Maybe Bytes - , nodeIndicesStatsSuggestCurrent :: Int - , nodeIndicesStatsSuggestTime :: NominalDiffTime - , nodeIndicesStatsSuggestTotal :: Int - , nodeIndicesStatsTranslogSize :: Bytes - , nodeIndicesStatsTranslogOps :: Int - , nodeIndicesStatsSegFixedBitSetMemory :: Maybe Bytes - , nodeIndicesStatsSegVersionMapMemory :: Bytes - , nodeIndicesStatsSegIndexWriterMaxMemory :: Maybe Bytes - , nodeIndicesStatsSegIndexWriterMemory :: Bytes - , nodeIndicesStatsSegMemory :: Bytes - , nodeIndicesStatsSegCount :: Int - , nodeIndicesStatsCompletionSize :: Bytes - , nodeIndicesStatsPercolateQueries :: Int - , nodeIndicesStatsPercolateMemory :: Bytes - , nodeIndicesStatsPercolateCurrent :: Int - , nodeIndicesStatsPercolateTime :: NominalDiffTime - , nodeIndicesStatsPercolateTotal :: Int - , nodeIndicesStatsFieldDataEvictions :: Int - , nodeIndicesStatsFieldDataMemory :: Bytes - , nodeIndicesStatsIDCacheMemory :: Bytes - , nodeIndicesStatsFilterCacheEvictions :: Int - , nodeIndicesStatsFilterCacheMemory :: Bytes - , nodeIndicesStatsWarmerTotalTime :: NominalDiffTime - , nodeIndicesStatsWarmerTotal :: Int - , nodeIndicesStatsWarmerCurrent :: Int - , nodeIndicesStatsFlushTotalTime :: NominalDiffTime - , nodeIndicesStatsFlushTotal :: Int - , nodeIndicesStatsRefreshTotalTime :: NominalDiffTime - , nodeIndicesStatsRefreshTotal :: Int - , nodeIndicesStatsMergesTotalSize :: Bytes - , nodeIndicesStatsMergesTotalDocs :: Int - , nodeIndicesStatsMergesTotalTime :: NominalDiffTime - , nodeIndicesStatsMergesTotal :: Int - , nodeIndicesStatsMergesCurrentSize :: Bytes - , nodeIndicesStatsMergesCurrentDocs :: Int - , nodeIndicesStatsMergesCurrent :: Int - , nodeIndicesStatsSearchFetchCurrent :: Int - , nodeIndicesStatsSearchFetchTime :: NominalDiffTime - , nodeIndicesStatsSearchFetchTotal :: Int - , nodeIndicesStatsSearchQueryCurrent :: Int - , nodeIndicesStatsSearchQueryTime :: NominalDiffTime - , nodeIndicesStatsSearchQueryTotal :: Int - , nodeIndicesStatsSearchOpenContexts :: Int - , nodeIndicesStatsGetCurrent :: Int - , nodeIndicesStatsGetMissingTime :: NominalDiffTime - , nodeIndicesStatsGetMissingTotal :: Int - , nodeIndicesStatsGetExistsTime :: NominalDiffTime - , nodeIndicesStatsGetExistsTotal :: Int - , nodeIndicesStatsGetTime :: NominalDiffTime - , nodeIndicesStatsGetTotal :: Int - , nodeIndicesStatsIndexingThrottleTime :: Maybe NominalDiffTime - , nodeIndicesStatsIndexingIsThrottled :: Maybe Bool - , nodeIndicesStatsIndexingNoopUpdateTotal :: Maybe Int - , nodeIndicesStatsIndexingDeleteCurrent :: Int - , nodeIndicesStatsIndexingDeleteTime :: NominalDiffTime - , nodeIndicesStatsIndexingDeleteTotal :: Int - , nodeIndicesStatsIndexingIndexCurrent :: Int - , nodeIndicesStatsIndexingIndexTime :: NominalDiffTime - , nodeIndicesStatsIndexingTotal :: Int - , nodeIndicesStatsStoreThrottleTime :: NominalDiffTime - , nodeIndicesStatsStoreSize :: Bytes - , nodeIndicesStatsDocsDeleted :: Int - , nodeIndicesStatsDocsCount :: Int - } deriving (Eq, Show, Generic, Typeable) - --- | A quirky address format used throughout Elasticsearch. An example --- would be inet[/1.1.1.1:9200]. inet may be a placeholder for a --- . -newtype EsAddress = EsAddress { esAddress :: Text } - deriving (Eq, Ord, Generic, Show, Typeable, FromJSON) - --- | Typically a 7 character hex string. -newtype BuildHash = BuildHash { buildHash :: Text } - deriving (Eq, Ord, Generic, Read, Show, Typeable, FromJSON, ToJSON) - -newtype PluginName = PluginName { pluginName :: Text } - deriving (Eq, Ord, Generic, Show, Typeable, FromJSON) - -data NodeInfo = NodeInfo { - nodeInfoHTTPAddress :: EsAddress - , nodeInfoBuild :: BuildHash - , nodeInfoESVersion :: VersionNumber - , nodeInfoIP :: Server - , nodeInfoHost :: Server - , nodeInfoTransportAddress :: EsAddress - , nodeInfoName :: NodeName - , nodeInfoFullId :: FullNodeId - , nodeInfoPlugins :: [NodePluginInfo] - , nodeInfoHTTP :: NodeHTTPInfo - , nodeInfoTransport :: NodeTransportInfo - , nodeInfoNetwork :: NodeNetworkInfo - , nodeInfoThreadPool :: NodeThreadPoolsInfo - , nodeInfoJVM :: NodeJVMInfo - , nodeInfoProcess :: NodeProcessInfo - , nodeInfoOS :: NodeOSInfo - , nodeInfoSettings :: Object - -- ^ The members of the settings objects are not consistent, - -- dependent on plugins, etc. - } deriving (Eq, Show, Generic, Typeable) - -data NodePluginInfo = NodePluginInfo { - nodePluginSite :: Bool - -- ^ Is this a site plugin? - , nodePluginJVM :: Bool - -- ^ Is this plugin running on the JVM - , nodePluginDescription :: Text - , nodePluginVersion :: MaybeNA VersionNumber - , nodePluginName :: PluginName - } deriving (Eq, Show, Generic, Typeable) - -data NodeHTTPInfo = NodeHTTPInfo { - nodeHTTPMaxContentLength :: Bytes - , nodeHTTPTransportAddress :: BoundTransportAddress - } deriving (Eq, Show, Generic, Typeable) - -data NodeTransportInfo = NodeTransportInfo { - nodeTransportProfiles :: [BoundTransportAddress] - , nodeTransportAddress :: BoundTransportAddress - } deriving (Eq, Show, Generic, Typeable) - -data BoundTransportAddress = BoundTransportAddress { - publishAddress :: EsAddress - , boundAddress :: EsAddress - } deriving (Eq, Show, Generic, Typeable) - -data NodeNetworkInfo = NodeNetworkInfo { - nodeNetworkPrimaryInterface :: NodeNetworkInterface - , nodeNetworkRefreshInterval :: NominalDiffTime - } deriving (Eq, Show, Generic, Typeable) - -newtype MacAddress = MacAddress { macAddress :: Text } - deriving (Eq, Ord, Generic, Show, Typeable, FromJSON) - -newtype NetworkInterfaceName = NetworkInterfaceName { networkInterfaceName :: Text } - deriving (Eq, Ord, Generic, Show, Typeable, FromJSON) - -data NodeNetworkInterface = NodeNetworkInterface { - nodeNetIfaceMacAddress :: MacAddress - , nodeNetIfaceName :: NetworkInterfaceName - , nodeNetIfaceAddress :: Server - } deriving (Eq, Show, Generic, Typeable) - -data NodeThreadPoolsInfo = NodeThreadPoolsInfo { - nodeThreadPoolsRefresh :: NodeThreadPoolInfo - , nodeThreadPoolsManagement :: NodeThreadPoolInfo - , nodeThreadPoolsPercolate :: NodeThreadPoolInfo - , nodeThreadPoolsListener :: Maybe NodeThreadPoolInfo - , nodeThreadPoolsFetchShardStarted :: Maybe NodeThreadPoolInfo - , nodeThreadPoolsSearch :: NodeThreadPoolInfo - , nodeThreadPoolsFlush :: NodeThreadPoolInfo - , nodeThreadPoolsWarmer :: NodeThreadPoolInfo - , nodeThreadPoolsOptimize :: NodeThreadPoolInfo - , nodeThreadPoolsBulk :: NodeThreadPoolInfo - , nodeThreadPoolsSuggest :: NodeThreadPoolInfo - , nodeThreadPoolsMerge :: NodeThreadPoolInfo - , nodeThreadPoolsSnapshot :: NodeThreadPoolInfo - , nodeThreadPoolsGet :: NodeThreadPoolInfo - , nodeThreadPoolsFetchShardStore :: Maybe NodeThreadPoolInfo - , nodeThreadPoolsIndex :: NodeThreadPoolInfo - , nodeThreadPoolsGeneric :: NodeThreadPoolInfo - } deriving (Eq, Show, Generic, Typeable) - -data NodeThreadPoolInfo = NodeThreadPoolInfo { - nodeThreadPoolQueueSize :: ThreadPoolSize - , nodeThreadPoolKeepalive :: Maybe NominalDiffTime - , nodeThreadPoolMin :: Maybe Int - , nodeThreadPoolMax :: Maybe Int - , nodeThreadPoolType :: ThreadPoolType - } deriving (Eq, Show, Generic, Typeable) - -data ThreadPoolSize = ThreadPoolBounded Int - | ThreadPoolUnbounded - deriving (Eq, Show, Generic, Typeable) - -data ThreadPoolType = ThreadPoolScaling - | ThreadPoolFixed - | ThreadPoolCached - deriving (Eq, Show, Generic, Typeable) - -data NodeJVMInfo = NodeJVMInfo { - nodeJVMInfoMemoryPools :: [JVMMemoryPool] - , nodeJVMInfoMemoryPoolsGCCollectors :: [JVMGCCollector] - , nodeJVMInfoMemoryInfo :: JVMMemoryInfo - , nodeJVMInfoStartTime :: UTCTime - , nodeJVMInfoVMVendor :: Text - , nodeJVMVMVersion :: VersionNumber - -- ^ JVM doesn't seme to follow normal version conventions - , nodeJVMVMName :: Text - , nodeJVMVersion :: VersionNumber - , nodeJVMPID :: PID - } deriving (Eq, Show, Generic, Typeable) - --- | Handles quirks in the way JVM versions are rendered (1.7.0_101 -> 1.7.0.101) -newtype JVMVersion = JVMVersion { unJVMVersion :: VersionNumber } - -data JVMMemoryInfo = JVMMemoryInfo { - jvmMemoryInfoDirectMax :: Bytes - , jvmMemoryInfoNonHeapMax :: Bytes - , jvmMemoryInfoNonHeapInit :: Bytes - , jvmMemoryInfoHeapMax :: Bytes - , jvmMemoryInfoHeapInit :: Bytes - } deriving (Eq, Show, Generic, Typeable) - -newtype JVMMemoryPool = JVMMemoryPool { - jvmMemoryPool :: Text - } deriving (Eq, Show, Generic, Typeable, FromJSON) - -newtype JVMGCCollector = JVMGCCollector { - jvmGCCollector :: Text - } deriving (Eq, Show, Generic, Typeable, FromJSON) - -newtype PID = PID { - pid :: Int - } deriving (Eq, Show, Generic, Typeable, FromJSON) - -data NodeOSInfo = NodeOSInfo { - nodeOSSwap :: Bytes - , nodeOSMem :: Bytes - , nodeOSCPUInfo :: CPUInfo - , nodeOSAvailableProcessors :: Int - , nodeOSRefreshInterval :: NominalDiffTime - } deriving (Eq, Show, Generic, Typeable) - -data CPUInfo = CPUInfo { - cpuCacheSize :: Bytes - , cpuCoresPerSocket :: Int - , cpuTotalSockets :: Int - , cpuTotalCores :: Int - , cpuMHZ :: Int - , cpuModel :: Text - , cpuVendor :: Text - } deriving (Eq, Show, Generic, Typeable) - -data NodeProcessInfo = NodeProcessInfo { - nodeProcessMLockAll :: Bool - -- ^ See - , nodeProcessMaxFileDescriptors :: Int - , nodeProcessId :: PID - , nodeProcessRefreshInterval :: NominalDiffTime - } deriving (Eq, Show, Generic, Typeable) - -data SnapshotRepoUpdateSettings = SnapshotRepoUpdateSettings { - repoUpdateVerify :: Bool - -- ^ After creation/update, synchronously check that nodes can - -- write to this repo. Defaults to True. You may use False if you - -- need a faster response and plan on verifying manually later - -- with 'verifySnapshotRepo'. - } deriving (Eq, Show, Generic, Typeable) - - --- | Reasonable defaults for repo creation/update --- --- * repoUpdateVerify True -defaultSnapshotRepoUpdateSettings :: SnapshotRepoUpdateSettings -defaultSnapshotRepoUpdateSettings = SnapshotRepoUpdateSettings True - - --- | A filesystem-based snapshot repo that ships with --- Elasticsearch. This is an instance of 'SnapshotRepo' so it can be --- used with 'updateSnapshotRepo' -data FsSnapshotRepo = FsSnapshotRepo { - fsrName :: SnapshotRepoName - , fsrLocation :: FilePath - , fsrCompressMetadata :: Bool - , fsrChunkSize :: Maybe Bytes - -- ^ Size by which to split large files during snapshotting. - , fsrMaxRestoreBytesPerSec :: Maybe Bytes - -- ^ Throttle node restore rate. If not supplied, defaults to 40mb/sec - , fsrMaxSnapshotBytesPerSec :: Maybe Bytes - -- ^ Throttle node snapshot rate. If not supplied, defaults to 40mb/sec - } deriving (Eq, Generic, Show, Typeable) - - -instance SnapshotRepo FsSnapshotRepo where - toGSnapshotRepo FsSnapshotRepo {..} = - GenericSnapshotRepo fsrName fsRepoType (GenericSnapshotRepoSettings settings) - where - Object settings = object $ [ "location" .= fsrLocation - , "compress" .= fsrCompressMetadata - ] ++ optionalPairs - optionalPairs = catMaybes [ ("chunk_size" .=) <$> fsrChunkSize - , ("max_restore_bytes_per_sec" .=) <$> fsrMaxRestoreBytesPerSec - , ("max_snapshot_bytes_per_sec" .=) <$> fsrMaxSnapshotBytesPerSec - ] - fromGSnapshotRepo GenericSnapshotRepo {..} - | gSnapshotRepoType == fsRepoType = do - let o = gSnapshotRepoSettingsObject gSnapshotRepoSettings - parseRepo $ do - FsSnapshotRepo gSnapshotRepoName <$> o .: "location" - <*> o .:? "compress" .!= False - <*> o .:? "chunk_size" - <*> o .:? "max_restore_bytes_per_sec" - <*> o .:? "max_snapshot_bytes_per_sec" - | otherwise = Left (RepoTypeMismatch fsRepoType gSnapshotRepoType) - - -parseRepo :: Parser a -> Either SnapshotRepoConversionError a -parseRepo parser = case parseEither (const parser) () of - Left e -> Left (OtherRepoConversionError (T.pack e)) - Right a -> Right a - - -fsRepoType :: SnapshotRepoType -fsRepoType = SnapshotRepoType "fs" - --- | Law: fromGSnapshotRepo (toGSnapshotRepo r) == Right r -class SnapshotRepo r where - toGSnapshotRepo :: r -> GenericSnapshotRepo - fromGSnapshotRepo :: GenericSnapshotRepo -> Either SnapshotRepoConversionError r - - -data SnapshotRepoConversionError = RepoTypeMismatch SnapshotRepoType SnapshotRepoType - -- ^ Expected type and actual type - | OtherRepoConversionError Text - deriving (Show, Eq, Generic, Typeable) - - -instance Exception SnapshotRepoConversionError - - -data SnapshotCreateSettings = SnapshotCreateSettings { - snapWaitForCompletion :: Bool - -- ^ Should the API call return immediately after initializing - -- the snapshot or wait until completed? Note that if this is - -- enabled it could wait a long time, so you should adjust your - -- 'ManagerSettings' accordingly to set long timeouts or - -- explicitly handle timeouts. - , snapIndices :: Maybe IndexSelection - -- ^ Nothing will snapshot all indices. Just [] is permissable and - -- will essentially be a no-op snapshot. - , snapIgnoreUnavailable :: Bool - -- ^ If set to True, any matched indices that don't exist will be - -- ignored. Otherwise it will be an error and fail. - , snapIncludeGlobalState :: Bool - , snapPartial :: Bool - -- ^ If some indices failed to snapshot (e.g. if not all primary - -- shards are available), should the process proceed? - } deriving (Eq, Generic, Show, Typeable) - - --- | Reasonable defaults for snapshot creation --- --- * snapWaitForCompletion False --- * snapIndices Nothing --- * snapIgnoreUnavailable False --- * snapIncludeGlobalState True --- * snapPartial False -defaultSnapshotCreateSettings :: SnapshotCreateSettings -defaultSnapshotCreateSettings = SnapshotCreateSettings { - snapWaitForCompletion = False - , snapIndices = Nothing - , snapIgnoreUnavailable = False - , snapIncludeGlobalState = True - , snapPartial = False - } - - -data SnapshotSelection = SnapshotList (NonEmpty SnapshotPattern) - | AllSnapshots deriving (Eq, Generic, Show, Typeable) - - --- | Either specifies an exact snapshot name or one with globs in it, --- e.g. @SnapPattern "foo*"@ __NOTE__: Patterns are not supported on --- ES < 1.7 -data SnapshotPattern = ExactSnap SnapshotName - | SnapPattern Text - deriving (Eq, Generic, Show, Typeable) - - --- | General information about the state of a snapshot. Has some --- redundancies with 'SnapshotStatus' -data SnapshotInfo = SnapshotInfo { - snapInfoShards :: ShardResult - , snapInfoFailures :: [SnapshotShardFailure] - , snapInfoDuration :: NominalDiffTime - , snapInfoEndTime :: UTCTime - , snapInfoStartTime :: UTCTime - , snapInfoState :: SnapshotState - , snapInfoIndices :: [IndexName] - , snapInfoName :: SnapshotName - } deriving (Eq, Generic, Show, Typeable) - - -instance FromJSON SnapshotInfo where - parseJSON = withObject "SnapshotInfo" parse - where - parse o = SnapshotInfo <$> o .: "shards" - <*> o .: "failures" - <*> (unMS <$> o .: "duration_in_millis") - <*> (posixMS <$> o .: "end_time_in_millis") - <*> (posixMS <$> o .: "start_time_in_millis") - <*> o .: "state" - <*> o .: "indices" - <*> o .: "snapshot" - -data SnapshotShardFailure = SnapshotShardFailure { - snapShardFailureIndex :: IndexName - , snapShardFailureNodeId :: Maybe NodeName -- I'm not 100% sure this isn't actually 'FullNodeId' - , snapShardFailureReason :: Text - , snapShardFailureShardId :: ShardId - } deriving (Eq, Show, Generic, Typeable) - - -instance FromJSON SnapshotShardFailure where - parseJSON = withObject "SnapshotShardFailure" parse - where - parse o = SnapshotShardFailure <$> o .: "index" - <*> o .:? "node_id" - <*> o .: "reason" - <*> o .: "shard_id" - - -newtype ShardId = ShardId { shardId :: Int } - deriving (Eq, Show, Generic, Typeable, FromJSON) - --- | Milliseconds -newtype MS = MS NominalDiffTime - - --- keeps the unexported constructor warnings at bay -unMS :: MS -> NominalDiffTime -unMS (MS t) = t - - -instance FromJSON MS where - parseJSON = withScientific "MS" (return . MS . parse) - where - parse n = fromInteger ((truncate n) * 1000) - - -data SnapshotState = SnapshotInit - | SnapshotStarted - | SnapshotSuccess - | SnapshotFailed - | SnapshotAborted - | SnapshotMissing - | SnapshotWaiting - deriving (Show, Eq, Generic, Typeable) - -instance FromJSON SnapshotState where - parseJSON = withText "SnapshotState" parse - where - parse "INIT" = return SnapshotInit - parse "STARTED" = return SnapshotStarted - parse "SUCCESS" = return SnapshotSuccess - parse "FAILED" = return SnapshotFailed - parse "ABORTED" = return SnapshotAborted - parse "MISSING" = return SnapshotMissing - parse "WAITING" = return SnapshotWaiting - parse t = fail ("Invalid snapshot state " <> T.unpack t) - - -newtype SnapshotName = SnapshotName { snapshotName :: Text } - deriving (Show, Eq, Ord, Generic, Typeable, ToJSON, FromJSON) - - -data SnapshotRestoreSettings = SnapshotRestoreSettings { - snapRestoreWaitForCompletion :: Bool - -- ^ Should the API call return immediately after initializing - -- the restore or wait until completed? Note that if this is - -- enabled, it could wait a long time, so you should adjust your - -- 'ManagerSettings' accordingly to set long timeouts or - -- explicitly handle timeouts. - , snapRestoreIndices :: Maybe IndexSelection - -- ^ Nothing will restore all indices in the snapshot. Just [] is - -- permissable and will essentially be a no-op restore. - , snapRestoreIgnoreUnavailable :: Bool - -- ^ If set to True, any indices that do not exist will be ignored - -- during snapshot rather than failing the restore. - , snapRestoreIncludeGlobalState :: Bool - -- ^ If set to false, will ignore any global state in the snapshot - -- and will not restore it. - , snapRestoreRenamePattern :: Maybe RestoreRenamePattern - -- ^ A regex pattern for matching indices. Used with - -- 'snapRestoreRenameReplacement', the restore can reference the - -- matched index and create a new index name upon restore. - , snapRestoreRenameReplacement :: Maybe (NonEmpty RestoreRenameToken) - -- ^ Expression of how index renames should be constructed. - , snapRestorePartial :: Bool - -- ^ If some indices fail to restore, should the process proceed? - , snapRestoreIncludeAliases :: Bool - -- ^ Should the restore also restore the aliases captured in the - -- snapshot. - , snapRestoreIndexSettingsOverrides :: Maybe RestoreIndexSettings - -- ^ Settings to apply during the restore process. __NOTE:__ This - -- option is not supported in ES < 1.5 and should be set to - -- Nothing in that case. - , snapRestoreIgnoreIndexSettings :: Maybe (NonEmpty Text) - -- ^ This type could be more rich but it isn't clear which - -- settings are allowed to be ignored during restore, so we're - -- going with including this feature in a basic form rather than - -- omitting it. One example here would be - -- "index.refresh_interval". Any setting specified here will - -- revert back to the server default during the restore process. - } deriving (Eq, Generic, Show, Typeable) - --- | Regex-stype pattern, e.g. "index_(.+)" to match index names -newtype RestoreRenamePattern = RestoreRenamePattern { rrPattern :: Text } - deriving (Show, Eq, Generic, Typeable, Ord, ToJSON) - - --- | A single token in a index renaming scheme for a restore. These --- are concatenated into a string before being sent to --- Elasticsearch. Check out these Java --- to find out more if you're into that sort of thing. -data RestoreRenameToken = RRTLit Text - -- ^ Just a literal string of characters - | RRSubWholeMatch - -- ^ Equivalent to $0. The entire matched pattern, not any subgroup - | RRSubGroup RRGroupRefNum - -- ^ A specific reference to a group number - deriving (Show, Eq, Generic, Typeable) - - --- | A group number for regex matching. Only values from 1-9 are --- supported. Construct with 'mkRRGroupRefNum' -newtype RRGroupRefNum = RRGroupRefNum { rrGroupRefNum :: Int } - deriving (Show, Eq, Generic, Typeable, Ord) - -instance Bounded RRGroupRefNum where - minBound = RRGroupRefNum 1 - maxBound = RRGroupRefNum 9 - - --- | Only allows valid group number references (1-9). -mkRRGroupRefNum :: Int -> Maybe RRGroupRefNum -mkRRGroupRefNum i - | i >= (rrGroupRefNum minBound) && i <= (rrGroupRefNum maxBound) = - Just $ RRGroupRefNum i - | otherwise = Nothing - - --- | Reasonable defaults for snapshot restores --- --- * snapRestoreWaitForCompletion False --- * snapRestoreIndices Nothing --- * snapRestoreIgnoreUnavailable False --- * snapRestoreIncludeGlobalState True --- * snapRestoreRenamePattern Nothing --- * snapRestoreRenameReplacement Nothing --- * snapRestorePartial False --- * snapRestoreIncludeAliases True --- * snapRestoreIndexSettingsOverrides Nothing --- * snapRestoreIgnoreIndexSettings Nothing -defaultSnapshotRestoreSettings :: SnapshotRestoreSettings -defaultSnapshotRestoreSettings = SnapshotRestoreSettings { - snapRestoreWaitForCompletion = False - , snapRestoreIndices = Nothing - , snapRestoreIgnoreUnavailable = False - , snapRestoreIncludeGlobalState = True - , snapRestoreRenamePattern = Nothing - , snapRestoreRenameReplacement = Nothing - , snapRestorePartial = False - , snapRestoreIncludeAliases = True - , snapRestoreIndexSettingsOverrides = Nothing - , snapRestoreIgnoreIndexSettings = Nothing - } - - --- | Index settings that can be overridden. The docs only mention you --- can update number of replicas, but there may be more. You --- definitely cannot override shard count. -data RestoreIndexSettings = RestoreIndexSettings { - restoreOverrideReplicas :: Maybe ReplicaCount - } deriving (Show, Eq, Generic, Typeable) - - -instance ToJSON RestoreIndexSettings where - toJSON RestoreIndexSettings {..} = object prs - where - prs = catMaybes [("index.number_of_replicas" .=) <$> restoreOverrideReplicas] - - -instance FromJSON NodesInfo where - parseJSON = withObject "NodesInfo" parse - where - parse o = do - nodes <- o .: "nodes" - infos <- forM (HM.toList nodes) $ \(fullNID, v) -> do - node <- parseJSON v - parseNodeInfo (FullNodeId fullNID) node - cn <- o .: "cluster_name" - return (NodesInfo infos cn) - -instance FromJSON NodesStats where - parseJSON = withObject "NodesStats" parse - where - parse o = do - nodes <- o .: "nodes" - stats <- forM (HM.toList nodes) $ \(fullNID, v) -> do - node <- parseJSON v - parseNodeStats (FullNodeId fullNID) node - cn <- o .: "cluster_name" - return (NodesStats stats cn) - -instance FromJSON NodeBreakerStats where - parseJSON = withObject "NodeBreakerStats" parse - where - parse o = NodeBreakerStats <$> o .: "tripped" - <*> o .: "overhead" - <*> o .: "estimated_size_in_bytes" - <*> o .: "limit_size_in_bytes" - -instance FromJSON NodeHTTPStats where - parseJSON = withObject "NodeHTTPStats" parse - where - parse o = NodeHTTPStats <$> o .: "total_opened" - <*> o .: "current_open" - -instance FromJSON NodeTransportStats where - parseJSON = withObject "NodeTransportStats" parse - where - parse o = NodeTransportStats <$> o .: "tx_size_in_bytes" - <*> o .: "tx_count" - <*> o .: "rx_size_in_bytes" - <*> o .: "rx_count" - <*> o .: "server_open" - -instance FromJSON NodeFSStats where - parseJSON = withObject "NodeFSStats" parse - where - parse o = NodeFSStats <$> o .: "data" - <*> o .: "total" - <*> (posixMS <$> o .: "timestamp") - -instance FromJSON NodeDataPathStats where - parseJSON = withObject "NodeDataPathStats" parse - where - parse o = - NodeDataPathStats <$> (fmap unStringlyTypedDouble <$> o .:? "disk_service_time") - <*> (fmap unStringlyTypedDouble <$> o .:? "disk_queue") - <*> o .:? "disk_io_size_in_bytes" - <*> o .:? "disk_write_size_in_bytes" - <*> o .:? "disk_read_size_in_bytes" - <*> o .:? "disk_io_op" - <*> o .:? "disk_writes" - <*> o .:? "disk_reads" - <*> o .: "available_in_bytes" - <*> o .: "free_in_bytes" - <*> o .: "total_in_bytes" - <*> o .:? "type" - <*> o .: "dev" - <*> o .: "mount" - <*> o .: "path" - -newtype StringlyTypedDouble = StringlyTypedDouble { unStringlyTypedDouble :: Double } - - -instance FromJSON StringlyTypedDouble where - parseJSON = fmap StringlyTypedDouble . parseJSON . unStringlyTypeJSON - - -instance FromJSON NodeFSTotalStats where - parseJSON = withObject "NodeFSTotalStats" parse - where - parse o = NodeFSTotalStats <$> (fmap unStringlyTypedDouble <$> o .:? "disk_service_time") - <*> (fmap unStringlyTypedDouble <$> o .:? "disk_queue") - <*> o .:? "disk_io_size_in_bytes" - <*> o .:? "disk_write_size_in_bytes" - <*> o .:? "disk_read_size_in_bytes" - <*> o .:? "disk_io_op" - <*> o .:? "disk_writes" - <*> o .:? "disk_reads" - <*> o .: "available_in_bytes" - <*> o .: "free_in_bytes" - <*> o .: "total_in_bytes" - -instance FromJSON NodeNetworkStats where - parseJSON = withObject "NodeNetworkStats" parse - where - parse o = do - tcp <- o .: "tcp" - NodeNetworkStats <$> tcp .: "out_rsts" - <*> tcp .: "in_errs" - <*> tcp .: "attempt_fails" - <*> tcp .: "estab_resets" - <*> tcp .: "retrans_segs" - <*> tcp .: "out_segs" - <*> tcp .: "in_segs" - <*> tcp .: "curr_estab" - <*> tcp .: "passive_opens" - <*> tcp .: "active_opens" - -instance FromJSON NodeThreadPoolsStats where - parseJSON = withObject "NodeThreadPoolsStats" parse - where - parse o = NodeThreadPoolsStats <$> o .: "snapshot" - <*> o .: "bulk" - <*> o .: "merge" - <*> o .: "get" - <*> o .: "management" - <*> o .:? "fetch_shard_store" - <*> o .: "optimize" - <*> o .: "flush" - <*> o .: "search" - <*> o .: "warmer" - <*> o .: "generic" - <*> o .: "suggest" - <*> o .: "refresh" - <*> o .: "index" - <*> o .:? "listener" - <*> o .:? "fetch_shard_started" - <*> o .: "percolate" -instance FromJSON NodeThreadPoolStats where - parseJSON = withObject "NodeThreadPoolStats" parse - where - parse o = NodeThreadPoolStats <$> o .: "completed" - <*> o .: "largest" - <*> o .: "rejected" - <*> o .: "active" - <*> o .: "queue" - <*> o .: "threads" - -instance FromJSON NodeJVMStats where - parseJSON = withObject "NodeJVMStats" parse - where - parse o = do - bufferPools <- o .: "buffer_pools" - mapped <- bufferPools .: "mapped" - direct <- bufferPools .: "direct" - gc <- o .: "gc" - collectors <- gc .: "collectors" - oldC <- collectors .: "old" - youngC <- collectors .: "young" - threads <- o .: "threads" - mem <- o .: "mem" - pools <- mem .: "pools" - oldM <- pools .: "old" - survivorM <- pools .: "survivor" - youngM <- pools .: "young" - NodeJVMStats <$> pure mapped - <*> pure direct - <*> pure oldC - <*> pure youngC - <*> threads .: "peak_count" - <*> threads .: "count" - <*> pure oldM - <*> pure survivorM - <*> pure youngM - <*> mem .: "non_heap_committed_in_bytes" - <*> mem .: "non_heap_used_in_bytes" - <*> mem .: "heap_max_in_bytes" - <*> mem .: "heap_committed_in_bytes" - <*> mem .: "heap_used_percent" - <*> mem .: "heap_used_in_bytes" - <*> (unMS <$> o .: "uptime_in_millis") - <*> (posixMS <$> o .: "timestamp") - -instance FromJSON JVMBufferPoolStats where - parseJSON = withObject "JVMBufferPoolStats" parse - where - parse o = JVMBufferPoolStats <$> o .: "total_capacity_in_bytes" - <*> o .: "used_in_bytes" - <*> o .: "count" - -instance FromJSON JVMGCStats where - parseJSON = withObject "JVMGCStats" parse - where - parse o = JVMGCStats <$> (unMS <$> o .: "collection_time_in_millis") - <*> o .: "collection_count" - -instance FromJSON JVMPoolStats where - parseJSON = withObject "JVMPoolStats" parse - where - parse o = JVMPoolStats <$> o .: "peak_max_in_bytes" - <*> o .: "peak_used_in_bytes" - <*> o .: "max_in_bytes" - <*> o .: "used_in_bytes" - -instance FromJSON NodeProcessStats where - parseJSON = withObject "NodeProcessStats" parse - where - parse o = do - mem <- o .: "mem" - cpu <- o .: "cpu" - NodeProcessStats <$> mem .: "total_virtual_in_bytes" - <*> mem .: "share_in_bytes" - <*> mem .: "resident_in_bytes" - <*> (unMS <$> cpu .: "total_in_millis") - <*> (unMS <$> cpu .: "user_in_millis") - <*> (unMS <$> cpu .: "sys_in_millis") - <*> cpu .: "percent" - <*> o .: "open_file_descriptors" - <*> (posixMS <$> o .: "timestamp") - -instance FromJSON NodeOSStats where - parseJSON = withObject "NodeOSStats" parse - where - parse o = do - swap <- o .: "swap" - mem <- o .: "mem" - cpu <- o .: "cpu" - load <- o .:? "load_average" - NodeOSStats <$> swap .: "free_in_bytes" - <*> swap .: "used_in_bytes" - <*> mem .: "actual_used_in_bytes" - <*> mem .: "actual_free_in_bytes" - <*> mem .: "used_percent" - <*> mem .: "free_percent" - <*> mem .: "used_in_bytes" - <*> mem .: "free_in_bytes" - <*> cpu .: "stolen" - <*> cpu .: "usage" - <*> cpu .: "idle" - <*> cpu .: "user" - <*> cpu .: "sys" - <*> pure load - <*> (unMS <$> o .: "uptime_in_millis") - <*> (posixMS <$> o .: "timestamp") - -instance FromJSON LoadAvgs where - parseJSON = withArray "LoadAvgs" parse - where - parse v = case V.toList v of - [one, five, fifteen] -> LoadAvgs <$> parseJSON one - <*> parseJSON five - <*> parseJSON fifteen - _ -> fail "Expecting a triple of Doubles" - -instance FromJSON NodeIndicesStats where - parseJSON = withObject "NodeIndicesStats" parse - where - parse o = do - let (.::) mv k = case mv of - Just v -> Just <$> v .: k - Nothing -> pure Nothing - mRecovery <- o .:? "recovery" - mQueryCache <- o .:? "query_cache" - suggest <- o .: "suggest" - translog <- o .: "translog" - segments <- o .: "segments" - completion <- o .: "completion" - percolate <- o .: "percolate" - fielddata <- o .: "fielddata" - idCache <- o .: "id_cache" - filterCache <- o .: "filter_cache" - warmer <- o .: "warmer" - flush <- o .: "flush" - refresh <- o .: "refresh" - merges <- o .: "merges" - search <- o .: "search" - getStats <- o .: "get" - indexing <- o .: "indexing" - store <- o .: "store" - docs <- o .: "docs" - NodeIndicesStats <$> (fmap unMS <$> mRecovery .:: "throttle_time_in_millis") - <*> mRecovery .:: "current_as_target" - <*> mRecovery .:: "current_as_source" - <*> mQueryCache .:: "miss_count" - <*> mQueryCache .:: "hit_count" - <*> mQueryCache .:: "evictions" - <*> mQueryCache .:: "memory_size_in_bytes" - <*> suggest .: "current" - <*> (unMS <$> suggest .: "time_in_millis") - <*> suggest .: "total" - <*> translog .: "size_in_bytes" - <*> translog .: "operations" - <*> segments .:? "fixed_bit_set_memory_in_bytes" - <*> segments .: "version_map_memory_in_bytes" - <*> segments .:? "index_writer_max_memory_in_bytes" - <*> segments .: "index_writer_memory_in_bytes" - <*> segments .: "memory_in_bytes" - <*> segments .: "count" - <*> completion .: "size_in_bytes" - <*> percolate .: "queries" - <*> percolate .: "memory_size_in_bytes" - <*> percolate .: "current" - <*> (unMS <$> percolate .: "time_in_millis") - <*> percolate .: "total" - <*> fielddata .: "evictions" - <*> fielddata .: "memory_size_in_bytes" - <*> idCache .: "memory_size_in_bytes" - <*> filterCache .: "evictions" - <*> filterCache .: "memory_size_in_bytes" - <*> (unMS <$> warmer .: "total_time_in_millis") - <*> warmer .: "total" - <*> warmer .: "current" - <*> (unMS <$> flush .: "total_time_in_millis") - <*> flush .: "total" - <*> (unMS <$> refresh .: "total_time_in_millis") - <*> refresh .: "total" - <*> merges .: "total_size_in_bytes" - <*> merges .: "total_docs" - <*> (unMS <$> merges .: "total_time_in_millis") - <*> merges .: "total" - <*> merges .: "current_size_in_bytes" - <*> merges .: "current_docs" - <*> merges .: "current" - <*> search .: "fetch_current" - <*> (unMS <$> search .: "fetch_time_in_millis") - <*> search .: "fetch_total" - <*> search .: "query_current" - <*> (unMS <$> search .: "query_time_in_millis") - <*> search .: "query_total" - <*> search .: "open_contexts" - <*> getStats .: "current" - <*> (unMS <$> getStats .: "missing_time_in_millis") - <*> getStats .: "missing_total" - <*> (unMS <$> getStats .: "exists_time_in_millis") - <*> getStats .: "exists_total" - <*> (unMS <$> getStats .: "time_in_millis") - <*> getStats .: "total" - <*> (fmap unMS <$> indexing .:? "throttle_time_in_millis") - <*> indexing .:? "is_throttled" - <*> indexing .:? "noop_update_total" - <*> indexing .: "delete_current" - <*> (unMS <$> indexing .: "delete_time_in_millis") - <*> indexing .: "delete_total" - <*> indexing .: "index_current" - <*> (unMS <$> indexing .: "index_time_in_millis") - <*> indexing .: "index_total" - <*> (unMS <$> store .: "throttle_time_in_millis") - <*> store .: "size_in_bytes" - <*> docs .: "deleted" - <*> docs .: "count" - -instance FromJSON NodeBreakersStats where - parseJSON = withObject "NodeBreakersStats" parse - where - parse o = NodeBreakersStats <$> o .: "parent" - <*> o .: "request" - <*> o .: "fielddata" - -parseNodeStats :: FullNodeId -> Object -> Parser NodeStats -parseNodeStats fnid o = do - NodeStats <$> o .: "name" - <*> pure fnid - <*> o .:? "breakers" - <*> o .: "http" - <*> o .: "transport" - <*> o .: "fs" - <*> o .: "network" - <*> o .: "thread_pool" - <*> o .: "jvm" - <*> o .: "process" - <*> o .: "os" - <*> o .: "indices" - -parseNodeInfo :: FullNodeId -> Object -> Parser NodeInfo -parseNodeInfo nid o = - NodeInfo <$> o .: "http_address" - <*> o .: "build" - <*> o .: "version" - <*> o .: "ip" - <*> o .: "host" - <*> o .: "transport_address" - <*> o .: "name" - <*> pure nid - <*> o .: "plugins" - <*> o .: "http" - <*> o .: "transport" - <*> o .: "network" - <*> o .: "thread_pool" - <*> o .: "jvm" - <*> o .: "process" - <*> o .: "os" - <*> o .: "settings" - -instance FromJSON NodePluginInfo where - parseJSON = withObject "NodePluginInfo" parse - where - parse o = NodePluginInfo <$> o .: "site" - <*> o .: "jvm" - <*> o .: "description" - <*> o .: "version" - <*> o .: "name" - -instance FromJSON NodeHTTPInfo where - parseJSON = withObject "NodeHTTPInfo" parse - where - parse o = NodeHTTPInfo <$> o .: "max_content_length_in_bytes" - <*> parseJSON (Object o) - -instance FromJSON BoundTransportAddress where - parseJSON = withObject "BoundTransportAddress" parse - where - parse o = BoundTransportAddress <$> o .: "publish_address" - <*> o .: "bound_address" - -instance FromJSON NodeOSInfo where - parseJSON = withObject "NodeOSInfo" parse - where - parse o = do - swap <- o .: "swap" - mem <- o .: "mem" - NodeOSInfo <$> swap .: "total_in_bytes" - <*> mem .: "total_in_bytes" - <*> o .: "cpu" - <*> o .: "available_processors" - <*> (unMS <$> o .: "refresh_interval_in_millis") - - -instance FromJSON CPUInfo where - parseJSON = withObject "CPUInfo" parse - where - parse o = CPUInfo <$> o .: "cache_size_in_bytes" - <*> o .: "cores_per_socket" - <*> o .: "total_sockets" - <*> o .: "total_cores" - <*> o .: "mhz" - <*> o .: "model" - <*> o .: "vendor" - -instance FromJSON NodeProcessInfo where - parseJSON = withObject "NodeProcessInfo" parse - where - parse o = NodeProcessInfo <$> o .: "mlockall" - <*> o .: "max_file_descriptors" - <*> o .: "id" - <*> (unMS <$> o .: "refresh_interval_in_millis") - -instance FromJSON NodeJVMInfo where - parseJSON = withObject "NodeJVMInfo" parse - where - parse o = NodeJVMInfo <$> o .: "memory_pools" - <*> o .: "gc_collectors" - <*> o .: "mem" - <*> (posixMS <$> o .: "start_time_in_millis") - <*> o .: "vm_vendor" - <*> o .: "vm_version" - <*> o .: "vm_name" - <*> (unJVMVersion <$> o .: "version") - <*> o .: "pid" - -instance FromJSON JVMVersion where - parseJSON (String t) = - JVMVersion <$> parseJSON (String (T.replace "_" "." t)) - parseJSON v = JVMVersion <$> parseJSON v - -instance FromJSON JVMMemoryInfo where - parseJSON = withObject "JVMMemoryInfo" parse - where - parse o = JVMMemoryInfo <$> o .: "direct_max_in_bytes" - <*> o .: "non_heap_max_in_bytes" - <*> o .: "non_heap_init_in_bytes" - <*> o .: "heap_max_in_bytes" - <*> o .: "heap_init_in_bytes" - -instance FromJSON NodeThreadPoolsInfo where - parseJSON = withObject "NodeThreadPoolsInfo" parse - where - parse o = NodeThreadPoolsInfo <$> o .: "refresh" - <*> o .: "management" - <*> o .: "percolate" - <*> o .:? "listener" - <*> o .:? "fetch_shard_started" - <*> o .: "search" - <*> o .: "flush" - <*> o .: "warmer" - <*> o .: "optimize" - <*> o .: "bulk" - <*> o .: "suggest" - <*> o .: "merge" - <*> o .: "snapshot" - <*> o .: "get" - <*> o .:? "fetch_shard_store" - <*> o .: "index" - <*> o .: "generic" - -instance FromJSON NodeThreadPoolInfo where - parseJSON = withObject "NodeThreadPoolInfo" parse - where - parse o = do - ka <- maybe (return Nothing) (fmap Just . parseStringInterval) =<< o .:? "keep_alive" - NodeThreadPoolInfo <$> (parseJSON . unStringlyTypeJSON =<< o .: "queue_size") - <*> pure ka - <*> o .:? "min" - <*> o .:? "max" - <*> o .: "type" - -parseStringInterval :: (Monad m) => String -> m NominalDiffTime -parseStringInterval s = case span isNumber s of - ("", _) -> fail "Invalid interval" - (nS, unitS) -> case (readMay nS, readMay unitS) of - (Just n, Just unit) -> return (fromInteger (n * unitNDT unit)) - (Nothing, _) -> fail "Invalid interval number" - (_, Nothing) -> fail "Invalid interval unit" - where - unitNDT Seconds = 1 - unitNDT Minutes = 60 - unitNDT Hours = 60 * 60 - unitNDT Days = 24 * 60 * 60 - unitNDT Weeks = 7 * 24 * 60 * 60 - -instance FromJSON ThreadPoolSize where - parseJSON v = parseAsNumber v <|> parseAsString v - where - parseAsNumber = parseAsInt <=< parseJSON - parseAsInt (-1) = return ThreadPoolUnbounded - parseAsInt n - | n >= 0 = return (ThreadPoolBounded n) - | otherwise = fail "Thread pool size must be >= -1." - parseAsString = withText "ThreadPoolSize" $ \t -> - case first (readMay . T.unpack) (T.span isNumber t) of - (Just n, "k") -> return (ThreadPoolBounded (n * 1000)) - (Just n, "") -> return (ThreadPoolBounded n) - _ -> fail ("Invalid thread pool size " <> T.unpack t) - -instance FromJSON ThreadPoolType where - parseJSON = withText "ThreadPoolType" parse - where - parse "scaling" = return ThreadPoolScaling - parse "fixed" = return ThreadPoolFixed - parse "cached" = return ThreadPoolCached - parse e = fail ("Unexpected thread pool type" <> T.unpack e) - -instance FromJSON NodeTransportInfo where - parseJSON = withObject "NodeTransportInfo" parse - where - parse o = NodeTransportInfo <$> (maybe (return mempty) parseProfiles =<< o .:? "profiles") - <*> parseJSON (Object o) - parseProfiles (Object o) | HM.null o = return [] - parseProfiles v@(Array _) = parseJSON v - parseProfiles Null = return [] - parseProfiles _ = fail "Could not parse profiles" - -instance FromJSON NodeNetworkInfo where - parseJSON = withObject "NodeNetworkInfo" parse - where - parse o = NodeNetworkInfo <$> o .: "primary_interface" - <*> (unMS <$> o .: "refresh_interval_in_millis") - - -instance FromJSON NodeNetworkInterface where - parseJSON = withObject "NodeNetworkInterface" parse - where - parse o = NodeNetworkInterface <$> o .: "mac_address" - <*> o .: "name" - <*> o .: "address" - -newtype MaybeNA a = MaybeNA { unMaybeNA :: Maybe a } - deriving (Show, Eq) - -instance FromJSON a => FromJSON (MaybeNA a) where - parseJSON (String "NA") = pure $ MaybeNA Nothing - parseJSON o = MaybeNA . Just <$> parseJSON o - - -data Suggest = Suggest { suggestText :: Text - , suggestName :: Text - , suggestType :: SuggestType - } - deriving (Show, Generic, Eq, Read, Typeable) - -instance ToJSON Suggest where - toJSON Suggest{..} = object [ "text" .= suggestText - , suggestName .= suggestType - ] - -instance FromJSON Suggest where - parseJSON (Object o) = do - suggestText' <- o .: "text" - let dropTextList = HM.toList $ HM.filterWithKey (\x _ -> x /= "text") o - suggestName' <- case dropTextList of - [(x, _)] -> return x - _ -> fail "error parsing Suggest field name" - suggestType' <- o .: suggestName' - return $ Suggest suggestText' suggestName' suggestType' - parseJSON x = typeMismatch "Suggest" x - -data SuggestType = SuggestTypePhraseSuggester PhraseSuggester - deriving (Show, Generic, Eq, Read, Typeable) - -instance ToJSON SuggestType where - toJSON (SuggestTypePhraseSuggester x) = object ["phrase" .= x] - -instance FromJSON SuggestType where - parseJSON = withObject "SuggestType" parse - where parse o = phraseSuggester `taggedWith` "phrase" - where taggedWith parser k = parser =<< o .: k - phraseSuggester = pure . SuggestTypePhraseSuggester - -data PhraseSuggester = - PhraseSuggester { phraseSuggesterField :: FieldName - , phraseSuggesterGramSize :: Maybe Int - , phraseSuggesterRealWordErrorLikelihood :: Maybe Int - , phraseSuggesterConfidence :: Maybe Int - , phraseSuggesterMaxErrors :: Maybe Int - , phraseSuggesterSeparator :: Maybe Text - , phraseSuggesterSize :: Maybe Size - , phraseSuggesterAnalyzer :: Maybe Analyzer - , phraseSuggesterShardSize :: Maybe Int - , phraseSuggesterHighlight :: Maybe PhraseSuggesterHighlighter - , phraseSuggesterCollate :: Maybe PhraseSuggesterCollate - , phraseSuggesterCandidateGenerators :: [DirectGenerators] - } - deriving (Show, Generic, Eq, Read, Typeable) - -instance ToJSON PhraseSuggester where - toJSON PhraseSuggester{..} = omitNulls [ "field" .= phraseSuggesterField - , "gram_size" .= phraseSuggesterGramSize - , "real_word_error_likelihood" .= phraseSuggesterRealWordErrorLikelihood - , "confidence" .= phraseSuggesterConfidence - , "max_errors" .= phraseSuggesterMaxErrors - , "separator" .= phraseSuggesterSeparator - , "size" .= phraseSuggesterSize - , "analyzer" .= phraseSuggesterAnalyzer - , "shard_size" .= phraseSuggesterShardSize - , "highlight" .= phraseSuggesterHighlight - , "collate" .= phraseSuggesterCollate - , "direct_generator" .= phraseSuggesterCandidateGenerators - ] - -instance FromJSON PhraseSuggester where - parseJSON = withObject "PhraseSuggester" parse - where parse o = PhraseSuggester - <$> o .: "field" - <*> o .:? "gram_size" - <*> o .:? "real_word_error_likelihood" - <*> o .:? "confidence" - <*> o .:? "max_errors" - <*> o .:? "separator" - <*> o .:? "size" - <*> o .:? "analyzer" - <*> o .:? "shard_size" - <*> o .:? "highlight" - <*> o .:? "collate" - <*> o .:? "direct_generator" .!= [] - -mkPhraseSuggester :: FieldName -> PhraseSuggester -mkPhraseSuggester fName = - PhraseSuggester fName Nothing Nothing Nothing Nothing Nothing Nothing - Nothing Nothing Nothing Nothing [] - -data PhraseSuggesterHighlighter = - PhraseSuggesterHighlighter { phraseSuggesterHighlighterPreTag :: Text - , phraseSuggesterHighlighterPostTag :: Text - } - deriving (Show, Generic, Eq, Read, Typeable) - -instance ToJSON PhraseSuggesterHighlighter where - toJSON PhraseSuggesterHighlighter{..} = - object [ "pre_tag" .= phraseSuggesterHighlighterPreTag - , "post_tag" .= phraseSuggesterHighlighterPostTag - ] - -instance FromJSON PhraseSuggesterHighlighter where - parseJSON = withObject "PhraseSuggesterHighlighter" parse - where parse o = PhraseSuggesterHighlighter - <$> o .: "pre_tag" - <*> o .: "post_tag" - -data PhraseSuggesterCollate = - PhraseSuggesterCollate { phraseSuggesterCollateTemplateQuery :: TemplateQueryInline - , phraseSuggesterCollatePrune :: Bool - } - deriving (Show, Generic, Eq, Read, Typeable) - -instance ToJSON PhraseSuggesterCollate where - toJSON PhraseSuggesterCollate{..} = object [ "query" .= object - [ "inline" .= (inline phraseSuggesterCollateTemplateQuery) - ] - , "params" .= (params phraseSuggesterCollateTemplateQuery) - , "prune" .= phraseSuggesterCollatePrune - ] - -instance FromJSON PhraseSuggesterCollate where - parseJSON (Object o) = do - query' <- o .: "query" - inline' <- query' .: "inline" - params' <- o .: "params" - prune' <- o .:? "prune" .!= False - return $ PhraseSuggesterCollate (TemplateQueryInline inline' params') prune' - parseJSON x = typeMismatch "PhraseSuggesterCollate" x - -data SuggestOptions = - SuggestOptions { suggestOptionsText :: Text - , suggestOptionsScore :: Double - , suggestOptionsFreq :: Maybe Int - , suggestOptionsHighlighted :: Maybe Text - } - deriving (Eq, Read, Show) - -instance FromJSON SuggestOptions where - parseJSON = withObject "SuggestOptions" parse - where parse o = SuggestOptions - <$> o .: "text" - <*> o .: "score" - <*> o .:? "freq" - <*> o .:? "highlighted" - -data SuggestResponse = - SuggestResponse { suggestResponseText :: Text - , suggestResponseOffset :: Int - , suggestResponseLength :: Int - , suggestResponseOptions :: [SuggestOptions] - } - deriving (Eq, Read, Show) - -instance FromJSON SuggestResponse where - parseJSON = withObject "SuggestResponse" parse - where parse o = SuggestResponse - <$> o .: "text" - <*> o .: "offset" - <*> o .: "length" - <*> o .: "options" - -data NamedSuggestionResponse = - NamedSuggestionResponse { nsrName :: Text - , nsrResponses :: [SuggestResponse] - } - deriving (Eq, Read, Show) - -instance FromJSON NamedSuggestionResponse where - parseJSON (Object o) = do - suggestionName' <- case HM.toList o of - [(x, _)] -> return x - _ -> fail "error parsing NamedSuggestionResponse name" - suggestionResponses' <- o .: suggestionName' - return $ NamedSuggestionResponse suggestionName' suggestionResponses' - - parseJSON x = typeMismatch "NamedSuggestionResponse" x - -data DirectGeneratorSuggestModeTypes = DirectGeneratorSuggestModeMissing - | DirectGeneratorSuggestModePopular - | DirectGeneratorSuggestModeAlways - deriving (Show, Eq, Read, Generic, Typeable) - -instance ToJSON DirectGeneratorSuggestModeTypes where - toJSON DirectGeneratorSuggestModeMissing = "missing" - toJSON DirectGeneratorSuggestModePopular = "popular" - toJSON DirectGeneratorSuggestModeAlways = "always" - -instance FromJSON DirectGeneratorSuggestModeTypes where - parseJSON = withText "DirectGeneratorSuggestModeTypes" parse - where parse "missing" = pure DirectGeneratorSuggestModeMissing - parse "popular" = pure DirectGeneratorSuggestModePopular - parse "always" = pure DirectGeneratorSuggestModeAlways - parse f = fail ("Unexpected DirectGeneratorSuggestModeTypes: " <> show f) - -data DirectGenerators = DirectGenerators - { directGeneratorsField :: FieldName - , directGeneratorsSize :: Maybe Int - , directGeneratorSuggestMode :: DirectGeneratorSuggestModeTypes - , directGeneratorMaxEdits :: Maybe Double - , directGeneratorPrefixLength :: Maybe Int - , directGeneratorMinWordLength :: Maybe Int - , directGeneratorMaxInspections :: Maybe Int - , directGeneratorMinDocFreq :: Maybe Double - , directGeneratorMaxTermFreq :: Maybe Double - , directGeneratorPreFilter :: Maybe Text - , directGeneratorPostFilter :: Maybe Text - } - deriving (Show, Eq, Read, Generic, Typeable) - - -instance ToJSON DirectGenerators where - toJSON DirectGenerators{..} = omitNulls [ "field" .= directGeneratorsField - , "size" .= directGeneratorsSize - , "suggest_mode" .= directGeneratorSuggestMode - , "max_edits" .= directGeneratorMaxEdits - , "prefix_length" .= directGeneratorPrefixLength - , "min_word_length" .= directGeneratorMinWordLength - , "max_inspections" .= directGeneratorMaxInspections - , "min_doc_freq" .= directGeneratorMinDocFreq - , "max_term_freq" .= directGeneratorMaxTermFreq - , "pre_filter" .= directGeneratorPreFilter - , "post_filter" .= directGeneratorPostFilter - ] - -instance FromJSON DirectGenerators where - parseJSON = withObject "DirectGenerators" parse - where parse o = DirectGenerators - <$> o .: "field" - <*> o .:? "size" - <*> o .: "suggest_mode" - <*> o .:? "max_edits" - <*> o .:? "prefix_length" - <*> o .:? "min_word_length" - <*> o .:? "max_inspections" - <*> o .:? "min_doc_freq" - <*> o .:? "max_term_freq" - <*> o .:? "pre_filter" - <*> o .:? "post_filter" - -mkDirectGenerators :: FieldName -> DirectGenerators -mkDirectGenerators fn = DirectGenerators fn Nothing DirectGeneratorSuggestModeMissing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing diff --git a/src/Database/V1/Bloodhound/Types/Internal.hs b/src/Database/V1/Bloodhound/Types/Internal.hs deleted file mode 100644 index 626ef43..0000000 --- a/src/Database/V1/Bloodhound/Types/Internal.hs +++ /dev/null @@ -1,57 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE FlexibleInstances #-} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} - -------------------------------------------------------------------------------- --- | --- Module : Database.Bloodhound.Types.Internal --- Copyright : (C) 2014 Chris Allen --- License : BSD-style (see the file LICENSE) --- Maintainer : Chris Allen --- Stability : provisional --- Portability : DeriveGeneric, RecordWildCards --- --- Internal data types for Bloodhound. These types may change without --- notice so import at your own risk. -------------------------------------------------------------------------------- -module Database.V1.Bloodhound.Types.Internal - ( BHEnv(..) - , Server(..) - , MonadBH(..) - ) where - - -import Control.Applicative as A -import Control.Monad.Reader -import Data.Aeson -import Data.Text (Text) -import Data.Typeable (Typeable) -import GHC.Generics (Generic) -import Network.HTTP.Client - -{-| Common environment for Elasticsearch calls. Connections will be - pipelined according to the provided HTTP connection manager. --} -data BHEnv = BHEnv { bhServer :: Server - , bhManager :: Manager - , bhRequestHook :: Request -> IO Request - -- ^ Low-level hook that is run before every request is sent. Used to implement custom authentication strategies. Defaults to 'return' with 'mkBHEnv'. - } - -instance (Functor m, Applicative m, MonadIO m) => MonadBH (ReaderT BHEnv m) where - getBHEnv = ask - -{-| 'Server' is used with the client functions to point at the ES instance --} -newtype Server = Server Text deriving (Eq, Show, Generic, Typeable, FromJSON) - -{-| All API calls to Elasticsearch operate within - MonadBH - . The idea is that it can be easily embedded in your - own monad transformer stack. A default instance for a ReaderT and - alias 'BH' is provided for the simple case. --} -class (Functor m, A.Applicative m, MonadIO m) => MonadBH m where - getBHEnv :: m BHEnv - diff --git a/src/Database/V5/Bloodhound/Client.hs b/src/Database/V5/Bloodhound/Client.hs index 6b2bd0e..5488f5c 100644 --- a/src/Database/V5/Bloodhound/Client.hs +++ b/src/Database/V5/Bloodhound/Client.hs @@ -5,7 +5,7 @@ ------------------------------------------------------------------------------- -- | -- Module : Database.Bloodhound.Client --- Copyright : (C) 2014 Chris Allen +-- Copyright : (C) 2014, 2018 Chris Allen -- License : BSD-style (see the file LICENSE) -- Maintainer : Chris Allen -- Stability : provisional @@ -216,6 +216,7 @@ dispatch dMethod url body = do $ setRequestIgnoreStatus $ initReq { method = dMethod , requestHeaders = + -- "application/x-ndjson" for bulk ("Content-Type", "application/json") : requestHeaders initReq , requestBody = reqBody } -- req <- liftIO $ reqHook $ setRequestIgnoreStatus $ initReq { method = dMethod @@ -316,7 +317,7 @@ instance FromJSON GSRs where parseJSON = withObject "Collection of GenericSnapshotRepo" parse where parse = fmap GSRs . mapM (uncurry go) . HM.toList - go rawName = withObject "GenericSnapshotRepo" $ \o -> do + go rawName = withObject "GenericSnapshotRepo" $ \o -> GenericSnapshotRepo (SnapshotRepoName rawName) <$> o .: "type" <*> o .: "settings" @@ -554,16 +555,18 @@ deleteIndex (IndexName indexName) = updateIndexSettings :: MonadBH m => NonEmpty UpdatableIndexSetting -> IndexName -> m Reply updateIndexSettings updates (IndexName indexName) = bindM2 put url (return body) - where url = joinPath [indexName, "_settings"] - body = Just (encode jsonBody) - jsonBody = Object (deepMerge [u | Object u <- toJSON <$> toList updates]) + where + url = joinPath [indexName, "_settings"] + body = Just (encode jsonBody) + jsonBody = Object (deepMerge [u | Object u <- toJSON <$> toList updates]) getIndexSettings :: (MonadBH m, MonadThrow m) => IndexName -> m (Either EsError IndexSettingsSummary) -getIndexSettings (IndexName indexName) = do +getIndexSettings (IndexName indexName) = parseEsResponse =<< get =<< url - where url = joinPath [indexName, "_settings"] + where + url = joinPath [indexName, "_settings"] -- | 'forceMergeIndex' -- @@ -703,7 +706,7 @@ listIndices = url = joinPath ["_cat/indices?format=json"] parse body = maybe (throwM (EsProtocolException body)) return $ do vals <- decode body - forM vals $ \val -> do + forM vals $ \val -> case val of Object obj -> do indexVal <- HM.lookup "index" obj @@ -860,9 +863,11 @@ deleteDocument (IndexName indexName) -- >>> _ <- runBH' $ bulk stream -- >>> _ <- runBH' $ refreshIndex testIndex bulk :: MonadBH m => V.Vector BulkOperation -> m Reply -bulk bulkOps = bindM2 post url (return body) - where url = joinPath ["_bulk"] - body = Just $ encodeBulkOperations bulkOps +bulk bulkOps = + bindM2 post url (return body) + where + url = joinPath ["_bulk"] + body = Just $ encodeBulkOperations bulkOps -- | 'encodeBulkOperations' is a convenience function for dumping a vector of 'BulkOperation' -- into an 'L.ByteString' @@ -872,12 +877,15 @@ bulk bulkOps = bindM2 post url (return body) -- "\n{\"index\":{\"_type\":\"tweet\",\"_id\":\"2\",\"_index\":\"twitter\"}}\n{\"name\":\"blah\"}\n" encodeBulkOperations :: V.Vector BulkOperation -> L.ByteString encodeBulkOperations stream = collapsed where - blobs = fmap encodeBulkOperation stream - mashedTaters = mash (mempty :: Builder) blobs - collapsed = toLazyByteString $ mappend mashedTaters (byteString "\n") + blobs = + fmap encodeBulkOperation stream + mashedTaters = + mash (mempty :: Builder) blobs + collapsed = + toLazyByteString $ mappend mashedTaters (byteString "\n") mash :: Builder -> V.Vector L.ByteString -> Builder -mash = V.foldl' (\b x -> b `mappend` (byteString "\n") `mappend` (lazyByteString x)) +mash = V.foldl' (\b x -> b <> byteString "\n" <> lazyByteString x) mkBulkStreamValue :: Text -> Text -> Text -> Text -> Value mkBulkStreamValue operation indexName mappingName docId = @@ -886,6 +894,12 @@ mkBulkStreamValue operation indexName mappingName docId = , "_type" .= mappingName , "_id" .= docId]] +mkBulkStreamValueAuto :: Text -> Text -> Text -> Value +mkBulkStreamValueAuto operation indexName mappingName = + object [operation .= + object [ "_index" .= indexName + , "_type" .= mappingName]] + -- | 'encodeBulkOperation' is a convenience function for dumping a single 'BulkOperation' -- into an 'L.ByteString' -- @@ -899,6 +913,18 @@ encodeBulkOperation (BulkIndex (IndexName indexName) where metadata = mkBulkStreamValue "index" indexName mappingName docId blob = encode metadata `mappend` "\n" `mappend` encode value +encodeBulkOperation (BulkIndexAuto (IndexName indexName) + (MappingName mappingName) + value) = blob + where metadata = mkBulkStreamValueAuto "index" indexName mappingName + blob = encode metadata `mappend` "\n" `mappend` encode value + +encodeBulkOperation (BulkIndexEncodingAuto (IndexName indexName) + (MappingName mappingName) + encoding) = toLazyByteString blob + where metadata = toEncoding (mkBulkStreamValueAuto "index" indexName mappingName) + blob = fromEncoding metadata <> "\n" <> fromEncoding encoding + encodeBulkOperation (BulkCreate (IndexName indexName) (MappingName mappingName) (DocId docId) value) = blob @@ -1099,7 +1125,7 @@ scanSearch indexName mappingName search = do -- >>> mkSearch (Just query) Nothing -- Search {queryBody = Just (TermQuery (Term {termField = "user", termValue = "bitemyapp"}) Nothing), filterBody = Nothing, sortBody = Nothing, aggBody = Nothing, highlight = Nothing, trackSortScores = False, from = From 0, size = Size 10, searchType = SearchTypeQueryThenFetch, fields = Nothing, source = Nothing} mkSearch :: Maybe Query -> Maybe Filter -> Search -mkSearch query filter = Search query filter Nothing Nothing Nothing False (From 0) (Size 10) SearchTypeQueryThenFetch Nothing Nothing Nothing +mkSearch query filter = Search query filter Nothing Nothing Nothing False (From 0) (Size 10) SearchTypeQueryThenFetch Nothing Nothing Nothing Nothing -- | 'mkAggregateSearch' is a helper function that defaults everything in a 'Search' except for -- the 'Query' and the 'Aggregation'. @@ -1109,7 +1135,7 @@ mkSearch query filter = Search query filter Nothing Nothing Nothing False (From -- TermsAgg (TermsAggregation {term = Left "user", termInclude = Nothing, termExclude = Nothing, termOrder = Nothing, termMinDocCount = Nothing, termSize = Nothing, termShardSize = Nothing, termCollectMode = Just BreadthFirst, termExecutionHint = Nothing, termAggs = Nothing}) -- >>> let myAggregation = mkAggregateSearch Nothing $ mkAggregations "users" terms mkAggregateSearch :: Maybe Query -> Aggregations -> Search -mkAggregateSearch query mkSearchAggs = Search query Nothing Nothing (Just mkSearchAggs) Nothing False (From 0) (Size 0) SearchTypeQueryThenFetch Nothing Nothing Nothing +mkAggregateSearch query mkSearchAggs = Search query Nothing Nothing (Just mkSearchAggs) Nothing False (From 0) (Size 0) SearchTypeQueryThenFetch Nothing Nothing Nothing Nothing -- | 'mkHighlightSearch' is a helper function that defaults everything in a 'Search' except for -- the 'Query' and the 'Aggregation'. @@ -1118,7 +1144,7 @@ mkAggregateSearch query mkSearchAggs = Search query Nothing Nothing (Just mkSear -- >>> let testHighlight = Highlights Nothing [FieldHighlight (FieldName "message") Nothing] -- >>> let search = mkHighlightSearch (Just query) testHighlight mkHighlightSearch :: Maybe Query -> Highlights -> Search -mkHighlightSearch query searchHighlights = Search query Nothing Nothing Nothing (Just searchHighlights) False (From 0) (Size 10) SearchTypeQueryThenFetch Nothing Nothing Nothing +mkHighlightSearch query searchHighlights = Search query Nothing Nothing Nothing (Just searchHighlights) False (From 0) (Size 10) SearchTypeQueryThenFetch Nothing Nothing Nothing Nothing -- | 'pageSearch' is a helper function that takes a search and assigns the from -- and size fields for the search. The from parameter defines the offset diff --git a/src/Database/V5/Bloodhound/Internal/Aggregation.hs b/src/Database/V5/Bloodhound/Internal/Aggregation.hs new file mode 100644 index 0000000..f6fc1ab --- /dev/null +++ b/src/Database/V5/Bloodhound/Internal/Aggregation.hs @@ -0,0 +1,467 @@ +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} + +module Database.V5.Bloodhound.Internal.Aggregation where + +import Bloodhound.Import + +import qualified Data.HashMap.Strict as HM +import qualified Data.Map.Strict as M +import qualified Data.Text as T + +import Database.V5.Bloodhound.Internal.Client +import Database.V5.Bloodhound.Internal.Highlight (HitHighlight) +import Database.V5.Bloodhound.Internal.Newtypes +import Database.V5.Bloodhound.Internal.Query +import Database.V5.Bloodhound.Internal.Sort + +type Aggregations = M.Map Text Aggregation + +emptyAggregations :: Aggregations +emptyAggregations = M.empty + +mkAggregations :: Text -> Aggregation -> Aggregations +mkAggregations name aggregation = M.insert name aggregation emptyAggregations + +data Aggregation = TermsAgg TermsAggregation + | CardinalityAgg CardinalityAggregation + | DateHistogramAgg DateHistogramAggregation + | ValueCountAgg ValueCountAggregation + | FilterAgg FilterAggregation + | DateRangeAgg DateRangeAggregation + | MissingAgg MissingAggregation + | TopHitsAgg TopHitsAggregation + | StatsAgg StatisticsAggregation + deriving (Eq, Show) + +instance ToJSON Aggregation where + toJSON (TermsAgg (TermsAggregation term include exclude order minDocCount size shardSize collectMode executionHint termAggs)) = + omitNulls ["terms" .= omitNulls [ toJSON' term, + "include" .= include, + "exclude" .= exclude, + "order" .= order, + "min_doc_count" .= minDocCount, + "size" .= size, + "shard_size" .= shardSize, + "collect_mode" .= collectMode, + "execution_hint" .= executionHint + ], + "aggs" .= termAggs ] + where + toJSON' x = case x of { Left y -> "field" .= y; Right y -> "script" .= y } + + toJSON (CardinalityAgg (CardinalityAggregation field precisionThreshold)) = + object ["cardinality" .= omitNulls [ "field" .= field, + "precisionThreshold" .= precisionThreshold + ] + ] + + toJSON (DateHistogramAgg + (DateHistogramAggregation field interval format + preZone postZone preOffset postOffset dateHistoAggs)) = + omitNulls ["date_histogram" .= omitNulls [ "field" .= field, + "interval" .= interval, + "format" .= format, + "pre_zone" .= preZone, + "post_zone" .= postZone, + "pre_offset" .= preOffset, + "post_offset" .= postOffset + ], + "aggs" .= dateHistoAggs ] + toJSON (ValueCountAgg a) = object ["value_count" .= v] + where v = case a of + (FieldValueCount (FieldName n)) -> + object ["field" .= n] + (ScriptValueCount s) -> + object ["script" .= s] + toJSON (FilterAgg (FilterAggregation filt ags)) = + omitNulls [ "filter" .= filt + , "aggs" .= ags] + toJSON (DateRangeAgg a) = object [ "date_range" .= a + ] + toJSON (MissingAgg (MissingAggregation{..})) = + object ["missing" .= object ["field" .= maField]] + + toJSON (TopHitsAgg (TopHitsAggregation mfrom msize msort)) = + omitNulls ["top_hits" .= omitNulls [ "size" .= msize + , "from" .= mfrom + , "sort" .= msort + ] + ] + + toJSON (StatsAgg (StatisticsAggregation typ field)) = + object [stType .= omitNulls [ "field" .= field ]] + where + stType | typ == Basic = "stats" + | otherwise = "extended_stats" + +data TopHitsAggregation = TopHitsAggregation + { taFrom :: Maybe From + , taSize :: Maybe Size + , taSort :: Maybe Sort + } deriving (Eq, Show) + +data MissingAggregation = MissingAggregation + { maField :: Text + } deriving (Eq, Show) + +data TermsAggregation = TermsAggregation + { term :: Either Text Text + , termInclude :: Maybe TermInclusion + , termExclude :: Maybe TermInclusion + , termOrder :: Maybe TermOrder + , termMinDocCount :: Maybe Int + , termSize :: Maybe Int + , termShardSize :: Maybe Int + , termCollectMode :: Maybe CollectionMode + , termExecutionHint :: Maybe ExecutionHint + , termAggs :: Maybe Aggregations + } deriving (Eq, Show) + +data CardinalityAggregation = CardinalityAggregation + { cardinalityField :: FieldName, + precisionThreshold :: Maybe Int + } deriving (Eq, Show) + +data DateHistogramAggregation = DateHistogramAggregation + { dateField :: FieldName + , dateInterval :: Interval + , dateFormat :: Maybe Text + -- pre and post deprecated in 1.5 + , datePreZone :: Maybe Text + , datePostZone :: Maybe Text + , datePreOffset :: Maybe Text + , datePostOffset :: Maybe Text + , dateAggs :: Maybe Aggregations + } deriving (Eq, Show) + +data DateRangeAggregation = DateRangeAggregation + { draField :: FieldName + , draFormat :: Maybe Text + , draRanges :: NonEmpty DateRangeAggRange + } deriving (Eq, Show) + +instance ToJSON DateRangeAggregation where + toJSON DateRangeAggregation {..} = + omitNulls [ "field" .= draField + , "format" .= draFormat + , "ranges" .= toList draRanges + ] + +data DateRangeAggRange = + DateRangeFrom DateMathExpr + | DateRangeTo DateMathExpr + | DateRangeFromAndTo DateMathExpr DateMathExpr + deriving (Eq, Show) + +instance ToJSON DateRangeAggRange where + toJSON (DateRangeFrom e) = object [ "from" .= e ] + toJSON (DateRangeTo e) = object [ "to" .= e ] + toJSON (DateRangeFromAndTo f t) = object [ "from" .= f, "to" .= t ] + +-- | See for more information. +data ValueCountAggregation = + FieldValueCount FieldName + | ScriptValueCount Script + deriving (Eq, Show) + +-- | Single-bucket filter aggregations. See for more information. +data FilterAggregation = FilterAggregation + { faFilter :: Filter + , faAggs :: Maybe Aggregations } + deriving (Eq, Show) + +data StatisticsAggregation = StatisticsAggregation + { statsType :: StatsType + , statsField :: FieldName } + deriving (Eq, Show) + +data StatsType + = Basic + | Extended + deriving (Eq, Show) + +mkTermsAggregation :: Text -> TermsAggregation +mkTermsAggregation t = + TermsAggregation (Left t) + Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing + +mkTermsScriptAggregation :: Text -> TermsAggregation +mkTermsScriptAggregation t = TermsAggregation (Right t) Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing + +mkDateHistogram :: FieldName -> Interval -> DateHistogramAggregation +mkDateHistogram t i = DateHistogramAggregation t i Nothing Nothing Nothing Nothing Nothing Nothing + +mkCardinalityAggregation :: FieldName -> CardinalityAggregation +mkCardinalityAggregation t = CardinalityAggregation t Nothing + +mkStatsAggregation :: FieldName -> StatisticsAggregation +mkStatsAggregation = StatisticsAggregation Basic + +mkExtendedStatsAggregation :: FieldName -> StatisticsAggregation +mkExtendedStatsAggregation = StatisticsAggregation Extended + +type AggregationResults = M.Map Text Value + +class BucketAggregation a where + key :: a -> BucketValue + docCount :: a -> Int + aggs :: a -> Maybe AggregationResults + +data Bucket a = Bucket + { buckets :: [a] + } deriving (Read, Show) + +instance (FromJSON a) => FromJSON (Bucket a) where + parseJSON (Object v) = Bucket <$> + v .: "buckets" + parseJSON _ = mempty + +data BucketValue = TextValue Text + | ScientificValue Scientific + | BoolValue Bool deriving (Read, Show) + +instance FromJSON BucketValue where + parseJSON (String t) = return $ TextValue t + parseJSON (Number s) = return $ ScientificValue s + parseJSON (Bool b) = return $ BoolValue b + parseJSON _ = mempty + +data TermInclusion = TermInclusion Text + | TermPattern Text Text deriving (Eq, Show) + +instance ToJSON TermInclusion where + toJSON (TermInclusion x) = toJSON x + toJSON (TermPattern pattern flags) = + omitNulls [ "pattern" .= pattern + , "flags" .= flags] + +data TermOrder = TermOrder + { termSortField :: Text + , termSortOrder :: SortOrder } deriving (Eq, Show) + +instance ToJSON TermOrder where + toJSON (TermOrder termSortField termSortOrder) = + object [termSortField .= termSortOrder] + +data CollectionMode = BreadthFirst + | DepthFirst deriving (Eq, Show) + +instance ToJSON CollectionMode where + toJSON BreadthFirst = "breadth_first" + toJSON DepthFirst = "depth_first" + +data ExecutionHint = Ordinals + | GlobalOrdinals + | GlobalOrdinalsHash + | GlobalOrdinalsLowCardinality + | Map deriving (Eq, Show) + +instance ToJSON ExecutionHint where + toJSON Ordinals = "ordinals" + toJSON GlobalOrdinals = "global_ordinals" + toJSON GlobalOrdinalsHash = "global_ordinals_hash" + toJSON GlobalOrdinalsLowCardinality = "global_ordinals_low_cardinality" + toJSON Map = "map" + +-- | See for more information. +data DateMathExpr = + DateMathExpr DateMathAnchor [DateMathModifier] + deriving (Eq, Show) + +instance ToJSON DateMathExpr where + toJSON (DateMathExpr a mods) = String (fmtA a <> mconcat (fmtMod <$> mods)) + where fmtA DMNow = "now" + fmtA (DMDate date) = (T.pack $ showGregorian date) <> "||" + fmtMod (AddTime n u) = "+" <> showText n <> fmtU u + fmtMod (SubtractTime n u) = "-" <> showText n <> fmtU u + fmtMod (RoundDownTo u) = "/" <> fmtU u + fmtU DMYear = "y" + fmtU DMMonth = "M" + fmtU DMWeek = "w" + fmtU DMDay = "d" + fmtU DMHour = "h" + fmtU DMMinute = "m" + fmtU DMSecond = "s" + +-- | Starting point for a date range. This along with the 'DateMathModifiers' gets you the date ES will start from. +data DateMathAnchor = + DMNow + | DMDate Day + deriving (Eq, Show) + +data DateMathModifier = + AddTime Int DateMathUnit + | SubtractTime Int DateMathUnit + | RoundDownTo DateMathUnit + deriving (Eq, Show) + +data DateMathUnit = + DMYear + | DMMonth + | DMWeek + | DMDay + | DMHour + | DMMinute + | DMSecond + deriving (Eq, Show) + +data TermsResult = TermsResult + { termKey :: BucketValue + , termsDocCount :: Int + , termsAggs :: Maybe AggregationResults + } deriving (Read, Show) + +instance FromJSON TermsResult where + parseJSON (Object v) = TermsResult <$> + v .: "key" <*> + v .: "doc_count" <*> + (pure $ getNamedSubAgg v ["key", "doc_count"]) + parseJSON _ = mempty + +instance BucketAggregation TermsResult where + key = termKey + docCount = termsDocCount + aggs = termsAggs + +data DateHistogramResult = DateHistogramResult + { dateKey :: Int + , dateKeyStr :: Maybe Text + , dateDocCount :: Int + , dateHistogramAggs :: Maybe AggregationResults + } deriving (Show) + +instance FromJSON DateHistogramResult where + parseJSON (Object v) = DateHistogramResult <$> + v .: "key" <*> + v .:? "key_as_string" <*> + v .: "doc_count" <*> + (pure $ getNamedSubAgg v [ "key" + , "doc_count" + , "key_as_string" + ] + ) + parseJSON _ = mempty + +instance BucketAggregation DateHistogramResult where + key = TextValue . showText . dateKey + docCount = dateDocCount + aggs = dateHistogramAggs + +data DateRangeResult = DateRangeResult + { dateRangeKey :: Text + , dateRangeFrom :: Maybe UTCTime + , dateRangeFromAsString :: Maybe Text + , dateRangeTo :: Maybe UTCTime + , dateRangeToAsString :: Maybe Text + , dateRangeDocCount :: Int + , dateRangeAggs :: Maybe AggregationResults + } deriving (Eq, Show) + +instance FromJSON DateRangeResult where + parseJSON = withObject "DateRangeResult" parse + where parse v = DateRangeResult <$> + v .: "key" <*> + (fmap posixMS <$> v .:? "from") <*> + v .:? "from_as_string" <*> + (fmap posixMS <$> v .:? "to") <*> + v .:? "to_as_string" <*> + v .: "doc_count" <*> + (pure $ getNamedSubAgg v [ "key" + , "from" + , "from_as_string" + , "to" + , "to_as_string" + , "doc_count" + ] + ) + +instance BucketAggregation DateRangeResult where + key = TextValue . dateRangeKey + docCount = dateRangeDocCount + aggs = dateRangeAggs + +toTerms :: Text -> AggregationResults -> Maybe (Bucket TermsResult) +toTerms = toAggResult + +toDateHistogram :: Text -> AggregationResults -> Maybe (Bucket DateHistogramResult) +toDateHistogram = toAggResult + +toMissing :: Text -> AggregationResults -> Maybe MissingResult +toMissing = toAggResult + +toTopHits :: (FromJSON a) => Text -> AggregationResults -> Maybe (TopHitResult a) +toTopHits = toAggResult + +toAggResult :: (FromJSON a) => Text -> AggregationResults -> Maybe a +toAggResult t a = M.lookup t a >>= deserialize + where deserialize = parseMaybe parseJSON + +-- Try to get an AggregationResults when we don't know the +-- field name. We filter out the known keys to try to minimize the noise. +getNamedSubAgg :: Object -> [Text] -> Maybe AggregationResults +getNamedSubAgg o knownKeys = maggRes + where unknownKeys = HM.filterWithKey (\k _ -> k `notElem` knownKeys) o + maggRes + | HM.null unknownKeys = Nothing + | otherwise = Just . M.fromList $ HM.toList unknownKeys + +data MissingResult = MissingResult + { missingDocCount :: Int + } deriving (Show) + +instance FromJSON MissingResult where + parseJSON = withObject "MissingResult" parse + where parse v = MissingResult <$> v .: "doc_count" + +data TopHitResult a = TopHitResult + { tarHits :: (SearchHits a) + } deriving Show + +instance (FromJSON a) => FromJSON (TopHitResult a) where + parseJSON (Object v) = TopHitResult <$> + v .: "hits" + parseJSON _ = fail "Failure in FromJSON (TopHitResult a)" + +data SearchHits a = + SearchHits { hitsTotal :: Int + , maxScore :: Score + , hits :: [Hit a] } deriving (Eq, Show) + + +instance (FromJSON a) => FromJSON (SearchHits a) where + parseJSON (Object v) = SearchHits <$> + v .: "total" <*> + v .: "max_score" <*> + v .: "hits" + parseJSON _ = empty + +instance Semigroup (SearchHits a) where + (SearchHits ta ma ha) <> (SearchHits tb mb hb) = + SearchHits (ta + tb) (max ma mb) (ha <> hb) + +instance Monoid (SearchHits a) where + mempty = SearchHits 0 Nothing mempty + mappend = (<>) + + +data Hit a = + Hit { hitIndex :: IndexName + , hitType :: MappingName + , hitDocId :: DocId + , hitScore :: Score + , hitSource :: Maybe a + , hitFields :: Maybe HitFields + , hitHighlight :: Maybe HitHighlight } deriving (Eq, Show) + +instance (FromJSON a) => FromJSON (Hit a) where + parseJSON (Object v) = Hit <$> + v .: "_index" <*> + v .: "_type" <*> + v .: "_id" <*> + v .: "_score" <*> + v .:? "_source" <*> + v .:? "fields" <*> + v .:? "highlight" + parseJSON _ = empty diff --git a/src/Database/V5/Bloodhound/Internal/Analysis.hs b/src/Database/V5/Bloodhound/Internal/Analysis.hs new file mode 100644 index 0000000..481a042 --- /dev/null +++ b/src/Database/V5/Bloodhound/Internal/Analysis.hs @@ -0,0 +1,312 @@ +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE OverloadedStrings #-} + +module Database.V5.Bloodhound.Internal.Analysis where + +import Bloodhound.Import + +import qualified Data.Map.Strict as M +import qualified Data.Text as T + +import Database.V5.Bloodhound.Internal.Newtypes +import Database.V5.Bloodhound.Internal.StringlyTyped + +data Analysis = Analysis + { analysisAnalyzer :: M.Map Text AnalyzerDefinition + , analysisTokenizer :: M.Map Text TokenizerDefinition + , analysisTokenFilter :: M.Map Text TokenFilterDefinition + } deriving (Eq, Show) + +instance ToJSON Analysis where + toJSON (Analysis analyzer tokenizer tokenFilter) = object + [ "analyzer" .= analyzer + , "tokenizer" .= tokenizer + , "filter" .= tokenFilter + ] + +instance FromJSON Analysis where + parseJSON = withObject "Analysis" $ \m -> Analysis + <$> m .: "analyzer" + <*> m .:? "tokenizer" .!= M.empty + <*> m .:? "filter" .!= M.empty + +newtype Tokenizer = + Tokenizer Text + deriving (Eq, Show, ToJSON, FromJSON) + +data AnalyzerDefinition = AnalyzerDefinition + { analyzerDefinitionTokenizer :: Maybe Tokenizer + , analyzerDefinitionFilter :: [TokenFilter] + } deriving (Eq,Show) + +instance ToJSON AnalyzerDefinition where + toJSON (AnalyzerDefinition tokenizer tokenFilter) = object $ catMaybes + [ fmap ("tokenizer" .=) tokenizer + , Just $ "filter" .= tokenFilter + ] + +instance FromJSON AnalyzerDefinition where + parseJSON = withObject "AnalyzerDefinition" $ \m -> AnalyzerDefinition + <$> m .:? "tokenizer" + <*> m .:? "filter" .!= [] + +newtype TokenizerDefinition = + TokenizerDefinitionNgram Ngram + deriving (Eq,Show) + +instance ToJSON TokenizerDefinition where + toJSON x = case x of + TokenizerDefinitionNgram (Ngram minGram maxGram tokenChars) -> object + [ "type" .= ("ngram" :: Text) + , "min_gram" .= minGram + , "max_gram" .= maxGram + , "token_chars" .= tokenChars + ] + +instance FromJSON TokenizerDefinition where + parseJSON = withObject "TokenizerDefinition" $ \m -> do + typ <- m .: "type" :: Parser Text + case typ of + "ngram" -> fmap TokenizerDefinitionNgram $ Ngram + <$> fmap unStringlyTypedInt (m .: "min_gram") + <*> fmap unStringlyTypedInt (m .: "max_gram") + <*> m .: "token_chars" + _ -> fail "invalid TokenizerDefinition" + +data Ngram = Ngram + { ngramMinGram :: Int + , ngramMaxGram :: Int + , ngramTokenChars :: [TokenChar] + } deriving (Eq,Show) + +data TokenChar = + TokenLetter + | TokenDigit + | TokenWhitespace + | TokenPunctuation + | TokenSymbol + deriving (Eq,Show) + +instance ToJSON TokenChar where + toJSON t = String $ case t of + TokenLetter -> "letter" + TokenDigit -> "digit" + TokenWhitespace -> "whitespace" + TokenPunctuation -> "punctuation" + TokenSymbol -> "symbol" + +instance FromJSON TokenChar where + parseJSON = withText "TokenChar" $ \t -> case t of + "letter" -> return TokenLetter + "digit" -> return TokenDigit + "whitespace" -> return TokenWhitespace + "punctuation" -> return TokenPunctuation + "symbol" -> return TokenSymbol + _ -> fail "invalid TokenChar" + +-- | Token filters are used to create custom analyzers. +data TokenFilterDefinition + = TokenFilterDefinitionLowercase (Maybe Language) + | TokenFilterDefinitionUppercase (Maybe Language) + | TokenFilterDefinitionApostrophe + | TokenFilterDefinitionReverse + | TokenFilterDefinitionSnowball Language + | TokenFilterDefinitionShingle Shingle + deriving (Eq, Show) + +instance ToJSON TokenFilterDefinition where + toJSON x = case x of + TokenFilterDefinitionLowercase mlang -> object $ catMaybes + [ Just $ "type" .= ("lowercase" :: Text) + , fmap (\lang -> "language" .= languageToText lang) mlang + ] + TokenFilterDefinitionUppercase mlang -> object $ catMaybes + [ Just $ "type" .= ("uppercase" :: Text) + , fmap (\lang -> "language" .= languageToText lang) mlang + ] + TokenFilterDefinitionApostrophe -> object + [ "type" .= ("apostrophe" :: Text) + ] + TokenFilterDefinitionReverse -> object + [ "type" .= ("reverse" :: Text) + ] + TokenFilterDefinitionSnowball lang -> object + [ "type" .= ("snowball" :: Text) + , "language" .= languageToText lang + ] + TokenFilterDefinitionShingle s -> object + [ "type" .= ("shingle" :: Text) + , "max_shingle_size" .= shingleMaxSize s + , "min_shingle_size" .= shingleMinSize s + , "output_unigrams" .= shingleOutputUnigrams s + , "output_unigrams_if_no_shingles" .= shingleOutputUnigramsIfNoShingles s + , "token_separator" .= shingleTokenSeparator s + , "filler_token" .= shingleFillerToken s + ] + +instance FromJSON TokenFilterDefinition where + parseJSON = withObject "TokenFilterDefinition" $ \m -> do + t <- m .: "type" + case (t :: Text) of + "reverse" -> return TokenFilterDefinitionReverse + "apostrophe" -> return TokenFilterDefinitionApostrophe + "lowercase" -> TokenFilterDefinitionLowercase + <$> m .:? "language" + "uppercase" -> TokenFilterDefinitionUppercase + <$> m .:? "language" + "snowball" -> TokenFilterDefinitionSnowball + <$> m .: "language" + "shingle" -> fmap TokenFilterDefinitionShingle $ Shingle + <$> (fmap.fmap) unStringlyTypedInt (m .:? "max_shingle_size") .!= 2 + <*> (fmap.fmap) unStringlyTypedInt (m .:? "min_shingle_size") .!= 2 + <*> (fmap.fmap) unStringlyTypedBool (m .:? "output_unigrams") .!= True + <*> (fmap.fmap) unStringlyTypedBool (m .:? "output_unigrams_if_no_shingles") .!= False + <*> m .:? "token_separator" .!= " " + <*> m .:? "filler_token" .!= "_" + _ -> fail ("unrecognized token filter type: " ++ T.unpack t) + +-- | The set of languages that can be passed to various analyzers, +-- filters, etc. in ElasticSearch. Most data types in this module +-- that have a 'Language' field are actually only actually to +-- handle a subset of these languages. Consult the official +-- ElasticSearch documentation to see what is actually supported. +data Language + = Arabic + | Armenian + | Basque + | Bengali + | Brazilian + | Bulgarian + | Catalan + | Cjk + | Czech + | Danish + | Dutch + | English + | Finnish + | French + | Galician + | German + | German2 + | Greek + | Hindi + | Hungarian + | Indonesian + | Irish + | Italian + | Kp + | Latvian + | Lithuanian + | Lovins + | Norwegian + | Persian + | Porter + | Portuguese + | Romanian + | Russian + | Sorani + | Spanish + | Swedish + | Thai + | Turkish + deriving (Eq, Show) + +instance ToJSON Language where + toJSON = String . languageToText + +instance FromJSON Language where + parseJSON = withText "Language" $ \t -> case languageFromText t of + Nothing -> fail "not a supported ElasticSearch language" + Just lang -> return lang + +languageToText :: Language -> Text +languageToText x = case x of + Arabic -> "arabic" + Armenian -> "armenian" + Basque -> "basque" + Bengali -> "bengali" + Brazilian -> "brazilian" + Bulgarian -> "bulgarian" + Catalan -> "catalan" + Cjk -> "cjk" + Czech -> "czech" + Danish -> "danish" + Dutch -> "dutch" + English -> "english" + Finnish -> "finnish" + French -> "french" + Galician -> "galician" + German -> "german" + German2 -> "german2" + Greek -> "greek" + Hindi -> "hindi" + Hungarian -> "hungarian" + Indonesian -> "indonesian" + Irish -> "irish" + Italian -> "italian" + Kp -> "kp" + Latvian -> "latvian" + Lithuanian -> "lithuanian" + Lovins -> "lovins" + Norwegian -> "norwegian" + Persian -> "persian" + Porter -> "porter" + Portuguese -> "portuguese" + Romanian -> "romanian" + Russian -> "russian" + Sorani -> "sorani" + Spanish -> "spanish" + Swedish -> "swedish" + Thai -> "thai" + Turkish -> "turkish" + +languageFromText :: Text -> Maybe Language +languageFromText x = case x of + "arabic" -> Just Arabic + "armenian" -> Just Armenian + "basque" -> Just Basque + "bengali" -> Just Bengali + "brazilian" -> Just Brazilian + "bulgarian" -> Just Bulgarian + "catalan" -> Just Catalan + "cjk" -> Just Cjk + "czech" -> Just Czech + "danish" -> Just Danish + "dutch" -> Just Dutch + "english" -> Just English + "finnish" -> Just Finnish + "french" -> Just French + "galician" -> Just Galician + "german" -> Just German + "german2" -> Just German2 + "greek" -> Just Greek + "hindi" -> Just Hindi + "hungarian" -> Just Hungarian + "indonesian" -> Just Indonesian + "irish" -> Just Irish + "italian" -> Just Italian + "kp" -> Just Kp + "latvian" -> Just Latvian + "lithuanian" -> Just Lithuanian + "lovins" -> Just Lovins + "norwegian" -> Just Norwegian + "persian" -> Just Persian + "porter" -> Just Porter + "portuguese" -> Just Portuguese + "romanian" -> Just Romanian + "russian" -> Just Russian + "sorani" -> Just Sorani + "spanish" -> Just Spanish + "swedish" -> Just Swedish + "thai" -> Just Thai + "turkish" -> Just Turkish + _ -> Nothing + +data Shingle = Shingle + { shingleMaxSize :: Int + , shingleMinSize :: Int + , shingleOutputUnigrams :: Bool + , shingleOutputUnigramsIfNoShingles :: Bool + , shingleTokenSeparator :: Text + , shingleFillerToken :: Text + } deriving (Eq, Show) diff --git a/src/Database/V5/Bloodhound/Internal/Client.hs b/src/Database/V5/Bloodhound/Internal/Client.hs new file mode 100644 index 0000000..27c807b --- /dev/null +++ b/src/Database/V5/Bloodhound/Internal/Client.hs @@ -0,0 +1,2408 @@ +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE MultiParamTypeClasses #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE UndecidableInstances #-} + +module Database.V5.Bloodhound.Internal.Client where + +import Bloodhound.Import + +import qualified Data.Text as T +import qualified Data.Traversable as DT +import qualified Data.HashMap.Strict as HM +import qualified Data.Vector as V +import qualified Data.Version as Vers +import GHC.Enum +import Network.HTTP.Client +import qualified Text.ParserCombinators.ReadP as RP +import Text.Read (Read(..)) +import qualified Text.Read as TR + +import Database.V5.Bloodhound.Internal.Analysis +import Database.V5.Bloodhound.Internal.Newtypes +import Database.V5.Bloodhound.Internal.Query +import Database.V5.Bloodhound.Internal.StringlyTyped + +{-| Common environment for Elasticsearch calls. Connections will be + pipelined according to the provided HTTP connection manager. +-} +data BHEnv = BHEnv { bhServer :: Server + , bhManager :: Manager + , bhRequestHook :: Request -> IO Request + -- ^ Low-level hook that is run before every request is sent. Used to implement custom authentication strategies. Defaults to 'return' with 'mkBHEnv'. + } + +instance (Functor m, Applicative m, MonadIO m) => MonadBH (ReaderT BHEnv m) where + getBHEnv = ask + +{-| 'Server' is used with the client functions to point at the ES instance +-} +newtype Server = Server Text deriving (Eq, Show, FromJSON) + +{-| All API calls to Elasticsearch operate within + MonadBH + . The idea is that it can be easily embedded in your + own monad transformer stack. A default instance for a ReaderT and + alias 'BH' is provided for the simple case. +-} +class (Functor m, Applicative m, MonadIO m) => MonadBH m where + getBHEnv :: m BHEnv + +-- | Create a 'BHEnv' with all optional fields defaulted. HTTP hook +-- will be a noop. You can use the exported fields to customize +-- it further, e.g.: +-- +-- >> (mkBHEnv myServer myManager) { bhRequestHook = customHook } +mkBHEnv :: Server -> Manager -> BHEnv +mkBHEnv s m = BHEnv s m return + +newtype BH m a = BH { + unBH :: ReaderT BHEnv m a + } deriving ( Functor + , Applicative + , Monad + , MonadIO + , MonadState s + , MonadWriter w + , MonadError e + , Alternative + , MonadPlus + , MonadFix + , MonadThrow + , MonadCatch + , MonadMask) + +instance MonadTrans BH where + lift = BH . lift + +instance (MonadReader r m) => MonadReader r (BH m) where + ask = lift ask + local f (BH (ReaderT m)) = BH $ ReaderT $ \r -> + local f (m r) + +instance (Functor m, Applicative m, MonadIO m) => MonadBH (BH m) where + getBHEnv = BH getBHEnv + +runBH :: BHEnv -> BH m a -> m a +runBH e f = runReaderT (unBH f) e + +{-| 'Version' is embedded in 'Status' -} +data Version = Version { number :: VersionNumber + , build_hash :: BuildHash + , build_date :: UTCTime + , build_snapshot :: Bool + , lucene_version :: VersionNumber } + deriving (Eq, Show) + +-- | Traditional software versioning number +newtype VersionNumber = VersionNumber + { versionNumber :: Vers.Version } + deriving (Eq, Ord, Show) + +{-| 'Status' is a data type for describing the JSON body returned by + Elasticsearch when you query its status. This was deprecated in 1.2.0. + + +-} + +data Status = Status + { name :: Text + , cluster_name :: Text + , cluster_uuid :: Text + , version :: Version + , tagline :: Text } + deriving (Eq, Show) + +instance FromJSON Status where + parseJSON (Object v) = Status <$> + v .: "name" <*> + v .: "cluster_name" <*> + v .: "cluster_uuid" <*> + v .: "version" <*> + v .: "tagline" + parseJSON _ = empty + +{-| 'IndexSettings' is used to configure the shards and replicas when + you create an Elasticsearch Index. + + +-} + +data IndexSettings = IndexSettings + { indexShards :: ShardCount + , indexReplicas :: ReplicaCount } + deriving (Eq, Show) + +instance ToJSON IndexSettings where + toJSON (IndexSettings s r) = object ["settings" .= + object ["index" .= + object ["number_of_shards" .= s, "number_of_replicas" .= r] + ] + ] + +instance FromJSON IndexSettings where + parseJSON = withObject "IndexSettings" parse + where parse o = do s <- o .: "settings" + i <- s .: "index" + IndexSettings <$> i .: "number_of_shards" + <*> i .: "number_of_replicas" + +{-| 'defaultIndexSettings' is an 'IndexSettings' with 3 shards and + 2 replicas. -} +defaultIndexSettings :: IndexSettings +defaultIndexSettings = IndexSettings (ShardCount 3) (ReplicaCount 2) +-- defaultIndexSettings is exported by Database.Bloodhound as well +-- no trailing slashes in servers, library handles building the path. + + +{-| 'ForceMergeIndexSettings' is used to configure index optimization. See + + for more info. +-} +data ForceMergeIndexSettings = + ForceMergeIndexSettings { maxNumSegments :: Maybe Int + -- ^ Number of segments to optimize to. 1 will fully optimize the index. If omitted, the default behavior is to only optimize if the server deems it necessary. + , onlyExpungeDeletes :: Bool + -- ^ Should the optimize process only expunge segments with deletes in them? If the purpose of the optimization is to free disk space, this should be set to True. + , flushAfterOptimize :: Bool + -- ^ Should a flush be performed after the optimize. + } deriving (Eq, Show) + + +{-| 'defaultForceMergeIndexSettings' implements the default settings that + ElasticSearch uses for index optimization. 'maxNumSegments' is Nothing, + 'onlyExpungeDeletes' is False, and flushAfterOptimize is True. +-} +defaultForceMergeIndexSettings :: ForceMergeIndexSettings +defaultForceMergeIndexSettings = ForceMergeIndexSettings Nothing False True + +{-| 'UpdatableIndexSetting' are settings which may be updated after an index is created. + + +-} +data UpdatableIndexSetting = NumberOfReplicas ReplicaCount + -- ^ The number of replicas each shard has. + | AutoExpandReplicas ReplicaBounds + | BlocksReadOnly Bool + -- ^ Set to True to have the index read only. False to allow writes and metadata changes. + | BlocksRead Bool + -- ^ Set to True to disable read operations against the index. + | BlocksWrite Bool + -- ^ Set to True to disable write operations against the index. + | BlocksMetaData Bool + -- ^ Set to True to disable metadata operations against the index. + | RefreshInterval NominalDiffTime + -- ^ The async refresh interval of a shard + | IndexConcurrency Int + | FailOnMergeFailure Bool + | TranslogFlushThresholdOps Int + -- ^ When to flush on operations. + | TranslogFlushThresholdSize Bytes + -- ^ When to flush based on translog (bytes) size. + | TranslogFlushThresholdPeriod NominalDiffTime + -- ^ When to flush based on a period of not flushing. + | TranslogDisableFlush Bool + -- ^ Disables flushing. Note, should be set for a short interval and then enabled. + | CacheFilterMaxSize (Maybe Bytes) + -- ^ The maximum size of filter cache (per segment in shard). + | CacheFilterExpire (Maybe NominalDiffTime) + -- ^ The expire after access time for filter cache. + | GatewaySnapshotInterval NominalDiffTime + -- ^ The gateway snapshot interval (only applies to shared gateways). + | RoutingAllocationInclude (NonEmpty NodeAttrFilter) + -- ^ A node matching any rule will be allowed to host shards from the index. + | RoutingAllocationExclude (NonEmpty NodeAttrFilter) + -- ^ A node matching any rule will NOT be allowed to host shards from the index. + | RoutingAllocationRequire (NonEmpty NodeAttrFilter) + -- ^ Only nodes matching all rules will be allowed to host shards from the index. + | RoutingAllocationEnable AllocationPolicy + -- ^ Enables shard allocation for a specific index. + | RoutingAllocationShardsPerNode ShardCount + -- ^ Controls the total number of shards (replicas and primaries) allowed to be allocated on a single node. + | RecoveryInitialShards InitialShardCount + -- ^ When using local gateway a particular shard is recovered only if there can be allocated quorum shards in the cluster. + | GCDeletes NominalDiffTime + | TTLDisablePurge Bool + -- ^ Disables temporarily the purge of expired docs. + | TranslogFSType FSType + | CompressionSetting Compression + | IndexCompoundFormat CompoundFormat + | IndexCompoundOnFlush Bool + | WarmerEnabled Bool + | MappingTotalFieldsLimit Int + | AnalysisSetting Analysis + -- ^ Analysis is not a dynamic setting and can only be performed on a closed index. + deriving (Eq, Show) + +attrFilterJSON :: NonEmpty NodeAttrFilter -> Value +attrFilterJSON fs = object [ n .= T.intercalate "," (toList vs) + | NodeAttrFilter (NodeAttrName n) vs <- toList fs] + +parseAttrFilter :: Value -> Parser (NonEmpty NodeAttrFilter) +parseAttrFilter = withObject "NonEmpty NodeAttrFilter" parse + where parse o = case HM.toList o of + [] -> fail "Expected non-empty list of NodeAttrFilters" + x:xs -> DT.mapM (uncurry parse') (x :| xs) + parse' n = withText "Text" $ \t -> + case T.splitOn "," t of + fv:fvs -> return (NodeAttrFilter (NodeAttrName n) (fv :| fvs)) + [] -> fail "Expected non-empty list of filter values" + +instance ToJSON UpdatableIndexSetting where + toJSON (NumberOfReplicas x) = oPath ("index" :| ["number_of_replicas"]) x + toJSON (AutoExpandReplicas x) = oPath ("index" :| ["auto_expand_replicas"]) x + toJSON (RefreshInterval x) = oPath ("index" :| ["refresh_interval"]) (NominalDiffTimeJSON x) + toJSON (IndexConcurrency x) = oPath ("index" :| ["concurrency"]) x + toJSON (FailOnMergeFailure x) = oPath ("index" :| ["fail_on_merge_failure"]) x + toJSON (TranslogFlushThresholdOps x) = oPath ("index" :| ["translog", "flush_threshold_ops"]) x + toJSON (TranslogFlushThresholdSize x) = oPath ("index" :| ["translog", "flush_threshold_size"]) x + toJSON (TranslogFlushThresholdPeriod x) = oPath ("index" :| ["translog", "flush_threshold_period"]) (NominalDiffTimeJSON x) + toJSON (TranslogDisableFlush x) = oPath ("index" :| ["translog", "disable_flush"]) x + toJSON (CacheFilterMaxSize x) = oPath ("index" :| ["cache", "filter", "max_size"]) x + toJSON (CacheFilterExpire x) = oPath ("index" :| ["cache", "filter", "expire"]) (NominalDiffTimeJSON <$> x) + toJSON (GatewaySnapshotInterval x) = oPath ("index" :| ["gateway", "snapshot_interval"]) (NominalDiffTimeJSON x) + toJSON (RoutingAllocationInclude fs) = oPath ("index" :| ["routing", "allocation", "include"]) (attrFilterJSON fs) + toJSON (RoutingAllocationExclude fs) = oPath ("index" :| ["routing", "allocation", "exclude"]) (attrFilterJSON fs) + toJSON (RoutingAllocationRequire fs) = oPath ("index" :| ["routing", "allocation", "require"]) (attrFilterJSON fs) + toJSON (RoutingAllocationEnable x) = oPath ("index" :| ["routing", "allocation", "enable"]) x + toJSON (RoutingAllocationShardsPerNode x) = oPath ("index" :| ["routing", "allocation", "total_shards_per_node"]) x + toJSON (RecoveryInitialShards x) = oPath ("index" :| ["recovery", "initial_shards"]) x + toJSON (GCDeletes x) = oPath ("index" :| ["gc_deletes"]) (NominalDiffTimeJSON x) + toJSON (TTLDisablePurge x) = oPath ("index" :| ["ttl", "disable_purge"]) x + toJSON (TranslogFSType x) = oPath ("index" :| ["translog", "fs", "type"]) x + toJSON (CompressionSetting x) = oPath ("index" :| ["codec"]) x + toJSON (IndexCompoundFormat x) = oPath ("index" :| ["compound_format"]) x + toJSON (IndexCompoundOnFlush x) = oPath ("index" :| ["compound_on_flush"]) x + toJSON (WarmerEnabled x) = oPath ("index" :| ["warmer", "enabled"]) x + toJSON (BlocksReadOnly x) = oPath ("blocks" :| ["read_only"]) x + toJSON (BlocksRead x) = oPath ("blocks" :| ["read"]) x + toJSON (BlocksWrite x) = oPath ("blocks" :| ["write"]) x + toJSON (BlocksMetaData x) = oPath ("blocks" :| ["metadata"]) x + toJSON (MappingTotalFieldsLimit x) = oPath ("index" :| ["mapping","total_fields","limit"]) x + toJSON (AnalysisSetting x) = oPath ("index" :| ["analysis"]) x + +instance FromJSON UpdatableIndexSetting where + parseJSON = withObject "UpdatableIndexSetting" parse + where parse o = numberOfReplicas `taggedAt` ["index", "number_of_replicas"] + <|> autoExpandReplicas `taggedAt` ["index", "auto_expand_replicas"] + <|> refreshInterval `taggedAt` ["index", "refresh_interval"] + <|> indexConcurrency `taggedAt` ["index", "concurrency"] + <|> failOnMergeFailure `taggedAt` ["index", "fail_on_merge_failure"] + <|> translogFlushThresholdOps `taggedAt` ["index", "translog", "flush_threshold_ops"] + <|> translogFlushThresholdSize `taggedAt` ["index", "translog", "flush_threshold_size"] + <|> translogFlushThresholdPeriod `taggedAt` ["index", "translog", "flush_threshold_period"] + <|> translogDisableFlush `taggedAt` ["index", "translog", "disable_flush"] + <|> cacheFilterMaxSize `taggedAt` ["index", "cache", "filter", "max_size"] + <|> cacheFilterExpire `taggedAt` ["index", "cache", "filter", "expire"] + <|> gatewaySnapshotInterval `taggedAt` ["index", "gateway", "snapshot_interval"] + <|> routingAllocationInclude `taggedAt` ["index", "routing", "allocation", "include"] + <|> routingAllocationExclude `taggedAt` ["index", "routing", "allocation", "exclude"] + <|> routingAllocationRequire `taggedAt` ["index", "routing", "allocation", "require"] + <|> routingAllocationEnable `taggedAt` ["index", "routing", "allocation", "enable"] + <|> routingAllocationShardsPerNode `taggedAt` ["index", "routing", "allocation", "total_shards_per_node"] + <|> recoveryInitialShards `taggedAt` ["index", "recovery", "initial_shards"] + <|> gcDeletes `taggedAt` ["index", "gc_deletes"] + <|> ttlDisablePurge `taggedAt` ["index", "ttl", "disable_purge"] + <|> translogFSType `taggedAt` ["index", "translog", "fs", "type"] + <|> compressionSetting `taggedAt` ["index", "codec"] + <|> compoundFormat `taggedAt` ["index", "compound_format"] + <|> compoundOnFlush `taggedAt` ["index", "compound_on_flush"] + <|> warmerEnabled `taggedAt` ["index", "warmer", "enabled"] + <|> blocksReadOnly `taggedAt` ["blocks", "read_only"] + <|> blocksRead `taggedAt` ["blocks", "read"] + <|> blocksWrite `taggedAt` ["blocks", "write"] + <|> blocksMetaData `taggedAt` ["blocks", "metadata"] + <|> mappingTotalFieldsLimit `taggedAt` ["index", "mapping", "total_fields", "limit"] + <|> analysisSetting `taggedAt` ["index", "analysis"] + where taggedAt f ks = taggedAt' f (Object o) ks + taggedAt' f v [] = + f =<< (parseJSON v <|> parseJSON (unStringlyTypeJSON v)) + taggedAt' f v (k:ks) = + withObject "Object" (\o -> do v' <- o .: k + taggedAt' f v' ks) v + numberOfReplicas = pure . NumberOfReplicas + autoExpandReplicas = pure . AutoExpandReplicas + refreshInterval = pure . RefreshInterval . ndtJSON + indexConcurrency = pure . IndexConcurrency + failOnMergeFailure = pure . FailOnMergeFailure + translogFlushThresholdOps = pure . TranslogFlushThresholdOps + translogFlushThresholdSize = pure . TranslogFlushThresholdSize + translogFlushThresholdPeriod = pure . TranslogFlushThresholdPeriod . ndtJSON + translogDisableFlush = pure . TranslogDisableFlush + cacheFilterMaxSize = pure . CacheFilterMaxSize + cacheFilterExpire = pure . CacheFilterExpire . fmap ndtJSON + gatewaySnapshotInterval = pure . GatewaySnapshotInterval . ndtJSON + routingAllocationInclude = fmap RoutingAllocationInclude . parseAttrFilter + routingAllocationExclude = fmap RoutingAllocationExclude . parseAttrFilter + routingAllocationRequire = fmap RoutingAllocationRequire . parseAttrFilter + routingAllocationEnable = pure . RoutingAllocationEnable + routingAllocationShardsPerNode = pure . RoutingAllocationShardsPerNode + recoveryInitialShards = pure . RecoveryInitialShards + gcDeletes = pure . GCDeletes . ndtJSON + ttlDisablePurge = pure . TTLDisablePurge + translogFSType = pure . TranslogFSType + compressionSetting = pure . CompressionSetting + compoundFormat = pure . IndexCompoundFormat + compoundOnFlush = pure . IndexCompoundOnFlush + warmerEnabled = pure . WarmerEnabled + blocksReadOnly = pure . BlocksReadOnly + blocksRead = pure . BlocksRead + blocksWrite = pure . BlocksWrite + blocksMetaData = pure . BlocksMetaData + mappingTotalFieldsLimit = pure . MappingTotalFieldsLimit + analysisSetting = pure . AnalysisSetting + +data ReplicaBounds = ReplicasBounded Int Int + | ReplicasLowerBounded Int + | ReplicasUnbounded + deriving (Eq, Show) + + +instance ToJSON ReplicaBounds where + toJSON (ReplicasBounded a b) = String (showText a <> "-" <> showText b) + toJSON (ReplicasLowerBounded a) = String (showText a <> "-all") + toJSON ReplicasUnbounded = Bool False + +instance FromJSON ReplicaBounds where + parseJSON v = withText "ReplicaBounds" parseText v + <|> withBool "ReplicaBounds" parseBool v + where parseText t = case T.splitOn "-" t of + [a, "all"] -> ReplicasLowerBounded <$> parseReadText a + [a, b] -> ReplicasBounded <$> parseReadText a + <*> parseReadText b + _ -> fail ("Could not parse ReplicaBounds: " <> show t) + parseBool False = pure ReplicasUnbounded + parseBool _ = fail "ReplicasUnbounded cannot be represented with True" + +data Compression + = CompressionDefault + -- ^ Compress with LZ4 + | CompressionBest + -- ^ Compress with DEFLATE. Elastic + -- + -- that this can reduce disk use by 15%-25%. + deriving (Eq,Show) + +instance ToJSON Compression where + toJSON x = case x of + CompressionDefault -> toJSON ("default" :: Text) + CompressionBest -> toJSON ("best_compression" :: Text) + +instance FromJSON Compression where + parseJSON = withText "Compression" $ \t -> case t of + "default" -> return CompressionDefault + "best_compression" -> return CompressionBest + _ -> fail "invalid compression codec" + +-- | A measure of bytes used for various configurations. You may want +-- to use smart constructors like 'gigabytes' for larger values. +-- +-- >>> gigabytes 9 +-- Bytes 9000000000 +-- +-- >>> megabytes 9 +-- Bytes 9000000 +-- +-- >>> kilobytes 9 +-- Bytes 9000 +newtype Bytes = + Bytes Int + deriving (Eq, Show, Ord, ToJSON, FromJSON) + +gigabytes :: Int -> Bytes +gigabytes n = megabytes (1000 * n) + + +megabytes :: Int -> Bytes +megabytes n = kilobytes (1000 * n) + + +kilobytes :: Int -> Bytes +kilobytes n = Bytes (1000 * n) + + +data FSType = FSSimple + | FSBuffered deriving (Eq, Show) + +instance ToJSON FSType where + toJSON FSSimple = "simple" + toJSON FSBuffered = "buffered" + +instance FromJSON FSType where + parseJSON = withText "FSType" parse + where parse "simple" = pure FSSimple + parse "buffered" = pure FSBuffered + parse t = fail ("Invalid FSType: " <> show t) + +data InitialShardCount = QuorumShards + | QuorumMinus1Shards + | FullShards + | FullMinus1Shards + | ExplicitShards Int + deriving (Eq, Show) + +instance FromJSON InitialShardCount where + parseJSON v = withText "InitialShardCount" parseText v + <|> ExplicitShards <$> parseJSON v + where parseText "quorum" = pure QuorumShards + parseText "quorum-1" = pure QuorumMinus1Shards + parseText "full" = pure FullShards + parseText "full-1" = pure FullMinus1Shards + parseText _ = mzero + +instance ToJSON InitialShardCount where + toJSON QuorumShards = String "quorum" + toJSON QuorumMinus1Shards = String "quorum-1" + toJSON FullShards = String "full" + toJSON FullMinus1Shards = String "full-1" + toJSON (ExplicitShards x) = toJSON x + +data NodeAttrFilter = NodeAttrFilter + { nodeAttrFilterName :: NodeAttrName + , nodeAttrFilterValues :: NonEmpty Text } + deriving (Eq, Ord, Show) + +newtype NodeAttrName = NodeAttrName Text deriving (Eq, Ord, Show) + +data CompoundFormat = CompoundFileFormat Bool + | MergeSegmentVsTotalIndex Double + -- ^ percentage between 0 and 1 where 0 is false, 1 is true + deriving (Eq, Show) + +instance ToJSON CompoundFormat where + toJSON (CompoundFileFormat x) = Bool x + toJSON (MergeSegmentVsTotalIndex x) = toJSON x + +instance FromJSON CompoundFormat where + parseJSON v = CompoundFileFormat <$> parseJSON v + <|> MergeSegmentVsTotalIndex <$> parseJSON v + +newtype NominalDiffTimeJSON = + NominalDiffTimeJSON { ndtJSON :: NominalDiffTime } + +instance ToJSON NominalDiffTimeJSON where + toJSON (NominalDiffTimeJSON t) = String (showText (round t :: Integer) <> "s") + +instance FromJSON NominalDiffTimeJSON where + parseJSON = withText "NominalDiffTime" parse + where parse t = case T.takeEnd 1 t of + "s" -> NominalDiffTimeJSON . fromInteger <$> parseReadText (T.dropEnd 1 t) + _ -> fail "Invalid or missing NominalDiffTime unit (expected s)" + +data IndexSettingsSummary = IndexSettingsSummary + { sSummaryIndexName :: IndexName + , sSummaryFixedSettings :: IndexSettings + , sSummaryUpdateable :: [UpdatableIndexSetting]} + deriving (Eq, Show) + +parseSettings :: Object -> Parser [UpdatableIndexSetting] +parseSettings o = do + o' <- o .: "index" + -- slice the index object into singleton hashmaps and try to parse each + parses <- forM (HM.toList o') $ \(k, v) -> do + -- blocks are now nested into the "index" key, which is not how they're serialized + let atRoot = Object (HM.singleton k v) + let atIndex = Object (HM.singleton "index" atRoot) + optional (parseJSON atRoot <|> parseJSON atIndex) + return (catMaybes parses) + +instance FromJSON IndexSettingsSummary where + parseJSON = withObject "IndexSettingsSummary" parse + where parse o = case HM.toList o of + [(ixn, v@(Object o'))] -> IndexSettingsSummary (IndexName ixn) + <$> parseJSON v + <*> (fmap (filter (not . redundant)) . parseSettings =<< o' .: "settings") + _ -> fail "Expected single-key object with index name" + redundant (NumberOfReplicas _) = True + redundant _ = False + +{-| 'Reply' and 'Method' are type synonyms from 'Network.HTTP.Types.Method.Method' -} +type Reply = Network.HTTP.Client.Response LByteString + +{-| 'OpenCloseIndex' is a sum type for opening and closing indices. + + +-} +data OpenCloseIndex = OpenIndex | CloseIndex deriving (Eq, Show) + +data FieldType = GeoPointType + | GeoShapeType + | FloatType + | IntegerType + | LongType + | ShortType + | ByteType deriving (Eq, Show) + +newtype FieldDefinition = FieldDefinition + { fieldType :: FieldType + } deriving (Eq, Show) + +{-| An 'IndexTemplate' defines a template that will automatically be + applied to new indices created. The templates include both + 'IndexSettings' and mappings, and a simple 'TemplatePattern' that + controls if the template will be applied to the index created. + Specify mappings as follows: @[toJSON TweetMapping, ...]@ + + https://www.elastic.co/guide/en/elasticsearch/reference/1.7/indices-templates.html +-} +data IndexTemplate = + IndexTemplate { templatePattern :: TemplatePattern + , templateSettings :: Maybe IndexSettings + , templateMappings :: [Value] + } + +instance ToJSON IndexTemplate where + toJSON (IndexTemplate p s m) = merge + (object [ "template" .= p + , "mappings" .= foldl' merge (object []) m + ]) + (toJSON s) + where + merge (Object o1) (Object o2) = toJSON $ HM.union o1 o2 + merge o Null = o + merge _ _ = undefined + +data MappingField = + MappingField { mappingFieldName :: FieldName + , fieldDefinition :: FieldDefinition } + deriving (Eq, Show) + +{-| Support for type reification of 'Mapping's is currently incomplete, for + now the mapping API verbiage expects a 'ToJSON'able blob. + + Indexes have mappings, mappings are schemas for the documents contained + in the index. I'd recommend having only one mapping per index, always + having a mapping, and keeping different kinds of documents separated + if possible. +-} +data Mapping = + Mapping { typeName :: TypeName + , mappingFields :: [MappingField] } + deriving (Eq, Show) + +data AllocationPolicy = AllocAll + -- ^ Allows shard allocation for all shards. + | AllocPrimaries + -- ^ Allows shard allocation only for primary shards. + | AllocNewPrimaries + -- ^ Allows shard allocation only for primary shards for new indices. + | AllocNone + -- ^ No shard allocation is allowed + deriving (Eq, Show) + +instance ToJSON AllocationPolicy where + toJSON AllocAll = String "all" + toJSON AllocPrimaries = String "primaries" + toJSON AllocNewPrimaries = String "new_primaries" + toJSON AllocNone = String "none" + +instance FromJSON AllocationPolicy where + parseJSON = withText "AllocationPolicy" parse + where parse "all" = pure AllocAll + parse "primaries" = pure AllocPrimaries + parse "new_primaries" = pure AllocNewPrimaries + parse "none" = pure AllocNone + parse t = fail ("Invlaid AllocationPolicy: " <> show t) + +{-| 'BulkOperation' is a sum type for expressing the four kinds of bulk + operation index, create, delete, and update. 'BulkIndex' behaves like an + "upsert", 'BulkCreate' will fail if a document already exists at the DocId. + Consult the + for further explanation. + Warning: Bulk operations suffixed with @Auto@ rely on ElasticSearch to + generate the id. Often, people use auto-generated identifiers when + ElasticSearch is the only place that their data is stored. Do not let + ElasticSearch be the only place your data is stored. It does not guarantee + durability, and it may silently discard data. + This is + discussed further on github. +-} +data BulkOperation = + BulkIndex IndexName MappingName DocId Value + -- ^ Create the document, replacing it if it already exists. + | BulkIndexAuto IndexName MappingName Value + -- ^ Create a document with an autogenerated id. + | BulkIndexEncodingAuto IndexName MappingName Encoding + -- ^ Create a document with an autogenerated id. Use fast JSON encoding. + | BulkCreate IndexName MappingName DocId Value + -- ^ Create a document, failing if it already exists. + | BulkCreateEncoding IndexName MappingName DocId Encoding + -- ^ Create a document, failing if it already exists. Use fast JSON encoding. + | BulkDelete IndexName MappingName DocId + -- ^ Delete the document + | BulkUpdate IndexName MappingName DocId Value + -- ^ Update the document, merging the new value with the existing one. + deriving (Eq, Show) + +{-| 'EsResult' describes the standard wrapper JSON document that you see in + successful Elasticsearch lookups or lookups that couldn't find the document. +-} +data EsResult a = EsResult { _index :: Text + , _type :: Text + , _id :: Text + , foundResult :: Maybe (EsResultFound a)} deriving (Eq, Show) + +{-| 'EsResultFound' contains the document and its metadata inside of an + 'EsResult' when the document was successfully found. +-} +data EsResultFound a = + EsResultFound { _version :: DocVersion + , _source :: a } + deriving (Eq, Show) + +instance (FromJSON a) => FromJSON (EsResult a) where + parseJSON jsonVal@(Object v) = do + found <- v .:? "found" .!= False + fr <- if found + then parseJSON jsonVal + else return Nothing + EsResult <$> v .: "_index" <*> + v .: "_type" <*> + v .: "_id" <*> + pure fr + parseJSON _ = empty + +instance (FromJSON a) => FromJSON (EsResultFound a) where + parseJSON (Object v) = EsResultFound <$> + v .: "_version" <*> + v .: "_source" + parseJSON _ = empty + +{-| 'EsError' is the generic type that will be returned when there was a + problem. If you can't parse the expected response, its a good idea to + try parsing this. +-} +data EsError = + EsError { errorStatus :: Int + , errorMessage :: Text } + deriving (Eq, Show) + +instance FromJSON EsError where + parseJSON (Object v) = EsError <$> + v .: "status" <*> + (v .: "error" <|> (v .: "error" >>= (.: "reason"))) + parseJSON _ = empty + +{-| 'EsProtocolException' will be thrown if Bloodhound cannot parse a response +returned by the ElasticSearch server. If you encounter this error, please +verify that your domain data types and FromJSON instances are working properly +(for example, the 'a' of '[Hit a]' in 'SearchResult.searchHits.hits'). If you're +sure that your mappings are correct, then this error may be an indication of an +incompatibility between Bloodhound and ElasticSearch. Please open a bug report +and be sure to include the exception body. +-} +newtype EsProtocolException = EsProtocolException + { esProtoExBody :: LByteString + } deriving (Eq, Show) + +instance Exception EsProtocolException + +data IndexAlias = IndexAlias { srcIndex :: IndexName + , indexAlias :: IndexAliasName } deriving (Eq, Show) + +data IndexAliasAction = + AddAlias IndexAlias IndexAliasCreate + | RemoveAlias IndexAlias + deriving (Eq, Show) + +data IndexAliasCreate = + IndexAliasCreate { aliasCreateRouting :: Maybe AliasRouting + , aliasCreateFilter :: Maybe Filter} + deriving (Eq, Show) + +data AliasRouting = + AllAliasRouting RoutingValue + | GranularAliasRouting (Maybe SearchAliasRouting) (Maybe IndexAliasRouting) + deriving (Eq, Show) + +newtype SearchAliasRouting = + SearchAliasRouting (NonEmpty RoutingValue) + deriving (Eq, Show) + +instance ToJSON SearchAliasRouting where + toJSON (SearchAliasRouting rvs) = toJSON (T.intercalate "," (routingValue <$> toList rvs)) + +instance FromJSON SearchAliasRouting where + parseJSON = withText "SearchAliasRouting" parse + where parse t = SearchAliasRouting <$> parseNEJSON (String <$> T.splitOn "," t) + +newtype IndexAliasRouting = + IndexAliasRouting RoutingValue + deriving (Eq, Show, ToJSON, FromJSON) + +newtype RoutingValue = + RoutingValue { routingValue :: Text } + deriving (Eq, Show, ToJSON, FromJSON) + +newtype IndexAliasesSummary = + IndexAliasesSummary { indexAliasesSummary :: [IndexAliasSummary] } + deriving (Eq, Show) + +instance FromJSON IndexAliasesSummary where + parseJSON = withObject "IndexAliasesSummary" parse + where parse o = IndexAliasesSummary . mconcat <$> mapM (uncurry go) (HM.toList o) + go ixn = withObject "index aliases" $ \ia -> do + aliases <- ia .:? "aliases" .!= mempty + forM (HM.toList aliases) $ \(aName, v) -> do + let indexAlias = IndexAlias (IndexName ixn) (IndexAliasName (IndexName aName)) + IndexAliasSummary indexAlias <$> parseJSON v + + +instance ToJSON IndexAliasAction where + toJSON (AddAlias ia opts) = object ["add" .= (iaObj <> optsObj)] + where Object iaObj = toJSON ia + Object optsObj = toJSON opts + toJSON (RemoveAlias ia) = object ["remove" .= iaObj] + where Object iaObj = toJSON ia + +instance ToJSON IndexAlias where + toJSON IndexAlias {..} = object ["index" .= srcIndex + , "alias" .= indexAlias + ] + +instance ToJSON IndexAliasCreate where + toJSON IndexAliasCreate {..} = Object (filterObj <> routingObj) + where filterObj = maybe mempty (HM.singleton "filter" . toJSON) aliasCreateFilter + Object routingObj = maybe (Object mempty) toJSON aliasCreateRouting + +instance ToJSON AliasRouting where + toJSON (AllAliasRouting v) = object ["routing" .= v] + toJSON (GranularAliasRouting srch idx) = object (catMaybes prs) + where prs = [("search_routing" .=) <$> srch + ,("index_routing" .=) <$> idx] + +instance FromJSON AliasRouting where + parseJSON = withObject "AliasRouting" parse + where parse o = parseAll o <|> parseGranular o + parseAll o = AllAliasRouting <$> o .: "routing" + parseGranular o = do + sr <- o .:? "search_routing" + ir <- o .:? "index_routing" + if isNothing sr && isNothing ir + then fail "Both search_routing and index_routing can't be blank" + else return (GranularAliasRouting sr ir) + +instance FromJSON IndexAliasCreate where + parseJSON v = withObject "IndexAliasCreate" parse v + where parse o = IndexAliasCreate <$> optional (parseJSON v) + <*> o .:? "filter" + +{-| 'IndexAliasSummary' is a summary of an index alias configured for a server. -} +data IndexAliasSummary = IndexAliasSummary + { indexAliasSummaryAlias :: IndexAlias + , indexAliasSummaryCreate :: IndexAliasCreate } + deriving (Eq, Show) + +{-| 'DocVersion' is an integer version number for a document between 1 +and 9.2e+18 used for <>. +-} +newtype DocVersion = DocVersion { + docVersionNumber :: Int + } deriving (Eq, Show, Ord, ToJSON) + +-- | Smart constructor for in-range doc version +mkDocVersion :: Int -> Maybe DocVersion +mkDocVersion i + | i >= docVersionNumber minBound + && i <= docVersionNumber maxBound = + Just $ DocVersion i + | otherwise = Nothing + +instance Bounded DocVersion where + minBound = DocVersion 1 + maxBound = DocVersion 9200000000000000000 -- 9.2e+18 + +instance Enum DocVersion where + succ x + | x /= maxBound = DocVersion (succ $ docVersionNumber x) + | otherwise = succError "DocVersion" + pred x + | x /= minBound = DocVersion (pred $ docVersionNumber x) + | otherwise = predError "DocVersion" + toEnum i = + fromMaybe (error $ show i ++ " out of DocVersion range") $ mkDocVersion i + fromEnum = docVersionNumber + enumFrom = boundedEnumFrom + enumFromThen = boundedEnumFromThen + +instance FromJSON DocVersion where + parseJSON v = do + i <- parseJSON v + maybe (fail "DocVersion out of range") return $ mkDocVersion i + +{-| 'ExternalDocVersion' is a convenience wrapper if your code uses its +own version numbers instead of ones from ES. +-} +newtype ExternalDocVersion = ExternalDocVersion DocVersion + deriving (Eq, Show, Ord, Bounded, Enum, ToJSON) + +{-| 'VersionControl' is specified when indexing documents as a +optimistic concurrency control. +-} +data VersionControl = NoVersionControl + -- ^ Don't send a version. This is a pure overwrite. + | InternalVersion DocVersion + -- ^ Use the default ES versioning scheme. Only + -- index the document if the version is the same + -- as the one specified. Only applicable to + -- updates, as you should be getting Version from + -- a search result. + | ExternalGT ExternalDocVersion + -- ^ Use your own version numbering. Only index + -- the document if the version is strictly higher + -- OR the document doesn't exist. The given + -- version will be used as the new version number + -- for the stored document. N.B. All updates must + -- increment this number, meaning there is some + -- global, external ordering of updates. + | ExternalGTE ExternalDocVersion + -- ^ Use your own version numbering. Only index + -- the document if the version is equal or higher + -- than the stored version. Will succeed if there + -- is no existing document. The given version will + -- be used as the new version number for the + -- stored document. Use with care, as this could + -- result in data loss. + | ForceVersion ExternalDocVersion + -- ^ The document will always be indexed and the + -- given version will be the new version. This is + -- typically used for correcting errors. Use with + -- care, as this could result in data loss. + deriving (Eq, Show, Ord) + +{-| 'DocumentParent' is used to specify a parent document. +-} +newtype DocumentParent = DocumentParent DocId + deriving (Eq, Show) + +{-| 'IndexDocumentSettings' are special settings supplied when indexing +a document. For the best backwards compatiblity when new fields are +added, you should probably prefer to start with 'defaultIndexDocumentSettings' +-} +data IndexDocumentSettings = + IndexDocumentSettings { idsVersionControl :: VersionControl + , idsParent :: Maybe DocumentParent + } deriving (Eq, Show) + +{-| Reasonable default settings. Chooses no version control and no parent. +-} +defaultIndexDocumentSettings :: IndexDocumentSettings +defaultIndexDocumentSettings = IndexDocumentSettings NoVersionControl Nothing + +{-| 'IndexSelection' is used for APIs which take a single index, a list of + indexes, or the special @_all@ index. +-} +--TODO: this does not fully support . It wouldn't be too hard to implement but you'd have to add the optional parameters (ignore_unavailable, allow_no_indices, expand_wildcards) to any APIs using it. Also would be a breaking API. +data IndexSelection = + IndexList (NonEmpty IndexName) + | AllIndexes + deriving (Eq, Show) + +{-| 'NodeSelection' is used for most cluster APIs. See for more details. +-} +data NodeSelection = + LocalNode + -- ^ Whatever node receives this request + | NodeList (NonEmpty NodeSelector) + | AllNodes + deriving (Eq, Show) + + +-- | An exact match or pattern to identify a node. Note that All of +-- these options support wildcarding, so your node name, server, attr +-- name can all contain * characters to be a fuzzy match. +data NodeSelector = + NodeByName NodeName + | NodeByFullNodeId FullNodeId + | NodeByHost Server + -- ^ e.g. 10.0.0.1 or even 10.0.0.* + | NodeByAttribute NodeAttrName Text + -- ^ NodeAttrName can be a pattern, e.g. rack*. The value can too. + deriving (Eq, Show) + +{-| 'TemplateName' is used to describe which template to query/create/delete +-} +newtype TemplateName = TemplateName Text deriving (Eq, Show, ToJSON, FromJSON) + +{-| 'TemplatePattern' represents a pattern which is matched against index names +-} +newtype TemplatePattern = TemplatePattern Text deriving (Eq, Show, ToJSON, FromJSON) + +-- | Username type used for HTTP Basic authentication. See 'basicAuthHook'. +newtype EsUsername = EsUsername { esUsername :: Text } deriving (Read, Show, Eq) + +-- | Password type used for HTTP Basic authentication. See 'basicAuthHook'. +newtype EsPassword = EsPassword { esPassword :: Text } deriving (Read, Show, Eq) + + +data SnapshotRepoSelection = + SnapshotRepoList (NonEmpty SnapshotRepoPattern) + | AllSnapshotRepos + deriving (Eq, Show) + + +-- | Either specifies an exact repo name or one with globs in it, +-- e.g. @RepoPattern "foo*"@ __NOTE__: Patterns are not supported on ES < 1.7 +data SnapshotRepoPattern = + ExactRepo SnapshotRepoName + | RepoPattern Text + deriving (Eq, Show) + +-- | The unique name of a snapshot repository. +newtype SnapshotRepoName = + SnapshotRepoName { snapshotRepoName :: Text } + deriving (Eq, Ord, Show, ToJSON, FromJSON) + + +-- | A generic representation of a snapshot repo. This is what gets +-- sent to and parsed from the server. For repo types enabled by +-- plugins that aren't exported by this library, consider making a +-- custom type which implements 'SnapshotRepo'. If it is a common repo +-- type, consider submitting a pull request to have it included in the +-- library proper +data GenericSnapshotRepo = GenericSnapshotRepo { + gSnapshotRepoName :: SnapshotRepoName + , gSnapshotRepoType :: SnapshotRepoType + , gSnapshotRepoSettings :: GenericSnapshotRepoSettings + } deriving (Eq, Show) + + +instance SnapshotRepo GenericSnapshotRepo where + toGSnapshotRepo = id + fromGSnapshotRepo = Right + + +newtype SnapshotRepoType = + SnapshotRepoType { snapshotRepoType :: Text } + deriving (Eq, Ord, Show, ToJSON, FromJSON) + + +-- | Opaque representation of snapshot repo settings. Instances of +-- 'SnapshotRepo' will produce this. +newtype GenericSnapshotRepoSettings = + GenericSnapshotRepoSettings { gSnapshotRepoSettingsObject :: Object } + deriving (Eq, Show, ToJSON) + + + -- Regardless of whether you send strongly typed json, my version of + -- ES sends back stringly typed json in the settings, e.g. booleans + -- as strings, so we'll try to convert them. +instance FromJSON GenericSnapshotRepoSettings where + parseJSON = fmap (GenericSnapshotRepoSettings . fmap unStringlyTypeJSON). parseJSON + +-- | The result of running 'verifySnapshotRepo'. +newtype SnapshotVerification = + SnapshotVerification { + snapshotNodeVerifications :: [SnapshotNodeVerification] + } deriving (Eq, Show) + + +instance FromJSON SnapshotVerification where + parseJSON = withObject "SnapshotVerification" parse + where + parse o = do + o2 <- o .: "nodes" + SnapshotVerification <$> mapM (uncurry parse') (HM.toList o2) + parse' rawFullId = withObject "SnapshotNodeVerification" $ \o -> + SnapshotNodeVerification (FullNodeId rawFullId) <$> o .: "name" + + +-- | A node that has verified a snapshot +data SnapshotNodeVerification = SnapshotNodeVerification { + snvFullId :: FullNodeId + , snvNodeName :: NodeName + } deriving (Eq, Show) + + +-- | Unique, automatically-generated name assigned to nodes that are +-- usually returned in node-oriented APIs. +newtype FullNodeId = FullNodeId { fullNodeId :: Text } + deriving (Eq, Ord, Show, FromJSON) + + +-- | A human-readable node name that is supplied by the user in the +-- node config or automatically generated by ElasticSearch. +newtype NodeName = NodeName { nodeName :: Text } + deriving (Eq, Ord, Show, FromJSON) + +newtype ClusterName = ClusterName { clusterName :: Text } + deriving (Eq, Ord, Show, FromJSON) + +data NodesInfo = NodesInfo { + nodesInfo :: [NodeInfo] + , nodesClusterName :: ClusterName + } deriving (Eq, Show) + +data NodesStats = NodesStats { + nodesStats :: [NodeStats] + , nodesStatsClusterName :: ClusterName + } deriving (Eq, Show) + +data NodeStats = NodeStats { + nodeStatsName :: NodeName + , nodeStatsFullId :: FullNodeId + , nodeStatsBreakersStats :: Maybe NodeBreakersStats + , nodeStatsHTTP :: NodeHTTPStats + , nodeStatsTransport :: NodeTransportStats + , nodeStatsFS :: NodeFSStats + , nodeStatsNetwork :: Maybe NodeNetworkStats + , nodeStatsThreadPool :: NodeThreadPoolsStats + , nodeStatsJVM :: NodeJVMStats + , nodeStatsProcess :: NodeProcessStats + , nodeStatsOS :: NodeOSStats + , nodeStatsIndices :: NodeIndicesStats + } deriving (Eq, Show) + +data NodeBreakersStats = NodeBreakersStats { + nodeStatsParentBreaker :: NodeBreakerStats + , nodeStatsRequestBreaker :: NodeBreakerStats + , nodeStatsFieldDataBreaker :: NodeBreakerStats + } deriving (Eq, Show) + +data NodeBreakerStats = NodeBreakerStats { + nodeBreakersTripped :: Int + , nodeBreakersOverhead :: Double + , nodeBreakersEstSize :: Bytes + , nodeBreakersLimitSize :: Bytes + } deriving (Eq, Show) + +data NodeHTTPStats = NodeHTTPStats { + nodeHTTPTotalOpened :: Int + , nodeHTTPCurrentOpen :: Int + } deriving (Eq, Show) + +data NodeTransportStats = NodeTransportStats { + nodeTransportTXSize :: Bytes + , nodeTransportCount :: Int + , nodeTransportRXSize :: Bytes + , nodeTransportRXCount :: Int + , nodeTransportServerOpen :: Int + } deriving (Eq, Show) + +data NodeFSStats = NodeFSStats { + nodeFSDataPaths :: [NodeDataPathStats] + , nodeFSTotal :: NodeFSTotalStats + , nodeFSTimestamp :: UTCTime + } deriving (Eq, Show) + +data NodeDataPathStats = NodeDataPathStats { + nodeDataPathDiskServiceTime :: Maybe Double + , nodeDataPathDiskQueue :: Maybe Double + , nodeDataPathIOSize :: Maybe Bytes + , nodeDataPathWriteSize :: Maybe Bytes + , nodeDataPathReadSize :: Maybe Bytes + , nodeDataPathIOOps :: Maybe Int + , nodeDataPathWrites :: Maybe Int + , nodeDataPathReads :: Maybe Int + , nodeDataPathAvailable :: Bytes + , nodeDataPathFree :: Bytes + , nodeDataPathTotal :: Bytes + , nodeDataPathType :: Maybe Text + , nodeDataPathDevice :: Maybe Text + , nodeDataPathMount :: Text + , nodeDataPathPath :: Text + } deriving (Eq, Show) + +data NodeFSTotalStats = NodeFSTotalStats { + nodeFSTotalDiskServiceTime :: Maybe Double + , nodeFSTotalDiskQueue :: Maybe Double + , nodeFSTotalIOSize :: Maybe Bytes + , nodeFSTotalWriteSize :: Maybe Bytes + , nodeFSTotalReadSize :: Maybe Bytes + , nodeFSTotalIOOps :: Maybe Int + , nodeFSTotalWrites :: Maybe Int + , nodeFSTotalReads :: Maybe Int + , nodeFSTotalAvailable :: Bytes + , nodeFSTotalFree :: Bytes + , nodeFSTotalTotal :: Bytes + } deriving (Eq, Show) + +data NodeNetworkStats = NodeNetworkStats { + nodeNetTCPOutRSTs :: Int + , nodeNetTCPInErrs :: Int + , nodeNetTCPAttemptFails :: Int + , nodeNetTCPEstabResets :: Int + , nodeNetTCPRetransSegs :: Int + , nodeNetTCPOutSegs :: Int + , nodeNetTCPInSegs :: Int + , nodeNetTCPCurrEstab :: Int + , nodeNetTCPPassiveOpens :: Int + , nodeNetTCPActiveOpens :: Int + } deriving (Eq, Show) + +data NodeThreadPoolsStats = NodeThreadPoolsStats { + nodeThreadPoolsStatsSnapshot :: NodeThreadPoolStats + , nodeThreadPoolsStatsBulk :: NodeThreadPoolStats + , nodeThreadPoolsStatsMerge :: NodeThreadPoolStats + , nodeThreadPoolsStatsGet :: NodeThreadPoolStats + , nodeThreadPoolsStatsManagement :: NodeThreadPoolStats + , nodeThreadPoolsStatsFetchShardStore :: Maybe NodeThreadPoolStats + , nodeThreadPoolsStatsOptimize :: Maybe NodeThreadPoolStats + , nodeThreadPoolsStatsFlush :: NodeThreadPoolStats + , nodeThreadPoolsStatsSearch :: NodeThreadPoolStats + , nodeThreadPoolsStatsWarmer :: NodeThreadPoolStats + , nodeThreadPoolsStatsGeneric :: NodeThreadPoolStats + , nodeThreadPoolsStatsSuggest :: Maybe NodeThreadPoolStats + , nodeThreadPoolsStatsRefresh :: NodeThreadPoolStats + , nodeThreadPoolsStatsIndex :: NodeThreadPoolStats + , nodeThreadPoolsStatsListener :: Maybe NodeThreadPoolStats + , nodeThreadPoolsStatsFetchShardStarted :: Maybe NodeThreadPoolStats + , nodeThreadPoolsStatsPercolate :: Maybe NodeThreadPoolStats + } deriving (Eq, Show) + +data NodeThreadPoolStats = NodeThreadPoolStats { + nodeThreadPoolCompleted :: Int + , nodeThreadPoolLargest :: Int + , nodeThreadPoolRejected :: Int + , nodeThreadPoolActive :: Int + , nodeThreadPoolQueue :: Int + , nodeThreadPoolThreads :: Int + } deriving (Eq, Show) + +data NodeJVMStats = NodeJVMStats { + nodeJVMStatsMappedBufferPool :: JVMBufferPoolStats + , nodeJVMStatsDirectBufferPool :: JVMBufferPoolStats + , nodeJVMStatsGCOldCollector :: JVMGCStats + , nodeJVMStatsGCYoungCollector :: JVMGCStats + , nodeJVMStatsPeakThreadsCount :: Int + , nodeJVMStatsThreadsCount :: Int + , nodeJVMStatsOldPool :: JVMPoolStats + , nodeJVMStatsSurvivorPool :: JVMPoolStats + , nodeJVMStatsYoungPool :: JVMPoolStats + , nodeJVMStatsNonHeapCommitted :: Bytes + , nodeJVMStatsNonHeapUsed :: Bytes + , nodeJVMStatsHeapMax :: Bytes + , nodeJVMStatsHeapCommitted :: Bytes + , nodeJVMStatsHeapUsedPercent :: Int + , nodeJVMStatsHeapUsed :: Bytes + , nodeJVMStatsUptime :: NominalDiffTime + , nodeJVMStatsTimestamp :: UTCTime + } deriving (Eq, Show) + +data JVMBufferPoolStats = JVMBufferPoolStats { + jvmBufferPoolStatsTotalCapacity :: Bytes + , jvmBufferPoolStatsUsed :: Bytes + , jvmBufferPoolStatsCount :: Int + } deriving (Eq, Show) + +data JVMGCStats = JVMGCStats { + jvmGCStatsCollectionTime :: NominalDiffTime + , jvmGCStatsCollectionCount :: Int + } deriving (Eq, Show) + +data JVMPoolStats = JVMPoolStats { + jvmPoolStatsPeakMax :: Bytes + , jvmPoolStatsPeakUsed :: Bytes + , jvmPoolStatsMax :: Bytes + , jvmPoolStatsUsed :: Bytes + } deriving (Eq, Show) + +data NodeProcessStats = NodeProcessStats { + nodeProcessTimestamp :: UTCTime + , nodeProcessOpenFDs :: Int + , nodeProcessMaxFDs :: Int + , nodeProcessCPUPercent :: Int + , nodeProcessCPUTotal :: NominalDiffTime + , nodeProcessMemTotalVirtual :: Bytes + } deriving (Eq, Show) + +data NodeOSStats = NodeOSStats { + nodeOSTimestamp :: UTCTime + , nodeOSCPUPercent :: Int + , nodeOSLoad :: Maybe LoadAvgs + , nodeOSMemTotal :: Bytes + , nodeOSMemFree :: Bytes + , nodeOSMemFreePercent :: Int + , nodeOSMemUsed :: Bytes + , nodeOSMemUsedPercent :: Int + , nodeOSSwapTotal :: Bytes + , nodeOSSwapFree :: Bytes + , nodeOSSwapUsed :: Bytes + } deriving (Eq, Show) + +data LoadAvgs = LoadAvgs { + loadAvg1Min :: Double + , loadAvg5Min :: Double + , loadAvg15Min :: Double + } deriving (Eq, Show) + +data NodeIndicesStats = NodeIndicesStats { + nodeIndicesStatsRecoveryThrottleTime :: Maybe NominalDiffTime + , nodeIndicesStatsRecoveryCurrentAsTarget :: Maybe Int + , nodeIndicesStatsRecoveryCurrentAsSource :: Maybe Int + , nodeIndicesStatsQueryCacheMisses :: Maybe Int + , nodeIndicesStatsQueryCacheHits :: Maybe Int + , nodeIndicesStatsQueryCacheEvictions :: Maybe Int + , nodeIndicesStatsQueryCacheSize :: Maybe Bytes + , nodeIndicesStatsSuggestCurrent :: Maybe Int + , nodeIndicesStatsSuggestTime :: Maybe NominalDiffTime + , nodeIndicesStatsSuggestTotal :: Maybe Int + , nodeIndicesStatsTranslogSize :: Bytes + , nodeIndicesStatsTranslogOps :: Int + , nodeIndicesStatsSegFixedBitSetMemory :: Maybe Bytes + , nodeIndicesStatsSegVersionMapMemory :: Bytes + , nodeIndicesStatsSegIndexWriterMaxMemory :: Maybe Bytes + , nodeIndicesStatsSegIndexWriterMemory :: Bytes + , nodeIndicesStatsSegMemory :: Bytes + , nodeIndicesStatsSegCount :: Int + , nodeIndicesStatsCompletionSize :: Bytes + , nodeIndicesStatsPercolateQueries :: Maybe Int + , nodeIndicesStatsPercolateMemory :: Maybe Bytes + , nodeIndicesStatsPercolateCurrent :: Maybe Int + , nodeIndicesStatsPercolateTime :: Maybe NominalDiffTime + , nodeIndicesStatsPercolateTotal :: Maybe Int + , nodeIndicesStatsFieldDataEvictions :: Int + , nodeIndicesStatsFieldDataMemory :: Bytes + , nodeIndicesStatsWarmerTotalTime :: NominalDiffTime + , nodeIndicesStatsWarmerTotal :: Int + , nodeIndicesStatsWarmerCurrent :: Int + , nodeIndicesStatsFlushTotalTime :: NominalDiffTime + , nodeIndicesStatsFlushTotal :: Int + , nodeIndicesStatsRefreshTotalTime :: NominalDiffTime + , nodeIndicesStatsRefreshTotal :: Int + , nodeIndicesStatsMergesTotalSize :: Bytes + , nodeIndicesStatsMergesTotalDocs :: Int + , nodeIndicesStatsMergesTotalTime :: NominalDiffTime + , nodeIndicesStatsMergesTotal :: Int + , nodeIndicesStatsMergesCurrentSize :: Bytes + , nodeIndicesStatsMergesCurrentDocs :: Int + , nodeIndicesStatsMergesCurrent :: Int + , nodeIndicesStatsSearchFetchCurrent :: Int + , nodeIndicesStatsSearchFetchTime :: NominalDiffTime + , nodeIndicesStatsSearchFetchTotal :: Int + , nodeIndicesStatsSearchQueryCurrent :: Int + , nodeIndicesStatsSearchQueryTime :: NominalDiffTime + , nodeIndicesStatsSearchQueryTotal :: Int + , nodeIndicesStatsSearchOpenContexts :: Int + , nodeIndicesStatsGetCurrent :: Int + , nodeIndicesStatsGetMissingTime :: NominalDiffTime + , nodeIndicesStatsGetMissingTotal :: Int + , nodeIndicesStatsGetExistsTime :: NominalDiffTime + , nodeIndicesStatsGetExistsTotal :: Int + , nodeIndicesStatsGetTime :: NominalDiffTime + , nodeIndicesStatsGetTotal :: Int + , nodeIndicesStatsIndexingThrottleTime :: Maybe NominalDiffTime + , nodeIndicesStatsIndexingIsThrottled :: Maybe Bool + , nodeIndicesStatsIndexingNoopUpdateTotal :: Maybe Int + , nodeIndicesStatsIndexingDeleteCurrent :: Int + , nodeIndicesStatsIndexingDeleteTime :: NominalDiffTime + , nodeIndicesStatsIndexingDeleteTotal :: Int + , nodeIndicesStatsIndexingIndexCurrent :: Int + , nodeIndicesStatsIndexingIndexTime :: NominalDiffTime + , nodeIndicesStatsIndexingTotal :: Int + , nodeIndicesStatsStoreThrottleTime :: NominalDiffTime + , nodeIndicesStatsStoreSize :: Bytes + , nodeIndicesStatsDocsDeleted :: Int + , nodeIndicesStatsDocsCount :: Int + } deriving (Eq, Show) + +-- | A quirky address format used throughout ElasticSearch. An example +-- would be inet[/1.1.1.1:9200]. inet may be a placeholder for a +-- . +newtype EsAddress = EsAddress { esAddress :: Text } + deriving (Eq, Ord, Show, FromJSON) + +-- | Typically a 7 character hex string. +newtype BuildHash = BuildHash { buildHash :: Text } + deriving (Eq, Ord, Show, FromJSON, ToJSON) + +newtype PluginName = PluginName { pluginName :: Text } + deriving (Eq, Ord, Show, FromJSON) + +data NodeInfo = NodeInfo { + nodeInfoHTTPAddress :: Maybe EsAddress + , nodeInfoBuild :: BuildHash + , nodeInfoESVersion :: VersionNumber + , nodeInfoIP :: Server + , nodeInfoHost :: Server + , nodeInfoTransportAddress :: EsAddress + , nodeInfoName :: NodeName + , nodeInfoFullId :: FullNodeId + , nodeInfoPlugins :: [NodePluginInfo] + , nodeInfoHTTP :: NodeHTTPInfo + , nodeInfoTransport :: NodeTransportInfo + , nodeInfoNetwork :: Maybe NodeNetworkInfo + , nodeInfoThreadPool :: NodeThreadPoolsInfo + , nodeInfoJVM :: NodeJVMInfo + , nodeInfoProcess :: NodeProcessInfo + , nodeInfoOS :: NodeOSInfo + , nodeInfoSettings :: Object + -- ^ The members of the settings objects are not consistent, + -- dependent on plugins, etc. + } deriving (Eq, Show) + +data NodePluginInfo = NodePluginInfo { + nodePluginSite :: Maybe Bool + -- ^ Is this a site plugin? + , nodePluginJVM :: Maybe Bool + -- ^ Is this plugin running on the JVM + , nodePluginDescription :: Text + , nodePluginVersion :: MaybeNA VersionNumber + , nodePluginName :: PluginName + } deriving (Eq, Show) + +data NodeHTTPInfo = NodeHTTPInfo { + nodeHTTPMaxContentLength :: Bytes + , nodeHTTPTransportAddress :: BoundTransportAddress + } deriving (Eq, Show) + +data NodeTransportInfo = NodeTransportInfo { + nodeTransportProfiles :: [BoundTransportAddress] + , nodeTransportAddress :: BoundTransportAddress + } deriving (Eq, Show) + +data BoundTransportAddress = BoundTransportAddress { + publishAddress :: EsAddress + , boundAddress :: [EsAddress] + } deriving (Eq, Show) + +data NodeNetworkInfo = NodeNetworkInfo { + nodeNetworkPrimaryInterface :: NodeNetworkInterface + , nodeNetworkRefreshInterval :: NominalDiffTime + } deriving (Eq, Show) + +newtype MacAddress = MacAddress { macAddress :: Text } + deriving (Eq, Ord, Show, FromJSON) + +newtype NetworkInterfaceName = NetworkInterfaceName { networkInterfaceName :: Text } + deriving (Eq, Ord, Show, FromJSON) + +data NodeNetworkInterface = NodeNetworkInterface { + nodeNetIfaceMacAddress :: MacAddress + , nodeNetIfaceName :: NetworkInterfaceName + , nodeNetIfaceAddress :: Server + } deriving (Eq, Show) + +data NodeThreadPoolsInfo = NodeThreadPoolsInfo { + nodeThreadPoolsRefresh :: NodeThreadPoolInfo + , nodeThreadPoolsManagement :: NodeThreadPoolInfo + , nodeThreadPoolsPercolate :: Maybe NodeThreadPoolInfo + , nodeThreadPoolsListener :: Maybe NodeThreadPoolInfo + , nodeThreadPoolsFetchShardStarted :: Maybe NodeThreadPoolInfo + , nodeThreadPoolsSearch :: NodeThreadPoolInfo + , nodeThreadPoolsFlush :: NodeThreadPoolInfo + , nodeThreadPoolsWarmer :: NodeThreadPoolInfo + , nodeThreadPoolsOptimize :: Maybe NodeThreadPoolInfo + , nodeThreadPoolsBulk :: NodeThreadPoolInfo + , nodeThreadPoolsSuggest :: Maybe NodeThreadPoolInfo + , nodeThreadPoolsMerge :: NodeThreadPoolInfo + , nodeThreadPoolsSnapshot :: NodeThreadPoolInfo + , nodeThreadPoolsGet :: NodeThreadPoolInfo + , nodeThreadPoolsFetchShardStore :: Maybe NodeThreadPoolInfo + , nodeThreadPoolsIndex :: NodeThreadPoolInfo + , nodeThreadPoolsGeneric :: NodeThreadPoolInfo + } deriving (Eq, Show) + +data NodeThreadPoolInfo = NodeThreadPoolInfo { + nodeThreadPoolQueueSize :: ThreadPoolSize + , nodeThreadPoolKeepalive :: Maybe NominalDiffTime + , nodeThreadPoolMin :: Maybe Int + , nodeThreadPoolMax :: Maybe Int + , nodeThreadPoolType :: ThreadPoolType + } deriving (Eq, Show) + +data ThreadPoolSize = ThreadPoolBounded Int + | ThreadPoolUnbounded + deriving (Eq, Show) + +data ThreadPoolType = ThreadPoolScaling + | ThreadPoolFixed + | ThreadPoolCached + deriving (Eq, Show) + +data NodeJVMInfo = NodeJVMInfo { + nodeJVMInfoMemoryPools :: [JVMMemoryPool] + , nodeJVMInfoMemoryPoolsGCCollectors :: [JVMGCCollector] + , nodeJVMInfoMemoryInfo :: JVMMemoryInfo + , nodeJVMInfoStartTime :: UTCTime + , nodeJVMInfoVMVendor :: Text + , nodeJVMVMVersion :: VersionNumber + -- ^ JVM doesn't seme to follow normal version conventions + , nodeJVMVMName :: Text + , nodeJVMVersion :: VersionNumber + , nodeJVMPID :: PID + } deriving (Eq, Show) + +-- | Handles quirks in the way JVM versions are rendered (1.7.0_101 -> 1.7.0.101) +newtype JVMVersion = JVMVersion { unJVMVersion :: VersionNumber } + +data JVMMemoryInfo = JVMMemoryInfo { + jvmMemoryInfoDirectMax :: Bytes + , jvmMemoryInfoNonHeapMax :: Bytes + , jvmMemoryInfoNonHeapInit :: Bytes + , jvmMemoryInfoHeapMax :: Bytes + , jvmMemoryInfoHeapInit :: Bytes + } deriving (Eq, Show) + +newtype JVMMemoryPool = JVMMemoryPool { + jvmMemoryPool :: Text + } deriving (Eq, Show, FromJSON) + +newtype JVMGCCollector = JVMGCCollector { + jvmGCCollector :: Text + } deriving (Eq, Show, FromJSON) + +newtype PID = PID { + pid :: Int + } deriving (Eq, Show, FromJSON) + +data NodeOSInfo = NodeOSInfo { + nodeOSRefreshInterval :: NominalDiffTime + , nodeOSName :: Text + , nodeOSArch :: Text + , nodeOSVersion :: VersionNumber + , nodeOSAvailableProcessors :: Int + , nodeOSAllocatedProcessors :: Int + } deriving (Eq, Show) + +data CPUInfo = CPUInfo { + cpuCacheSize :: Bytes + , cpuCoresPerSocket :: Int + , cpuTotalSockets :: Int + , cpuTotalCores :: Int + , cpuMHZ :: Int + , cpuModel :: Text + , cpuVendor :: Text + } deriving (Eq, Show) + +data NodeProcessInfo = NodeProcessInfo { + nodeProcessMLockAll :: Bool + -- ^ See + , nodeProcessMaxFileDescriptors :: Maybe Int + , nodeProcessId :: PID + , nodeProcessRefreshInterval :: NominalDiffTime + } deriving (Eq, Show) + +data ShardResult = + ShardResult { shardTotal :: Int + , shardsSuccessful :: Int + , shardsFailed :: Int } deriving (Eq, Show) + +instance FromJSON ShardResult where + parseJSON (Object v) = ShardResult <$> + v .: "total" <*> + v .: "successful" <*> + v .: "failed" + parseJSON _ = empty + +data SnapshotState = SnapshotInit + | SnapshotStarted + | SnapshotSuccess + | SnapshotFailed + | SnapshotAborted + | SnapshotMissing + | SnapshotWaiting + deriving (Eq, Show) + +instance FromJSON SnapshotState where + parseJSON = withText "SnapshotState" parse + where + parse "INIT" = return SnapshotInit + parse "STARTED" = return SnapshotStarted + parse "SUCCESS" = return SnapshotSuccess + parse "FAILED" = return SnapshotFailed + parse "ABORTED" = return SnapshotAborted + parse "MISSING" = return SnapshotMissing + parse "WAITING" = return SnapshotWaiting + parse t = fail ("Invalid snapshot state " <> T.unpack t) + + +data SnapshotRestoreSettings = SnapshotRestoreSettings { + snapRestoreWaitForCompletion :: Bool + -- ^ Should the API call return immediately after initializing + -- the restore or wait until completed? Note that if this is + -- enabled, it could wait a long time, so you should adjust your + -- 'ManagerSettings' accordingly to set long timeouts or + -- explicitly handle timeouts. + , snapRestoreIndices :: Maybe IndexSelection + -- ^ Nothing will restore all indices in the snapshot. Just [] is + -- permissable and will essentially be a no-op restore. + , snapRestoreIgnoreUnavailable :: Bool + -- ^ If set to True, any indices that do not exist will be ignored + -- during snapshot rather than failing the restore. + , snapRestoreIncludeGlobalState :: Bool + -- ^ If set to false, will ignore any global state in the snapshot + -- and will not restore it. + , snapRestoreRenamePattern :: Maybe RestoreRenamePattern + -- ^ A regex pattern for matching indices. Used with + -- 'snapRestoreRenameReplacement', the restore can reference the + -- matched index and create a new index name upon restore. + , snapRestoreRenameReplacement :: Maybe (NonEmpty RestoreRenameToken) + -- ^ Expression of how index renames should be constructed. + , snapRestorePartial :: Bool + -- ^ If some indices fail to restore, should the process proceed? + , snapRestoreIncludeAliases :: Bool + -- ^ Should the restore also restore the aliases captured in the + -- snapshot. + , snapRestoreIndexSettingsOverrides :: Maybe RestoreIndexSettings + -- ^ Settings to apply during the restore process. __NOTE:__ This + -- option is not supported in ES < 1.5 and should be set to + -- Nothing in that case. + , snapRestoreIgnoreIndexSettings :: Maybe (NonEmpty Text) + -- ^ This type could be more rich but it isn't clear which + -- settings are allowed to be ignored during restore, so we're + -- going with including this feature in a basic form rather than + -- omitting it. One example here would be + -- "index.refresh_interval". Any setting specified here will + -- revert back to the server default during the restore process. + } deriving (Eq, Show) + +newtype SnapshotRepoUpdateSettings = SnapshotRepoUpdateSettings + { repoUpdateVerify :: Bool + -- ^ After creation/update, synchronously check that nodes can + -- write to this repo. Defaults to True. You may use False if you + -- need a faster response and plan on verifying manually later + -- with 'verifySnapshotRepo'. + } deriving (Eq, Show) + + +-- | Reasonable defaults for repo creation/update +-- +-- * repoUpdateVerify True +defaultSnapshotRepoUpdateSettings :: SnapshotRepoUpdateSettings +defaultSnapshotRepoUpdateSettings = SnapshotRepoUpdateSettings True + + +-- | A filesystem-based snapshot repo that ships with +-- ElasticSearch. This is an instance of 'SnapshotRepo' so it can be +-- used with 'updateSnapshotRepo' +data FsSnapshotRepo = FsSnapshotRepo { + fsrName :: SnapshotRepoName + , fsrLocation :: FilePath + , fsrCompressMetadata :: Bool + , fsrChunkSize :: Maybe Bytes + -- ^ Size by which to split large files during snapshotting. + , fsrMaxRestoreBytesPerSec :: Maybe Bytes + -- ^ Throttle node restore rate. If not supplied, defaults to 40mb/sec + , fsrMaxSnapshotBytesPerSec :: Maybe Bytes + -- ^ Throttle node snapshot rate. If not supplied, defaults to 40mb/sec + } deriving (Eq, Show) + + +instance SnapshotRepo FsSnapshotRepo where + toGSnapshotRepo FsSnapshotRepo {..} = + GenericSnapshotRepo fsrName fsRepoType (GenericSnapshotRepoSettings settings) + where + Object settings = object $ [ "location" .= fsrLocation + , "compress" .= fsrCompressMetadata + ] ++ optionalPairs + optionalPairs = catMaybes [ ("chunk_size" .=) <$> fsrChunkSize + , ("max_restore_bytes_per_sec" .=) <$> fsrMaxRestoreBytesPerSec + , ("max_snapshot_bytes_per_sec" .=) <$> fsrMaxSnapshotBytesPerSec + ] + fromGSnapshotRepo GenericSnapshotRepo {..} + | gSnapshotRepoType == fsRepoType = do + let o = gSnapshotRepoSettingsObject gSnapshotRepoSettings + parseRepo $ + FsSnapshotRepo gSnapshotRepoName <$> o .: "location" + <*> o .:? "compress" .!= False + <*> o .:? "chunk_size" + <*> o .:? "max_restore_bytes_per_sec" + <*> o .:? "max_snapshot_bytes_per_sec" + | otherwise = Left (RepoTypeMismatch fsRepoType gSnapshotRepoType) + + +parseRepo :: Parser a -> Either SnapshotRepoConversionError a +parseRepo parser = case parseEither (const parser) () of + Left e -> Left (OtherRepoConversionError (T.pack e)) + Right a -> Right a + + +fsRepoType :: SnapshotRepoType +fsRepoType = SnapshotRepoType "fs" + +-- | Law: fromGSnapshotRepo (toGSnapshotRepo r) == Right r +class SnapshotRepo r where + toGSnapshotRepo :: r -> GenericSnapshotRepo + fromGSnapshotRepo :: GenericSnapshotRepo -> Either SnapshotRepoConversionError r + + +data SnapshotRepoConversionError = RepoTypeMismatch SnapshotRepoType SnapshotRepoType + -- ^ Expected type and actual type + | OtherRepoConversionError Text + deriving (Show, Eq) + + +instance Exception SnapshotRepoConversionError + + +data SnapshotCreateSettings = SnapshotCreateSettings { + snapWaitForCompletion :: Bool + -- ^ Should the API call return immediately after initializing + -- the snapshot or wait until completed? Note that if this is + -- enabled it could wait a long time, so you should adjust your + -- 'ManagerSettings' accordingly to set long timeouts or + -- explicitly handle timeouts. + , snapIndices :: Maybe IndexSelection + -- ^ Nothing will snapshot all indices. Just [] is permissable and + -- will essentially be a no-op snapshot. + , snapIgnoreUnavailable :: Bool + -- ^ If set to True, any matched indices that don't exist will be + -- ignored. Otherwise it will be an error and fail. + , snapIncludeGlobalState :: Bool + , snapPartial :: Bool + -- ^ If some indices failed to snapshot (e.g. if not all primary + -- shards are available), should the process proceed? + } deriving (Eq, Show) + + +-- | Reasonable defaults for snapshot creation +-- +-- * snapWaitForCompletion False +-- * snapIndices Nothing +-- * snapIgnoreUnavailable False +-- * snapIncludeGlobalState True +-- * snapPartial False +defaultSnapshotCreateSettings :: SnapshotCreateSettings +defaultSnapshotCreateSettings = SnapshotCreateSettings { + snapWaitForCompletion = False + , snapIndices = Nothing + , snapIgnoreUnavailable = False + , snapIncludeGlobalState = True + , snapPartial = False + } + + +data SnapshotSelection = + SnapshotList (NonEmpty SnapshotPattern) + | AllSnapshots + deriving (Eq, Show) + + +-- | Either specifies an exact snapshot name or one with globs in it, +-- e.g. @SnapPattern "foo*"@ __NOTE__: Patterns are not supported on +-- ES < 1.7 +data SnapshotPattern = + ExactSnap SnapshotName + | SnapPattern Text + deriving (Eq, Show) + + +-- | General information about the state of a snapshot. Has some +-- redundancies with 'SnapshotStatus' +data SnapshotInfo = SnapshotInfo { + snapInfoShards :: ShardResult + , snapInfoFailures :: [SnapshotShardFailure] + , snapInfoDuration :: NominalDiffTime + , snapInfoEndTime :: UTCTime + , snapInfoStartTime :: UTCTime + , snapInfoState :: SnapshotState + , snapInfoIndices :: [IndexName] + , snapInfoName :: SnapshotName + } deriving (Eq, Show) + + +instance FromJSON SnapshotInfo where + parseJSON = withObject "SnapshotInfo" parse + where + parse o = SnapshotInfo <$> o .: "shards" + <*> o .: "failures" + <*> (unMS <$> o .: "duration_in_millis") + <*> (posixMS <$> o .: "end_time_in_millis") + <*> (posixMS <$> o .: "start_time_in_millis") + <*> o .: "state" + <*> o .: "indices" + <*> o .: "snapshot" + +data SnapshotShardFailure = SnapshotShardFailure { + snapShardFailureIndex :: IndexName + , snapShardFailureNodeId :: Maybe NodeName -- I'm not 100% sure this isn't actually 'FullNodeId' + , snapShardFailureReason :: Text + , snapShardFailureShardId :: ShardId + } deriving (Eq, Show) + + +instance FromJSON SnapshotShardFailure where + parseJSON = withObject "SnapshotShardFailure" parse + where + parse o = SnapshotShardFailure <$> o .: "index" + <*> o .:? "node_id" + <*> o .: "reason" + <*> o .: "shard_id" + +-- | Regex-stype pattern, e.g. "index_(.+)" to match index names +newtype RestoreRenamePattern = + RestoreRenamePattern { rrPattern :: Text } + deriving (Eq, Show, Ord, ToJSON) + + +-- | A single token in a index renaming scheme for a restore. These +-- are concatenated into a string before being sent to +-- ElasticSearch. Check out these Java +-- to find out more if you're into that sort of thing. +data RestoreRenameToken = RRTLit Text + -- ^ Just a literal string of characters + | RRSubWholeMatch + -- ^ Equivalent to $0. The entire matched pattern, not any subgroup + | RRSubGroup RRGroupRefNum + -- ^ A specific reference to a group number + deriving (Eq, Show) + + +-- | A group number for regex matching. Only values from 1-9 are +-- supported. Construct with 'mkRRGroupRefNum' +newtype RRGroupRefNum = + RRGroupRefNum { rrGroupRefNum :: Int } + deriving (Eq, Ord, Show) + +instance Bounded RRGroupRefNum where + minBound = RRGroupRefNum 1 + maxBound = RRGroupRefNum 9 + + +-- | Only allows valid group number references (1-9). +mkRRGroupRefNum :: Int -> Maybe RRGroupRefNum +mkRRGroupRefNum i + | i >= rrGroupRefNum minBound + && i <= rrGroupRefNum maxBound = + Just $ RRGroupRefNum i + | otherwise = Nothing + +-- | Reasonable defaults for snapshot restores +-- +-- * snapRestoreWaitForCompletion False +-- * snapRestoreIndices Nothing +-- * snapRestoreIgnoreUnavailable False +-- * snapRestoreIncludeGlobalState True +-- * snapRestoreRenamePattern Nothing +-- * snapRestoreRenameReplacement Nothing +-- * snapRestorePartial False +-- * snapRestoreIncludeAliases True +-- * snapRestoreIndexSettingsOverrides Nothing +-- * snapRestoreIgnoreIndexSettings Nothing +defaultSnapshotRestoreSettings :: SnapshotRestoreSettings +defaultSnapshotRestoreSettings = SnapshotRestoreSettings { + snapRestoreWaitForCompletion = False + , snapRestoreIndices = Nothing + , snapRestoreIgnoreUnavailable = False + , snapRestoreIncludeGlobalState = True + , snapRestoreRenamePattern = Nothing + , snapRestoreRenameReplacement = Nothing + , snapRestorePartial = False + , snapRestoreIncludeAliases = True + , snapRestoreIndexSettingsOverrides = Nothing + , snapRestoreIgnoreIndexSettings = Nothing + } + + +-- | Index settings that can be overridden. The docs only mention you +-- can update number of replicas, but there may be more. You +-- definitely cannot override shard count. +newtype RestoreIndexSettings = RestoreIndexSettings + { restoreOverrideReplicas :: Maybe ReplicaCount + } deriving (Eq, Show) + + +instance ToJSON RestoreIndexSettings where + toJSON RestoreIndexSettings {..} = object prs + where + prs = catMaybes [("index.number_of_replicas" .=) <$> restoreOverrideReplicas] + + +instance FromJSON NodesInfo where + parseJSON = withObject "NodesInfo" parse + where + parse o = do + nodes <- o .: "nodes" + infos <- forM (HM.toList nodes) $ \(fullNID, v) -> do + node <- parseJSON v + parseNodeInfo (FullNodeId fullNID) node + cn <- o .: "cluster_name" + return (NodesInfo infos cn) + +instance FromJSON NodesStats where + parseJSON = withObject "NodesStats" parse + where + parse o = do + nodes <- o .: "nodes" + stats <- forM (HM.toList nodes) $ \(fullNID, v) -> do + node <- parseJSON v + parseNodeStats (FullNodeId fullNID) node + cn <- o .: "cluster_name" + return (NodesStats stats cn) + +instance FromJSON NodeBreakerStats where + parseJSON = withObject "NodeBreakerStats" parse + where + parse o = NodeBreakerStats <$> o .: "tripped" + <*> o .: "overhead" + <*> o .: "estimated_size_in_bytes" + <*> o .: "limit_size_in_bytes" + +instance FromJSON NodeHTTPStats where + parseJSON = withObject "NodeHTTPStats" parse + where + parse o = NodeHTTPStats <$> o .: "total_opened" + <*> o .: "current_open" + +instance FromJSON NodeTransportStats where + parseJSON = withObject "NodeTransportStats" parse + where + parse o = NodeTransportStats <$> o .: "tx_size_in_bytes" + <*> o .: "tx_count" + <*> o .: "rx_size_in_bytes" + <*> o .: "rx_count" + <*> o .: "server_open" + +instance FromJSON NodeFSStats where + parseJSON = withObject "NodeFSStats" parse + where + parse o = NodeFSStats <$> o .: "data" + <*> o .: "total" + <*> (posixMS <$> o .: "timestamp") + +instance FromJSON NodeDataPathStats where + parseJSON = withObject "NodeDataPathStats" parse + where + parse o = + NodeDataPathStats <$> (fmap unStringlyTypedDouble <$> o .:? "disk_service_time") + <*> (fmap unStringlyTypedDouble <$> o .:? "disk_queue") + <*> o .:? "disk_io_size_in_bytes" + <*> o .:? "disk_write_size_in_bytes" + <*> o .:? "disk_read_size_in_bytes" + <*> o .:? "disk_io_op" + <*> o .:? "disk_writes" + <*> o .:? "disk_reads" + <*> o .: "available_in_bytes" + <*> o .: "free_in_bytes" + <*> o .: "total_in_bytes" + <*> o .:? "type" + <*> o .:? "dev" + <*> o .: "mount" + <*> o .: "path" + +instance FromJSON NodeFSTotalStats where + parseJSON = withObject "NodeFSTotalStats" parse + where + parse o = NodeFSTotalStats <$> (fmap unStringlyTypedDouble <$> o .:? "disk_service_time") + <*> (fmap unStringlyTypedDouble <$> o .:? "disk_queue") + <*> o .:? "disk_io_size_in_bytes" + <*> o .:? "disk_write_size_in_bytes" + <*> o .:? "disk_read_size_in_bytes" + <*> o .:? "disk_io_op" + <*> o .:? "disk_writes" + <*> o .:? "disk_reads" + <*> o .: "available_in_bytes" + <*> o .: "free_in_bytes" + <*> o .: "total_in_bytes" + +instance FromJSON NodeNetworkStats where + parseJSON = withObject "NodeNetworkStats" parse + where + parse o = do + tcp <- o .: "tcp" + NodeNetworkStats <$> tcp .: "out_rsts" + <*> tcp .: "in_errs" + <*> tcp .: "attempt_fails" + <*> tcp .: "estab_resets" + <*> tcp .: "retrans_segs" + <*> tcp .: "out_segs" + <*> tcp .: "in_segs" + <*> tcp .: "curr_estab" + <*> tcp .: "passive_opens" + <*> tcp .: "active_opens" + +instance FromJSON NodeThreadPoolsStats where + parseJSON = withObject "NodeThreadPoolsStats" parse + where + parse o = NodeThreadPoolsStats <$> o .: "snapshot" + <*> o .: "bulk" + <*> o .: "force_merge" + <*> o .: "get" + <*> o .: "management" + <*> o .:? "fetch_shard_store" + <*> o .:? "optimize" + <*> o .: "flush" + <*> o .: "search" + <*> o .: "warmer" + <*> o .: "generic" + <*> o .:? "suggest" + <*> o .: "refresh" + <*> o .: "index" + <*> o .:? "listener" + <*> o .:? "fetch_shard_started" + <*> o .:? "percolate" +instance FromJSON NodeThreadPoolStats where + parseJSON = withObject "NodeThreadPoolStats" parse + where + parse o = NodeThreadPoolStats <$> o .: "completed" + <*> o .: "largest" + <*> o .: "rejected" + <*> o .: "active" + <*> o .: "queue" + <*> o .: "threads" + +instance FromJSON NodeJVMStats where + parseJSON = withObject "NodeJVMStats" parse + where + parse o = do + bufferPools <- o .: "buffer_pools" + mapped <- bufferPools .: "mapped" + direct <- bufferPools .: "direct" + gc <- o .: "gc" + collectors <- gc .: "collectors" + oldC <- collectors .: "old" + youngC <- collectors .: "young" + threads <- o .: "threads" + mem <- o .: "mem" + pools <- mem .: "pools" + oldM <- pools .: "old" + survivorM <- pools .: "survivor" + youngM <- pools .: "young" + NodeJVMStats <$> pure mapped + <*> pure direct + <*> pure oldC + <*> pure youngC + <*> threads .: "peak_count" + <*> threads .: "count" + <*> pure oldM + <*> pure survivorM + <*> pure youngM + <*> mem .: "non_heap_committed_in_bytes" + <*> mem .: "non_heap_used_in_bytes" + <*> mem .: "heap_max_in_bytes" + <*> mem .: "heap_committed_in_bytes" + <*> mem .: "heap_used_percent" + <*> mem .: "heap_used_in_bytes" + <*> (unMS <$> o .: "uptime_in_millis") + <*> (posixMS <$> o .: "timestamp") + +instance FromJSON JVMBufferPoolStats where + parseJSON = withObject "JVMBufferPoolStats" parse + where + parse o = JVMBufferPoolStats <$> o .: "total_capacity_in_bytes" + <*> o .: "used_in_bytes" + <*> o .: "count" + +instance FromJSON JVMGCStats where + parseJSON = withObject "JVMGCStats" parse + where + parse o = JVMGCStats <$> (unMS <$> o .: "collection_time_in_millis") + <*> o .: "collection_count" + +instance FromJSON JVMPoolStats where + parseJSON = withObject "JVMPoolStats" parse + where + parse o = JVMPoolStats <$> o .: "peak_max_in_bytes" + <*> o .: "peak_used_in_bytes" + <*> o .: "max_in_bytes" + <*> o .: "used_in_bytes" + +instance FromJSON NodeProcessStats where + parseJSON = withObject "NodeProcessStats" parse + where + parse o = do + mem <- o .: "mem" + cpu <- o .: "cpu" + NodeProcessStats <$> (posixMS <$> o .: "timestamp") + <*> o .: "open_file_descriptors" + <*> o .: "max_file_descriptors" + <*> cpu .: "percent" + <*> (unMS <$> cpu .: "total_in_millis") + <*> mem .: "total_virtual_in_bytes" + +instance FromJSON NodeOSStats where + parseJSON = withObject "NodeOSStats" parse + where + parse o = do + swap <- o .: "swap" + mem <- o .: "mem" + cpu <- o .: "cpu" + load <- o .:? "load_average" + NodeOSStats <$> (posixMS <$> o .: "timestamp") + <*> cpu .: "percent" + <*> pure load + <*> mem .: "total_in_bytes" + <*> mem .: "free_in_bytes" + <*> mem .: "free_percent" + <*> mem .: "used_in_bytes" + <*> mem .: "used_percent" + <*> swap .: "total_in_bytes" + <*> swap .: "free_in_bytes" + <*> swap .: "used_in_bytes" + +instance FromJSON LoadAvgs where + parseJSON = withArray "LoadAvgs" parse + where + parse v = case V.toList v of + [one, five, fifteen] -> LoadAvgs <$> parseJSON one + <*> parseJSON five + <*> parseJSON fifteen + _ -> fail "Expecting a triple of Doubles" + +instance FromJSON NodeIndicesStats where + parseJSON = withObject "NodeIndicesStats" parse + where + parse o = do + let (.::) mv k = case mv of + Just v -> Just <$> v .: k + Nothing -> pure Nothing + mRecovery <- o .:? "recovery" + mQueryCache <- o .:? "query_cache" + mSuggest <- o .:? "suggest" + translog <- o .: "translog" + segments <- o .: "segments" + completion <- o .: "completion" + mPercolate <- o .:? "percolate" + fielddata <- o .: "fielddata" + warmer <- o .: "warmer" + flush <- o .: "flush" + refresh <- o .: "refresh" + merges <- o .: "merges" + search <- o .: "search" + getStats <- o .: "get" + indexing <- o .: "indexing" + store <- o .: "store" + docs <- o .: "docs" + NodeIndicesStats <$> (fmap unMS <$> mRecovery .:: "throttle_time_in_millis") + <*> mRecovery .:: "current_as_target" + <*> mRecovery .:: "current_as_source" + <*> mQueryCache .:: "miss_count" + <*> mQueryCache .:: "hit_count" + <*> mQueryCache .:: "evictions" + <*> mQueryCache .:: "memory_size_in_bytes" + <*> mSuggest .:: "current" + <*> (fmap unMS <$> mSuggest .:: "time_in_millis") + <*> mSuggest .:: "total" + <*> translog .: "size_in_bytes" + <*> translog .: "operations" + <*> segments .:? "fixed_bit_set_memory_in_bytes" + <*> segments .: "version_map_memory_in_bytes" + <*> segments .:? "index_writer_max_memory_in_bytes" + <*> segments .: "index_writer_memory_in_bytes" + <*> segments .: "memory_in_bytes" + <*> segments .: "count" + <*> completion .: "size_in_bytes" + <*> mPercolate .:: "queries" + <*> mPercolate .:: "memory_size_in_bytes" + <*> mPercolate .:: "current" + <*> (fmap unMS <$> mPercolate .:: "time_in_millis") + <*> mPercolate .:: "total" + <*> fielddata .: "evictions" + <*> fielddata .: "memory_size_in_bytes" + <*> (unMS <$> warmer .: "total_time_in_millis") + <*> warmer .: "total" + <*> warmer .: "current" + <*> (unMS <$> flush .: "total_time_in_millis") + <*> flush .: "total" + <*> (unMS <$> refresh .: "total_time_in_millis") + <*> refresh .: "total" + <*> merges .: "total_size_in_bytes" + <*> merges .: "total_docs" + <*> (unMS <$> merges .: "total_time_in_millis") + <*> merges .: "total" + <*> merges .: "current_size_in_bytes" + <*> merges .: "current_docs" + <*> merges .: "current" + <*> search .: "fetch_current" + <*> (unMS <$> search .: "fetch_time_in_millis") + <*> search .: "fetch_total" + <*> search .: "query_current" + <*> (unMS <$> search .: "query_time_in_millis") + <*> search .: "query_total" + <*> search .: "open_contexts" + <*> getStats .: "current" + <*> (unMS <$> getStats .: "missing_time_in_millis") + <*> getStats .: "missing_total" + <*> (unMS <$> getStats .: "exists_time_in_millis") + <*> getStats .: "exists_total" + <*> (unMS <$> getStats .: "time_in_millis") + <*> getStats .: "total" + <*> (fmap unMS <$> indexing .:? "throttle_time_in_millis") + <*> indexing .:? "is_throttled" + <*> indexing .:? "noop_update_total" + <*> indexing .: "delete_current" + <*> (unMS <$> indexing .: "delete_time_in_millis") + <*> indexing .: "delete_total" + <*> indexing .: "index_current" + <*> (unMS <$> indexing .: "index_time_in_millis") + <*> indexing .: "index_total" + <*> (unMS <$> store .: "throttle_time_in_millis") + <*> store .: "size_in_bytes" + <*> docs .: "deleted" + <*> docs .: "count" + +instance FromJSON NodeBreakersStats where + parseJSON = withObject "NodeBreakersStats" parse + where + parse o = NodeBreakersStats <$> o .: "parent" + <*> o .: "request" + <*> o .: "fielddata" + +parseNodeStats :: FullNodeId -> Object -> Parser NodeStats +parseNodeStats fnid o = + NodeStats <$> o .: "name" + <*> pure fnid + <*> o .:? "breakers" + <*> o .: "http" + <*> o .: "transport" + <*> o .: "fs" + <*> o .:? "network" + <*> o .: "thread_pool" + <*> o .: "jvm" + <*> o .: "process" + <*> o .: "os" + <*> o .: "indices" + +parseNodeInfo :: FullNodeId -> Object -> Parser NodeInfo +parseNodeInfo nid o = + NodeInfo <$> o .:? "http_address" + <*> o .: "build_hash" + <*> o .: "version" + <*> o .: "ip" + <*> o .: "host" + <*> o .: "transport_address" + <*> o .: "name" + <*> pure nid + <*> o .: "plugins" + <*> o .: "http" + <*> o .: "transport" + <*> o .:? "network" + <*> o .: "thread_pool" + <*> o .: "jvm" + <*> o .: "process" + <*> o .: "os" + <*> o .: "settings" + +instance FromJSON NodePluginInfo where + parseJSON = withObject "NodePluginInfo" parse + where + parse o = NodePluginInfo <$> o .:? "site" + <*> o .:? "jvm" + <*> o .: "description" + <*> o .: "version" + <*> o .: "name" + +instance FromJSON NodeHTTPInfo where + parseJSON = withObject "NodeHTTPInfo" parse + where + parse o = NodeHTTPInfo <$> o .: "max_content_length_in_bytes" + <*> parseJSON (Object o) + +instance FromJSON BoundTransportAddress where + parseJSON = withObject "BoundTransportAddress" parse + where + parse o = BoundTransportAddress <$> o .: "publish_address" + <*> o .: "bound_address" + +instance FromJSON NodeOSInfo where + parseJSON = withObject "NodeOSInfo" parse + where + parse o = + NodeOSInfo <$> (unMS <$> o .: "refresh_interval_in_millis") + <*> o .: "name" + <*> o .: "arch" + <*> o .: "version" + <*> o .: "available_processors" + <*> o .: "allocated_processors" + + +instance FromJSON CPUInfo where + parseJSON = withObject "CPUInfo" parse + where + parse o = CPUInfo <$> o .: "cache_size_in_bytes" + <*> o .: "cores_per_socket" + <*> o .: "total_sockets" + <*> o .: "total_cores" + <*> o .: "mhz" + <*> o .: "model" + <*> o .: "vendor" + +instance FromJSON NodeProcessInfo where + parseJSON = withObject "NodeProcessInfo" parse + where + parse o = NodeProcessInfo <$> o .: "mlockall" + <*> o .:? "max_file_descriptors" + <*> o .: "id" + <*> (unMS <$> o .: "refresh_interval_in_millis") + +instance FromJSON NodeJVMInfo where + parseJSON = withObject "NodeJVMInfo" parse + where + parse o = NodeJVMInfo <$> o .: "memory_pools" + <*> o .: "gc_collectors" + <*> o .: "mem" + <*> (posixMS <$> o .: "start_time_in_millis") + <*> o .: "vm_vendor" + <*> o .: "vm_version" + <*> o .: "vm_name" + <*> (unJVMVersion <$> o .: "version") + <*> o .: "pid" + +instance FromJSON JVMVersion where + parseJSON (String t) = + JVMVersion <$> parseJSON (String (T.replace "_" "." t)) + parseJSON v = JVMVersion <$> parseJSON v + +instance FromJSON JVMMemoryInfo where + parseJSON = withObject "JVMMemoryInfo" parse + where + parse o = JVMMemoryInfo <$> o .: "direct_max_in_bytes" + <*> o .: "non_heap_max_in_bytes" + <*> o .: "non_heap_init_in_bytes" + <*> o .: "heap_max_in_bytes" + <*> o .: "heap_init_in_bytes" + +instance FromJSON NodeThreadPoolsInfo where + parseJSON = withObject "NodeThreadPoolsInfo" parse + where + parse o = NodeThreadPoolsInfo <$> o .: "refresh" + <*> o .: "management" + <*> o .:? "percolate" + <*> o .:? "listener" + <*> o .:? "fetch_shard_started" + <*> o .: "search" + <*> o .: "flush" + <*> o .: "warmer" + <*> o .:? "optimize" + <*> o .: "bulk" + <*> o .:? "suggest" + <*> o .: "force_merge" + <*> o .: "snapshot" + <*> o .: "get" + <*> o .:? "fetch_shard_store" + <*> o .: "index" + <*> o .: "generic" + +instance FromJSON NodeThreadPoolInfo where + parseJSON = withObject "NodeThreadPoolInfo" parse + where + parse o = do + ka <- maybe (return Nothing) (fmap Just . parseStringInterval) =<< o .:? "keep_alive" + NodeThreadPoolInfo <$> (parseJSON . unStringlyTypeJSON =<< o .: "queue_size") + <*> pure ka + <*> o .:? "min" + <*> o .:? "max" + <*> o .: "type" + +data TimeInterval = Weeks + | Days + | Hours + | Minutes + | Seconds deriving Eq + +instance Show TimeInterval where + show Weeks = "w" + show Days = "d" + show Hours = "h" + show Minutes = "m" + show Seconds = "s" + +instance Read TimeInterval where + readPrec = f =<< TR.get + where + f 'w' = return Weeks + f 'd' = return Days + f 'h' = return Hours + f 'm' = return Minutes + f 's' = return Seconds + f _ = fail "TimeInterval expected one of w, d, h, m, s" + +data Interval = Year + | Quarter + | Month + | Week + | Day + | Hour + | Minute + | Second deriving (Eq, Show) + +instance ToJSON Interval where + toJSON Year = "year" + toJSON Quarter = "quarter" + toJSON Month = "month" + toJSON Week = "week" + toJSON Day = "day" + toJSON Hour = "hour" + toJSON Minute = "minute" + toJSON Second = "second" + +parseStringInterval :: (Monad m) => String -> m NominalDiffTime +parseStringInterval s = case span isNumber s of + ("", _) -> fail "Invalid interval" + (nS, unitS) -> case (readMay nS, readMay unitS) of + (Just n, Just unit) -> return (fromInteger (n * unitNDT unit)) + (Nothing, _) -> fail "Invalid interval number" + (_, Nothing) -> fail "Invalid interval unit" + where + unitNDT Seconds = 1 + unitNDT Minutes = 60 + unitNDT Hours = 60 * 60 + unitNDT Days = 24 * 60 * 60 + unitNDT Weeks = 7 * 24 * 60 * 60 + +instance FromJSON ThreadPoolSize where + parseJSON v = parseAsNumber v <|> parseAsString v + where + parseAsNumber = parseAsInt <=< parseJSON + parseAsInt (-1) = return ThreadPoolUnbounded + parseAsInt n + | n >= 0 = return (ThreadPoolBounded n) + | otherwise = fail "Thread pool size must be >= -1." + parseAsString = withText "ThreadPoolSize" $ \t -> + case first (readMay . T.unpack) (T.span isNumber t) of + (Just n, "k") -> return (ThreadPoolBounded (n * 1000)) + (Just n, "") -> return (ThreadPoolBounded n) + _ -> fail ("Invalid thread pool size " <> T.unpack t) + +instance FromJSON ThreadPoolType where + parseJSON = withText "ThreadPoolType" parse + where + parse "scaling" = return ThreadPoolScaling + parse "fixed" = return ThreadPoolFixed + parse "cached" = return ThreadPoolCached + parse e = fail ("Unexpected thread pool type" <> T.unpack e) + +instance FromJSON NodeTransportInfo where + parseJSON = withObject "NodeTransportInfo" parse + where + parse o = NodeTransportInfo <$> (maybe (return mempty) parseProfiles =<< o .:? "profiles") + <*> parseJSON (Object o) + parseProfiles (Object o) | HM.null o = return [] + parseProfiles v@(Array _) = parseJSON v + parseProfiles Null = return [] + parseProfiles _ = fail "Could not parse profiles" + +instance FromJSON NodeNetworkInfo where + parseJSON = withObject "NodeNetworkInfo" parse + where + parse o = NodeNetworkInfo <$> o .: "primary_interface" + <*> (unMS <$> o .: "refresh_interval_in_millis") + + +instance FromJSON NodeNetworkInterface where + parseJSON = withObject "NodeNetworkInterface" parse + where + parse o = NodeNetworkInterface <$> o .: "mac_address" + <*> o .: "name" + <*> o .: "address" + + +instance ToJSON Version where + toJSON Version {..} = object ["number" .= number + ,"build_hash" .= build_hash + ,"build_date" .= build_date + ,"build_snapshot" .= build_snapshot + ,"lucene_version" .= lucene_version] + +instance FromJSON Version where + parseJSON = withObject "Version" parse + where parse o = Version + <$> o .: "number" + <*> o .: "build_hash" + <*> o .: "build_date" + <*> o .: "build_snapshot" + <*> o .: "lucene_version" + +instance ToJSON VersionNumber where + toJSON = toJSON . Vers.showVersion . versionNumber + +instance FromJSON VersionNumber where + parseJSON = withText "VersionNumber" (parse . T.unpack) + where + parse s = case filter (null . snd)(RP.readP_to_S Vers.parseVersion s) of + [(v, _)] -> pure (VersionNumber v) + [] -> fail ("Invalid version string " ++ s) + xs -> fail ("Ambiguous version string " ++ s ++ " (" ++ intercalate ", " (Vers.showVersion . fst <$> xs) ++ ")") diff --git a/src/Database/V5/Bloodhound/Internal/Highlight.hs b/src/Database/V5/Bloodhound/Internal/Highlight.hs new file mode 100644 index 0000000..1531278 --- /dev/null +++ b/src/Database/V5/Bloodhound/Internal/Highlight.hs @@ -0,0 +1,164 @@ +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE OverloadedStrings #-} + +module Database.V5.Bloodhound.Internal.Highlight where + +import Bloodhound.Import + +import qualified Data.Map.Strict as M + +import Database.V5.Bloodhound.Internal.Newtypes +import Database.V5.Bloodhound.Internal.Query + +type HitHighlight = M.Map Text [Text] + +data Highlights = Highlights + { globalsettings :: Maybe HighlightSettings + , highlightFields :: [FieldHighlight] + } deriving (Eq, Show) + +instance ToJSON Highlights where + toJSON (Highlights global fields) = + omitNulls (("fields" .= fields) + : highlightSettingsPairs global) + +data FieldHighlight = + FieldHighlight FieldName (Maybe HighlightSettings) + deriving (Eq, Show) + + +instance ToJSON FieldHighlight where + toJSON (FieldHighlight (FieldName fName) (Just fSettings)) = + object [ fName .= fSettings ] + toJSON (FieldHighlight (FieldName fName) Nothing) = + object [ fName .= emptyObject ] + +data HighlightSettings = + Plain PlainHighlight + | Postings PostingsHighlight + | FastVector FastVectorHighlight + deriving (Eq, Show) + + +instance ToJSON HighlightSettings where + toJSON hs = omitNulls (highlightSettingsPairs (Just hs)) + +data PlainHighlight = + PlainHighlight { plainCommon :: Maybe CommonHighlight + , plainNonPost :: Maybe NonPostings } + deriving (Eq, Show) + + -- This requires that index_options are set to 'offset' in the mapping. +data PostingsHighlight = + PostingsHighlight (Maybe CommonHighlight) + deriving (Eq, Show) + +-- This requires that term_vector is set to 'with_positions_offsets' in the mapping. +data FastVectorHighlight = FastVectorHighlight + { fvCommon :: Maybe CommonHighlight + , fvNonPostSettings :: Maybe NonPostings + , boundaryChars :: Maybe Text + , boundaryMaxScan :: Maybe Int + , fragmentOffset :: Maybe Int + , matchedFields :: [Text] + , phraseLimit :: Maybe Int + } deriving (Eq, Show) + +data CommonHighlight = CommonHighlight + { order :: Maybe Text + , forceSource :: Maybe Bool + , tag :: Maybe HighlightTag + , encoder :: Maybe HighlightEncoder + , noMatchSize :: Maybe Int + , highlightQuery :: Maybe Query + , requireFieldMatch :: Maybe Bool + } deriving (Eq, Show) + +-- Settings that are only applicable to FastVector and Plain highlighters. +data NonPostings = + NonPostings { fragmentSize :: Maybe Int + , numberOfFragments :: Maybe Int + } deriving (Eq, Show) + +data HighlightEncoder = DefaultEncoder + | HTMLEncoder + deriving (Eq, Show) + +instance ToJSON HighlightEncoder where + toJSON DefaultEncoder = String "default" + toJSON HTMLEncoder = String "html" + +-- NOTE: Should the tags use some kind of HTML type, rather than Text? +data HighlightTag = + TagSchema Text + -- Only uses more than the first value in the lists if fvh + | CustomTags ([Text], [Text]) + deriving (Eq, Show) + +highlightSettingsPairs :: Maybe HighlightSettings -> [Pair] +highlightSettingsPairs Nothing = [] +highlightSettingsPairs (Just (Plain plh)) = plainHighPairs (Just plh) +highlightSettingsPairs (Just (Postings ph)) = postHighPairs (Just ph) +highlightSettingsPairs (Just (FastVector fvh)) = fastVectorHighPairs (Just fvh) + + +plainHighPairs :: Maybe PlainHighlight -> [Pair] +plainHighPairs Nothing = [] +plainHighPairs (Just (PlainHighlight plCom plNonPost)) = + [ "type" .= String "plain"] + ++ commonHighlightPairs plCom + ++ nonPostingsToPairs plNonPost + +postHighPairs :: Maybe PostingsHighlight -> [Pair] +postHighPairs Nothing = [] +postHighPairs (Just (PostingsHighlight pCom)) = + ("type" .= String "postings") + : commonHighlightPairs pCom + +fastVectorHighPairs :: Maybe FastVectorHighlight -> [Pair] +fastVectorHighPairs Nothing = [] +fastVectorHighPairs + (Just + (FastVectorHighlight fvCom fvNonPostSettings' fvBoundChars + fvBoundMaxScan fvFragOff fvMatchedFields + fvPhraseLim)) = + [ "type" .= String "fvh" + , "boundary_chars" .= fvBoundChars + , "boundary_max_scan" .= fvBoundMaxScan + , "fragment_offset" .= fvFragOff + , "matched_fields" .= fvMatchedFields + , "phraseLimit" .= fvPhraseLim] + ++ commonHighlightPairs fvCom + ++ nonPostingsToPairs fvNonPostSettings' + +commonHighlightPairs :: Maybe CommonHighlight -> [Pair] +commonHighlightPairs Nothing = [] +commonHighlightPairs (Just (CommonHighlight chScore chForceSource + chTag chEncoder chNoMatchSize + chHighlightQuery chRequireFieldMatch)) = + [ "order" .= chScore + , "force_source" .= chForceSource + , "encoder" .= chEncoder + , "no_match_size" .= chNoMatchSize + , "highlight_query" .= chHighlightQuery + , "require_fieldMatch" .= chRequireFieldMatch + ] + ++ highlightTagToPairs chTag + + +nonPostingsToPairs :: Maybe NonPostings -> [Pair] +nonPostingsToPairs Nothing = [] +nonPostingsToPairs (Just (NonPostings npFragSize npNumOfFrags)) = + [ "fragment_size" .= npFragSize + , "number_of_fragments" .= npNumOfFrags + ] + +highlightTagToPairs :: Maybe HighlightTag -> [Pair] +highlightTagToPairs (Just (TagSchema _)) = + [ "scheme" .= String "default" + ] +highlightTagToPairs (Just (CustomTags (pre, post))) = + [ "pre_tags" .= pre + , "post_tags" .= post + ] +highlightTagToPairs Nothing = [] diff --git a/src/Database/V5/Bloodhound/Internal/Newtypes.hs b/src/Database/V5/Bloodhound/Internal/Newtypes.hs new file mode 100644 index 0000000..d58690e --- /dev/null +++ b/src/Database/V5/Bloodhound/Internal/Newtypes.hs @@ -0,0 +1,234 @@ +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE OverloadedStrings #-} + +module Database.V5.Bloodhound.Internal.Newtypes where + +import Bloodhound.Import + +import qualified Data.Map.Strict as M + +newtype From = From Int deriving (Eq, Show, ToJSON) +newtype Size = Size Int deriving (Eq, Show, ToJSON, FromJSON) + +-- Used with scripts +newtype HitFields = + HitFields (M.Map Text [Value]) + deriving (Eq, Show) + +instance FromJSON HitFields where + parseJSON x + = HitFields <$> parseJSON x + +-- Slight misnomer. +type Score = Maybe Double + +{-| 'MappingName' is part of mappings which are how ES describes and schematizes + the data in the indices. +-} +newtype MappingName = + MappingName Text + deriving (Eq, Show, ToJSON, FromJSON) + +newtype ShardId = ShardId { shardId :: Int } + deriving (Eq, Show, FromJSON) + +{-| 'DocId' is a generic wrapper value for expressing unique Document IDs. + Can be set by the user or created by ES itself. Often used in client + functions for poking at specific documents. +-} +newtype DocId = + DocId Text + deriving (Eq, Show, ToJSON, FromJSON) + +{-| 'FieldName' is used all over the place wherever a specific field within + a document needs to be specified, usually in 'Query's or 'Filter's. +-} +newtype FieldName = + FieldName Text + deriving (Eq, Read, Show, ToJSON, FromJSON) + +newtype TypeName = + TypeName Text + deriving (Eq, Show, ToJSON, FromJSON) + +{-| 'QueryString' is used to wrap query text bodies, be they human written or not. +-} +newtype QueryString = + QueryString Text + deriving (Eq, Show, ToJSON, FromJSON) + + +-- {-| 'Script' is often used in place of 'FieldName' to specify more +-- complex ways of extracting a value from a document. +-- -} +-- newtype Script = +-- Script { scriptText :: Text } +-- deriving (Eq, Show) + +{-| 'CacheName' is used in 'RegexpFilter' for describing the + 'CacheKey' keyed caching behavior. +-} +newtype CacheName = + CacheName Text + deriving (Eq, Show, FromJSON, ToJSON) + +{-| 'CacheKey' is used in 'RegexpFilter' to key regex caching. +-} +newtype CacheKey = + CacheKey Text deriving (Eq, Show, FromJSON, ToJSON) +newtype Existence = + Existence Bool deriving (Eq, Show, FromJSON, ToJSON) +newtype NullValue = + NullValue Bool deriving (Eq, Show, FromJSON, ToJSON) +newtype CutoffFrequency = + CutoffFrequency Double deriving (Eq, Show, FromJSON, ToJSON) +newtype Analyzer = + Analyzer Text deriving (Eq, Show, FromJSON, ToJSON) +newtype MaxExpansions = + MaxExpansions Int deriving (Eq, Show, FromJSON, ToJSON) + +{-| 'Lenient', if set to true, will cause format based failures to be + ignored. I don't know what the bloody default is, Elasticsearch + documentation didn't say what it was. Let me know if you figure it out. +-} +newtype Lenient = + Lenient Bool deriving (Eq, Show, FromJSON, ToJSON) +newtype Tiebreaker = + Tiebreaker Double deriving (Eq, Show, FromJSON, ToJSON) + +{-| 'MinimumMatch' controls how many should clauses in the bool query should + match. Can be an absolute value (2) or a percentage (30%) or a + combination of both. +-} +newtype MinimumMatch = + MinimumMatch Int deriving (Eq, Show, FromJSON, ToJSON) +newtype DisableCoord = + DisableCoord Bool deriving (Eq, Show, FromJSON, ToJSON) +newtype IgnoreTermFrequency = + IgnoreTermFrequency Bool deriving (Eq, Show, FromJSON, ToJSON) +newtype MinimumTermFrequency = + MinimumTermFrequency Int deriving (Eq, Show, FromJSON, ToJSON) +newtype MaxQueryTerms = + MaxQueryTerms Int deriving (Eq, Show, FromJSON, ToJSON) +newtype Fuzziness = + Fuzziness Double deriving (Eq, Show, FromJSON, ToJSON) + +{-| 'PrefixLength' is the prefix length used in queries, defaults to 0. -} +newtype PrefixLength = + PrefixLength Int deriving (Eq, Show, FromJSON, ToJSON) +newtype PercentMatch = + PercentMatch Double deriving (Eq, Show, FromJSON, ToJSON) +newtype StopWord = + StopWord Text deriving (Eq, Show, FromJSON, ToJSON) +newtype QueryPath = + QueryPath Text deriving (Eq, Show, FromJSON, ToJSON) + +{-| Allowing a wildcard at the beginning of a word (eg "*ing") is particularly + heavy, because all terms in the index need to be examined, just in case + they match. Leading wildcards can be disabled by setting + 'AllowLeadingWildcard' to false. -} +newtype AllowLeadingWildcard = + AllowLeadingWildcard Bool deriving (Eq, Show, FromJSON, ToJSON) +newtype LowercaseExpanded = + LowercaseExpanded Bool deriving (Eq, Show, FromJSON, ToJSON) +newtype EnablePositionIncrements = + EnablePositionIncrements Bool deriving (Eq, Show, FromJSON, ToJSON) + +{-| By default, wildcard terms in a query are not analyzed. + Setting 'AnalyzeWildcard' to true enables best-effort analysis. +-} +newtype AnalyzeWildcard = AnalyzeWildcard Bool deriving (Eq, Show, FromJSON, ToJSON) + +{-| 'GeneratePhraseQueries' defaults to false. +-} +newtype GeneratePhraseQueries = + GeneratePhraseQueries Bool deriving (Eq, Show, FromJSON, ToJSON) + +{-| 'Locale' is used for string conversions - defaults to ROOT. +-} +newtype Locale = Locale Text deriving (Eq, Show, FromJSON, ToJSON) +newtype MaxWordLength = MaxWordLength Int deriving (Eq, Show, FromJSON, ToJSON) +newtype MinWordLength = MinWordLength Int deriving (Eq, Show, FromJSON, ToJSON) + +{-| 'PhraseSlop' sets the default slop for phrases, 0 means exact + phrase matches. Default is 0. +-} +newtype PhraseSlop = PhraseSlop Int deriving (Eq, Show, FromJSON, ToJSON) +newtype MinDocFrequency = MinDocFrequency Int deriving (Eq, Show, FromJSON, ToJSON) +newtype MaxDocFrequency = MaxDocFrequency Int deriving (Eq, Show, FromJSON, ToJSON) + +-- | Newtype wrapper to parse ES's concerning tendency to in some APIs return a floating point number of milliseconds since epoch ಠ_ಠ +newtype POSIXMS = POSIXMS { posixMS :: UTCTime } + +instance FromJSON POSIXMS where + parseJSON = withScientific "POSIXMS" (return . parse) + where parse n = + let n' = truncate n :: Integer + in POSIXMS (posixSecondsToUTCTime (fromInteger (n' `div` 1000))) + +newtype Boost = + Boost Double + deriving (Eq, Show, ToJSON, FromJSON) + +newtype BoostTerms = + BoostTerms Double + deriving (Eq, Show, ToJSON, FromJSON) + +{-| 'ReplicaCount' is part of 'IndexSettings' -} +newtype ReplicaCount = + ReplicaCount Int + deriving (Eq, Show, ToJSON) + +{-| 'ShardCount' is part of 'IndexSettings' -} +newtype ShardCount = + ShardCount Int + deriving (Eq, Show, ToJSON) + +-- This insanity is because ES *sometimes* returns Replica/Shard counts as strings +instance FromJSON ReplicaCount where + parseJSON v = parseAsInt v + <|> parseAsString v + where parseAsInt = fmap ReplicaCount . parseJSON + parseAsString = withText "ReplicaCount" (fmap ReplicaCount . parseReadText) + +instance FromJSON ShardCount where + parseJSON v = parseAsInt v + <|> parseAsString v + where parseAsInt = fmap ShardCount . parseJSON + parseAsString = withText "ShardCount" (fmap ShardCount . parseReadText) + +{-| 'IndexName' is used to describe which index to query/create/delete -} +newtype IndexName = + IndexName Text + deriving (Eq, Show, ToJSON, FromJSON) + +newtype IndexAliasName = + IndexAliasName { indexAliasName :: IndexName } + deriving (Eq, Show, ToJSON) + +newtype MaybeNA a = MaybeNA { unMaybeNA :: Maybe a } + deriving (Show, Eq) + +instance FromJSON a => FromJSON (MaybeNA a) where + parseJSON (String "NA") = pure $ MaybeNA Nothing + parseJSON o = MaybeNA . Just <$> parseJSON o + +newtype SnapshotName = + SnapshotName { snapshotName :: Text } + deriving (Eq, Show, ToJSON, FromJSON) + +-- | Milliseconds +newtype MS = MS NominalDiffTime + +-- keeps the unexported constructor warnings at bay +unMS :: MS -> NominalDiffTime +unMS (MS t) = t + + +instance FromJSON MS where + parseJSON = withScientific "MS" (return . MS . parse) + where + parse n = fromInteger (truncate n * 1000) + +newtype TokenFilter = + TokenFilter Text deriving (Eq, Show, FromJSON, ToJSON) diff --git a/src/Database/V5/Bloodhound/Internal/Query.hs b/src/Database/V5/Bloodhound/Internal/Query.hs new file mode 100644 index 0000000..69b7814 --- /dev/null +++ b/src/Database/V5/Bloodhound/Internal/Query.hs @@ -0,0 +1,1597 @@ +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} + +module Database.V5.Bloodhound.Internal.Query + ( module X + , module Database.V5.Bloodhound.Internal.Query + ) where + +import Bloodhound.Import + +import Data.Char (isNumber) +import qualified Data.HashMap.Strict as HM +import Data.List (nub) +import qualified Data.Text as T + +import Database.Bloodhound.Common.Script as X +import Database.V5.Bloodhound.Internal.Newtypes + +data Query = + TermQuery Term (Maybe Boost) + | TermsQuery Text (NonEmpty Text) + | QueryMatchQuery MatchQuery + | QueryMultiMatchQuery MultiMatchQuery + | QueryBoolQuery BoolQuery + | QueryBoostingQuery BoostingQuery + | QueryCommonTermsQuery CommonTermsQuery + | ConstantScoreQuery Query Boost + | QueryFunctionScoreQuery FunctionScoreQuery + | QueryDisMaxQuery DisMaxQuery + | QueryFuzzyLikeThisQuery FuzzyLikeThisQuery + | QueryFuzzyLikeFieldQuery FuzzyLikeFieldQuery + | QueryFuzzyQuery FuzzyQuery + | QueryHasChildQuery HasChildQuery + | QueryHasParentQuery HasParentQuery + | IdsQuery MappingName [DocId] + | QueryIndicesQuery IndicesQuery + | MatchAllQuery (Maybe Boost) + | QueryMoreLikeThisQuery MoreLikeThisQuery + | QueryMoreLikeThisFieldQuery MoreLikeThisFieldQuery + | QueryNestedQuery NestedQuery + | QueryPrefixQuery PrefixQuery + | QueryQueryStringQuery QueryStringQuery + | QuerySimpleQueryStringQuery SimpleQueryStringQuery + | QueryRangeQuery RangeQuery + | QueryRegexpQuery RegexpQuery + | QueryExistsQuery FieldName + | QueryTemplateQueryInline TemplateQueryInline + | QueryMatchNoneQuery + deriving (Eq, Show) + +instance ToJSON Query where + toJSON (TermQuery (Term termQueryField termQueryValue) boost) = + object [ "term" .= + object [termQueryField .= object merged]] + where + base = [ "value" .= termQueryValue ] + boosted = maybe [] (return . ("boost" .=)) boost + merged = mappend base boosted + + toJSON (TermsQuery fieldName terms) = + object [ "terms" .= object conjoined ] + where conjoined = [fieldName .= terms] + + toJSON (IdsQuery idsQueryMappingName docIds) = + object [ "ids" .= object conjoined ] + where conjoined = [ "type" .= idsQueryMappingName + , "values" .= fmap toJSON docIds ] + + toJSON (QueryQueryStringQuery qQueryStringQuery) = + object [ "query_string" .= qQueryStringQuery ] + + toJSON (QueryMatchQuery matchQuery) = + object [ "match" .= matchQuery ] + + toJSON (QueryMultiMatchQuery multiMatchQuery) = + toJSON multiMatchQuery + + toJSON (QueryBoolQuery boolQuery) = + object [ "bool" .= boolQuery ] + + toJSON (QueryBoostingQuery boostingQuery) = + object [ "boosting" .= boostingQuery ] + + toJSON (QueryCommonTermsQuery commonTermsQuery) = + object [ "common" .= commonTermsQuery ] + + toJSON (ConstantScoreQuery query boost) = + object ["constant_score" .= object ["query" .= query + , "boost" .= boost]] + + toJSON (QueryFunctionScoreQuery functionScoreQuery) = + object [ "function_score" .= functionScoreQuery ] + + toJSON (QueryDisMaxQuery disMaxQuery) = + object [ "dis_max" .= disMaxQuery ] + + toJSON (QueryFuzzyLikeThisQuery fuzzyQuery) = + object [ "fuzzy_like_this" .= fuzzyQuery ] + + toJSON (QueryFuzzyLikeFieldQuery fuzzyFieldQuery) = + object [ "fuzzy_like_this_field" .= fuzzyFieldQuery ] + + toJSON (QueryFuzzyQuery fuzzyQuery) = + object [ "fuzzy" .= fuzzyQuery ] + + toJSON (QueryHasChildQuery childQuery) = + object [ "has_child" .= childQuery ] + + toJSON (QueryHasParentQuery parentQuery) = + object [ "has_parent" .= parentQuery ] + + toJSON (QueryIndicesQuery qIndicesQuery) = + object [ "indices" .= qIndicesQuery ] + + toJSON (MatchAllQuery boost) = + object [ "match_all" .= omitNulls [ "boost" .= boost ] ] + + toJSON (QueryMoreLikeThisQuery query) = + object [ "more_like_this" .= query ] + + toJSON (QueryMoreLikeThisFieldQuery query) = + object [ "more_like_this_field" .= query ] + + toJSON (QueryNestedQuery query) = + object [ "nested" .= query ] + + toJSON (QueryPrefixQuery query) = + object [ "prefix" .= query ] + + toJSON (QueryRangeQuery query) = + object [ "range" .= query ] + + toJSON (QueryRegexpQuery query) = + object [ "regexp" .= query ] + + toJSON (QuerySimpleQueryStringQuery query) = + object [ "simple_query_string" .= query ] + + toJSON (QueryExistsQuery (FieldName fieldName)) = + object ["exists" .= object + ["field" .= fieldName] + ] + toJSON QueryMatchNoneQuery = + object ["match_none" .= object []] + + toJSON (QueryTemplateQueryInline templateQuery) = + object [ "template" .= templateQuery ] + +instance FromJSON Query where + parseJSON v = withObject "Query" parse v + where parse o = termQuery `taggedWith` "term" + <|> termsQuery `taggedWith` "terms" + <|> idsQuery `taggedWith` "ids" + <|> queryQueryStringQuery `taggedWith` "query_string" + <|> queryMatchQuery `taggedWith` "match" + <|> queryMultiMatchQuery + <|> queryBoolQuery `taggedWith` "bool" + <|> queryBoostingQuery `taggedWith` "boosting" + <|> queryCommonTermsQuery `taggedWith` "common" + <|> constantScoreQuery `taggedWith` "constant_score" + <|> queryFunctionScoreQuery `taggedWith` "function_score" + <|> queryDisMaxQuery `taggedWith` "dis_max" + <|> queryFuzzyLikeThisQuery `taggedWith` "fuzzy_like_this" + <|> queryFuzzyLikeFieldQuery `taggedWith` "fuzzy_like_this_field" + <|> queryFuzzyQuery `taggedWith` "fuzzy" + <|> queryHasChildQuery `taggedWith` "has_child" + <|> queryHasParentQuery `taggedWith` "has_parent" + <|> queryIndicesQuery `taggedWith` "indices" + <|> matchAllQuery `taggedWith` "match_all" + <|> queryMoreLikeThisQuery `taggedWith` "more_like_this" + <|> queryMoreLikeThisFieldQuery `taggedWith` "more_like_this_field" + <|> queryNestedQuery `taggedWith` "nested" + <|> queryPrefixQuery `taggedWith` "prefix" + <|> queryRangeQuery `taggedWith` "range" + <|> queryRegexpQuery `taggedWith` "regexp" + <|> querySimpleQueryStringQuery `taggedWith` "simple_query_string" + <|> queryTemplateQueryInline `taggedWith` "template" + where taggedWith parser k = parser =<< o .: k + termQuery = fieldTagged $ \(FieldName fn) o -> + TermQuery <$> (Term fn <$> o .: "value") <*> o .:? "boost" + termsQuery o = case HM.toList o of + [(fn, vs)] -> do vals <- parseJSON vs + case vals of + x:xs -> return (TermsQuery fn (x :| xs)) + _ -> fail "Expected non empty list of values" + _ -> fail "Expected object with 1 field-named key" + idsQuery o = IdsQuery <$> o .: "type" + <*> o .: "values" + queryQueryStringQuery = pure . QueryQueryStringQuery + queryMatchQuery = pure . QueryMatchQuery + queryMultiMatchQuery = QueryMultiMatchQuery <$> parseJSON v + queryBoolQuery = pure . QueryBoolQuery + queryBoostingQuery = pure . QueryBoostingQuery + queryCommonTermsQuery = pure . QueryCommonTermsQuery + constantScoreQuery o = case HM.lookup "query" o of + Just x -> ConstantScoreQuery <$> parseJSON x + <*> o .: "boost" + _ -> fail "Does not appear to be a ConstantScoreQuery" + queryFunctionScoreQuery = pure . QueryFunctionScoreQuery + queryDisMaxQuery = pure . QueryDisMaxQuery + queryFuzzyLikeThisQuery = pure . QueryFuzzyLikeThisQuery + queryFuzzyLikeFieldQuery = pure . QueryFuzzyLikeFieldQuery + queryFuzzyQuery = pure . QueryFuzzyQuery + queryHasChildQuery = pure . QueryHasChildQuery + queryHasParentQuery = pure . QueryHasParentQuery + queryIndicesQuery = pure . QueryIndicesQuery + matchAllQuery o = MatchAllQuery <$> o .:? "boost" + queryMoreLikeThisQuery = pure . QueryMoreLikeThisQuery + queryMoreLikeThisFieldQuery = pure . QueryMoreLikeThisFieldQuery + queryNestedQuery = pure . QueryNestedQuery + queryPrefixQuery = pure . QueryPrefixQuery + queryRangeQuery = pure . QueryRangeQuery + queryRegexpQuery = pure . QueryRegexpQuery + querySimpleQueryStringQuery = pure . QuerySimpleQueryStringQuery + -- queryExistsQuery o = QueryExistsQuery <$> o .: "field" + queryTemplateQueryInline = pure . QueryTemplateQueryInline + +-- | As of Elastic 2.0, 'Filters' are just 'Queries' housed in a +-- Bool Query, and flagged in a different context. +newtype Filter = Filter { unFilter :: Query } + deriving (Eq, Show) + +instance ToJSON Filter where + toJSON = toJSON . unFilter + +instance FromJSON Filter where + parseJSON v = Filter <$> parseJSON v + +data RegexpQuery = + RegexpQuery { regexpQueryField :: FieldName + , regexpQuery :: Regexp + , regexpQueryFlags :: RegexpFlags + , regexpQueryBoost :: Maybe Boost + } deriving (Eq, Show) + +instance ToJSON RegexpQuery where + toJSON (RegexpQuery (FieldName rqQueryField) + (Regexp regexpQueryQuery) rqQueryFlags + rqQueryBoost) = + object [ rqQueryField .= omitNulls base ] + where base = [ "value" .= regexpQueryQuery + , "flags" .= rqQueryFlags + , "boost" .= rqQueryBoost ] + +instance FromJSON RegexpQuery where + parseJSON = withObject "RegexpQuery" parse + where parse = fieldTagged $ \fn o -> + RegexpQuery fn + <$> o .: "value" + <*> o .: "flags" + <*> o .:? "boost" + +data RangeQuery = + RangeQuery { rangeQueryField :: FieldName + , rangeQueryRange :: RangeValue + , rangeQueryBoost :: Boost } deriving (Eq, Show) + +instance ToJSON RangeQuery where + toJSON (RangeQuery (FieldName fieldName) range boost) = + object [ fieldName .= object conjoined ] + where + conjoined = ("boost" .= boost) : rangeValueToPair range + +instance FromJSON RangeQuery where + parseJSON = withObject "RangeQuery" parse + where parse = fieldTagged $ \fn o -> + RangeQuery fn + <$> parseJSON (Object o) + <*> o .: "boost" + +mkRangeQuery :: FieldName -> RangeValue -> RangeQuery +mkRangeQuery f r = RangeQuery f r (Boost 1.0) + +data SimpleQueryStringQuery = + SimpleQueryStringQuery + { simpleQueryStringQuery :: QueryString + , simpleQueryStringField :: Maybe FieldOrFields + , simpleQueryStringOperator :: Maybe BooleanOperator + , simpleQueryStringAnalyzer :: Maybe Analyzer + , simpleQueryStringFlags :: Maybe (NonEmpty SimpleQueryFlag) + , simpleQueryStringLowercaseExpanded :: Maybe LowercaseExpanded + , simpleQueryStringLocale :: Maybe Locale + } deriving (Eq, Show) + + +instance ToJSON SimpleQueryStringQuery where + toJSON SimpleQueryStringQuery {..} = + omitNulls (base ++ maybeAdd) + where base = [ "query" .= simpleQueryStringQuery ] + maybeAdd = [ "fields" .= simpleQueryStringField + , "default_operator" .= simpleQueryStringOperator + , "analyzer" .= simpleQueryStringAnalyzer + , "flags" .= simpleQueryStringFlags + , "lowercase_expanded_terms" .= simpleQueryStringLowercaseExpanded + , "locale" .= simpleQueryStringLocale ] + +instance FromJSON SimpleQueryStringQuery where + parseJSON = withObject "SimpleQueryStringQuery" parse + where parse o = SimpleQueryStringQuery <$> o .: "query" + <*> o .:? "fields" + <*> o .:? "default_operator" + <*> o .:? "analyzer" + <*> (parseFlags <$> o .:? "flags") + <*> o .:? "lowercase_expanded_terms" + <*> o .:? "locale" + parseFlags (Just (x:xs)) = Just (x :| xs) + parseFlags _ = Nothing + +data SimpleQueryFlag = + SimpleQueryAll + | SimpleQueryNone + | SimpleQueryAnd + | SimpleQueryOr + | SimpleQueryPrefix + | SimpleQueryPhrase + | SimpleQueryPrecedence + | SimpleQueryEscape + | SimpleQueryWhitespace + | SimpleQueryFuzzy + | SimpleQueryNear + | SimpleQuerySlop deriving (Eq, Show) + +instance ToJSON SimpleQueryFlag where + toJSON SimpleQueryAll = "ALL" + toJSON SimpleQueryNone = "NONE" + toJSON SimpleQueryAnd = "AND" + toJSON SimpleQueryOr = "OR" + toJSON SimpleQueryPrefix = "PREFIX" + toJSON SimpleQueryPhrase = "PHRASE" + toJSON SimpleQueryPrecedence = "PRECEDENCE" + toJSON SimpleQueryEscape = "ESCAPE" + toJSON SimpleQueryWhitespace = "WHITESPACE" + toJSON SimpleQueryFuzzy = "FUZZY" + toJSON SimpleQueryNear = "NEAR" + toJSON SimpleQuerySlop = "SLOP" + +instance FromJSON SimpleQueryFlag where + parseJSON = withText "SimpleQueryFlag" parse + where parse "ALL" = pure SimpleQueryAll + parse "NONE" = pure SimpleQueryNone + parse "AND" = pure SimpleQueryAnd + parse "OR" = pure SimpleQueryOr + parse "PREFIX" = pure SimpleQueryPrefix + parse "PHRASE" = pure SimpleQueryPhrase + parse "PRECEDENCE" = pure SimpleQueryPrecedence + parse "ESCAPE" = pure SimpleQueryEscape + parse "WHITESPACE" = pure SimpleQueryWhitespace + parse "FUZZY" = pure SimpleQueryFuzzy + parse "NEAR" = pure SimpleQueryNear + parse "SLOP" = pure SimpleQuerySlop + parse f = fail ("Unexpected SimpleQueryFlag: " <> show f) + +-- use_dis_max and tie_breaker when fields are plural? +data QueryStringQuery = + QueryStringQuery + { queryStringQuery :: QueryString + , queryStringDefaultField :: Maybe FieldName + , queryStringOperator :: Maybe BooleanOperator + , queryStringAnalyzer :: Maybe Analyzer + , queryStringAllowLeadingWildcard :: Maybe AllowLeadingWildcard + , queryStringLowercaseExpanded :: Maybe LowercaseExpanded + , queryStringEnablePositionIncrements :: Maybe EnablePositionIncrements + , queryStringFuzzyMaxExpansions :: Maybe MaxExpansions + , queryStringFuzziness :: Maybe Fuzziness + , queryStringFuzzyPrefixLength :: Maybe PrefixLength + , queryStringPhraseSlop :: Maybe PhraseSlop + , queryStringBoost :: Maybe Boost + , queryStringAnalyzeWildcard :: Maybe AnalyzeWildcard + , queryStringGeneratePhraseQueries :: Maybe GeneratePhraseQueries + , queryStringMinimumShouldMatch :: Maybe MinimumMatch + , queryStringLenient :: Maybe Lenient + , queryStringLocale :: Maybe Locale + } deriving (Eq, Show) + + +instance ToJSON QueryStringQuery where + toJSON (QueryStringQuery qsQueryString + qsDefaultField qsOperator + qsAnalyzer qsAllowWildcard + qsLowercaseExpanded qsEnablePositionIncrements + qsFuzzyMaxExpansions qsFuzziness + qsFuzzyPrefixLength qsPhraseSlop + qsBoost qsAnalyzeWildcard + qsGeneratePhraseQueries qsMinimumShouldMatch + qsLenient qsLocale) = + omitNulls base + where + base = [ "query" .= qsQueryString + , "default_field" .= qsDefaultField + , "default_operator" .= qsOperator + , "analyzer" .= qsAnalyzer + , "allow_leading_wildcard" .= qsAllowWildcard + , "lowercase_expanded_terms" .= qsLowercaseExpanded + , "enable_position_increments" .= qsEnablePositionIncrements + , "fuzzy_max_expansions" .= qsFuzzyMaxExpansions + , "fuzziness" .= qsFuzziness + , "fuzzy_prefix_length" .= qsFuzzyPrefixLength + , "phrase_slop" .= qsPhraseSlop + , "boost" .= qsBoost + , "analyze_wildcard" .= qsAnalyzeWildcard + , "auto_generate_phrase_queries" .= qsGeneratePhraseQueries + , "minimum_should_match" .= qsMinimumShouldMatch + , "lenient" .= qsLenient + , "locale" .= qsLocale ] + +instance FromJSON QueryStringQuery where + parseJSON = withObject "QueryStringQuery" parse + where parse o = QueryStringQuery + <$> o .: "query" + <*> o .:? "default_field" + <*> o .:? "default_operator" + <*> o .:? "analyzer" + <*> o .:? "allow_leading_wildcard" + <*> o .:? "lowercase_expanded_terms" + <*> o .:? "enable_position_increments" + <*> o .:? "fuzzy_max_expansions" + <*> o .:? "fuzziness" + <*> o .:? "fuzzy_prefix_length" + <*> o .:? "phrase_slop" + <*> o .:? "boost" + <*> o .:? "analyze_wildcard" + <*> o .:? "auto_generate_phrase_queries" + <*> o .:? "minimum_should_match" + <*> o .:? "lenient" + <*> o .:? "locale" + +mkQueryStringQuery :: QueryString -> QueryStringQuery +mkQueryStringQuery qs = + QueryStringQuery qs Nothing Nothing + Nothing Nothing Nothing Nothing + Nothing Nothing Nothing Nothing + Nothing Nothing Nothing Nothing + Nothing Nothing + +data FieldOrFields = FofField FieldName + | FofFields (NonEmpty FieldName) deriving (Eq, Show) + +instance ToJSON FieldOrFields where + toJSON (FofField fieldName) = + toJSON fieldName + toJSON (FofFields fieldNames) = + toJSON fieldNames + +instance FromJSON FieldOrFields where + parseJSON v = FofField <$> parseJSON v + <|> FofFields <$> (parseNEJSON =<< parseJSON v) + +data PrefixQuery = + PrefixQuery + { prefixQueryField :: FieldName + , prefixQueryPrefixValue :: Text + , prefixQueryBoost :: Maybe Boost } deriving (Eq, Show) + +instance ToJSON PrefixQuery where + toJSON (PrefixQuery (FieldName fieldName) queryValue boost) = + object [ fieldName .= omitNulls base ] + where base = [ "value" .= queryValue + , "boost" .= boost ] + +instance FromJSON PrefixQuery where + parseJSON = withObject "PrefixQuery" parse + where parse = fieldTagged $ \fn o -> + PrefixQuery fn + <$> o .: "value" + <*> o .:? "boost" + +data NestedQuery = + NestedQuery + { nestedQueryPath :: QueryPath + , nestedQueryScoreType :: ScoreType + , nestedQuery :: Query } deriving (Eq, Show) + +instance ToJSON NestedQuery where + toJSON (NestedQuery nqPath nqScoreType nqQuery) = + object [ "path" .= nqPath + , "score_mode" .= nqScoreType + , "query" .= nqQuery ] + +instance FromJSON NestedQuery where + parseJSON = withObject "NestedQuery" parse + where parse o = NestedQuery + <$> o .: "path" + <*> o .: "score_mode" + <*> o .: "query" + +data MoreLikeThisFieldQuery = + MoreLikeThisFieldQuery + { moreLikeThisFieldText :: Text + , moreLikeThisFieldFields :: FieldName + -- default 0.3 (30%) + , moreLikeThisFieldPercentMatch :: Maybe PercentMatch + , moreLikeThisFieldMinimumTermFreq :: Maybe MinimumTermFrequency + , moreLikeThisFieldMaxQueryTerms :: Maybe MaxQueryTerms + , moreLikeThisFieldStopWords :: Maybe (NonEmpty StopWord) + , moreLikeThisFieldMinDocFrequency :: Maybe MinDocFrequency + , moreLikeThisFieldMaxDocFrequency :: Maybe MaxDocFrequency + , moreLikeThisFieldMinWordLength :: Maybe MinWordLength + , moreLikeThisFieldMaxWordLength :: Maybe MaxWordLength + , moreLikeThisFieldBoostTerms :: Maybe BoostTerms + , moreLikeThisFieldBoost :: Maybe Boost + , moreLikeThisFieldAnalyzer :: Maybe Analyzer + } deriving (Eq, Show) + + +instance ToJSON MoreLikeThisFieldQuery where + toJSON (MoreLikeThisFieldQuery text (FieldName fieldName) + percent mtf mqt stopwords mindf maxdf + minwl maxwl boostTerms boost analyzer) = + object [ fieldName .= omitNulls base ] + where base = [ "like_text" .= text + , "percent_terms_to_match" .= percent + , "min_term_freq" .= mtf + , "max_query_terms" .= mqt + , "stop_words" .= stopwords + , "min_doc_freq" .= mindf + , "max_doc_freq" .= maxdf + , "min_word_length" .= minwl + , "max_word_length" .= maxwl + , "boost_terms" .= boostTerms + , "boost" .= boost + , "analyzer" .= analyzer ] + +instance FromJSON MoreLikeThisFieldQuery where + parseJSON = withObject "MoreLikeThisFieldQuery" parse + where parse = fieldTagged $ \fn o -> + MoreLikeThisFieldQuery + <$> o .: "like_text" + <*> pure fn + <*> o .:? "percent_terms_to_match" + <*> o .:? "min_term_freq" + <*> o .:? "max_query_terms" + -- <*> (optionalNE =<< o .:? "stop_words") + <*> o .:? "stop_words" + <*> o .:? "min_doc_freq" + <*> o .:? "max_doc_freq" + <*> o .:? "min_word_length" + <*> o .:? "max_word_length" + <*> o .:? "boost_terms" + <*> o .:? "boost" + <*> o .:? "analyzer" + -- optionalNE = maybe (pure Nothing) (fmap Just . parseNEJSON) + +data MoreLikeThisQuery = + MoreLikeThisQuery + { moreLikeThisText :: Text + , moreLikeThisFields :: Maybe (NonEmpty FieldName) + -- default 0.3 (30%) + , moreLikeThisPercentMatch :: Maybe PercentMatch + , moreLikeThisMinimumTermFreq :: Maybe MinimumTermFrequency + , moreLikeThisMaxQueryTerms :: Maybe MaxQueryTerms + , moreLikeThisStopWords :: Maybe (NonEmpty StopWord) + , moreLikeThisMinDocFrequency :: Maybe MinDocFrequency + , moreLikeThisMaxDocFrequency :: Maybe MaxDocFrequency + , moreLikeThisMinWordLength :: Maybe MinWordLength + , moreLikeThisMaxWordLength :: Maybe MaxWordLength + , moreLikeThisBoostTerms :: Maybe BoostTerms + , moreLikeThisBoost :: Maybe Boost + , moreLikeThisAnalyzer :: Maybe Analyzer + } deriving (Eq, Show) + + +instance ToJSON MoreLikeThisQuery where + toJSON (MoreLikeThisQuery text fields percent + mtf mqt stopwords mindf maxdf + minwl maxwl boostTerms boost analyzer) = + omitNulls base + where base = [ "like_text" .= text + , "fields" .= fields + , "percent_terms_to_match" .= percent + , "min_term_freq" .= mtf + , "max_query_terms" .= mqt + , "stop_words" .= stopwords + , "min_doc_freq" .= mindf + , "max_doc_freq" .= maxdf + , "min_word_length" .= minwl + , "max_word_length" .= maxwl + , "boost_terms" .= boostTerms + , "boost" .= boost + , "analyzer" .= analyzer ] + +instance FromJSON MoreLikeThisQuery where + parseJSON = withObject "MoreLikeThisQuery" parse + where parse o = MoreLikeThisQuery + <$> o .: "like_text" + -- <*> (optionalNE =<< o .:? "fields") + <*> o .:? "fields" + <*> o .:? "percent_terms_to_match" + <*> o .:? "min_term_freq" + <*> o .:? "max_query_terms" + -- <*> (optionalNE =<< o .:? "stop_words") + <*> o .:? "stop_words" + <*> o .:? "min_doc_freq" + <*> o .:? "max_doc_freq" + <*> o .:? "min_word_length" + <*> o .:? "max_word_length" + <*> o .:? "boost_terms" + <*> o .:? "boost" + <*> o .:? "analyzer" + -- optionalNE = maybe (pure Nothing) (fmap Just . parseNEJSON) + +data IndicesQuery = + IndicesQuery + { indicesQueryIndices :: [IndexName] + , indicesQuery :: Query + -- default "all" + , indicesQueryNoMatch :: Maybe Query } deriving (Eq, Show) + + +instance ToJSON IndicesQuery where + toJSON (IndicesQuery indices query noMatch) = + omitNulls [ "indices" .= indices + , "no_match_query" .= noMatch + , "query" .= query ] + +instance FromJSON IndicesQuery where + parseJSON = withObject "IndicesQuery" parse + where parse o = IndicesQuery + <$> o .:? "indices" .!= [] + <*> o .: "query" + <*> o .:? "no_match_query" + +data HasParentQuery = + HasParentQuery + { hasParentQueryType :: TypeName + , hasParentQuery :: Query + , hasParentQueryScoreType :: Maybe ScoreType } deriving (Eq, Show) + +instance ToJSON HasParentQuery where + toJSON (HasParentQuery queryType query scoreType) = + omitNulls [ "parent_type" .= queryType + , "score_type" .= scoreType + , "query" .= query ] + +instance FromJSON HasParentQuery where + parseJSON = withObject "HasParentQuery" parse + where parse o = HasParentQuery + <$> o .: "parent_type" + <*> o .: "query" + <*> o .:? "score_type" + +data HasChildQuery = + HasChildQuery + { hasChildQueryType :: TypeName + , hasChildQuery :: Query + , hasChildQueryScoreType :: Maybe ScoreType } deriving (Eq, Show) + +instance ToJSON HasChildQuery where + toJSON (HasChildQuery queryType query scoreType) = + omitNulls [ "query" .= query + , "score_type" .= scoreType + , "type" .= queryType ] + +instance FromJSON HasChildQuery where + parseJSON = withObject "HasChildQuery" parse + where parse o = HasChildQuery + <$> o .: "type" + <*> o .: "query" + <*> o .:? "score_type" + +data ScoreType = + ScoreTypeMax + | ScoreTypeSum + | ScoreTypeAvg + | ScoreTypeNone deriving (Eq, Show) + +instance ToJSON ScoreType where + toJSON ScoreTypeMax = "max" + toJSON ScoreTypeAvg = "avg" + toJSON ScoreTypeSum = "sum" + toJSON ScoreTypeNone = "none" + +instance FromJSON ScoreType where + parseJSON = withText "ScoreType" parse + where parse "max" = pure ScoreTypeMax + parse "avg" = pure ScoreTypeAvg + parse "sum" = pure ScoreTypeSum + parse "none" = pure ScoreTypeNone + parse t = fail ("Unexpected ScoreType: " <> show t) + +data FuzzyQuery = + FuzzyQuery { fuzzyQueryField :: FieldName + , fuzzyQueryValue :: Text + , fuzzyQueryPrefixLength :: PrefixLength + , fuzzyQueryMaxExpansions :: MaxExpansions + , fuzzyQueryFuzziness :: Fuzziness + , fuzzyQueryBoost :: Maybe Boost + } deriving (Eq, Show) + + +instance ToJSON FuzzyQuery where + toJSON (FuzzyQuery (FieldName fieldName) queryText + prefixLength maxEx fuzziness boost) = + object [ fieldName .= omitNulls base ] + where base = [ "value" .= queryText + , "fuzziness" .= fuzziness + , "prefix_length" .= prefixLength + , "boost" .= boost + , "max_expansions" .= maxEx ] + +instance FromJSON FuzzyQuery where + parseJSON = withObject "FuzzyQuery" parse + where parse = fieldTagged $ \fn o -> + FuzzyQuery fn + <$> o .: "value" + <*> o .: "prefix_length" + <*> o .: "max_expansions" + <*> o .: "fuzziness" + <*> o .:? "boost" + +data FuzzyLikeFieldQuery = + FuzzyLikeFieldQuery + { fuzzyLikeField :: FieldName + -- anaphora is good for the soul. + , fuzzyLikeFieldText :: Text + , fuzzyLikeFieldMaxQueryTerms :: MaxQueryTerms + , fuzzyLikeFieldIgnoreTermFrequency :: IgnoreTermFrequency + , fuzzyLikeFieldFuzziness :: Fuzziness + , fuzzyLikeFieldPrefixLength :: PrefixLength + , fuzzyLikeFieldBoost :: Boost + , fuzzyLikeFieldAnalyzer :: Maybe Analyzer + } deriving (Eq, Show) + + +instance ToJSON FuzzyLikeFieldQuery where + toJSON (FuzzyLikeFieldQuery (FieldName fieldName) + fieldText maxTerms ignoreFreq fuzziness prefixLength + boost analyzer) = + object [ fieldName .= + omitNulls [ "like_text" .= fieldText + , "max_query_terms" .= maxTerms + , "ignore_tf" .= ignoreFreq + , "fuzziness" .= fuzziness + , "prefix_length" .= prefixLength + , "analyzer" .= analyzer + , "boost" .= boost ]] + +instance FromJSON FuzzyLikeFieldQuery where + parseJSON = withObject "FuzzyLikeFieldQuery" parse + where parse = fieldTagged $ \fn o -> + FuzzyLikeFieldQuery fn + <$> o .: "like_text" + <*> o .: "max_query_terms" + <*> o .: "ignore_tf" + <*> o .: "fuzziness" + <*> o .: "prefix_length" + <*> o .: "boost" + <*> o .:? "analyzer" + +data FuzzyLikeThisQuery = + FuzzyLikeThisQuery + { fuzzyLikeFields :: [FieldName] + , fuzzyLikeText :: Text + , fuzzyLikeMaxQueryTerms :: MaxQueryTerms + , fuzzyLikeIgnoreTermFrequency :: IgnoreTermFrequency + , fuzzyLikeFuzziness :: Fuzziness + , fuzzyLikePrefixLength :: PrefixLength + , fuzzyLikeBoost :: Boost + , fuzzyLikeAnalyzer :: Maybe Analyzer + } deriving (Eq, Show) + + +instance ToJSON FuzzyLikeThisQuery where + toJSON (FuzzyLikeThisQuery fields text maxTerms + ignoreFreq fuzziness prefixLength boost analyzer) = + omitNulls base + where base = [ "fields" .= fields + , "like_text" .= text + , "max_query_terms" .= maxTerms + , "ignore_tf" .= ignoreFreq + , "fuzziness" .= fuzziness + , "prefix_length" .= prefixLength + , "analyzer" .= analyzer + , "boost" .= boost ] + +instance FromJSON FuzzyLikeThisQuery where + parseJSON = withObject "FuzzyLikeThisQuery" parse + where parse o = FuzzyLikeThisQuery + <$> o .:? "fields" .!= [] + <*> o .: "like_text" + <*> o .: "max_query_terms" + <*> o .: "ignore_tf" + <*> o .: "fuzziness" + <*> o .: "prefix_length" + <*> o .: "boost" + <*> o .:? "analyzer" + +data DisMaxQuery = + DisMaxQuery { disMaxQueries :: [Query] + -- default 0.0 + , disMaxTiebreaker :: Tiebreaker + , disMaxBoost :: Maybe Boost + } deriving (Eq, Show) + + +instance ToJSON DisMaxQuery where + toJSON (DisMaxQuery queries tiebreaker boost) = + omitNulls base + where base = [ "queries" .= queries + , "boost" .= boost + , "tie_breaker" .= tiebreaker ] + +instance FromJSON DisMaxQuery where + parseJSON = withObject "DisMaxQuery" parse + where parse o = DisMaxQuery + <$> o .:? "queries" .!= [] + <*> o .: "tie_breaker" + <*> o .:? "boost" + +data MatchQuery = MatchQuery + { matchQueryField :: FieldName + , matchQueryQueryString :: QueryString + , matchQueryOperator :: BooleanOperator + , matchQueryZeroTerms :: ZeroTermsQuery + , matchQueryCutoffFrequency :: Maybe CutoffFrequency + , matchQueryMatchType :: Maybe MatchQueryType + , matchQueryAnalyzer :: Maybe Analyzer + , matchQueryMaxExpansions :: Maybe MaxExpansions + , matchQueryLenient :: Maybe Lenient + , matchQueryBoost :: Maybe Boost + , matchQueryMinimumShouldMatch :: Maybe Text + } deriving (Eq, Show) + + +instance ToJSON MatchQuery where + toJSON (MatchQuery (FieldName fieldName) + (QueryString mqQueryString) booleanOperator + zeroTermsQuery cutoffFrequency matchQueryType + analyzer maxExpansions lenient boost + minShouldMatch + ) = + object [ fieldName .= omitNulls base ] + where base = [ "query" .= mqQueryString + , "operator" .= booleanOperator + , "zero_terms_query" .= zeroTermsQuery + , "cutoff_frequency" .= cutoffFrequency + , "type" .= matchQueryType + , "analyzer" .= analyzer + , "max_expansions" .= maxExpansions + , "lenient" .= lenient + , "boost" .= boost + , "minimum_should_match" .= minShouldMatch + ] + +instance FromJSON MatchQuery where + parseJSON = withObject "MatchQuery" parse + where parse = fieldTagged $ \fn o -> + MatchQuery fn + <$> o .: "query" + <*> o .: "operator" + <*> o .: "zero_terms_query" + <*> o .:? "cutoff_frequency" + <*> o .:? "type" + <*> o .:? "analyzer" + <*> o .:? "max_expansions" + <*> o .:? "lenient" + <*> o .:? "boost" + <*> o .:? "minimum_should_match" + +{-| 'mkMatchQuery' is a convenience function that defaults the less common parameters, + enabling you to provide only the 'FieldName' and 'QueryString' to make a 'MatchQuery' +-} +mkMatchQuery :: FieldName -> QueryString -> MatchQuery +mkMatchQuery field query = MatchQuery field query Or ZeroTermsNone Nothing Nothing Nothing Nothing Nothing Nothing Nothing + +data MatchQueryType = + MatchPhrase + | MatchPhrasePrefix deriving (Eq, Show) + +instance ToJSON MatchQueryType where + toJSON MatchPhrase = "phrase" + toJSON MatchPhrasePrefix = "phrase_prefix" + +instance FromJSON MatchQueryType where + parseJSON = withText "MatchQueryType" parse + where parse "phrase" = pure MatchPhrase + parse "phrase_prefix" = pure MatchPhrasePrefix + parse t = fail ("Unexpected MatchQueryType: " <> show t) + +data MultiMatchQuery = MultiMatchQuery + { multiMatchQueryFields :: [FieldName] + , multiMatchQueryString :: QueryString + , multiMatchQueryOperator :: BooleanOperator + , multiMatchQueryZeroTerms :: ZeroTermsQuery + , multiMatchQueryTiebreaker :: Maybe Tiebreaker + , multiMatchQueryType :: Maybe MultiMatchQueryType + , multiMatchQueryCutoffFrequency :: Maybe CutoffFrequency + , multiMatchQueryAnalyzer :: Maybe Analyzer + , multiMatchQueryMaxExpansions :: Maybe MaxExpansions + , multiMatchQueryLenient :: Maybe Lenient + } deriving (Eq, Show) + +instance ToJSON MultiMatchQuery where + toJSON (MultiMatchQuery fields (QueryString query) boolOp + ztQ tb mmqt cf analyzer maxEx lenient) = + object ["multi_match" .= omitNulls base] + where base = [ "fields" .= fmap toJSON fields + , "query" .= query + , "operator" .= boolOp + , "zero_terms_query" .= ztQ + , "tie_breaker" .= tb + , "type" .= mmqt + , "cutoff_frequency" .= cf + , "analyzer" .= analyzer + , "max_expansions" .= maxEx + , "lenient" .= lenient ] + +instance FromJSON MultiMatchQuery where + parseJSON = withObject "MultiMatchQuery" parse + where parse raw = do o <- raw .: "multi_match" + MultiMatchQuery + <$> o .:? "fields" .!= [] + <*> o .: "query" + <*> o .: "operator" + <*> o .: "zero_terms_query" + <*> o .:? "tie_breaker" + <*> o .:? "type" + <*> o .:? "cutoff_frequency" + <*> o .:? "analyzer" + <*> o .:? "max_expansions" + <*> o .:? "lenient" + +{-| 'mkMultiMatchQuery' is a convenience function that defaults the less common parameters, + enabling you to provide only the list of 'FieldName's and 'QueryString' to + make a 'MultiMatchQuery'. +-} + +mkMultiMatchQuery :: [FieldName] -> QueryString -> MultiMatchQuery +mkMultiMatchQuery matchFields query = + MultiMatchQuery matchFields query + Or ZeroTermsNone Nothing Nothing Nothing Nothing Nothing Nothing + +data MultiMatchQueryType = + MultiMatchBestFields + | MultiMatchMostFields + | MultiMatchCrossFields + | MultiMatchPhrase + | MultiMatchPhrasePrefix deriving (Eq, Show) + +instance ToJSON MultiMatchQueryType where + toJSON MultiMatchBestFields = "best_fields" + toJSON MultiMatchMostFields = "most_fields" + toJSON MultiMatchCrossFields = "cross_fields" + toJSON MultiMatchPhrase = "phrase" + toJSON MultiMatchPhrasePrefix = "phrase_prefix" + +instance FromJSON MultiMatchQueryType where + parseJSON = withText "MultiMatchPhrasePrefix" parse + where parse "best_fields" = pure MultiMatchBestFields + parse "most_fields" = pure MultiMatchMostFields + parse "cross_fields" = pure MultiMatchCrossFields + parse "phrase" = pure MultiMatchPhrase + parse "phrase_prefix" = pure MultiMatchPhrasePrefix + parse t = fail ("Unexpected MultiMatchPhrasePrefix: " <> show t) + +data BoolQuery = + BoolQuery { boolQueryMustMatch :: [Query] + , boolQueryFilter :: [Filter] + , boolQueryMustNotMatch :: [Query] + , boolQueryShouldMatch :: [Query] + , boolQueryMinimumShouldMatch :: Maybe MinimumMatch + , boolQueryBoost :: Maybe Boost + , boolQueryDisableCoord :: Maybe DisableCoord + } deriving (Eq, Show) + + +instance ToJSON BoolQuery where + toJSON (BoolQuery mustM filterM' notM shouldM bqMin boost disableCoord) = + omitNulls base + where base = [ "must" .= mustM + , "filter" .= filterM' + , "must_not" .= notM + , "should" .= shouldM + , "minimum_should_match" .= bqMin + , "boost" .= boost + , "disable_coord" .= disableCoord ] + +instance FromJSON BoolQuery where + parseJSON = withObject "BoolQuery" parse + where parse o = BoolQuery + <$> o .:? "must" .!= [] + <*> o .:? "filter" .!= [] + <*> o .:? "must_not" .!= [] + <*> o .:? "should" .!= [] + <*> o .:? "minimum_should_match" + <*> o .:? "boost" + <*> o .:? "disable_coord" + +mkBoolQuery :: [Query] -> [Filter] -> [Query] -> [Query] -> BoolQuery +mkBoolQuery must filt mustNot should = + BoolQuery must filt mustNot should Nothing Nothing Nothing + +data BoostingQuery = + BoostingQuery { positiveQuery :: Query + , negativeQuery :: Query + , negativeBoost :: Boost } deriving (Eq, Show) + +instance ToJSON BoostingQuery where + toJSON (BoostingQuery bqPositiveQuery bqNegativeQuery bqNegativeBoost) = + object [ "positive" .= bqPositiveQuery + , "negative" .= bqNegativeQuery + , "negative_boost" .= bqNegativeBoost ] + +instance FromJSON BoostingQuery where + parseJSON = withObject "BoostingQuery" parse + where parse o = BoostingQuery + <$> o .: "positive" + <*> o .: "negative" + <*> o .: "negative_boost" + +data CommonTermsQuery = + CommonTermsQuery { commonField :: FieldName + , commonQuery :: QueryString + , commonCutoffFrequency :: CutoffFrequency + , commonLowFreqOperator :: BooleanOperator + , commonHighFreqOperator :: BooleanOperator + , commonMinimumShouldMatch :: Maybe CommonMinimumMatch + , commonBoost :: Maybe Boost + , commonAnalyzer :: Maybe Analyzer + , commonDisableCoord :: Maybe DisableCoord + } deriving (Eq, Show) + + +instance ToJSON CommonTermsQuery where + toJSON (CommonTermsQuery (FieldName fieldName) + (QueryString query) cf lfo hfo msm + boost analyzer disableCoord) = + object [fieldName .= omitNulls base ] + where base = [ "query" .= query + , "cutoff_frequency" .= cf + , "low_freq_operator" .= lfo + , "minimum_should_match" .= msm + , "boost" .= boost + , "analyzer" .= analyzer + , "disable_coord" .= disableCoord + , "high_freq_operator" .= hfo ] + +instance FromJSON CommonTermsQuery where + parseJSON = withObject "CommonTermsQuery" parse + where parse = fieldTagged $ \fn o -> + CommonTermsQuery fn + <$> o .: "query" + <*> o .: "cutoff_frequency" + <*> o .: "low_freq_operator" + <*> o .: "high_freq_operator" + <*> o .:? "minimum_should_match" + <*> o .:? "boost" + <*> o .:? "analyzer" + <*> o .:? "disable_coord" + +data CommonMinimumMatch = + CommonMinimumMatchHighLow MinimumMatchHighLow + | CommonMinimumMatch MinimumMatch + deriving (Eq, Show) + + +instance ToJSON CommonMinimumMatch where + toJSON (CommonMinimumMatch mm) = toJSON mm + toJSON (CommonMinimumMatchHighLow (MinimumMatchHighLow lowF highF)) = + object [ "low_freq" .= lowF + , "high_freq" .= highF ] + +instance FromJSON CommonMinimumMatch where + parseJSON v = parseMinimum v + <|> parseMinimumHighLow v + where parseMinimum = fmap CommonMinimumMatch . parseJSON + parseMinimumHighLow = fmap CommonMinimumMatchHighLow . withObject "CommonMinimumMatchHighLow" (\o -> + MinimumMatchHighLow + <$> o .: "low_freq" + <*> o .: "high_freq") + +data MinimumMatchHighLow = + MinimumMatchHighLow { lowFreq :: MinimumMatch + , highFreq :: MinimumMatch } deriving (Eq, Show) + +data ZeroTermsQuery = + ZeroTermsNone + | ZeroTermsAll deriving (Eq, Show) + +instance ToJSON ZeroTermsQuery where + toJSON ZeroTermsNone = String "none" + toJSON ZeroTermsAll = String "all" + +instance FromJSON ZeroTermsQuery where + parseJSON = withText "ZeroTermsQuery" parse + where parse "none" = pure ZeroTermsNone + parse "all" = pure ZeroTermsAll + parse q = fail ("Unexpected ZeroTermsQuery: " <> show q) + +data RangeExecution = RangeExecutionIndex + | RangeExecutionFielddata deriving (Eq, Show) + +-- index for smaller ranges, fielddata for longer ranges +instance ToJSON RangeExecution where + toJSON RangeExecutionIndex = "index" + toJSON RangeExecutionFielddata = "fielddata" + + +instance FromJSON RangeExecution where + parseJSON = withText "RangeExecution" parse + where parse "index" = pure RangeExecutionIndex + parse "fielddata" = pure RangeExecutionFielddata + parse t = error ("Unrecognized RangeExecution " <> show t) + +newtype Regexp = Regexp Text deriving (Eq, Show, FromJSON) + +data RegexpFlags = AllRegexpFlags + | NoRegexpFlags + | SomeRegexpFlags (NonEmpty RegexpFlag) deriving (Eq, Show) + +instance ToJSON RegexpFlags where + toJSON AllRegexpFlags = String "ALL" + toJSON NoRegexpFlags = String "NONE" + toJSON (SomeRegexpFlags (h :| fs)) = String $ T.intercalate "|" flagStrs + where flagStrs = map flagStr . nub $ h:fs + flagStr AnyString = "ANYSTRING" + flagStr Automaton = "AUTOMATON" + flagStr Complement = "COMPLEMENT" + flagStr Empty = "EMPTY" + flagStr Intersection = "INTERSECTION" + flagStr Interval = "INTERVAL" + +instance FromJSON RegexpFlags where + parseJSON = withText "RegexpFlags" parse + where parse "ALL" = pure AllRegexpFlags + parse "NONE" = pure NoRegexpFlags + parse t = SomeRegexpFlags <$> parseNEJSON (String <$> T.splitOn "|" t) + +data RegexpFlag = AnyString + | Automaton + | Complement + | Empty + | Intersection + | Interval deriving (Eq, Show) + +instance FromJSON RegexpFlag where + parseJSON = withText "RegexpFlag" parse + where parse "ANYSTRING" = pure AnyString + parse "AUTOMATON" = pure Automaton + parse "COMPLEMENT" = pure Complement + parse "EMPTY" = pure Empty + parse "INTERSECTION" = pure Intersection + parse "INTERVAL" = pure Interval + parse f = fail ("Unknown RegexpFlag: " <> show f) + +newtype LessThan = LessThan Double deriving (Eq, Show) +newtype LessThanEq = LessThanEq Double deriving (Eq, Show) +newtype GreaterThan = GreaterThan Double deriving (Eq, Show) +newtype GreaterThanEq = GreaterThanEq Double deriving (Eq, Show) + +newtype LessThanD = LessThanD UTCTime deriving (Eq, Show) +newtype LessThanEqD = LessThanEqD UTCTime deriving (Eq, Show) +newtype GreaterThanD = GreaterThanD UTCTime deriving (Eq, Show) +newtype GreaterThanEqD = GreaterThanEqD UTCTime deriving (Eq, Show) + +data RangeValue = RangeDateLte LessThanEqD + | RangeDateLt LessThanD + | RangeDateGte GreaterThanEqD + | RangeDateGt GreaterThanD + | RangeDateGtLt GreaterThanD LessThanD + | RangeDateGteLte GreaterThanEqD LessThanEqD + | RangeDateGteLt GreaterThanEqD LessThanD + | RangeDateGtLte GreaterThanD LessThanEqD + | RangeDoubleLte LessThanEq + | RangeDoubleLt LessThan + | RangeDoubleGte GreaterThanEq + | RangeDoubleGt GreaterThan + | RangeDoubleGtLt GreaterThan LessThan + | RangeDoubleGteLte GreaterThanEq LessThanEq + | RangeDoubleGteLt GreaterThanEq LessThan + | RangeDoubleGtLte GreaterThan LessThanEq + deriving (Eq, Show) + + +parseRangeValue :: ( FromJSON t4 + , FromJSON t3 + , FromJSON t2 + , FromJSON t1 + ) + => (t3 -> t5) + -> (t1 -> t6) + -> (t4 -> t7) + -> (t2 -> t8) + -> (t5 -> t6 -> b) + -> (t7 -> t6 -> b) + -> (t5 -> t8 -> b) + -> (t7 -> t8 -> b) + -> (t5 -> b) + -> (t6 -> b) + -> (t7 -> b) + -> (t8 -> b) + -> Parser b + -> Object + -> Parser b +parseRangeValue mkGt mkLt mkGte mkLte + fGtLt fGteLt fGtLte fGteLte + fGt fLt fGte fLte nada o = do + lt <- o .:? "lt" + lte <- o .:? "lte" + gt <- o .:? "gt" + gte <- o .:? "gte" + case (lt, lte, gt, gte) of + (Just a, _, Just b, _) -> + return (fGtLt (mkGt b) (mkLt a)) + (Just a, _, _, Just b) -> + return (fGteLt (mkGte b) (mkLt a)) + (_, Just a, Just b, _) -> + return (fGtLte (mkGt b) (mkLte a)) + (_, Just a, _, Just b) -> + return (fGteLte (mkGte b) (mkLte a)) + (_, _, Just a, _) -> + return (fGt (mkGt a)) + (Just a, _, _, _) -> + return (fLt (mkLt a)) + (_, _, _, Just a) -> + return (fGte (mkGte a)) + (_, Just a, _, _) -> + return (fLte (mkLte a)) + (Nothing, Nothing, Nothing, Nothing) -> + nada + + +instance FromJSON RangeValue where + parseJSON = withObject "RangeValue" parse + where parse o = parseDate o + <|> parseDouble o + parseDate o = + parseRangeValue + GreaterThanD LessThanD + GreaterThanEqD LessThanEqD + RangeDateGtLt RangeDateGteLt + RangeDateGtLte RangeDateGteLte + RangeDateGt RangeDateLt + RangeDateGte RangeDateLte + mzero o + parseDouble o = + parseRangeValue + GreaterThan LessThan + GreaterThanEq LessThanEq + RangeDoubleGtLt RangeDoubleGteLt + RangeDoubleGtLte RangeDoubleGteLte + RangeDoubleGt RangeDoubleLt + RangeDoubleGte RangeDoubleLte + mzero o + +rangeValueToPair :: RangeValue -> [Pair] +rangeValueToPair rv = case rv of + RangeDateLte (LessThanEqD t) -> ["lte" .= t] + RangeDateGte (GreaterThanEqD t) -> ["gte" .= t] + RangeDateLt (LessThanD t) -> ["lt" .= t] + RangeDateGt (GreaterThanD t) -> ["gt" .= t] + RangeDateGteLte (GreaterThanEqD l) (LessThanEqD g) -> ["gte" .= l, "lte" .= g] + RangeDateGtLte (GreaterThanD l) (LessThanEqD g) -> ["gt" .= l, "lte" .= g] + RangeDateGteLt (GreaterThanEqD l) (LessThanD g) -> ["gte" .= l, "lt" .= g] + RangeDateGtLt (GreaterThanD l) (LessThanD g) -> ["gt" .= l, "lt" .= g] + RangeDoubleLte (LessThanEq t) -> ["lte" .= t] + RangeDoubleGte (GreaterThanEq t) -> ["gte" .= t] + RangeDoubleLt (LessThan t) -> ["lt" .= t] + RangeDoubleGt (GreaterThan t) -> ["gt" .= t] + RangeDoubleGteLte (GreaterThanEq l) (LessThanEq g) -> ["gte" .= l, "lte" .= g] + RangeDoubleGtLte (GreaterThan l) (LessThanEq g) -> ["gt" .= l, "lte" .= g] + RangeDoubleGteLt (GreaterThanEq l) (LessThan g) -> ["gte" .= l, "lt" .= g] + RangeDoubleGtLt (GreaterThan l) (LessThan g) -> ["gt" .= l, "lt" .= g] + +data Term = Term { termField :: Text + , termValue :: Text } deriving (Eq, Show) + +instance ToJSON Term where + toJSON (Term field value) = object ["term" .= object + [field .= value]] + +instance FromJSON Term where + parseJSON = withObject "Term" parse + where parse o = do termObj <- o .: "term" + case HM.toList termObj of + [(fn, v)] -> Term fn <$> parseJSON v + _ -> fail "Expected object with 1 field-named key" + +data BoolMatch = MustMatch Term Cache + | MustNotMatch Term Cache + | ShouldMatch [Term] Cache deriving (Eq, Show) + + +instance ToJSON BoolMatch where + toJSON (MustMatch term cache) = object ["must" .= term, + "_cache" .= cache] + toJSON (MustNotMatch term cache) = object ["must_not" .= term, + "_cache" .= cache] + toJSON (ShouldMatch terms cache) = object ["should" .= fmap toJSON terms, + "_cache" .= cache] + +instance FromJSON BoolMatch where + parseJSON = withObject "BoolMatch" parse + where parse o = mustMatch `taggedWith` "must" + <|> mustNotMatch `taggedWith` "must_not" + <|> shouldMatch `taggedWith` "should" + where taggedWith parser k = parser =<< o .: k + mustMatch t = MustMatch t <$> o .:? "_cache" .!= defaultCache + mustNotMatch t = MustNotMatch t <$> o .:? "_cache" .!= defaultCache + shouldMatch t = ShouldMatch t <$> o .:? "_cache" .!= defaultCache + +-- "memory" or "indexed" +data GeoFilterType = GeoFilterMemory + | GeoFilterIndexed deriving (Eq, Show) + +instance ToJSON GeoFilterType where + toJSON GeoFilterMemory = String "memory" + toJSON GeoFilterIndexed = String "indexed" + +instance FromJSON GeoFilterType where + parseJSON = withText "GeoFilterType" parse + where parse "memory" = pure GeoFilterMemory + parse "indexed" = pure GeoFilterIndexed + parse t = fail ("Unrecognized GeoFilterType: " <> show t) + +data LatLon = LatLon { lat :: Double + , lon :: Double } deriving (Eq, Show) + +instance ToJSON LatLon where + toJSON (LatLon lLat lLon) = + object ["lat" .= lLat + , "lon" .= lLon] + +instance FromJSON LatLon where + parseJSON = withObject "LatLon" parse + where parse o = LatLon <$> o .: "lat" + <*> o .: "lon" + +data GeoBoundingBox = + GeoBoundingBox { topLeft :: LatLon + , bottomRight :: LatLon } deriving (Eq, Show) + +instance ToJSON GeoBoundingBox where + toJSON (GeoBoundingBox gbbTopLeft gbbBottomRight) = + object ["top_left" .= gbbTopLeft + , "bottom_right" .= gbbBottomRight] + +instance FromJSON GeoBoundingBox where + parseJSON = withObject "GeoBoundingBox" parse + where parse o = GeoBoundingBox + <$> o .: "top_left" + <*> o .: "bottom_right" + +data GeoBoundingBoxConstraint = + GeoBoundingBoxConstraint { geoBBField :: FieldName + , constraintBox :: GeoBoundingBox + , bbConstraintcache :: Cache + , geoType :: GeoFilterType + } deriving (Eq, Show) + +instance ToJSON GeoBoundingBoxConstraint where + toJSON (GeoBoundingBoxConstraint + (FieldName gbbcGeoBBField) gbbcConstraintBox cache type') = + object [gbbcGeoBBField .= gbbcConstraintBox + , "_cache" .= cache + , "type" .= type'] + +instance FromJSON GeoBoundingBoxConstraint where + parseJSON = withObject "GeoBoundingBoxConstraint" parse + where parse o = case HM.toList (deleteSeveral ["type", "_cache"] o) of + [(fn, v)] -> GeoBoundingBoxConstraint (FieldName fn) + <$> parseJSON v + <*> o .:? "_cache" .!= defaultCache + <*> o .: "type" + _ -> fail "Could not find field name for GeoBoundingBoxConstraint" + +data GeoPoint = + GeoPoint { geoField :: FieldName + , latLon :: LatLon} deriving (Eq, Show) + +instance ToJSON GeoPoint where + toJSON (GeoPoint (FieldName geoPointField) geoPointLatLon) = + object [ geoPointField .= geoPointLatLon ] + +data DistanceUnit = Miles + | Yards + | Feet + | Inches + | Kilometers + | Meters + | Centimeters + | Millimeters + | NauticalMiles deriving (Eq, Show) + +instance ToJSON DistanceUnit where + toJSON Miles = String "mi" + toJSON Yards = String "yd" + toJSON Feet = String "ft" + toJSON Inches = String "in" + toJSON Kilometers = String "km" + toJSON Meters = String "m" + toJSON Centimeters = String "cm" + toJSON Millimeters = String "mm" + toJSON NauticalMiles = String "nmi" + +instance FromJSON DistanceUnit where + parseJSON = withText "DistanceUnit" parse + where parse "mi" = pure Miles + parse "yd" = pure Yards + parse "ft" = pure Feet + parse "in" = pure Inches + parse "km" = pure Kilometers + parse "m" = pure Meters + parse "cm" = pure Centimeters + parse "mm" = pure Millimeters + parse "nmi" = pure NauticalMiles + parse u = fail ("Unrecognized DistanceUnit: " <> show u) + +data DistanceType = Arc + | SloppyArc -- doesn't exist <1.0 + | Plane deriving (Eq, Show) + +instance ToJSON DistanceType where + toJSON Arc = String "arc" + toJSON SloppyArc = String "sloppy_arc" + toJSON Plane = String "plane" + +instance FromJSON DistanceType where + parseJSON = withText "DistanceType" parse + where parse "arc" = pure Arc + parse "sloppy_arc" = pure SloppyArc + parse "plane" = pure Plane + parse t = fail ("Unrecognized DistanceType: " <> show t) + +data OptimizeBbox = OptimizeGeoFilterType GeoFilterType + | NoOptimizeBbox deriving (Eq, Show) + + +instance ToJSON OptimizeBbox where + toJSON NoOptimizeBbox = String "none" + toJSON (OptimizeGeoFilterType gft) = toJSON gft + +instance FromJSON OptimizeBbox where + parseJSON v = withText "NoOptimizeBbox" parseNoOptimize v + <|> parseOptimize v + where parseNoOptimize "none" = pure NoOptimizeBbox + parseNoOptimize _ = mzero + parseOptimize = fmap OptimizeGeoFilterType . parseJSON + +data Distance = + Distance { coefficient :: Double + , unit :: DistanceUnit } deriving (Eq, Show) + + +instance ToJSON Distance where + toJSON (Distance dCoefficient dUnit) = + String boltedTogether where + coefText = showText dCoefficient + (String unitText) = toJSON dUnit + boltedTogether = mappend coefText unitText + +instance FromJSON Distance where + parseJSON = withText "Distance" parse + where parse t = Distance <$> parseCoeff nT + <*> parseJSON (String unitT) + where (nT, unitT) = T.span validForNumber t + -- may be a better way to do this + validForNumber '-' = True + validForNumber '.' = True + validForNumber 'e' = True + validForNumber c = isNumber c + parseCoeff "" = fail "Empty string cannot be parsed as number" + parseCoeff s = return (read (T.unpack s)) + +data DistanceRange = + DistanceRange { distanceFrom :: Distance + , distanceTo :: Distance } deriving (Eq, Show) + +type TemplateQueryKey = Text +type TemplateQueryValue = Text + +newtype TemplateQueryKeyValuePairs = + TemplateQueryKeyValuePairs (HM.HashMap TemplateQueryKey TemplateQueryValue) + deriving (Eq, Show) + +instance ToJSON TemplateQueryKeyValuePairs where + toJSON (TemplateQueryKeyValuePairs x) = + Object $ HM.map toJSON x + +instance FromJSON TemplateQueryKeyValuePairs where + parseJSON (Object o) = + pure . TemplateQueryKeyValuePairs $ HM.mapMaybe getValue o + where getValue (String x) = Just x + getValue _ = Nothing + parseJSON _ = + fail "error parsing TemplateQueryKeyValuePairs" + +data TemplateQueryInline = + TemplateQueryInline { inline :: Query + , params :: TemplateQueryKeyValuePairs + } + deriving (Eq, Show) + +instance ToJSON TemplateQueryInline where + toJSON TemplateQueryInline{..} = object [ "inline" .= inline + , "params" .= params + ] + +instance FromJSON TemplateQueryInline where + parseJSON = withObject "TemplateQueryInline" parse + where parse o = TemplateQueryInline + <$> o .: "inline" + <*> o .: "params" + +{-| 'BooleanOperator' is the usual And/Or operators with an ES compatible + JSON encoding baked in. Used all over the place. +-} +data BooleanOperator = And | Or deriving (Eq, Show) + +instance ToJSON BooleanOperator where + toJSON And = String "and" + toJSON Or = String "or" + +instance FromJSON BooleanOperator where + parseJSON = withText "BooleanOperator" parse + where parse "and" = pure And + parse "or" = pure Or + parse o = fail ("Unexpected BooleanOperator: " <> show o) + +{-| 'Cache' is for telling ES whether it should cache a 'Filter' not. + 'Query's cannot be cached. +-} +type Cache = Bool -- caching on/off +defaultCache :: Cache +defaultCache = False + + +data FunctionScoreQuery = + FunctionScoreQuery { functionScoreQuery :: Maybe Query + , functionScoreBoost :: Maybe Boost + , functionScoreFunctions :: FunctionScoreFunctions + , functionScoreMaxBoost :: Maybe Boost + , functionScoreBoostMode :: Maybe BoostMode + , functionScoreMinScore :: Score + , functionScoreScoreMode :: Maybe ScoreMode + } deriving (Eq, Show) + +instance ToJSON FunctionScoreQuery where + toJSON (FunctionScoreQuery query boost fns maxBoost boostMode minScore scoreMode) = + omitNulls base + where base = functionScoreFunctionsPair fns : + [ "query" .= query + , "boost" .= boost + , "max_boost" .= maxBoost + , "boost_mode" .= boostMode + , "min_score" .= minScore + , "score_mode" .= scoreMode ] + +instance FromJSON FunctionScoreQuery where + parseJSON = withObject "FunctionScoreQuery" parse + where parse o = FunctionScoreQuery + <$> o .:? "query" + <*> o .:? "boost" + <*> (singleFunction o + <|> multipleFunctions `taggedWith` "functions") + <*> o .:? "max_boost" + <*> o .:? "boost_mode" + <*> o .:? "min_score" + <*> o .:? "score_mode" + where taggedWith parser k = parser =<< o .: k + singleFunction = fmap FunctionScoreSingle . parseFunctionScoreFunction + multipleFunctions = pure . FunctionScoreMultiple + +data FunctionScoreFunctions = + FunctionScoreSingle FunctionScoreFunction + | FunctionScoreMultiple (NonEmpty ComponentFunctionScoreFunction) deriving (Eq, Show) + +data ComponentFunctionScoreFunction = + ComponentFunctionScoreFunction { componentScoreFunctionFilter :: Maybe Filter + , componentScoreFunction :: FunctionScoreFunction + , componentScoreFunctionWeight :: Maybe Weight + } deriving (Eq, Show) + +instance ToJSON ComponentFunctionScoreFunction where + toJSON (ComponentFunctionScoreFunction filter' fn weight) = + omitNulls base + where base = functionScoreFunctionPair fn : + [ "filter" .= filter' + , "weight" .= weight ] + +instance FromJSON ComponentFunctionScoreFunction where + parseJSON = withObject "ComponentFunctionScoreFunction" parse + where parse o = ComponentFunctionScoreFunction + <$> o .:? "filter" + <*> parseFunctionScoreFunction o + <*> o .:? "weight" + +functionScoreFunctionsPair :: FunctionScoreFunctions -> (Text, Value) +functionScoreFunctionsPair (FunctionScoreSingle fn) + = functionScoreFunctionPair fn +functionScoreFunctionsPair (FunctionScoreMultiple componentFns) = + ("functions", toJSON componentFns) + +fieldTagged :: Monad m => (FieldName -> Object -> m a) -> Object -> m a +fieldTagged f o = case HM.toList o of + [(k, Object o')] -> f (FieldName k) o' + _ -> fail "Expected object with 1 field-named key" diff --git a/src/Database/V5/Bloodhound/Internal/Sort.hs b/src/Database/V5/Bloodhound/Internal/Sort.hs new file mode 100644 index 0000000..ff4835b --- /dev/null +++ b/src/Database/V5/Bloodhound/Internal/Sort.hs @@ -0,0 +1,107 @@ +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} + +module Database.V5.Bloodhound.Internal.Sort where + +import Bloodhound.Import + +import Database.V5.Bloodhound.Internal.Newtypes +import Database.V5.Bloodhound.Internal.Query + +{-| 'SortMode' prescribes how to handle sorting array/multi-valued fields. + +http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-sort.html#_sort_mode_option +-} +data SortMode = SortMin + | SortMax + | SortSum + | SortAvg deriving (Eq, Show) + +instance ToJSON SortMode where + toJSON SortMin = String "min" + toJSON SortMax = String "max" + toJSON SortSum = String "sum" + toJSON SortAvg = String "avg" + +{-| 'mkSort' defaults everything but the 'FieldName' and the 'SortOrder' so + that you can concisely describe the usual kind of 'SortSpec's you want. +-} +mkSort :: FieldName -> SortOrder -> DefaultSort +mkSort fieldName sOrder = DefaultSort fieldName sOrder Nothing Nothing Nothing Nothing + +{-| 'Sort' is a synonym for a list of 'SortSpec's. Sort behavior is order + dependent with later sorts acting as tie-breakers for earlier sorts. +-} +type Sort = [SortSpec] + +{-| The two main kinds of 'SortSpec' are 'DefaultSortSpec' and + 'GeoDistanceSortSpec'. The latter takes a 'SortOrder', 'GeoPoint', and + 'DistanceUnit' to express "nearness" to a single geographical point as a + sort specification. + + +-} +data SortSpec = + DefaultSortSpec DefaultSort + | GeoDistanceSortSpec SortOrder GeoPoint DistanceUnit + deriving (Eq, Show) + +instance ToJSON SortSpec where + toJSON (DefaultSortSpec + (DefaultSort (FieldName dsSortFieldName) dsSortOrder dsIgnoreUnmapped + dsSortMode dsMissingSort dsNestedFilter)) = + object [dsSortFieldName .= omitNulls base] where + base = [ "order" .= dsSortOrder + , "unmapped_type" .= dsIgnoreUnmapped + , "mode" .= dsSortMode + , "missing" .= dsMissingSort + , "nested_filter" .= dsNestedFilter ] + + toJSON (GeoDistanceSortSpec gdsSortOrder (GeoPoint (FieldName field) gdsLatLon) units) = + object [ "unit" .= units + , field .= gdsLatLon + , "order" .= gdsSortOrder ] + +{-| 'DefaultSort' is usually the kind of 'SortSpec' you'll want. There's a + 'mkSort' convenience function for when you want to specify only the most + common parameters. + + The `ignoreUnmapped`, when `Just` field is used to set the elastic 'unmapped_type' + + +-} +data DefaultSort = + DefaultSort { sortFieldName :: FieldName + , sortOrder :: SortOrder + -- default False + , ignoreUnmapped :: Maybe Text + , sortMode :: Maybe SortMode + , missingSort :: Maybe Missing + , nestedFilter :: Maybe Filter } deriving (Eq, Show) + +{-| 'SortOrder' is 'Ascending' or 'Descending', as you might expect. These get + encoded into "asc" or "desc" when turned into JSON. + + +-} +data SortOrder = Ascending + | Descending deriving (Eq, Show) + +instance ToJSON SortOrder where + toJSON Ascending = String "asc" + toJSON Descending = String "desc" + +{-| 'Missing' prescribes how to handle missing fields. A missing field can be + sorted last, first, or using a custom value as a substitute. + + +-} +data Missing = LastMissing + | FirstMissing + | CustomMissing Text deriving (Eq, Show) + +instance ToJSON Missing where + toJSON LastMissing = String "_last" + toJSON FirstMissing = String "_first" + toJSON (CustomMissing txt) = String txt diff --git a/src/Database/V5/Bloodhound/Internal/StringlyTyped.hs b/src/Database/V5/Bloodhound/Internal/StringlyTyped.hs new file mode 100644 index 0000000..ea3d41a --- /dev/null +++ b/src/Database/V5/Bloodhound/Internal/StringlyTyped.hs @@ -0,0 +1,51 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Database.V5.Bloodhound.Internal.StringlyTyped where + +import Bloodhound.Import + +import qualified Data.Text as T + + +-- This whole module is a sin bucket to deal with Elasticsearch badness. +newtype StringlyTypedDouble = StringlyTypedDouble + { unStringlyTypedDouble :: Double } + +instance FromJSON StringlyTypedDouble where + parseJSON = + fmap StringlyTypedDouble + . parseJSON + . unStringlyTypeJSON + +newtype StringlyTypedInt = StringlyTypedInt + { unStringlyTypedInt :: Int } + +instance FromJSON StringlyTypedInt where + parseJSON = + fmap StringlyTypedInt + . parseJSON + . unStringlyTypeJSON + +newtype StringlyTypedBool = StringlyTypedBool { unStringlyTypedBool :: Bool } + +instance FromJSON StringlyTypedBool where + parseJSON = + fmap StringlyTypedBool + . parseJSON + . unStringlyTypeJSON + +-- | For some reason in several settings APIs, all leaf values get returned +-- as strings. This function attempts to recover from this for all +-- non-recursive JSON types. If nothing can be done, the value is left alone. +unStringlyTypeJSON :: Value -> Value +unStringlyTypeJSON (String "true") = + Bool True +unStringlyTypeJSON (String "false") = + Bool False +unStringlyTypeJSON (String "null") = + Null +unStringlyTypeJSON v@(String t) = + case readMay (T.unpack t) of + Just n -> Number n + Nothing -> v +unStringlyTypeJSON v = v diff --git a/src/Database/V5/Bloodhound/Internal/Suggest.hs b/src/Database/V5/Bloodhound/Internal/Suggest.hs new file mode 100644 index 0000000..b002091 --- /dev/null +++ b/src/Database/V5/Bloodhound/Internal/Suggest.hs @@ -0,0 +1,265 @@ +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE OverloadedStrings #-} + +module Database.V5.Bloodhound.Internal.Suggest where + +import Bloodhound.Import + +import qualified Data.HashMap.Strict as HM + +import Database.V5.Bloodhound.Internal.Newtypes +import Database.V5.Bloodhound.Internal.Query (TemplateQueryInline(..), params) + +data Suggest = Suggest + { suggestText :: Text + , suggestName :: Text + , suggestType :: SuggestType + } deriving (Eq, Show) + +instance ToJSON Suggest where + toJSON Suggest{..} = + object [ "text" .= suggestText + , suggestName .= suggestType + ] + +instance FromJSON Suggest where + parseJSON (Object o) = do + suggestText' <- o .: "text" + let dropTextList = + HM.toList + $ HM.filterWithKey (\x _ -> x /= "text") o + suggestName' <- + case dropTextList of + [(x, _)] -> return x + _ -> fail "error parsing Suggest field name" + suggestType' <- o .: suggestName' + return $ Suggest suggestText' suggestName' suggestType' + parseJSON x = typeMismatch "Suggest" x + +data SuggestType = + SuggestTypePhraseSuggester PhraseSuggester + deriving (Eq, Show) + +instance ToJSON SuggestType where + toJSON (SuggestTypePhraseSuggester x) = + object [ "phrase" .= x ] + +instance FromJSON SuggestType where + parseJSON = withObject "SuggestType" parse + where parse o = phraseSuggester `taggedWith` "phrase" + where taggedWith parser k = parser =<< o .: k + phraseSuggester = pure . SuggestTypePhraseSuggester + +data PhraseSuggester = PhraseSuggester + { phraseSuggesterField :: FieldName + , phraseSuggesterGramSize :: Maybe Int + , phraseSuggesterRealWordErrorLikelihood :: Maybe Int + , phraseSuggesterConfidence :: Maybe Int + , phraseSuggesterMaxErrors :: Maybe Int + , phraseSuggesterSeparator :: Maybe Text + , phraseSuggesterSize :: Maybe Size + , phraseSuggesterAnalyzer :: Maybe Analyzer + , phraseSuggesterShardSize :: Maybe Int + , phraseSuggesterHighlight :: Maybe PhraseSuggesterHighlighter + , phraseSuggesterCollate :: Maybe PhraseSuggesterCollate + , phraseSuggesterCandidateGenerators :: [DirectGenerators] + } deriving (Eq, Show) + +instance ToJSON PhraseSuggester where + toJSON PhraseSuggester{..} = + omitNulls [ "field" .= phraseSuggesterField + , "gram_size" .= phraseSuggesterGramSize + , "real_word_error_likelihood" .= + phraseSuggesterRealWordErrorLikelihood + , "confidence" .= phraseSuggesterConfidence + , "max_errors" .= phraseSuggesterMaxErrors + , "separator" .= phraseSuggesterSeparator + , "size" .= phraseSuggesterSize + , "analyzer" .= phraseSuggesterAnalyzer + , "shard_size" .= phraseSuggesterShardSize + , "highlight" .= phraseSuggesterHighlight + , "collate" .= phraseSuggesterCollate + , "direct_generator" .= + phraseSuggesterCandidateGenerators + ] + +instance FromJSON PhraseSuggester where + parseJSON = withObject "PhraseSuggester" parse + where parse o = PhraseSuggester + <$> o .: "field" + <*> o .:? "gram_size" + <*> o .:? "real_word_error_likelihood" + <*> o .:? "confidence" + <*> o .:? "max_errors" + <*> o .:? "separator" + <*> o .:? "size" + <*> o .:? "analyzer" + <*> o .:? "shard_size" + <*> o .:? "highlight" + <*> o .:? "collate" + <*> o .:? "direct_generator" .!= [] + +mkPhraseSuggester :: FieldName -> PhraseSuggester +mkPhraseSuggester fName = + PhraseSuggester fName Nothing Nothing Nothing Nothing Nothing Nothing + Nothing Nothing Nothing Nothing [] + +data PhraseSuggesterHighlighter = + PhraseSuggesterHighlighter { phraseSuggesterHighlighterPreTag :: Text + , phraseSuggesterHighlighterPostTag :: Text + } + deriving (Eq, Show) + +instance ToJSON PhraseSuggesterHighlighter where + toJSON PhraseSuggesterHighlighter{..} = + object [ "pre_tag" .= phraseSuggesterHighlighterPreTag + , "post_tag" .= phraseSuggesterHighlighterPostTag + ] + +instance FromJSON PhraseSuggesterHighlighter where + parseJSON = withObject "PhraseSuggesterHighlighter" parse + where parse o = PhraseSuggesterHighlighter + <$> o .: "pre_tag" + <*> o .: "post_tag" + +data PhraseSuggesterCollate = PhraseSuggesterCollate + { phraseSuggesterCollateTemplateQuery :: TemplateQueryInline + , phraseSuggesterCollatePrune :: Bool + } deriving (Eq, Show) + +instance ToJSON PhraseSuggesterCollate where + toJSON PhraseSuggesterCollate{..} = + object [ "query" .= object + [ "inline" .= (inline phraseSuggesterCollateTemplateQuery) + ] + , "params" .= (params phraseSuggesterCollateTemplateQuery) + , "prune" .= phraseSuggesterCollatePrune + ] + +instance FromJSON PhraseSuggesterCollate where + parseJSON (Object o) = do + query' <- o .: "query" + inline' <- query' .: "inline" + params' <- o .: "params" + prune' <- o .:? "prune" .!= False + return $ PhraseSuggesterCollate + (TemplateQueryInline inline' params') prune' + parseJSON x = typeMismatch "PhraseSuggesterCollate" x + +data SuggestOptions = + SuggestOptions { suggestOptionsText :: Text + , suggestOptionsScore :: Double + , suggestOptionsFreq :: Maybe Int + , suggestOptionsHighlighted :: Maybe Text + } + deriving (Eq, Read, Show) + +instance FromJSON SuggestOptions where + parseJSON = withObject "SuggestOptions" parse + where + parse o = SuggestOptions + <$> o .: "text" + <*> o .: "score" + <*> o .:? "freq" + <*> o .:? "highlighted" + +data SuggestResponse = + SuggestResponse { suggestResponseText :: Text + , suggestResponseOffset :: Int + , suggestResponseLength :: Int + , suggestResponseOptions :: [SuggestOptions] + } + deriving (Eq, Read, Show) + +instance FromJSON SuggestResponse where + parseJSON = withObject "SuggestResponse" parse + where parse o = SuggestResponse + <$> o .: "text" + <*> o .: "offset" + <*> o .: "length" + <*> o .: "options" + +data NamedSuggestionResponse = NamedSuggestionResponse + { nsrName :: Text + , nsrResponses :: [SuggestResponse] + } deriving (Eq, Read, Show) + +instance FromJSON NamedSuggestionResponse where + parseJSON (Object o) = do + suggestionName' <- case HM.toList o of + [(x, _)] -> return x + _ -> fail "error parsing NamedSuggestionResponse name" + suggestionResponses' <- o .: suggestionName' + return $ NamedSuggestionResponse suggestionName' suggestionResponses' + + parseJSON x = typeMismatch "NamedSuggestionResponse" x + +data DirectGeneratorSuggestModeTypes = DirectGeneratorSuggestModeMissing + | DirectGeneratorSuggestModePopular + | DirectGeneratorSuggestModeAlways + deriving (Eq, Show) + +instance ToJSON DirectGeneratorSuggestModeTypes where + toJSON DirectGeneratorSuggestModeMissing = "missing" + toJSON DirectGeneratorSuggestModePopular = "popular" + toJSON DirectGeneratorSuggestModeAlways = "always" + +instance FromJSON DirectGeneratorSuggestModeTypes where + parseJSON = withText "DirectGeneratorSuggestModeTypes" parse + where + parse "missing" = + pure DirectGeneratorSuggestModeMissing + parse "popular" = + pure DirectGeneratorSuggestModePopular + parse "always" = + pure DirectGeneratorSuggestModeAlways + parse f = + fail ("Unexpected DirectGeneratorSuggestModeTypes: " <> show f) + +data DirectGenerators = DirectGenerators + { directGeneratorsField :: FieldName + , directGeneratorsSize :: Maybe Int + , directGeneratorSuggestMode :: DirectGeneratorSuggestModeTypes + , directGeneratorMaxEdits :: Maybe Double + , directGeneratorPrefixLength :: Maybe Int + , directGeneratorMinWordLength :: Maybe Int + , directGeneratorMaxInspections :: Maybe Int + , directGeneratorMinDocFreq :: Maybe Double + , directGeneratorMaxTermFreq :: Maybe Double + , directGeneratorPreFilter :: Maybe Text + , directGeneratorPostFilter :: Maybe Text + } deriving (Eq, Show) + +instance ToJSON DirectGenerators where + toJSON DirectGenerators{..} = + omitNulls [ "field" .= directGeneratorsField + , "size" .= directGeneratorsSize + , "suggest_mode" .= directGeneratorSuggestMode + , "max_edits" .= directGeneratorMaxEdits + , "prefix_length" .= directGeneratorPrefixLength + , "min_word_length" .= directGeneratorMinWordLength + , "max_inspections" .= directGeneratorMaxInspections + , "min_doc_freq" .= directGeneratorMinDocFreq + , "max_term_freq" .= directGeneratorMaxTermFreq + , "pre_filter" .= directGeneratorPreFilter + , "post_filter" .= directGeneratorPostFilter + ] + +instance FromJSON DirectGenerators where + parseJSON = withObject "DirectGenerators" parse + where parse o = DirectGenerators + <$> o .: "field" + <*> o .:? "size" + <*> o .: "suggest_mode" + <*> o .:? "max_edits" + <*> o .:? "prefix_length" + <*> o .:? "min_word_length" + <*> o .:? "max_inspections" + <*> o .:? "min_doc_freq" + <*> o .:? "max_term_freq" + <*> o .:? "pre_filter" + <*> o .:? "post_filter" + +mkDirectGenerators :: FieldName -> DirectGenerators +mkDirectGenerators fn = DirectGenerators fn Nothing DirectGeneratorSuggestModeMissing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing diff --git a/src/Database/V5/Bloodhound/Types.hs b/src/Database/V5/Bloodhound/Types.hs index e91ac7d..cd48760 100644 --- a/src/Database/V5/Bloodhound/Types.hs +++ b/src/Database/V5/Bloodhound/Types.hs @@ -1,10 +1,7 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE MultiParamTypeClasses #-} --- {-# LANGUAGE NoMonomorphismRestriction #-} {-# LANGUAGE NamedFieldPuns #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} @@ -13,11 +10,11 @@ ------------------------------------------------------------------------------- -- | -- Module : Database.Bloodhound.Types --- Copyright : (C) 2014, 2015, 2016 Chris Allen +-- Copyright : (C) 2014, 2018 Chris Allen -- License : BSD-style (see the file LICENSE) -- Maintainer : Chris Allen >> :set -XOverloadedStrings --- >>> import Data.Aeson --- >>> import Database.V5.Bloodhound --- >>> let testServer = (Server "http://localhost:9200") --- >>> let testIndex = IndexName "twitter" --- >>> let testMapping = MappingName "tweet" --- >>> let defaultIndexSettings = IndexSettings (ShardCount 3) (ReplicaCount 2) - --- defaultIndexSettings is exported by Database.Bloodhound as well --- no trailing slashes in servers, library handles building the path. - --- | Create a 'BHEnv' with all optional fields defaulted. HTTP hook --- will be a noop. You can use the exported fields to customize it further, e.g.: --- --- >> (mkBHEnv myServer myManager) { bhRequestHook = customHook } -mkBHEnv :: Server -> Manager -> BHEnv -mkBHEnv s m = BHEnv s m return - -newtype BH m a = BH { - unBH :: ReaderT BHEnv m a - } deriving ( Functor - , A.Applicative - , Monad - , MonadIO - , MonadState s - , MonadWriter w - , MonadError e - , Alternative - , MonadPlus - , MonadFix - , MonadThrow - , MonadCatch - , MonadMask) - -instance MonadTrans BH where - lift = BH . lift - -instance (MonadReader r m) => MonadReader r (BH m) where - ask = lift ask - local f (BH (ReaderT m)) = BH $ ReaderT $ \r -> - local f (m r) - -instance (Functor m, Applicative m, MonadIO m) => MonadBH (BH m) where - getBHEnv = BH getBHEnv - -runBH :: BHEnv -> BH m a -> m a -runBH e f = runReaderT (unBH f) e - -{-| 'Version' is embedded in 'Status' -} -data Version = Version { number :: VersionNumber - , build_hash :: BuildHash - , build_date :: UTCTime - , build_snapshot :: Bool - , lucene_version :: VersionNumber } deriving (Eq, Read, Show, Generic, Typeable) - --- | Traditional software versioning number -newtype VersionNumber = VersionNumber { versionNumber :: Vers.Version - } deriving (Eq, Read, Show, Generic, Typeable, Ord) - -{-| 'Status' is a data type for describing the JSON body returned by - Elasticsearch when you query its status. This was deprecated in 1.2.0. - - --} - -data Status = Status { name :: Text - , cluster_name :: Text - , cluster_uuid :: Text - , version :: Version - , tagline :: Text } deriving (Eq, Read, Show, Generic) - -{-| 'IndexSettings' is used to configure the shards and replicas when you create - an Elasticsearch Index. - - --} - -data IndexSettings = - IndexSettings { indexShards :: ShardCount - , indexReplicas :: ReplicaCount } deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'defaultIndexSettings' is an 'IndexSettings' with 3 shards and 2 replicas. -} -defaultIndexSettings :: IndexSettings -defaultIndexSettings = IndexSettings (ShardCount 3) (ReplicaCount 2) - - -{-| 'ForceMergeIndexSettings' is used to configure index optimization. See - - for more info. --} -data ForceMergeIndexSettings = - ForceMergeIndexSettings { maxNumSegments :: Maybe Int - -- ^ Number of segments to optimize to. 1 will fully optimize the index. If omitted, the default behavior is to only optimize if the server deems it necessary. - , onlyExpungeDeletes :: Bool - -- ^ Should the optimize process only expunge segments with deletes in them? If the purpose of the optimization is to free disk space, this should be set to True. - , flushAfterOptimize :: Bool - -- ^ Should a flush be performed after the optimize. - } deriving (Eq, Show, Generic, Typeable) - - -{-| 'defaultForceMergeIndexSettings' implements the default settings that - Elasticsearch uses for index optimization. 'maxNumSegments' is Nothing, - 'onlyExpungeDeletes' is False, and flushAfterOptimize is True. --} -defaultForceMergeIndexSettings :: ForceMergeIndexSettings -defaultForceMergeIndexSettings = ForceMergeIndexSettings Nothing False True - -{-| 'UpdatableIndexSetting' are settings which may be updated after an index is created. - - --} -data UpdatableIndexSetting = NumberOfReplicas ReplicaCount - -- ^ The number of replicas each shard has. - | AutoExpandReplicas ReplicaBounds - | BlocksReadOnly Bool - -- ^ Set to True to have the index read only. False to allow writes and metadata changes. - | BlocksRead Bool - -- ^ Set to True to disable read operations against the index. - | BlocksWrite Bool - -- ^ Set to True to disable write operations against the index. - | BlocksMetaData Bool - -- ^ Set to True to disable metadata operations against the index. - | RefreshInterval NominalDiffTime - -- ^ The async refresh interval of a shard - | IndexConcurrency Int - | FailOnMergeFailure Bool - | TranslogFlushThresholdOps Int - -- ^ When to flush on operations. - | TranslogFlushThresholdSize Bytes - -- ^ When to flush based on translog (bytes) size. - | TranslogFlushThresholdPeriod NominalDiffTime - -- ^ When to flush based on a period of not flushing. - | TranslogDisableFlush Bool - -- ^ Disables flushing. Note, should be set for a short interval and then enabled. - | CacheFilterMaxSize (Maybe Bytes) - -- ^ The maximum size of filter cache (per segment in shard). - | CacheFilterExpire (Maybe NominalDiffTime) - -- ^ The expire after access time for filter cache. - | GatewaySnapshotInterval NominalDiffTime - -- ^ The gateway snapshot interval (only applies to shared gateways). - | RoutingAllocationInclude (NonEmpty NodeAttrFilter) - -- ^ A node matching any rule will be allowed to host shards from the index. - | RoutingAllocationExclude (NonEmpty NodeAttrFilter) - -- ^ A node matching any rule will NOT be allowed to host shards from the index. - | RoutingAllocationRequire (NonEmpty NodeAttrFilter) - -- ^ Only nodes matching all rules will be allowed to host shards from the index. - | RoutingAllocationEnable AllocationPolicy - -- ^ Enables shard allocation for a specific index. - | RoutingAllocationShardsPerNode ShardCount - -- ^ Controls the total number of shards (replicas and primaries) allowed to be allocated on a single node. - | RecoveryInitialShards InitialShardCount - -- ^ When using local gateway a particular shard is recovered only if there can be allocated quorum shards in the cluster. - | GCDeletes NominalDiffTime - | TTLDisablePurge Bool - -- ^ Disables temporarily the purge of expired docs. - | TranslogFSType FSType - | CompressionSetting Compression - | IndexCompoundFormat CompoundFormat - | IndexCompoundOnFlush Bool - | WarmerEnabled Bool - | MappingTotalFieldsLimit Int - | AnalysisSetting Analysis - -- ^ Analysis is not a dynamic setting and can only be performed on a closed index. - deriving (Eq, Show, Generic, Typeable) - -data Analysis = Analysis - { analysisAnalyzer :: M.Map Text AnalyzerDefinition - , analysisTokenizer :: M.Map Text TokenizerDefinition - , analysisTokenFilter :: M.Map Text TokenFilterDefinition - } deriving (Eq,Show,Generic,Typeable) - -instance ToJSON Analysis where - toJSON (Analysis analyzer tokenizer tokenFilter) = object - [ "analyzer" .= analyzer - , "tokenizer" .= tokenizer - , "filter" .= tokenFilter - ] - -instance FromJSON Analysis where - parseJSON = withObject "Analysis" $ \m -> Analysis - <$> m .: "analyzer" - <*> m .:? "tokenizer" .!= M.empty - <*> m .:? "filter" .!= M.empty - -data AnalyzerDefinition = AnalyzerDefinition - { analyzerDefinitionTokenizer :: Maybe Tokenizer - , analyzerDefinitionFilter :: [TokenFilter] - } deriving (Eq,Show,Generic,Typeable) - -instance ToJSON AnalyzerDefinition where - toJSON (AnalyzerDefinition tokenizer tokenFilter) = object $ catMaybes - [ fmap ("tokenizer" .=) tokenizer - , Just $ "filter" .= tokenFilter - ] - -instance FromJSON AnalyzerDefinition where - parseJSON = withObject "AnalyzerDefinition" $ \m -> AnalyzerDefinition - <$> m .:? "tokenizer" - <*> m .:? "filter" .!= [] - --- | Token filters are used to create custom analyzers. -data TokenFilterDefinition - = TokenFilterDefinitionLowercase (Maybe Language) - | TokenFilterDefinitionUppercase (Maybe Language) - | TokenFilterDefinitionApostrophe - | TokenFilterDefinitionReverse - | TokenFilterDefinitionSnowball Language - | TokenFilterDefinitionShingle Shingle - deriving (Eq,Show,Generic) - -instance ToJSON TokenFilterDefinition where - toJSON x = case x of - TokenFilterDefinitionLowercase mlang -> object $ catMaybes - [ Just $ "type" .= ("lowercase" :: Text) - , fmap (\lang -> "language" .= languageToText lang) mlang - ] - TokenFilterDefinitionUppercase mlang -> object $ catMaybes - [ Just $ "type" .= ("uppercase" :: Text) - , fmap (\lang -> "language" .= languageToText lang) mlang - ] - TokenFilterDefinitionApostrophe -> object - [ "type" .= ("apostrophe" :: Text) - ] - TokenFilterDefinitionReverse -> object - [ "type" .= ("reverse" :: Text) - ] - TokenFilterDefinitionSnowball lang -> object - [ "type" .= ("snowball" :: Text) - , "language" .= languageToText lang - ] - TokenFilterDefinitionShingle s -> object - [ "type" .= ("shingle" :: Text) - , "max_shingle_size" .= shingleMaxSize s - , "min_shingle_size" .= shingleMinSize s - , "output_unigrams" .= shingleOutputUnigrams s - , "output_unigrams_if_no_shingles" .= shingleOutputUnigramsIfNoShingles s - , "token_separator" .= shingleTokenSeparator s - , "filler_token" .= shingleFillerToken s - ] - -instance FromJSON TokenFilterDefinition where - parseJSON = withObject "TokenFilterDefinition" $ \m -> do - t <- m .: "type" - case (t :: Text) of - "reverse" -> return TokenFilterDefinitionReverse - "apostrophe" -> return TokenFilterDefinitionApostrophe - "lowercase" -> TokenFilterDefinitionLowercase - <$> m .:? "language" - "uppercase" -> TokenFilterDefinitionUppercase - <$> m .:? "language" - "snowball" -> TokenFilterDefinitionSnowball - <$> m .: "language" - "shingle" -> fmap TokenFilterDefinitionShingle $ Shingle - <$> (fmap.fmap) unStringlyTypedInt (m .:? "max_shingle_size") .!= 2 - <*> (fmap.fmap) unStringlyTypedInt (m .:? "min_shingle_size") .!= 2 - <*> (fmap.fmap) unStringlyTypedBool (m .:? "output_unigrams") .!= True - <*> (fmap.fmap) unStringlyTypedBool (m .:? "output_unigrams_if_no_shingles") .!= False - <*> m .:? "token_separator" .!= " " - <*> m .:? "filler_token" .!= "_" - _ -> fail ("unrecognized token filter type: " ++ T.unpack t) - --- | The set of languages that can be passed to various analyzers, --- filters, etc. in Elasticsearch. Most data types in this module --- that have a 'Language' field are actually only actually to --- handle a subset of these languages. Consult the official --- Elasticsearch documentation to see what is actually supported. -data Language - = Arabic - | Armenian - | Basque - | Bengali - | Brazilian - | Bulgarian - | Catalan - | Cjk - | Czech - | Danish - | Dutch - | English - | Finnish - | French - | Galician - | German - | German2 - | Greek - | Hindi - | Hungarian - | Indonesian - | Irish - | Italian - | Kp - | Latvian - | Lithuanian - | Lovins - | Norwegian - | Persian - | Porter - | Portuguese - | Romanian - | Russian - | Sorani - | Spanish - | Swedish - | Thai - | Turkish - deriving (Show,Eq,Generic) - -instance ToJSON Language where - toJSON = Data.Aeson.String . languageToText - -instance FromJSON Language where - parseJSON = withText "Language" $ \t -> case languageFromText t of - Nothing -> fail "not a supported Elasticsearch language" - Just lang -> return lang - -languageToText :: Language -> Text -languageToText x = case x of - Arabic -> "arabic" - Armenian -> "armenian" - Basque -> "basque" - Bengali -> "bengali" - Brazilian -> "brazilian" - Bulgarian -> "bulgarian" - Catalan -> "catalan" - Cjk -> "cjk" - Czech -> "czech" - Danish -> "danish" - Dutch -> "dutch" - English -> "english" - Finnish -> "finnish" - French -> "french" - Galician -> "galician" - German -> "german" - German2 -> "german2" - Greek -> "greek" - Hindi -> "hindi" - Hungarian -> "hungarian" - Indonesian -> "indonesian" - Irish -> "irish" - Italian -> "italian" - Kp -> "kp" - Latvian -> "latvian" - Lithuanian -> "lithuanian" - Lovins -> "lovins" - Norwegian -> "norwegian" - Persian -> "persian" - Porter -> "porter" - Portuguese -> "portuguese" - Romanian -> "romanian" - Russian -> "russian" - Sorani -> "sorani" - Spanish -> "spanish" - Swedish -> "swedish" - Thai -> "thai" - Turkish -> "turkish" - -languageFromText :: Text -> Maybe Language -languageFromText x = case x of - "arabic" -> Just Arabic - "armenian" -> Just Armenian - "basque" -> Just Basque - "bengali" -> Just Bengali - "brazilian" -> Just Brazilian - "bulgarian" -> Just Bulgarian - "catalan" -> Just Catalan - "cjk" -> Just Cjk - "czech" -> Just Czech - "danish" -> Just Danish - "dutch" -> Just Dutch - "english" -> Just English - "finnish" -> Just Finnish - "french" -> Just French - "galician" -> Just Galician - "german" -> Just German - "german2" -> Just German2 - "greek" -> Just Greek - "hindi" -> Just Hindi - "hungarian" -> Just Hungarian - "indonesian" -> Just Indonesian - "irish" -> Just Irish - "italian" -> Just Italian - "kp" -> Just Kp - "latvian" -> Just Latvian - "lithuanian" -> Just Lithuanian - "lovins" -> Just Lovins - "norwegian" -> Just Norwegian - "persian" -> Just Persian - "porter" -> Just Porter - "portuguese" -> Just Portuguese - "romanian" -> Just Romanian - "russian" -> Just Russian - "sorani" -> Just Sorani - "spanish" -> Just Spanish - "swedish" -> Just Swedish - "thai" -> Just Thai - "turkish" -> Just Turkish - _ -> Nothing - -data Shingle = Shingle - { shingleMaxSize :: Int - , shingleMinSize :: Int - , shingleOutputUnigrams :: Bool - , shingleOutputUnigramsIfNoShingles :: Bool - , shingleTokenSeparator :: Text - , shingleFillerToken :: Text - } deriving (Eq,Show,Generic,Typeable) - -data TokenizerDefinition - = TokenizerDefinitionNgram Ngram - deriving (Eq,Show,Generic,Typeable) - -instance ToJSON TokenizerDefinition where - toJSON x = case x of - TokenizerDefinitionNgram (Ngram minGram maxGram tokenChars) -> object - [ "type" .= ("ngram" :: Text) - , "min_gram" .= minGram - , "max_gram" .= maxGram - , "token_chars" .= tokenChars - ] - -instance FromJSON TokenizerDefinition where - parseJSON = withObject "TokenizerDefinition" $ \m -> do - typ <- m .: "type" :: Parser Text - case typ of - "ngram" -> fmap TokenizerDefinitionNgram $ Ngram - <$> (fmap unStringlyTypedInt (m .: "min_gram")) - <*> (fmap unStringlyTypedInt (m .: "max_gram")) - <*> m .: "token_chars" - _ -> fail "invalid TokenizerDefinition" - -data Ngram = Ngram - { ngramMinGram :: Int - , ngramMaxGram :: Int - , ngramTokenChars :: [TokenChar] - } deriving (Eq,Show,Generic,Typeable) - -data TokenChar = TokenLetter | TokenDigit | TokenWhitespace | TokenPunctuation | TokenSymbol - deriving (Eq,Read,Show,Generic,Typeable) - -instance ToJSON TokenChar where - toJSON t = String $ case t of - TokenLetter -> "letter" - TokenDigit -> "digit" - TokenWhitespace -> "whitespace" - TokenPunctuation -> "punctuation" - TokenSymbol -> "symbol" - -instance FromJSON TokenChar where - parseJSON = withText "TokenChar" $ \t -> case t of - "letter" -> return TokenLetter - "digit" -> return TokenDigit - "whitespace" -> return TokenWhitespace - "punctuation" -> return TokenPunctuation - "symbol" -> return TokenSymbol - _ -> fail "invalid TokenChar" - -data AllocationPolicy = AllocAll - -- ^ Allows shard allocation for all shards. - | AllocPrimaries - -- ^ Allows shard allocation only for primary shards. - | AllocNewPrimaries - -- ^ Allows shard allocation only for primary shards for new indices. - | AllocNone - -- ^ No shard allocation is allowed - deriving (Eq, Read, Show, Generic, Typeable) - -data ReplicaBounds = ReplicasBounded Int Int - | ReplicasLowerBounded Int - | ReplicasUnbounded - deriving (Eq, Read, Show, Generic, Typeable) - -data Compression - = CompressionDefault - -- ^ Compress with LZ4 - | CompressionBest - -- ^ Compress with DEFLATE. Elastic - -- - -- that this can reduce disk use by 15%-25%. - deriving (Eq,Show,Generic,Typeable) - -instance ToJSON Compression where - toJSON x = case x of - CompressionDefault -> toJSON ("default" :: Text) - CompressionBest -> toJSON ("best_compression" :: Text) - -instance FromJSON Compression where - parseJSON = withText "Compression" $ \t -> case t of - "default" -> return CompressionDefault - "best_compression" -> return CompressionBest - _ -> fail "invalid compression codec" - --- | A measure of bytes used for various configurations. You may want --- to use smart constructors like 'gigabytes' for larger values. --- --- >>> gigabytes 9 --- Bytes 9000000000 --- --- >>> megabytes 9 --- Bytes 9000000 --- --- >>> kilobytes 9 --- Bytes 9000 -newtype Bytes = Bytes Int deriving (Eq, Read, Show, Generic, Typeable, Ord, ToJSON, FromJSON) - -gigabytes :: Int -> Bytes -gigabytes n = megabytes (1000 * n) - - -megabytes :: Int -> Bytes -megabytes n = kilobytes (1000 * n) - - -kilobytes :: Int -> Bytes -kilobytes n = Bytes (1000 * n) - - -data FSType = FSSimple - | FSBuffered deriving (Eq, Read, Show, Generic, Typeable, Ord) - -data InitialShardCount = QuorumShards - | QuorumMinus1Shards - | FullShards - | FullMinus1Shards - | ExplicitShards Int - deriving (Eq, Read, Show, Generic, Typeable) - -data NodeAttrFilter = NodeAttrFilter { nodeAttrFilterName :: NodeAttrName - , nodeAttrFilterValues :: NonEmpty Text} - deriving (Eq, Read, Show, Generic, Ord, Typeable) - -newtype NodeAttrName = NodeAttrName Text deriving (Eq, Read, Show, Ord, Generic, Typeable) - -data CompoundFormat = CompoundFileFormat Bool - | MergeSegmentVsTotalIndex Double - -- ^ percentage between 0 and 1 where 0 is false, 1 is true - deriving (Eq, Read, Show, Generic, Typeable) - -newtype NominalDiffTimeJSON = NominalDiffTimeJSON { ndtJSON :: NominalDiffTime } - -data IndexSettingsSummary = IndexSettingsSummary { sSummaryIndexName :: IndexName - , sSummaryFixedSettings :: IndexSettings - , sSummaryUpdateable :: [UpdatableIndexSetting]} - deriving (Eq, Show, Generic, Typeable) - -{-| 'Reply' and 'Method' are type synonyms from 'Network.HTTP.Types.Method.Method' -} -type Reply = Network.HTTP.Client.Response L.ByteString -type Method = NHTM.Method - -{-| 'OpenCloseIndex' is a sum type for opening and closing indices. - - --} -data OpenCloseIndex = OpenIndex | CloseIndex deriving (Eq, Read, Show, Generic, Typeable) - -data FieldType = GeoPointType - | GeoShapeType - | FloatType - | IntegerType - | LongType - | ShortType - | ByteType deriving (Eq, Read, Show, Generic, Typeable) - -data FieldDefinition = - FieldDefinition { fieldType :: FieldType } deriving (Eq, Read, Show, Generic, Typeable) - -{-| An 'IndexTemplate' defines a template that will automatically be - applied to new indices created. The templates include both - 'IndexSettings' and mappings, and a simple 'TemplatePattern' that - controls if the template will be applied to the index created. - Specify mappings as follows: @[toJSON TweetMapping, ...]@ - - https://www.elastic.co/guide/en/elasticsearch/reference/1.7/indices-templates.html --} -data IndexTemplate = - IndexTemplate { templatePattern :: TemplatePattern - , templateSettings :: Maybe IndexSettings - , templateMappings :: [Value] - } - -data MappingField = - MappingField { mappingFieldName :: FieldName - , fieldDefinition :: FieldDefinition } deriving (Eq, Read, Show, Generic, Typeable) - -{-| Support for type reification of 'Mapping's is currently incomplete, for - now the mapping API verbiage expects a 'ToJSON'able blob. - - Indexes have mappings, mappings are schemas for the documents contained in the - index. I'd recommend having only one mapping per index, always having a mapping, - and keeping different kinds of documents separated if possible. --} -data Mapping = Mapping { typeName :: TypeName - , mappingFields :: [MappingField] } deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'BulkOperation' is a sum type for expressing the four kinds of bulk - operation index, create, delete, and update. 'BulkIndex' behaves like an - "upsert", 'BulkCreate' will fail if a document already exists at the DocId. - - --} -data BulkOperation = - BulkIndex IndexName MappingName DocId Value - | BulkCreate IndexName MappingName DocId Value - | BulkCreateEncoding IndexName MappingName DocId Encoding - | BulkDelete IndexName MappingName DocId - | BulkUpdate IndexName MappingName DocId Value deriving (Eq, Show, Generic, Typeable) - -{-| 'EsResult' describes the standard wrapper JSON document that you see in - successful Elasticsearch lookups or lookups that couldn't find the document. --} -data EsResult a = EsResult { _index :: Text - , _type :: Text - , _id :: Text - , foundResult :: Maybe (EsResultFound a)} deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'EsResultFound' contains the document and its metadata inside of an - 'EsResult' when the document was successfully found. --} -data EsResultFound a = EsResultFound { _version :: DocVersion - , _source :: a } deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'EsError' is the generic type that will be returned when there was a - problem. If you can't parse the expected response, its a good idea to - try parsing this. --} -data EsError = EsError { errorStatus :: Int - , errorMessage :: Text } deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'EsProtocolException' will be thrown if Bloodhound cannot parse a response -returned by the Elasticsearch server. If you encounter this error, please -verify that your domain data types and FromJSON instances are working properly -(for example, the 'a' of '[Hit a]' in 'SearchResult.searchHits.hits'). If you're -sure that your mappings are correct, then this error may be an indication of an -incompatibility between Bloodhound and Elasticsearch. Please open a bug report -and be sure to include the exception body. --} -data EsProtocolException = EsProtocolException { esProtoExBody :: L.ByteString } - deriving (Eq, Read, Show, Generic, Typeable) - -instance Exception EsProtocolException - -data IndexAlias = IndexAlias { srcIndex :: IndexName - , indexAlias :: IndexAliasName } deriving (Eq, Read, Show, Generic, Typeable) - -newtype IndexAliasName = IndexAliasName { indexAliasName :: IndexName } deriving (Eq, Read, Show, Generic, ToJSON) - -data IndexAliasAction = AddAlias IndexAlias IndexAliasCreate - | RemoveAlias IndexAlias deriving (Read, Show, Eq, Generic, Typeable) - -data IndexAliasCreate = IndexAliasCreate { aliasCreateRouting :: Maybe AliasRouting - , aliasCreateFilter :: Maybe Filter} - deriving (Read, Show, Eq, Generic, Typeable) - -data AliasRouting = AllAliasRouting RoutingValue - | GranularAliasRouting (Maybe SearchAliasRouting) (Maybe IndexAliasRouting) - deriving (Read, Show, Eq, Generic, Typeable) - -newtype SearchAliasRouting = SearchAliasRouting (NonEmpty RoutingValue) deriving (Read, Show, Eq, Generic, Typeable) - -newtype IndexAliasRouting = IndexAliasRouting RoutingValue deriving (Read, Show, Eq, Generic, ToJSON, FromJSON, Typeable) - -newtype RoutingValue = RoutingValue { routingValue :: Text } deriving (Read, Show, Eq, Generic, ToJSON, FromJSON, Typeable) - -newtype IndexAliasesSummary = IndexAliasesSummary { indexAliasesSummary :: [IndexAliasSummary] } deriving (Read, Show, Eq, Generic, Typeable) - -{-| 'IndexAliasSummary' is a summary of an index alias configured for a server. -} -data IndexAliasSummary = IndexAliasSummary { indexAliasSummaryAlias :: IndexAlias - , indexAliasSummaryCreate :: IndexAliasCreate} deriving (Read, Show, Eq, Generic, Typeable) - -{-| 'DocVersion' is an integer version number for a document between 1 -and 9.2e+18 used for <>. --} -newtype DocVersion = DocVersion { - docVersionNumber :: Int - } deriving (Eq, Read, Show, Generic, Ord, ToJSON) - --- | Smart constructor for in-range doc version -mkDocVersion :: Int -> Maybe DocVersion -mkDocVersion i - | i >= (docVersionNumber minBound) && i <= (docVersionNumber maxBound) = - Just $ DocVersion i - | otherwise = Nothing - - -{-| 'ExternalDocVersion' is a convenience wrapper if your code uses its -own version numbers instead of ones from ES. --} -newtype ExternalDocVersion = ExternalDocVersion DocVersion - deriving (Eq, Read, Show, Generic, Ord, Bounded, Enum, ToJSON) - -{-| 'VersionControl' is specified when indexing documents as a -optimistic concurrency control. --} -data VersionControl = NoVersionControl - -- ^ Don't send a version. This is a pure overwrite. - | InternalVersion DocVersion - -- ^ Use the default ES versioning scheme. Only - -- index the document if the version is the same - -- as the one specified. Only applicable to - -- updates, as you should be getting Version from - -- a search result. - | ExternalGT ExternalDocVersion - -- ^ Use your own version numbering. Only index - -- the document if the version is strictly higher - -- OR the document doesn't exist. The given - -- version will be used as the new version number - -- for the stored document. N.B. All updates must - -- increment this number, meaning there is some - -- global, external ordering of updates. - | ExternalGTE ExternalDocVersion - -- ^ Use your own version numbering. Only index - -- the document if the version is equal or higher - -- than the stored version. Will succeed if there - -- is no existing document. The given version will - -- be used as the new version number for the - -- stored document. Use with care, as this could - -- result in data loss. - | ForceVersion ExternalDocVersion - -- ^ The document will always be indexed and the - -- given version will be the new version. This is - -- typically used for correcting errors. Use with - -- care, as this could result in data loss. - deriving (Read, Show, Eq, Generic, Ord) - -{-| 'DocumentParent' is used to specify a parent document. --} -newtype DocumentParent = DocumentParent DocId - deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'IndexDocumentSettings' are special settings supplied when indexing -a document. For the best backwards compatiblity when new fields are -added, you should probably prefer to start with 'defaultIndexDocumentSettings' --} -data IndexDocumentSettings = - IndexDocumentSettings { idsVersionControl :: VersionControl - , idsParent :: Maybe DocumentParent - } deriving (Eq, Read, Show, Generic, Typeable) - -{-| Reasonable default settings. Chooses no version control and no parent. --} -defaultIndexDocumentSettings :: IndexDocumentSettings -defaultIndexDocumentSettings = IndexDocumentSettings NoVersionControl Nothing - -{-| 'Sort' is a synonym for a list of 'SortSpec's. Sort behavior is order - dependent with later sorts acting as tie-breakers for earlier sorts. --} -type Sort = [SortSpec] - -{-| The two main kinds of 'SortSpec' are 'DefaultSortSpec' and - 'GeoDistanceSortSpec'. The latter takes a 'SortOrder', 'GeoPoint', and - 'DistanceUnit' to express "nearness" to a single geographical point as a - sort specification. - - --} -data SortSpec = DefaultSortSpec DefaultSort - | GeoDistanceSortSpec SortOrder GeoPoint DistanceUnit deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'DefaultSort' is usually the kind of 'SortSpec' you'll want. There's a - 'mkSort' convenience function for when you want to specify only the most - common parameters. - - The `ignoreUnmapped`, when `Just` field is used to set the elastic 'unmapped_type' - - --} -data DefaultSort = - DefaultSort { sortFieldName :: FieldName - , sortOrder :: SortOrder - -- default False - , ignoreUnmapped :: Maybe Text - , sortMode :: Maybe SortMode - , missingSort :: Maybe Missing - , nestedFilter :: Maybe Filter } deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'SortOrder' is 'Ascending' or 'Descending', as you might expect. These get - encoded into "asc" or "desc" when turned into JSON. - - --} -data SortOrder = Ascending - | Descending deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'Missing' prescribes how to handle missing fields. A missing field can be - sorted last, first, or using a custom value as a substitute. - - --} -data Missing = LastMissing - | FirstMissing - | CustomMissing Text deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'SortMode' prescribes how to handle sorting array/multi-valued fields. - -http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-sort.html#_sort_mode_option --} -data SortMode = SortMin - | SortMax - | SortSum - | SortAvg deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'mkSort' defaults everything but the 'FieldName' and the 'SortOrder' so - that you can concisely describe the usual kind of 'SortSpec's you want. --} -mkSort :: FieldName -> SortOrder -> DefaultSort -mkSort fieldName sOrder = DefaultSort fieldName sOrder Nothing Nothing Nothing Nothing - -{-| 'Cache' is for telling ES whether it should cache a 'Filter' not. - 'Query's cannot be cached. --} -type Cache = Bool -- caching on/off -defaultCache :: Cache -defaultCache = False - -{-| 'PrefixValue' is used in 'PrefixQuery' as the main query component. --} --- type PrefixValue = Text - -{-| 'BooleanOperator' is the usual And/Or operators with an ES compatible - JSON encoding baked in. Used all over the place. --} -data BooleanOperator = And | Or deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'ShardCount' is part of 'IndexSettings' --} -newtype ShardCount = ShardCount Int deriving (Eq, Read, Show, Generic, ToJSON, Typeable) - -{-| 'ReplicaCount' is part of 'IndexSettings' --} -newtype ReplicaCount = ReplicaCount Int deriving (Eq, Read, Show, Generic, ToJSON, Typeable) - -{-| 'IndexName' is used to describe which index to query/create/delete --} -newtype IndexName = IndexName Text deriving (Eq, Generic, Read, Show, ToJSON, FromJSON, Typeable) - -{-| 'IndexSelection' is used for APIs which take a single index, a list of - indexes, or the special @_all@ index. --} ---TODO: this does not fully support . It wouldn't be too hard to implement but you'd have to add the optional parameters (ignore_unavailable, allow_no_indices, expand_wildcards) to any APIs using it. Also would be a breaking API. -data IndexSelection = IndexList (NonEmpty IndexName) - | AllIndexes deriving (Eq, Generic, Show, Typeable) - -{-| 'NodeSelection' is used for most cluster APIs. See for more details. --} -data NodeSelection = LocalNode - -- ^ Whatever node receives this request - | NodeList (NonEmpty NodeSelector) - | AllNodes deriving (Eq, Generic, Show, Typeable) - - --- | An exact match or pattern to identify a node. Note that All of --- these options support wildcarding, so your node name, server, attr --- name can all contain * characters to be a fuzzy match. -data NodeSelector = NodeByName NodeName - | NodeByFullNodeId FullNodeId - | NodeByHost Server - -- ^ e.g. 10.0.0.1 or even 10.0.0.* - | NodeByAttribute NodeAttrName Text - -- ^ NodeAttrName can be a pattern, e.g. rack*. The value can too. - deriving (Eq, Generic, Show, Typeable) - -{-| 'TemplateName' is used to describe which template to query/create/delete --} -newtype TemplateName = TemplateName Text deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) - -{-| 'TemplatePattern' represents a pattern which is matched against index names --} -newtype TemplatePattern = TemplatePattern Text deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) - -{-| 'MappingName' is part of mappings which are how ES describes and schematizes - the data in the indices. --} -newtype MappingName = MappingName Text deriving (Eq, Generic, Read, Show, ToJSON, FromJSON, Typeable) - -{-| 'DocId' is a generic wrapper value for expressing unique Document IDs. - Can be set by the user or created by ES itself. Often used in client - functions for poking at specific documents. --} -newtype DocId = DocId Text deriving (Eq, Generic, Read, Show, ToJSON, FromJSON, Typeable) - -{-| 'QueryString' is used to wrap query text bodies, be they human written or not. --} -newtype QueryString = QueryString Text deriving (Eq, Generic, Read, Show, ToJSON, FromJSON, Typeable) - -{-| 'FieldName' is used all over the place wherever a specific field within - a document needs to be specified, usually in 'Query's or 'Filter's. --} -newtype FieldName = FieldName Text deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) - - -{-| 'Script' is often used in place of 'FieldName' to specify more -complex ways of extracting a value from a document. --} -newtype Script = Script { scriptText :: Text } deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'CacheName' is used in 'RegexpFilter' for describing the - 'CacheKey' keyed caching behavior. --} -newtype CacheName = CacheName Text deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) - -{-| 'CacheKey' is used in 'RegexpFilter' to key regex caching. --} -newtype CacheKey = - CacheKey Text deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype Existence = - Existence Bool deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype NullValue = - NullValue Bool deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype CutoffFrequency = - CutoffFrequency Double deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype Analyzer = - Analyzer Text deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype Tokenizer = - Tokenizer Text deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype TokenFilter = - TokenFilter Text deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype MaxExpansions = - MaxExpansions Int deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) - -{-| 'Lenient', if set to true, will cause format based failures to be - ignored. I don't know what the bloody default is, Elasticsearch - documentation didn't say what it was. Let me know if you figure it out. --} -newtype Lenient = - Lenient Bool deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype Tiebreaker = - Tiebreaker Double deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype Boost = - Boost Double deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype BoostTerms = - BoostTerms Double deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) - -{-| 'MinimumMatch' controls how many should clauses in the bool query should - match. Can be an absolute value (2) or a percentage (30%) or a - combination of both. --} -newtype MinimumMatch = - MinimumMatch Int deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype DisableCoord = - DisableCoord Bool deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype IgnoreTermFrequency = - IgnoreTermFrequency Bool deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype MinimumTermFrequency = - MinimumTermFrequency Int deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype MaxQueryTerms = - MaxQueryTerms Int deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype Fuzziness = - Fuzziness Double deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) - -{-| 'PrefixLength' is the prefix length used in queries, defaults to 0. -} -newtype PrefixLength = - PrefixLength Int deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype TypeName = - TypeName Text deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype PercentMatch = - PercentMatch Double deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype StopWord = - StopWord Text deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype QueryPath = - QueryPath Text deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) - -{-| Allowing a wildcard at the beginning of a word (eg "*ing") is particularly - heavy, because all terms in the index need to be examined, just in case - they match. Leading wildcards can be disabled by setting - 'AllowLeadingWildcard' to false. -} -newtype AllowLeadingWildcard = - AllowLeadingWildcard Bool deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype LowercaseExpanded = - LowercaseExpanded Bool deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype EnablePositionIncrements = - EnablePositionIncrements Bool deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) - -{-| By default, wildcard terms in a query are not analyzed. - Setting 'AnalyzeWildcard' to true enables best-effort analysis. --} -newtype AnalyzeWildcard = AnalyzeWildcard Bool deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) - -{-| 'GeneratePhraseQueries' defaults to false. --} -newtype GeneratePhraseQueries = - GeneratePhraseQueries Bool deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) - -{-| 'Locale' is used for string conversions - defaults to ROOT. --} -newtype Locale = Locale Text deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype MaxWordLength = MaxWordLength Int deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype MinWordLength = MinWordLength Int deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) - -{-| 'PhraseSlop' sets the default slop for phrases, 0 means exact - phrase matches. Default is 0. --} -newtype PhraseSlop = PhraseSlop Int deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype MinDocFrequency = MinDocFrequency Int deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype MaxDocFrequency = MaxDocFrequency Int deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) - --- | Newtype wrapper to parse ES's concerning tendency to in some APIs return a floating point number of milliseconds since epoch ಠ_ಠ -newtype POSIXMS = POSIXMS { posixMS :: UTCTime } +import Database.V5.Bloodhound.Internal.Analysis +import Database.V5.Bloodhound.Internal.Aggregation +import Database.V5.Bloodhound.Internal.Client +import Database.V5.Bloodhound.Internal.Highlight +import Database.V5.Bloodhound.Internal.Newtypes +import Database.V5.Bloodhound.Internal.Query +import Database.V5.Bloodhound.Internal.Sort +import Database.V5.Bloodhound.Internal.Suggest {-| 'unpackId' is a silly convenience function that gets used once. -} @@ -1441,8 +430,6 @@ unpackId :: DocId -> Text unpackId (DocId docId) = docId type TrackSortScores = Bool -newtype From = From Int deriving (Eq, Read, Show, Generic, ToJSON) -newtype Size = Size Int deriving (Eq, Read, Show, Generic, ToJSON, FromJSON) data Search = Search { queryBody :: Maybe Query , filterBody :: Maybe Filter @@ -1455,557 +442,71 @@ data Search = Search { queryBody :: Maybe Query , size :: Size , searchType :: SearchType , fields :: Maybe [FieldName] + , scriptFields :: Maybe ScriptFields , source :: Maybe Source , suggestBody :: Maybe Suggest -- ^ Only one Suggestion request / response per Search is supported. - } deriving (Eq, Read, Show, Generic, Typeable) + } deriving (Eq, Show) + + +instance ToJSON Search where + toJSON (Search mquery sFilter sort searchAggs + highlight sTrackSortScores sFrom sSize _ sFields + sScriptFields sSource sSuggest) = + omitNulls [ "query" .= query' + , "sort" .= sort + , "aggregations" .= searchAggs + , "highlight" .= highlight + , "from" .= sFrom + , "size" .= sSize + , "track_scores" .= sTrackSortScores + , "fields" .= sFields + , "script_fields" .= sScriptFields + , "_source" .= sSource + , "suggest" .= sSuggest] + + where query' = case sFilter of + Nothing -> mquery + Just x -> + Just + . QueryBoolQuery + $ mkBoolQuery (maybeToList mquery) + [x] [] [] data SearchType = SearchTypeQueryThenFetch | SearchTypeDfsQueryThenFetch - deriving (Eq, Read, Show, Generic, Typeable) + deriving (Eq, Show) data Source = NoSource | SourcePatterns PatternOrPatterns | SourceIncludeExclude Include Exclude - deriving (Read, Show, Eq, Generic, Typeable) + deriving (Eq, Show) + +instance ToJSON Source where + toJSON NoSource = toJSON False + toJSON (SourcePatterns patterns) = toJSON patterns + toJSON (SourceIncludeExclude incl excl) = object [ "includes" .= incl, "excludes" .= excl ] data PatternOrPatterns = PopPattern Pattern - | PopPatterns [Pattern] deriving (Eq, Read, Show, Generic, Typeable) + | PopPatterns [Pattern] deriving (Eq, Read, Show) -data Include = Include [Pattern] deriving (Eq, Read, Show, Generic, Typeable) -data Exclude = Exclude [Pattern] deriving (Eq, Read, Show, Generic, Typeable) +instance ToJSON PatternOrPatterns where + toJSON (PopPattern pattern) = toJSON pattern + toJSON (PopPatterns patterns) = toJSON patterns -newtype Pattern = Pattern Text deriving (Eq, Read, Show, Generic, Typeable) +data Include = Include [Pattern] deriving (Eq, Read, Show) +data Exclude = Exclude [Pattern] deriving (Eq, Read, Show) -data Highlights = Highlights { globalsettings :: Maybe HighlightSettings - , highlightFields :: [FieldHighlight] - } deriving (Read, Show, Eq, Generic, Typeable) +instance ToJSON Include where + toJSON (Include patterns) = toJSON patterns -data FieldHighlight = FieldHighlight FieldName (Maybe HighlightSettings) - deriving (Read, Show, Eq, Generic, Typeable) +instance ToJSON Exclude where + toJSON (Exclude patterns) = toJSON patterns +newtype Pattern = Pattern Text deriving (Eq, Read, Show) -data HighlightSettings = Plain PlainHighlight - | Postings PostingsHighlight - | FastVector FastVectorHighlight - deriving (Read, Show, Eq, Generic, Typeable) -data PlainHighlight = - PlainHighlight { plainCommon :: Maybe CommonHighlight - , plainNonPost :: Maybe NonPostings } deriving (Read, Show, Eq, Generic, Typeable) - - -- This requires that index_options are set to 'offset' in the mapping. -data PostingsHighlight = PostingsHighlight (Maybe CommonHighlight) deriving (Read, Show, Eq, Generic, Typeable) - --- This requires that term_vector is set to 'with_positions_offsets' in the mapping. -data FastVectorHighlight = - FastVectorHighlight { fvCommon :: Maybe CommonHighlight - , fvNonPostSettings :: Maybe NonPostings - , boundaryChars :: Maybe Text - , boundaryMaxScan :: Maybe Int - , fragmentOffset :: Maybe Int - , matchedFields :: [Text] - , phraseLimit :: Maybe Int - } deriving (Read, Show, Eq, Generic, Typeable) - -data CommonHighlight = - CommonHighlight { order :: Maybe Text - , forceSource :: Maybe Bool - , tag :: Maybe HighlightTag - , encoder :: Maybe HighlightEncoder - , noMatchSize :: Maybe Int - , highlightQuery :: Maybe Query - , requireFieldMatch :: Maybe Bool - } deriving (Read, Show, Eq, Generic, Typeable) - --- Settings that are only applicable to FastVector and Plain highlighters. -data NonPostings = - NonPostings { fragmentSize :: Maybe Int - , numberOfFragments :: Maybe Int} deriving (Read, Show, Eq, Generic, Typeable) - -data HighlightEncoder = DefaultEncoder - | HTMLEncoder - deriving (Read, Show, Eq, Generic, Typeable) - --- NOTE: Should the tags use some kind of HTML type, rather than Text? -data HighlightTag = TagSchema Text - | CustomTags ([Text], [Text]) -- Only uses more than the first value in the lists if fvh - deriving (Read, Show, Eq, Generic, Typeable) - - -data Query = - TermQuery Term (Maybe Boost) - | TermsQuery Text (NonEmpty Text) - | QueryMatchQuery MatchQuery - | QueryMultiMatchQuery MultiMatchQuery - | QueryBoolQuery BoolQuery - | QueryBoostingQuery BoostingQuery - | QueryCommonTermsQuery CommonTermsQuery - | ConstantScoreQuery Query Boost - | QueryDisMaxQuery DisMaxQuery - | QueryFuzzyLikeThisQuery FuzzyLikeThisQuery - | QueryFuzzyLikeFieldQuery FuzzyLikeFieldQuery - | QueryFuzzyQuery FuzzyQuery - | QueryHasChildQuery HasChildQuery - | QueryHasParentQuery HasParentQuery - | IdsQuery MappingName [DocId] - | QueryIndicesQuery IndicesQuery - | MatchAllQuery (Maybe Boost) - | QueryMoreLikeThisQuery MoreLikeThisQuery - | QueryMoreLikeThisFieldQuery MoreLikeThisFieldQuery - | QueryNestedQuery NestedQuery - | QueryPrefixQuery PrefixQuery - | QueryQueryStringQuery QueryStringQuery - | QuerySimpleQueryStringQuery SimpleQueryStringQuery - | QueryRangeQuery RangeQuery - | QueryRegexpQuery RegexpQuery - | QueryExistsQuery FieldName - | QueryMatchNoneQuery - | QueryTemplateQueryInline TemplateQueryInline - deriving (Eq, Read, Show, Generic, Typeable) - --- | As of Elastic 2.0, 'Filters' are just 'Queries' housed in a Bool Query, and --- flagged in a different context. -newtype Filter = Filter { unFilter :: Query } - deriving (Eq, Read, Show, Generic, Typeable) - -instance ToJSON Filter where - toJSON = toJSON . unFilter - -instance FromJSON Filter where - parseJSON v = Filter <$> parseJSON v - -data RegexpQuery = - RegexpQuery { regexpQueryField :: FieldName - , regexpQuery :: Regexp - , regexpQueryFlags :: RegexpFlags - , regexpQueryBoost :: Maybe Boost - } deriving (Eq, Read, Show, Generic, Typeable) - -data RangeQuery = - RangeQuery { rangeQueryField :: FieldName - , rangeQueryRange :: RangeValue - , rangeQueryBoost :: Boost } deriving (Eq, Read, Show, Generic, Typeable) - -mkRangeQuery :: FieldName -> RangeValue -> RangeQuery -mkRangeQuery f r = RangeQuery f r (Boost 1.0) - -data SimpleQueryStringQuery = - SimpleQueryStringQuery - { simpleQueryStringQuery :: QueryString - , simpleQueryStringField :: Maybe FieldOrFields - , simpleQueryStringOperator :: Maybe BooleanOperator - , simpleQueryStringAnalyzer :: Maybe Analyzer - , simpleQueryStringFlags :: Maybe (NonEmpty SimpleQueryFlag) - , simpleQueryStringLowercaseExpanded :: Maybe LowercaseExpanded - , simpleQueryStringLocale :: Maybe Locale - } deriving (Eq, Read, Show, Generic, Typeable) - -data SimpleQueryFlag = - SimpleQueryAll - | SimpleQueryNone - | SimpleQueryAnd - | SimpleQueryOr - | SimpleQueryPrefix - | SimpleQueryPhrase - | SimpleQueryPrecedence - | SimpleQueryEscape - | SimpleQueryWhitespace - | SimpleQueryFuzzy - | SimpleQueryNear - | SimpleQuerySlop deriving (Eq, Read, Show, Generic, Typeable) - --- use_dis_max and tie_breaker when fields are plural? -data QueryStringQuery = - QueryStringQuery - { queryStringQuery :: QueryString - , queryStringDefaultField :: Maybe FieldName - , queryStringOperator :: Maybe BooleanOperator - , queryStringAnalyzer :: Maybe Analyzer - , queryStringAllowLeadingWildcard :: Maybe AllowLeadingWildcard - , queryStringLowercaseExpanded :: Maybe LowercaseExpanded - , queryStringEnablePositionIncrements :: Maybe EnablePositionIncrements - , queryStringFuzzyMaxExpansions :: Maybe MaxExpansions - , queryStringFuzziness :: Maybe Fuzziness - , queryStringFuzzyPrefixLength :: Maybe PrefixLength - , queryStringPhraseSlop :: Maybe PhraseSlop - , queryStringBoost :: Maybe Boost - , queryStringAnalyzeWildcard :: Maybe AnalyzeWildcard - , queryStringGeneratePhraseQueries :: Maybe GeneratePhraseQueries - , queryStringMinimumShouldMatch :: Maybe MinimumMatch - , queryStringLenient :: Maybe Lenient - , queryStringLocale :: Maybe Locale - } deriving (Eq, Read, Show, Generic, Typeable) - -mkQueryStringQuery :: QueryString -> QueryStringQuery -mkQueryStringQuery qs = - QueryStringQuery qs Nothing Nothing - Nothing Nothing Nothing Nothing - Nothing Nothing Nothing Nothing - Nothing Nothing Nothing Nothing - Nothing Nothing - -data FieldOrFields = FofField FieldName - | FofFields (NonEmpty FieldName) deriving (Eq, Read, Show, Generic, Typeable) - -data PrefixQuery = - PrefixQuery - { prefixQueryField :: FieldName - , prefixQueryPrefixValue :: Text - , prefixQueryBoost :: Maybe Boost } deriving (Eq, Read, Show, Generic, Typeable) - -data NestedQuery = - NestedQuery - { nestedQueryPath :: QueryPath - , nestedQueryScoreType :: ScoreType - , nestedQuery :: Query } deriving (Eq, Read, Show, Generic, Typeable) - -data MoreLikeThisFieldQuery = - MoreLikeThisFieldQuery - { moreLikeThisFieldText :: Text - , moreLikeThisFieldFields :: FieldName - -- default 0.3 (30%) - , moreLikeThisFieldPercentMatch :: Maybe PercentMatch - , moreLikeThisFieldMinimumTermFreq :: Maybe MinimumTermFrequency - , moreLikeThisFieldMaxQueryTerms :: Maybe MaxQueryTerms - , moreLikeThisFieldStopWords :: Maybe (NonEmpty StopWord) - , moreLikeThisFieldMinDocFrequency :: Maybe MinDocFrequency - , moreLikeThisFieldMaxDocFrequency :: Maybe MaxDocFrequency - , moreLikeThisFieldMinWordLength :: Maybe MinWordLength - , moreLikeThisFieldMaxWordLength :: Maybe MaxWordLength - , moreLikeThisFieldBoostTerms :: Maybe BoostTerms - , moreLikeThisFieldBoost :: Maybe Boost - , moreLikeThisFieldAnalyzer :: Maybe Analyzer - } deriving (Eq, Read, Show, Generic, Typeable) - -data MoreLikeThisQuery = - MoreLikeThisQuery - { moreLikeThisText :: Text - , moreLikeThisFields :: Maybe (NonEmpty FieldName) - -- default 0.3 (30%) - , moreLikeThisPercentMatch :: Maybe PercentMatch - , moreLikeThisMinimumTermFreq :: Maybe MinimumTermFrequency - , moreLikeThisMaxQueryTerms :: Maybe MaxQueryTerms - , moreLikeThisStopWords :: Maybe (NonEmpty StopWord) - , moreLikeThisMinDocFrequency :: Maybe MinDocFrequency - , moreLikeThisMaxDocFrequency :: Maybe MaxDocFrequency - , moreLikeThisMinWordLength :: Maybe MinWordLength - , moreLikeThisMaxWordLength :: Maybe MaxWordLength - , moreLikeThisBoostTerms :: Maybe BoostTerms - , moreLikeThisBoost :: Maybe Boost - , moreLikeThisAnalyzer :: Maybe Analyzer - } deriving (Eq, Read, Show, Generic, Typeable) - -data IndicesQuery = - IndicesQuery - { indicesQueryIndices :: [IndexName] - , indicesQuery :: Query - -- default "all" - , indicesQueryNoMatch :: Maybe Query } deriving (Eq, Read, Show, Generic, Typeable) - -data HasParentQuery = - HasParentQuery - { hasParentQueryType :: TypeName - , hasParentQuery :: Query - , hasParentQueryScoreType :: Maybe ScoreType } deriving (Eq, Read, Show, Generic, Typeable) - -data HasChildQuery = - HasChildQuery - { hasChildQueryType :: TypeName - , hasChildQuery :: Query - , hasChildQueryScoreType :: Maybe ScoreType } deriving (Eq, Read, Show, Generic, Typeable) - -data ScoreType = - ScoreTypeMax - | ScoreTypeSum - | ScoreTypeAvg - | ScoreTypeNone deriving (Eq, Read, Show, Generic, Typeable) - -data FuzzyQuery = - FuzzyQuery { fuzzyQueryField :: FieldName - , fuzzyQueryValue :: Text - , fuzzyQueryPrefixLength :: PrefixLength - , fuzzyQueryMaxExpansions :: MaxExpansions - , fuzzyQueryFuzziness :: Fuzziness - , fuzzyQueryBoost :: Maybe Boost - } deriving (Eq, Read, Show, Generic, Typeable) - -data FuzzyLikeFieldQuery = - FuzzyLikeFieldQuery - { fuzzyLikeField :: FieldName - -- anaphora is good for the soul. - , fuzzyLikeFieldText :: Text - , fuzzyLikeFieldMaxQueryTerms :: MaxQueryTerms - , fuzzyLikeFieldIgnoreTermFrequency :: IgnoreTermFrequency - , fuzzyLikeFieldFuzziness :: Fuzziness - , fuzzyLikeFieldPrefixLength :: PrefixLength - , fuzzyLikeFieldBoost :: Boost - , fuzzyLikeFieldAnalyzer :: Maybe Analyzer - } deriving (Eq, Read, Show, Generic, Typeable) - -data FuzzyLikeThisQuery = - FuzzyLikeThisQuery - { fuzzyLikeFields :: [FieldName] - , fuzzyLikeText :: Text - , fuzzyLikeMaxQueryTerms :: MaxQueryTerms - , fuzzyLikeIgnoreTermFrequency :: IgnoreTermFrequency - , fuzzyLikeFuzziness :: Fuzziness - , fuzzyLikePrefixLength :: PrefixLength - , fuzzyLikeBoost :: Boost - , fuzzyLikeAnalyzer :: Maybe Analyzer - } deriving (Eq, Read, Show, Generic, Typeable) - -data DisMaxQuery = - DisMaxQuery { disMaxQueries :: [Query] - -- default 0.0 - , disMaxTiebreaker :: Tiebreaker - , disMaxBoost :: Maybe Boost - } deriving (Eq, Read, Show, Generic, Typeable) - -data MatchQuery = - MatchQuery { matchQueryField :: FieldName - , matchQueryQueryString :: QueryString - , matchQueryOperator :: BooleanOperator - , matchQueryZeroTerms :: ZeroTermsQuery - , matchQueryCutoffFrequency :: Maybe CutoffFrequency - , matchQueryMatchType :: Maybe MatchQueryType - , matchQueryAnalyzer :: Maybe Analyzer - , matchQueryMaxExpansions :: Maybe MaxExpansions - , matchQueryLenient :: Maybe Lenient - , matchQueryBoost :: Maybe Boost - , matchQueryMinimumShouldMatch :: Maybe Text - } deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'mkMatchQuery' is a convenience function that defaults the less common parameters, - enabling you to provide only the 'FieldName' and 'QueryString' to make a 'MatchQuery' --} -mkMatchQuery :: FieldName -> QueryString -> MatchQuery -mkMatchQuery field query = MatchQuery field query Or ZeroTermsNone Nothing Nothing Nothing Nothing Nothing Nothing Nothing - -data MatchQueryType = - MatchPhrase - | MatchPhrasePrefix deriving (Eq, Read, Show, Generic, Typeable) - -data MultiMatchQuery = - MultiMatchQuery { multiMatchQueryFields :: [FieldName] - , multiMatchQueryString :: QueryString - , multiMatchQueryOperator :: BooleanOperator - , multiMatchQueryZeroTerms :: ZeroTermsQuery - , multiMatchQueryTiebreaker :: Maybe Tiebreaker - , multiMatchQueryType :: Maybe MultiMatchQueryType - , multiMatchQueryCutoffFrequency :: Maybe CutoffFrequency - , multiMatchQueryAnalyzer :: Maybe Analyzer - , multiMatchQueryMaxExpansions :: Maybe MaxExpansions - , multiMatchQueryLenient :: Maybe Lenient } deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'mkMultiMatchQuery' is a convenience function that defaults the less common parameters, - enabling you to provide only the list of 'FieldName's and 'QueryString' to - make a 'MultiMatchQuery'. --} - -mkMultiMatchQuery :: [FieldName] -> QueryString -> MultiMatchQuery -mkMultiMatchQuery matchFields query = - MultiMatchQuery matchFields query - Or ZeroTermsNone Nothing Nothing Nothing Nothing Nothing Nothing - -data MultiMatchQueryType = - MultiMatchBestFields - | MultiMatchMostFields - | MultiMatchCrossFields - | MultiMatchPhrase - | MultiMatchPhrasePrefix deriving (Eq, Read, Show, Generic, Typeable) - -data BoolQuery = - BoolQuery { boolQueryMustMatch :: [Query] - , boolQueryFilter :: [Filter] - , boolQueryMustNotMatch :: [Query] - , boolQueryShouldMatch :: [Query] - , boolQueryMinimumShouldMatch :: Maybe MinimumMatch - , boolQueryBoost :: Maybe Boost - , boolQueryDisableCoord :: Maybe DisableCoord - } deriving (Eq, Read, Show, Generic, Typeable) - -mkBoolQuery :: [Query] -> [Filter] -> [Query] -> [Query] -> BoolQuery -mkBoolQuery must filt mustNot should = - BoolQuery must filt mustNot should Nothing Nothing Nothing - -data BoostingQuery = - BoostingQuery { positiveQuery :: Query - , negativeQuery :: Query - , negativeBoost :: Boost } deriving (Eq, Read, Show, Generic, Typeable) - -data CommonTermsQuery = - CommonTermsQuery { commonField :: FieldName - , commonQuery :: QueryString - , commonCutoffFrequency :: CutoffFrequency - , commonLowFreqOperator :: BooleanOperator - , commonHighFreqOperator :: BooleanOperator - , commonMinimumShouldMatch :: Maybe CommonMinimumMatch - , commonBoost :: Maybe Boost - , commonAnalyzer :: Maybe Analyzer - , commonDisableCoord :: Maybe DisableCoord - } deriving (Eq, Read, Show, Generic, Typeable) - -data CommonMinimumMatch = - CommonMinimumMatchHighLow MinimumMatchHighLow - | CommonMinimumMatch MinimumMatch - deriving (Eq, Read, Show, Generic, Typeable) - -data MinimumMatchHighLow = - MinimumMatchHighLow { lowFreq :: MinimumMatch - , highFreq :: MinimumMatch } deriving (Eq, Read, Show, Generic, Typeable) - -data ZeroTermsQuery = ZeroTermsNone - | ZeroTermsAll deriving (Eq, Read, Show, Generic, Typeable) - -data RangeExecution = RangeExecutionIndex - | RangeExecutionFielddata deriving (Eq, Read, Show, Generic, Typeable) - -newtype Regexp = Regexp Text deriving (Eq, Read, Show, Generic, Typeable, FromJSON) - -data RegexpFlags = AllRegexpFlags - | NoRegexpFlags - | SomeRegexpFlags (NonEmpty RegexpFlag) deriving (Eq, Read, Show, Generic, Typeable) - -data RegexpFlag = AnyString - | Automaton - | Complement - | Empty - | Intersection - | Interval deriving (Eq, Read, Show, Generic, Typeable) - -newtype LessThan = LessThan Double deriving (Eq, Read, Show, Generic, Typeable) -newtype LessThanEq = LessThanEq Double deriving (Eq, Read, Show, Generic, Typeable) -newtype GreaterThan = GreaterThan Double deriving (Eq, Read, Show, Generic, Typeable) -newtype GreaterThanEq = GreaterThanEq Double deriving (Eq, Read, Show, Generic, Typeable) - -newtype LessThanD = LessThanD UTCTime deriving (Eq, Read, Show, Generic, Typeable) -newtype LessThanEqD = LessThanEqD UTCTime deriving (Eq, Read, Show, Generic, Typeable) -newtype GreaterThanD = GreaterThanD UTCTime deriving (Eq, Read, Show, Generic, Typeable) -newtype GreaterThanEqD = GreaterThanEqD UTCTime deriving (Eq, Read, Show, Generic, Typeable) - -data RangeValue = RangeDateLte LessThanEqD - | RangeDateLt LessThanD - | RangeDateGte GreaterThanEqD - | RangeDateGt GreaterThanD - | RangeDateGtLt GreaterThanD LessThanD - | RangeDateGteLte GreaterThanEqD LessThanEqD - | RangeDateGteLt GreaterThanEqD LessThanD - | RangeDateGtLte GreaterThanD LessThanEqD - | RangeDoubleLte LessThanEq - | RangeDoubleLt LessThan - | RangeDoubleGte GreaterThanEq - | RangeDoubleGt GreaterThan - | RangeDoubleGtLt GreaterThan LessThan - | RangeDoubleGteLte GreaterThanEq LessThanEq - | RangeDoubleGteLt GreaterThanEq LessThan - | RangeDoubleGtLte GreaterThan LessThanEq - deriving (Eq, Read, Show, Generic, Typeable) - -rangeValueToPair :: RangeValue -> [Pair] -rangeValueToPair rv = case rv of - RangeDateLte (LessThanEqD t) -> ["lte" .= t] - RangeDateGte (GreaterThanEqD t) -> ["gte" .= t] - RangeDateLt (LessThanD t) -> ["lt" .= t] - RangeDateGt (GreaterThanD t) -> ["gt" .= t] - RangeDateGteLte (GreaterThanEqD l) (LessThanEqD g) -> ["gte" .= l, "lte" .= g] - RangeDateGtLte (GreaterThanD l) (LessThanEqD g) -> ["gt" .= l, "lte" .= g] - RangeDateGteLt (GreaterThanEqD l) (LessThanD g) -> ["gte" .= l, "lt" .= g] - RangeDateGtLt (GreaterThanD l) (LessThanD g) -> ["gt" .= l, "lt" .= g] - RangeDoubleLte (LessThanEq t) -> ["lte" .= t] - RangeDoubleGte (GreaterThanEq t) -> ["gte" .= t] - RangeDoubleLt (LessThan t) -> ["lt" .= t] - RangeDoubleGt (GreaterThan t) -> ["gt" .= t] - RangeDoubleGteLte (GreaterThanEq l) (LessThanEq g) -> ["gte" .= l, "lte" .= g] - RangeDoubleGtLte (GreaterThan l) (LessThanEq g) -> ["gt" .= l, "lte" .= g] - RangeDoubleGteLt (GreaterThanEq l) (LessThan g) -> ["gte" .= l, "lt" .= g] - RangeDoubleGtLt (GreaterThan l) (LessThan g) -> ["gt" .= l, "lt" .= g] - -data Term = Term { termField :: Text - , termValue :: Text } deriving (Eq, Read, Show, Generic, Typeable) - -data BoolMatch = MustMatch Term Cache - | MustNotMatch Term Cache - | ShouldMatch [Term] Cache deriving (Eq, Read, Show, Generic, Typeable) - --- "memory" or "indexed" -data GeoFilterType = GeoFilterMemory - | GeoFilterIndexed deriving (Eq, Read, Show, Generic, Typeable) - -data LatLon = LatLon { lat :: Double - , lon :: Double } deriving (Eq, Read, Show, Generic, Typeable) - -data GeoBoundingBox = - GeoBoundingBox { topLeft :: LatLon - , bottomRight :: LatLon } deriving (Eq, Read, Show, Generic, Typeable) - -data GeoBoundingBoxConstraint = - GeoBoundingBoxConstraint { geoBBField :: FieldName - , constraintBox :: GeoBoundingBox - , bbConstraintcache :: Cache - , geoType :: GeoFilterType - } deriving (Eq, Read, Show, Generic, Typeable) - -data GeoPoint = - GeoPoint { geoField :: FieldName - , latLon :: LatLon} deriving (Eq, Read, Show, Generic, Typeable) - -data DistanceUnit = Miles - | Yards - | Feet - | Inches - | Kilometers - | Meters - | Centimeters - | Millimeters - | NauticalMiles deriving (Eq, Read, Show, Generic, Typeable) - -data DistanceType = Arc - | SloppyArc -- doesn't exist <1.0 - | Plane deriving (Eq, Read, Show, Generic, Typeable) - -data OptimizeBbox = OptimizeGeoFilterType GeoFilterType - | NoOptimizeBbox deriving (Eq, Read, Show, Generic, Typeable) - -data Distance = - Distance { coefficient :: Double - , unit :: DistanceUnit } deriving (Eq, Read, Show, Generic, Typeable) - -data DistanceRange = - DistanceRange { distanceFrom :: Distance - , distanceTo :: Distance } deriving (Eq, Read, Show, Generic, Typeable) - -type TemplateQueryKey = Text -type TemplateQueryValue = Text - -newtype TemplateQueryKeyValuePairs = TemplateQueryKeyValuePairs (HM.HashMap TemplateQueryKey TemplateQueryValue) - deriving (Eq, Read, Show, Generic) - -instance ToJSON TemplateQueryKeyValuePairs where - toJSON (TemplateQueryKeyValuePairs x) = Object $ HM.map toJSON x - -instance FromJSON TemplateQueryKeyValuePairs where - parseJSON (Object o) = pure . TemplateQueryKeyValuePairs $ HM.mapMaybe getValue o - where getValue (String x) = Just x - getValue _ = Nothing - parseJSON _ = fail "error parsing TemplateQueryKeyValuePairs" - -data TemplateQueryInline = - TemplateQueryInline { inline :: Query - , params :: TemplateQueryKeyValuePairs - } - deriving (Eq, Read, Show, Generic, Typeable) - -instance ToJSON TemplateQueryInline where - toJSON TemplateQueryInline{..} = object [ "inline" .= inline - , "params" .= params - ] - -instance FromJSON TemplateQueryInline where - parseJSON = withObject "TemplateQueryInline" parse - where parse o = TemplateQueryInline - <$> o .: "inline" - <*> o .: "params" - +instance ToJSON Pattern where + toJSON (Pattern pattern) = toJSON pattern data SearchResult a = SearchResult { took :: Int @@ -2014,1966 +515,11 @@ data SearchResult a = , searchHits :: SearchHits a , aggregations :: Maybe AggregationResults , scrollId :: Maybe ScrollId - , suggest :: Maybe NamedSuggestionResponse -- ^ Only one Suggestion request / response per Search is supported. + -- ^ Only one Suggestion request / response per + -- Search is supported. + , suggest :: Maybe NamedSuggestionResponse } - deriving (Eq, Read, Show, Generic, Typeable) - -newtype ScrollId = ScrollId Text deriving (Eq, Read, Show, Generic, Ord, ToJSON, FromJSON) - -type Score = Maybe Double - -data SearchHits a = - SearchHits { hitsTotal :: Int - , maxScore :: Score - , hits :: [Hit a] } deriving (Eq, Read, Show, Generic, Typeable) - -instance Semigroup (SearchHits a) where - (SearchHits ta ma ha) <> (SearchHits tb mb hb) = SearchHits (ta + tb) (max ma mb) (ha <> hb) - -instance Monoid (SearchHits a) where - mempty = SearchHits 0 Nothing mempty - mappend = (<>) - -data Hit a = - Hit { hitIndex :: IndexName - , hitType :: MappingName - , hitDocId :: DocId - , hitScore :: Score - , hitSource :: Maybe a - , hitHighlight :: Maybe HitHighlight } deriving (Eq, Read, Show, Generic, Typeable) - -data ShardResult = - ShardResult { shardTotal :: Int - , shardsSuccessful :: Int - , shardsFailed :: Int } deriving (Eq, Read, Show, Generic, Typeable) - -type HitHighlight = M.Map Text [Text] - -showText :: Show a => a -> Text -showText = T.pack . show - -readMay :: Read a => String -> Maybe a -readMay s = case reads s of - (a, ""):_ -> Just a - _ -> Nothing - -parseReadText :: Read a => Text -> Parser a -parseReadText = maybe mzero return . readMay . T.unpack - -type Aggregations = M.Map Text Aggregation - -emptyAggregations :: Aggregations -emptyAggregations = M.empty - -mkAggregations :: Text -> Aggregation -> Aggregations -mkAggregations name aggregation = M.insert name aggregation emptyAggregations - -data TermOrder = TermOrder{ termSortField :: Text - , termSortOrder :: SortOrder } deriving (Eq, Read, Show, Generic, Typeable) - -data TermInclusion = TermInclusion Text - | TermPattern Text Text deriving (Eq, Read, Show, Generic, Typeable) - -data CollectionMode = BreadthFirst - | DepthFirst deriving (Eq, Read, Show, Generic, Typeable) - -data ExecutionHint = Ordinals - | GlobalOrdinals - | GlobalOrdinalsHash - | GlobalOrdinalsLowCardinality - | Map deriving (Eq, Read, Show, Generic, Typeable) - -data TimeInterval = Weeks - | Days - | Hours - | Minutes - | Seconds deriving Eq - -data Interval = Year - | Quarter - | Month - | Week - | Day - | Hour - | Minute - | Second deriving (Eq, Read, Show, Generic, Typeable) - -data Aggregation = TermsAgg TermsAggregation - | CardinalityAgg CardinalityAggregation - | DateHistogramAgg DateHistogramAggregation - | ValueCountAgg ValueCountAggregation - | FilterAgg FilterAggregation - | DateRangeAgg DateRangeAggregation - | MissingAgg MissingAggregation - | TopHitsAgg TopHitsAggregation - | StatsAgg StatisticsAggregation - deriving (Eq, Read, Show, Generic, Typeable) - -data TopHitsAggregation = TopHitsAggregation - { taFrom :: Maybe From - , taSize :: Maybe Size - , taSort :: Maybe Sort - } deriving (Eq, Read, Show) - -data MissingAggregation = MissingAggregation - { maField :: Text - } deriving (Eq, Read, Show, Generic, Typeable) - -data TermsAggregation = TermsAggregation { term :: Either Text Text - , termInclude :: Maybe TermInclusion - , termExclude :: Maybe TermInclusion - , termOrder :: Maybe TermOrder - , termMinDocCount :: Maybe Int - , termSize :: Maybe Int - , termShardSize :: Maybe Int - , termCollectMode :: Maybe CollectionMode - , termExecutionHint :: Maybe ExecutionHint - , termAggs :: Maybe Aggregations - } deriving (Eq, Read, Show, Generic, Typeable) - -data CardinalityAggregation = CardinalityAggregation { cardinalityField :: FieldName, - precisionThreshold :: Maybe Int - } deriving (Eq, Read, Show, Generic, Typeable) - -data DateHistogramAggregation = DateHistogramAggregation { dateField :: FieldName - , dateInterval :: Interval - , dateFormat :: Maybe Text - -- pre and post deprecated in 1.5 - , datePreZone :: Maybe Text - , datePostZone :: Maybe Text - , datePreOffset :: Maybe Text - , datePostOffset :: Maybe Text - , dateAggs :: Maybe Aggregations - } deriving (Eq, Read, Show, Generic, Typeable) - - -data DateRangeAggregation = DateRangeAggregation { draField :: FieldName - , draFormat :: Maybe Text - , draRanges :: NonEmpty DateRangeAggRange - } deriving (Eq, Read, Show, Generic, Typeable) - -data DateRangeAggRange = DateRangeFrom DateMathExpr - | DateRangeTo DateMathExpr - | DateRangeFromAndTo DateMathExpr DateMathExpr deriving (Eq, Read, Show, Generic, Typeable) - --- | See for more information. -data DateMathExpr = DateMathExpr DateMathAnchor [DateMathModifier] deriving (Eq, Read, Show, Generic, Typeable) - - --- | Starting point for a date range. This along with the 'DateMathModifiers' gets you the date ES will start from. -data DateMathAnchor = DMNow - | DMDate Day deriving (Eq, Read, Show, Generic, Typeable) - -data DateMathModifier = AddTime Int DateMathUnit - | SubtractTime Int DateMathUnit - | RoundDownTo DateMathUnit deriving (Eq, Read, Show, Generic, Typeable) - -data DateMathUnit = DMYear - | DMMonth - | DMWeek - | DMDay - | DMHour - | DMMinute - | DMSecond deriving (Eq, Read, Show, Generic, Typeable) - --- | See for more information. -data ValueCountAggregation = FieldValueCount FieldName - | ScriptValueCount Script deriving (Eq, Read, Show, Generic, Typeable) - --- | Single-bucket filter aggregations. See for more information. -data FilterAggregation = FilterAggregation { faFilter :: Filter - , faAggs :: Maybe Aggregations} deriving (Eq, Read, Show, Generic, Typeable) - -data StatisticsAggregation = StatisticsAggregation { statsType :: StatsType - , statsField :: FieldName } deriving (Eq, Read, Show, Generic, Typeable) - -data StatsType - = Basic - | Extended - deriving (Eq, Read, Show, Generic, Typeable) - -mkTermsAggregation :: Text -> TermsAggregation -mkTermsAggregation t = TermsAggregation (Left t) Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing - -mkTermsScriptAggregation :: Text -> TermsAggregation -mkTermsScriptAggregation t = TermsAggregation (Right t) Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing - -mkDateHistogram :: FieldName -> Interval -> DateHistogramAggregation -mkDateHistogram t i = DateHistogramAggregation t i Nothing Nothing Nothing Nothing Nothing Nothing - -mkCardinalityAggregation :: FieldName -> CardinalityAggregation -mkCardinalityAggregation t = CardinalityAggregation t Nothing - -mkStatsAggregation :: FieldName -> StatisticsAggregation -mkStatsAggregation = StatisticsAggregation Basic - -mkExtendedStatsAggregation :: FieldName -> StatisticsAggregation -mkExtendedStatsAggregation = StatisticsAggregation Extended - -instance ToJSON Version where - toJSON Version {..} = object ["number" .= number - ,"build_hash" .= build_hash - ,"build_date" .= build_date - ,"build_snapshot" .= build_snapshot - ,"lucene_version" .= lucene_version] - -instance FromJSON Version where - parseJSON = withObject "Version" parse - where parse o = Version - <$> o .: "number" - <*> o .: "build_hash" - <*> o .: "build_date" - <*> o .: "build_snapshot" - <*> o .: "lucene_version" - -instance ToJSON VersionNumber where - toJSON = toJSON . Vers.showVersion . versionNumber - -instance FromJSON VersionNumber where - parseJSON = withText "VersionNumber" (parse . T.unpack) - where - parse s = case filter (null . snd)(RP.readP_to_S Vers.parseVersion s) of - [(v, _)] -> pure (VersionNumber v) - [] -> fail ("Invalid version string " ++ s) - xs -> fail ("Ambiguous version string " ++ s ++ " (" ++ intercalate ", " (Vers.showVersion . fst <$> xs) ++ ")") - -instance ToJSON TermOrder where - toJSON (TermOrder termSortField termSortOrder) = object [termSortField .= termSortOrder] - -instance ToJSON TermInclusion where - toJSON (TermInclusion x) = toJSON x - toJSON (TermPattern pattern flags) = omitNulls [ "pattern" .= pattern, - "flags" .= flags] - -instance ToJSON CollectionMode where - toJSON BreadthFirst = "breadth_first" - toJSON DepthFirst = "depth_first" - -instance ToJSON ExecutionHint where - toJSON Ordinals = "ordinals" - toJSON GlobalOrdinals = "global_ordinals" - toJSON GlobalOrdinalsHash = "global_ordinals_hash" - toJSON GlobalOrdinalsLowCardinality = "global_ordinals_low_cardinality" - toJSON Map = "map" - -instance ToJSON Interval where - toJSON Year = "year" - toJSON Quarter = "quarter" - toJSON Month = "month" - toJSON Week = "week" - toJSON Day = "day" - toJSON Hour = "hour" - toJSON Minute = "minute" - toJSON Second = "second" - -instance Show TimeInterval where - show Weeks = "w" - show Days = "d" - show Hours = "h" - show Minutes = "m" - show Seconds = "s" - -instance Read TimeInterval where - readPrec = f =<< TR.get - where - f 'w' = return Weeks - f 'd' = return Days - f 'h' = return Hours - f 'm' = return Minutes - f 's' = return Seconds - f _ = fail "TimeInterval expected one of w, d, h, m, s" - -instance ToJSON Aggregation where - toJSON (TermsAgg (TermsAggregation term include exclude order minDocCount size shardSize collectMode executionHint termAggs)) = - omitNulls ["terms" .= omitNulls [ toJSON' term, - "include" .= include, - "exclude" .= exclude, - "order" .= order, - "min_doc_count" .= minDocCount, - "size" .= size, - "shard_size" .= shardSize, - "collect_mode" .= collectMode, - "execution_hint" .= executionHint - ], - "aggs" .= termAggs ] - where - toJSON' x = case x of { Left y -> "field" .= y; Right y -> "script" .= y } - - toJSON (CardinalityAgg (CardinalityAggregation field precisionThreshold)) = - object ["cardinality" .= omitNulls [ "field" .= field, - "precisionThreshold" .= precisionThreshold - ] - ] - - toJSON (DateHistogramAgg (DateHistogramAggregation field interval format preZone postZone preOffset postOffset dateHistoAggs)) = - omitNulls ["date_histogram" .= omitNulls [ "field" .= field, - "interval" .= interval, - "format" .= format, - "pre_zone" .= preZone, - "post_zone" .= postZone, - "pre_offset" .= preOffset, - "post_offset" .= postOffset - ], - "aggs" .= dateHistoAggs ] - toJSON (ValueCountAgg a) = object ["value_count" .= v] - where v = case a of - (FieldValueCount (FieldName n)) -> object ["field" .= n] - (ScriptValueCount (Script s)) -> object ["script" .= s] - toJSON (FilterAgg (FilterAggregation filt ags)) = - omitNulls [ "filter" .= filt - , "aggs" .= ags] - toJSON (DateRangeAgg a) = object [ "date_range" .= a - ] - toJSON (MissingAgg (MissingAggregation{..})) = - object ["missing" .= object ["field" .= maField]] - - toJSON (TopHitsAgg (TopHitsAggregation mfrom msize msort)) = - omitNulls ["top_hits" .= omitNulls [ "size" .= msize - , "from" .= mfrom - , "sort" .= msort - ] - ] - - toJSON (StatsAgg (StatisticsAggregation typ field)) = - object [stType .= omitNulls [ "field" .= field ]] - where - stType | typ == Basic = "stats" - | otherwise = "extended_stats" - -instance ToJSON DateRangeAggregation where - toJSON DateRangeAggregation {..} = - omitNulls [ "field" .= draField - , "format" .= draFormat - , "ranges" .= toList draRanges - ] - -instance ToJSON DateRangeAggRange where - toJSON (DateRangeFrom e) = object [ "from" .= e ] - toJSON (DateRangeTo e) = object [ "to" .= e ] - toJSON (DateRangeFromAndTo f t) = object [ "from" .= f, "to" .= t ] - -instance ToJSON DateMathExpr where - toJSON (DateMathExpr a mods) = String (fmtA a <> mconcat (fmtMod <$> mods)) - where fmtA DMNow = "now" - fmtA (DMDate date) = (T.pack $ showGregorian date) <> "||" - fmtMod (AddTime n u) = "+" <> showText n <> fmtU u - fmtMod (SubtractTime n u) = "-" <> showText n <> fmtU u - fmtMod (RoundDownTo u) = "/" <> fmtU u - fmtU DMYear = "y" - fmtU DMMonth = "M" - fmtU DMWeek = "w" - fmtU DMDay = "d" - fmtU DMHour = "h" - fmtU DMMinute = "m" - fmtU DMSecond = "s" - - -type AggregationResults = M.Map Text Value - -class BucketAggregation a where - key :: a -> BucketValue - docCount :: a -> Int - aggs :: a -> Maybe AggregationResults - - -data Bucket a = Bucket { buckets :: [a]} deriving (Read, Show) - -data BucketValue = TextValue Text - | ScientificValue Scientific - | BoolValue Bool deriving (Read, Show) - -data MissingResult = MissingResult { missingDocCount :: Int } deriving (Show) - -data TopHitResult a = TopHitResult { tarHits :: (SearchHits a) - } deriving Show - -data TermsResult = TermsResult { termKey :: BucketValue - , termsDocCount :: Int - , termsAggs :: Maybe AggregationResults } deriving (Read, Show) - -data DateHistogramResult = DateHistogramResult { dateKey :: Int - , dateKeyStr :: Maybe Text - , dateDocCount :: Int - , dateHistogramAggs :: Maybe AggregationResults } deriving (Read, Show) - -data DateRangeResult = DateRangeResult { dateRangeKey :: Text - , dateRangeFrom :: Maybe UTCTime - , dateRangeFromAsString :: Maybe Text - , dateRangeTo :: Maybe UTCTime - , dateRangeToAsString :: Maybe Text - , dateRangeDocCount :: Int - , dateRangeAggs :: Maybe AggregationResults } deriving (Read, Show, Eq, Generic, Typeable) - -toTerms :: Text -> AggregationResults -> Maybe (Bucket TermsResult) -toTerms = toAggResult - -toDateHistogram :: Text -> AggregationResults -> Maybe (Bucket DateHistogramResult) -toDateHistogram = toAggResult - -toMissing :: Text -> AggregationResults -> Maybe MissingResult -toMissing = toAggResult - -toTopHits :: (FromJSON a) => Text -> AggregationResults -> Maybe (TopHitResult a) -toTopHits = toAggResult - -toAggResult :: (FromJSON a) => Text -> AggregationResults -> Maybe a -toAggResult t a = M.lookup t a >>= deserialize - where deserialize = parseMaybe parseJSON - -instance BucketAggregation TermsResult where - key = termKey - docCount = termsDocCount - aggs = termsAggs - -instance BucketAggregation DateHistogramResult where - key = TextValue . showText . dateKey - docCount = dateDocCount - aggs = dateHistogramAggs - -instance BucketAggregation DateRangeResult where - key = TextValue . dateRangeKey - docCount = dateRangeDocCount - aggs = dateRangeAggs - -instance (FromJSON a) => FromJSON (Bucket a) where - parseJSON (Object v) = Bucket <$> - v .: "buckets" - parseJSON _ = mempty - -instance FromJSON BucketValue where - parseJSON (String t) = return $ TextValue t - parseJSON (Number s) = return $ ScientificValue s - parseJSON (Bool b) = return $ BoolValue b - parseJSON _ = mempty - -instance FromJSON MissingResult where - parseJSON = withObject "MissingResult" parse - where parse v = MissingResult <$> v .: "doc_count" - -instance FromJSON TermsResult where - parseJSON (Object v) = TermsResult <$> - v .: "key" <*> - v .: "doc_count" <*> - (pure $ getNamedSubAgg v ["key", "doc_count"]) - parseJSON _ = mempty - -instance FromJSON DateHistogramResult where - parseJSON (Object v) = DateHistogramResult <$> - v .: "key" <*> - v .:? "key_as_string" <*> - v .: "doc_count" <*> - (pure $ getNamedSubAgg v [ "key" - , "doc_count" - , "key_as_string" - ] - ) - parseJSON _ = mempty - -instance FromJSON DateRangeResult where - parseJSON = withObject "DateRangeResult" parse - where parse v = DateRangeResult <$> - v .: "key" <*> - (fmap posixMS <$> v .:? "from") <*> - v .:? "from_as_string" <*> - (fmap posixMS <$> v .:? "to") <*> - v .:? "to_as_string" <*> - v .: "doc_count" <*> - (pure $ getNamedSubAgg v [ "key" - , "from" - , "from_as_string" - , "to" - , "to_as_string" - , "doc_count" - ] - ) - -instance FromJSON POSIXMS where - parseJSON = withScientific "POSIXMS" (return . parse) - where parse n = let n' = truncate n :: Integer - in POSIXMS (posixSecondsToUTCTime (fromInteger (n' `div` 1000))) - -instance (FromJSON a) => FromJSON (TopHitResult a) where - parseJSON (Object v) = TopHitResult <$> - v .: "hits" - parseJSON _ = fail "Failure in FromJSON (TopHitResult a)" - -fieldTagged :: Monad m => (FieldName -> Object -> m a) -> Object -> m a -fieldTagged f o = case HM.toList o of - [(k, Object o')] -> f (FieldName k) o' - _ -> fail "Expected object with 1 field-named key" - --- Try to get an AggregationResults when we don't know the --- field name. We filter out the known keys to try to minimize the noise. -getNamedSubAgg :: Object -> [Text] -> Maybe AggregationResults -getNamedSubAgg o knownKeys = maggRes - where unknownKeys = HM.filterWithKey (\k _ -> k `notElem` knownKeys) o - maggRes - | HM.null unknownKeys = Nothing - | otherwise = Just . M.fromList $ HM.toList unknownKeys - -instance ToJSON GeoPoint where - toJSON (GeoPoint (FieldName geoPointField) geoPointLatLon) = - object [ geoPointField .= geoPointLatLon ] - - -instance ToJSON Query where - toJSON (TermQuery (Term termQueryField termQueryValue) boost) = - object [ "term" .= - object [termQueryField .= object merged]] - where - base = [ "value" .= termQueryValue ] - boosted = maybe [] (return . ("boost" .=)) boost - merged = mappend base boosted - - toJSON (TermsQuery fieldName terms) = - object [ "terms" .= object conjoined ] - where conjoined = [fieldName .= terms] - - toJSON (IdsQuery idsQueryMappingName docIds) = - object [ "ids" .= object conjoined ] - where conjoined = [ "type" .= idsQueryMappingName - , "values" .= fmap toJSON docIds ] - - toJSON (QueryQueryStringQuery qQueryStringQuery) = - object [ "query_string" .= qQueryStringQuery ] - - toJSON (QueryMatchQuery matchQuery) = - object [ "match" .= matchQuery ] - - toJSON (QueryMultiMatchQuery multiMatchQuery) = - toJSON multiMatchQuery - - toJSON (QueryBoolQuery boolQuery) = - object [ "bool" .= boolQuery ] - - toJSON (QueryBoostingQuery boostingQuery) = - object [ "boosting" .= boostingQuery ] - - toJSON (QueryCommonTermsQuery commonTermsQuery) = - object [ "common" .= commonTermsQuery ] - - toJSON (ConstantScoreQuery query boost) = - object ["constant_score" .= object ["query" .= query - , "boost" .= boost]] - - toJSON (QueryDisMaxQuery disMaxQuery) = - object [ "dis_max" .= disMaxQuery ] - - toJSON (QueryFuzzyLikeThisQuery fuzzyQuery) = - object [ "fuzzy_like_this" .= fuzzyQuery ] - - toJSON (QueryFuzzyLikeFieldQuery fuzzyFieldQuery) = - object [ "fuzzy_like_this_field" .= fuzzyFieldQuery ] - - toJSON (QueryFuzzyQuery fuzzyQuery) = - object [ "fuzzy" .= fuzzyQuery ] - - toJSON (QueryHasChildQuery childQuery) = - object [ "has_child" .= childQuery ] - - toJSON (QueryHasParentQuery parentQuery) = - object [ "has_parent" .= parentQuery ] - - toJSON (QueryIndicesQuery qIndicesQuery) = - object [ "indices" .= qIndicesQuery ] - - toJSON (MatchAllQuery boost) = - object [ "match_all" .= omitNulls [ "boost" .= boost ] ] - - toJSON (QueryMoreLikeThisQuery query) = - object [ "more_like_this" .= query ] - - toJSON (QueryMoreLikeThisFieldQuery query) = - object [ "more_like_this_field" .= query ] - - toJSON (QueryNestedQuery query) = - object [ "nested" .= query ] - - toJSON (QueryPrefixQuery query) = - object [ "prefix" .= query ] - - toJSON (QueryRangeQuery query) = - object [ "range" .= query ] - - toJSON (QueryRegexpQuery query) = - object [ "regexp" .= query ] - - toJSON (QuerySimpleQueryStringQuery query) = - object [ "simple_query_string" .= query ] - - toJSON (QueryExistsQuery (FieldName fieldName)) = - object ["exists" .= object - ["field" .= fieldName] - ] - toJSON QueryMatchNoneQuery = - object ["match_none" .= object []] - - toJSON (QueryTemplateQueryInline templateQuery) = - object [ "template" .= templateQuery ] - -instance FromJSON Query where - parseJSON v = withObject "Query" parse v - where parse o = termQuery `taggedWith` "term" - <|> termsQuery `taggedWith` "terms" - <|> idsQuery `taggedWith` "ids" - <|> queryQueryStringQuery `taggedWith` "query_string" - <|> queryMatchQuery `taggedWith` "match" - <|> queryMultiMatchQuery - <|> queryBoolQuery `taggedWith` "bool" - <|> queryBoostingQuery `taggedWith` "boosting" - <|> queryCommonTermsQuery `taggedWith` "common" - <|> constantScoreQuery `taggedWith` "constant_score" - <|> queryDisMaxQuery `taggedWith` "dis_max" - <|> queryFuzzyLikeThisQuery `taggedWith` "fuzzy_like_this" - <|> queryFuzzyLikeFieldQuery `taggedWith` "fuzzy_like_this_field" - <|> queryFuzzyQuery `taggedWith` "fuzzy" - <|> queryHasChildQuery `taggedWith` "has_child" - <|> queryHasParentQuery `taggedWith` "has_parent" - <|> queryIndicesQuery `taggedWith` "indices" - <|> matchAllQuery `taggedWith` "match_all" - <|> queryMoreLikeThisQuery `taggedWith` "more_like_this" - <|> queryMoreLikeThisFieldQuery `taggedWith` "more_like_this_field" - <|> queryNestedQuery `taggedWith` "nested" - <|> queryPrefixQuery `taggedWith` "prefix" - <|> queryRangeQuery `taggedWith` "range" - <|> queryRegexpQuery `taggedWith` "regexp" - <|> querySimpleQueryStringQuery `taggedWith` "simple_query_string" - <|> queryTemplateQueryInline `taggedWith` "template" - where taggedWith parser k = parser =<< o .: k - termQuery = fieldTagged $ \(FieldName fn) o -> - TermQuery <$> (Term fn <$> o .: "value") <*> o .:? "boost" - termsQuery o = case HM.toList o of - [(fn, vs)] -> do vals <- parseJSON vs - case vals of - x:xs -> return (TermsQuery fn (x :| xs)) - _ -> fail "Expected non empty list of values" - _ -> fail "Expected object with 1 field-named key" - idsQuery o = IdsQuery <$> o .: "type" - <*> o .: "values" - queryQueryStringQuery = pure . QueryQueryStringQuery - queryMatchQuery = pure . QueryMatchQuery - queryMultiMatchQuery = QueryMultiMatchQuery <$> parseJSON v - queryBoolQuery = pure . QueryBoolQuery - queryBoostingQuery = pure . QueryBoostingQuery - queryCommonTermsQuery = pure . QueryCommonTermsQuery - constantScoreQuery o = case HM.lookup "query" o of - Just x -> ConstantScoreQuery <$> parseJSON x - <*> o .: "boost" - _ -> fail "Does not appear to be a ConstantScoreQuery" - queryDisMaxQuery = pure . QueryDisMaxQuery - queryFuzzyLikeThisQuery = pure . QueryFuzzyLikeThisQuery - queryFuzzyLikeFieldQuery = pure . QueryFuzzyLikeFieldQuery - queryFuzzyQuery = pure . QueryFuzzyQuery - queryHasChildQuery = pure . QueryHasChildQuery - queryHasParentQuery = pure . QueryHasParentQuery - queryIndicesQuery = pure . QueryIndicesQuery - matchAllQuery o = MatchAllQuery <$> o .:? "boost" - queryMoreLikeThisQuery = pure . QueryMoreLikeThisQuery - queryMoreLikeThisFieldQuery = pure . QueryMoreLikeThisFieldQuery - queryNestedQuery = pure . QueryNestedQuery - queryPrefixQuery = pure . QueryPrefixQuery - queryRangeQuery = pure . QueryRangeQuery - queryRegexpQuery = pure . QueryRegexpQuery - querySimpleQueryStringQuery = pure . QuerySimpleQueryStringQuery - -- queryExistsQuery o = QueryExistsQuery <$> o .: "field" - queryTemplateQueryInline = pure . QueryTemplateQueryInline - - -omitNulls :: [(Text, Value)] -> Value -omitNulls = object . filter notNull where - notNull (_, Null) = False - notNull (_, Array a) = (not . V.null) a - notNull _ = True - -instance ToJSON SimpleQueryStringQuery where - toJSON SimpleQueryStringQuery {..} = - omitNulls (base ++ maybeAdd) - where base = [ "query" .= simpleQueryStringQuery ] - maybeAdd = [ "fields" .= simpleQueryStringField - , "default_operator" .= simpleQueryStringOperator - , "analyzer" .= simpleQueryStringAnalyzer - , "flags" .= simpleQueryStringFlags - , "lowercase_expanded_terms" .= simpleQueryStringLowercaseExpanded - , "locale" .= simpleQueryStringLocale ] - -instance FromJSON SimpleQueryStringQuery where - parseJSON = withObject "SimpleQueryStringQuery" parse - where parse o = SimpleQueryStringQuery <$> o .: "query" - <*> o .:? "fields" - <*> o .:? "default_operator" - <*> o .:? "analyzer" - <*> (parseFlags <$> o .:? "flags") - <*> o .:? "lowercase_expanded_terms" - <*> o .:? "locale" - parseFlags (Just (x:xs)) = Just (x :| xs) - parseFlags _ = Nothing - -instance ToJSON FieldOrFields where - toJSON (FofField fieldName) = - toJSON fieldName - toJSON (FofFields fieldNames) = - toJSON fieldNames - -instance FromJSON FieldOrFields where - parseJSON v = FofField <$> parseJSON v - <|> FofFields <$> (parseNEJSON =<< parseJSON v) - -instance ToJSON SimpleQueryFlag where - toJSON SimpleQueryAll = "ALL" - toJSON SimpleQueryNone = "NONE" - toJSON SimpleQueryAnd = "AND" - toJSON SimpleQueryOr = "OR" - toJSON SimpleQueryPrefix = "PREFIX" - toJSON SimpleQueryPhrase = "PHRASE" - toJSON SimpleQueryPrecedence = "PRECEDENCE" - toJSON SimpleQueryEscape = "ESCAPE" - toJSON SimpleQueryWhitespace = "WHITESPACE" - toJSON SimpleQueryFuzzy = "FUZZY" - toJSON SimpleQueryNear = "NEAR" - toJSON SimpleQuerySlop = "SLOP" - -instance FromJSON SimpleQueryFlag where - parseJSON = withText "SimpleQueryFlag" parse - where parse "ALL" = pure SimpleQueryAll - parse "NONE" = pure SimpleQueryNone - parse "AND" = pure SimpleQueryAnd - parse "OR" = pure SimpleQueryOr - parse "PREFIX" = pure SimpleQueryPrefix - parse "PHRASE" = pure SimpleQueryPhrase - parse "PRECEDENCE" = pure SimpleQueryPrecedence - parse "ESCAPE" = pure SimpleQueryEscape - parse "WHITESPACE" = pure SimpleQueryWhitespace - parse "FUZZY" = pure SimpleQueryFuzzy - parse "NEAR" = pure SimpleQueryNear - parse "SLOP" = pure SimpleQuerySlop - parse f = fail ("Unexpected SimpleQueryFlag: " <> show f) - -instance ToJSON RegexpQuery where - toJSON (RegexpQuery (FieldName rqQueryField) - (Regexp regexpQueryQuery) rqQueryFlags - rqQueryBoost) = - object [ rqQueryField .= omitNulls base ] - where base = [ "value" .= regexpQueryQuery - , "flags" .= rqQueryFlags - , "boost" .= rqQueryBoost ] - -instance FromJSON RegexpQuery where - parseJSON = withObject "RegexpQuery" parse - where parse = fieldTagged $ \fn o -> - RegexpQuery fn - <$> o .: "value" - <*> o .: "flags" - <*> o .:? "boost" - -instance ToJSON QueryStringQuery where - toJSON (QueryStringQuery qsQueryString - qsDefaultField qsOperator - qsAnalyzer qsAllowWildcard - qsLowercaseExpanded qsEnablePositionIncrements - qsFuzzyMaxExpansions qsFuzziness - qsFuzzyPrefixLength qsPhraseSlop - qsBoost qsAnalyzeWildcard - qsGeneratePhraseQueries qsMinimumShouldMatch - qsLenient qsLocale) = - omitNulls base - where - base = [ "query" .= qsQueryString - , "default_field" .= qsDefaultField - , "default_operator" .= qsOperator - , "analyzer" .= qsAnalyzer - , "allow_leading_wildcard" .= qsAllowWildcard - , "lowercase_expanded_terms" .= qsLowercaseExpanded - , "enable_position_increments" .= qsEnablePositionIncrements - , "fuzzy_max_expansions" .= qsFuzzyMaxExpansions - , "fuzziness" .= qsFuzziness - , "fuzzy_prefix_length" .= qsFuzzyPrefixLength - , "phrase_slop" .= qsPhraseSlop - , "boost" .= qsBoost - , "analyze_wildcard" .= qsAnalyzeWildcard - , "auto_generate_phrase_queries" .= qsGeneratePhraseQueries - , "minimum_should_match" .= qsMinimumShouldMatch - , "lenient" .= qsLenient - , "locale" .= qsLocale ] - -instance FromJSON QueryStringQuery where - parseJSON = withObject "QueryStringQuery" parse - where parse o = QueryStringQuery - <$> o .: "query" - <*> o .:? "default_field" - <*> o .:? "default_operator" - <*> o .:? "analyzer" - <*> o .:? "allow_leading_wildcard" - <*> o .:? "lowercase_expanded_terms" - <*> o .:? "enable_position_increments" - <*> o .:? "fuzzy_max_expansions" - <*> o .:? "fuzziness" - <*> o .:? "fuzzy_prefix_length" - <*> o .:? "phrase_slop" - <*> o .:? "boost" - <*> o .:? "analyze_wildcard" - <*> o .:? "auto_generate_phrase_queries" - <*> o .:? "minimum_should_match" - <*> o .:? "lenient" - <*> o .:? "locale" - -instance ToJSON RangeQuery where - toJSON (RangeQuery (FieldName fieldName) range boost) = - object [ fieldName .= object conjoined ] - where conjoined = [ "boost" .= boost ] ++ (rangeValueToPair range) - -instance FromJSON RangeQuery where - parseJSON = withObject "RangeQuery" parse - where parse = fieldTagged $ \fn o -> - RangeQuery fn - <$> parseJSON (Object o) - <*> o .: "boost" - -instance FromJSON RangeValue where - parseJSON = withObject "RangeValue" parse - where parse o = parseDate o - <|> parseDouble o - parseDate o = do lt <- o .:? "lt" - lte <- o .:? "lte" - gt <- o .:? "gt" - gte <- o .:? "gte" - case (lt, lte, gt, gte) of - (Just a, _, Just b, _) -> return (RangeDateGtLt (GreaterThanD b) (LessThanD a)) - (Just a, _, _, Just b)-> return (RangeDateGteLt (GreaterThanEqD b) (LessThanD a)) - (_, Just a, Just b, _)-> return (RangeDateGtLte (GreaterThanD b) (LessThanEqD a)) - (_, Just a, _, Just b)-> return (RangeDateGteLte (GreaterThanEqD b) (LessThanEqD a)) - (_, _, Just a, _)-> return (RangeDateGt (GreaterThanD a)) - (Just a, _, _, _)-> return (RangeDateLt (LessThanD a)) - (_, _, _, Just a)-> return (RangeDateGte (GreaterThanEqD a)) - (_, Just a, _, _)-> return (RangeDateLte (LessThanEqD a)) - (Nothing, Nothing, Nothing, Nothing) -> mzero - parseDouble o = do lt <- o .:? "lt" - lte <- o .:? "lte" - gt <- o .:? "gt" - gte <- o .:? "gte" - case (lt, lte, gt, gte) of - (Just a, _, Just b, _) -> return (RangeDoubleGtLt (GreaterThan b) (LessThan a)) - (Just a, _, _, Just b)-> return (RangeDoubleGteLt (GreaterThanEq b) (LessThan a)) - (_, Just a, Just b, _)-> return (RangeDoubleGtLte (GreaterThan b) (LessThanEq a)) - (_, Just a, _, Just b)-> return (RangeDoubleGteLte (GreaterThanEq b) (LessThanEq a)) - (_, _, Just a, _)-> return (RangeDoubleGt (GreaterThan a)) - (Just a, _, _, _)-> return (RangeDoubleLt (LessThan a)) - (_, _, _, Just a)-> return (RangeDoubleGte (GreaterThanEq a)) - (_, Just a, _, _)-> return (RangeDoubleLte (LessThanEq a)) - (Nothing, Nothing, Nothing, Nothing) -> mzero - -instance ToJSON PrefixQuery where - toJSON (PrefixQuery (FieldName fieldName) queryValue boost) = - object [ fieldName .= omitNulls base ] - where base = [ "value" .= queryValue - , "boost" .= boost ] - -instance FromJSON PrefixQuery where - parseJSON = withObject "PrefixQuery" parse - where parse = fieldTagged $ \fn o -> - PrefixQuery fn - <$> o .: "value" - <*> o .:? "boost" - -instance ToJSON NestedQuery where - toJSON (NestedQuery nqPath nqScoreType nqQuery) = - object [ "path" .= nqPath - , "score_mode" .= nqScoreType - , "query" .= nqQuery ] - -instance FromJSON NestedQuery where - parseJSON = withObject "NestedQuery" parse - where parse o = NestedQuery - <$> o .: "path" - <*> o .: "score_mode" - <*> o .: "query" - -instance ToJSON MoreLikeThisFieldQuery where - toJSON (MoreLikeThisFieldQuery text (FieldName fieldName) - percent mtf mqt stopwords mindf maxdf - minwl maxwl boostTerms boost analyzer) = - object [ fieldName .= omitNulls base ] - where base = [ "like_text" .= text - , "percent_terms_to_match" .= percent - , "min_term_freq" .= mtf - , "max_query_terms" .= mqt - , "stop_words" .= stopwords - , "min_doc_freq" .= mindf - , "max_doc_freq" .= maxdf - , "min_word_length" .= minwl - , "max_word_length" .= maxwl - , "boost_terms" .= boostTerms - , "boost" .= boost - , "analyzer" .= analyzer ] - -instance FromJSON MoreLikeThisFieldQuery where - parseJSON = withObject "MoreLikeThisFieldQuery" parse - where parse = fieldTagged $ \fn o -> - MoreLikeThisFieldQuery - <$> o .: "like_text" - <*> pure fn - <*> o .:? "percent_terms_to_match" - <*> o .:? "min_term_freq" - <*> o .:? "max_query_terms" - -- <*> (optionalNE =<< o .:? "stop_words") - <*> o .:? "stop_words" - <*> o .:? "min_doc_freq" - <*> o .:? "max_doc_freq" - <*> o .:? "min_word_length" - <*> o .:? "max_word_length" - <*> o .:? "boost_terms" - <*> o .:? "boost" - <*> o .:? "analyzer" - -- optionalNE = maybe (pure Nothing) (fmap Just . parseNEJSON) - -instance ToJSON MoreLikeThisQuery where - toJSON (MoreLikeThisQuery text fields percent - mtf mqt stopwords mindf maxdf - minwl maxwl boostTerms boost analyzer) = - omitNulls base - where base = [ "like_text" .= text - , "fields" .= fields - , "percent_terms_to_match" .= percent - , "min_term_freq" .= mtf - , "max_query_terms" .= mqt - , "stop_words" .= stopwords - , "min_doc_freq" .= mindf - , "max_doc_freq" .= maxdf - , "min_word_length" .= minwl - , "max_word_length" .= maxwl - , "boost_terms" .= boostTerms - , "boost" .= boost - , "analyzer" .= analyzer ] - -instance FromJSON MoreLikeThisQuery where - parseJSON = withObject "MoreLikeThisQuery" parse - where parse o = MoreLikeThisQuery - <$> o .: "like_text" - -- <*> (optionalNE =<< o .:? "fields") - <*> o .:? "fields" - <*> o .:? "percent_terms_to_match" - <*> o .:? "min_term_freq" - <*> o .:? "max_query_terms" - -- <*> (optionalNE =<< o .:? "stop_words") - <*> o .:? "stop_words" - <*> o .:? "min_doc_freq" - <*> o .:? "max_doc_freq" - <*> o .:? "min_word_length" - <*> o .:? "max_word_length" - <*> o .:? "boost_terms" - <*> o .:? "boost" - <*> o .:? "analyzer" - -- optionalNE = maybe (pure Nothing) (fmap Just . parseNEJSON) - -instance ToJSON IndicesQuery where - toJSON (IndicesQuery indices query noMatch) = - omitNulls [ "indices" .= indices - , "no_match_query" .= noMatch - , "query" .= query ] - -instance FromJSON IndicesQuery where - parseJSON = withObject "IndicesQuery" parse - where parse o = IndicesQuery - <$> o .:? "indices" .!= [] - <*> o .: "query" - <*> o .:? "no_match_query" - -instance ToJSON HasParentQuery where - toJSON (HasParentQuery queryType query scoreType) = - omitNulls [ "parent_type" .= queryType - , "score_type" .= scoreType - , "query" .= query ] - -instance FromJSON HasParentQuery where - parseJSON = withObject "HasParentQuery" parse - where parse o = HasParentQuery - <$> o .: "parent_type" - <*> o .: "query" - <*> o .:? "score_type" - -instance ToJSON HasChildQuery where - toJSON (HasChildQuery queryType query scoreType) = - omitNulls [ "query" .= query - , "score_type" .= scoreType - , "type" .= queryType ] - -instance FromJSON HasChildQuery where - parseJSON = withObject "HasChildQuery" parse - where parse o = HasChildQuery - <$> o .: "type" - <*> o .: "query" - <*> o .:? "score_type" - -instance ToJSON FuzzyQuery where - toJSON (FuzzyQuery (FieldName fieldName) queryText - prefixLength maxEx fuzziness boost) = - object [ fieldName .= omitNulls base ] - where base = [ "value" .= queryText - , "fuzziness" .= fuzziness - , "prefix_length" .= prefixLength - , "boost" .= boost - , "max_expansions" .= maxEx ] - -instance FromJSON FuzzyQuery where - parseJSON = withObject "FuzzyQuery" parse - where parse = fieldTagged $ \fn o -> - FuzzyQuery fn - <$> o .: "value" - <*> o .: "prefix_length" - <*> o .: "max_expansions" - <*> o .: "fuzziness" - <*> o .:? "boost" - -instance ToJSON FuzzyLikeFieldQuery where - toJSON (FuzzyLikeFieldQuery (FieldName fieldName) - fieldText maxTerms ignoreFreq fuzziness prefixLength - boost analyzer) = - object [ fieldName .= - omitNulls [ "like_text" .= fieldText - , "max_query_terms" .= maxTerms - , "ignore_tf" .= ignoreFreq - , "fuzziness" .= fuzziness - , "prefix_length" .= prefixLength - , "analyzer" .= analyzer - , "boost" .= boost ]] - -instance FromJSON FuzzyLikeFieldQuery where - parseJSON = withObject "FuzzyLikeFieldQuery" parse - where parse = fieldTagged $ \fn o -> - FuzzyLikeFieldQuery fn - <$> o .: "like_text" - <*> o .: "max_query_terms" - <*> o .: "ignore_tf" - <*> o .: "fuzziness" - <*> o .: "prefix_length" - <*> o .: "boost" - <*> o .:? "analyzer" - -instance ToJSON FuzzyLikeThisQuery where - toJSON (FuzzyLikeThisQuery fields text maxTerms - ignoreFreq fuzziness prefixLength boost analyzer) = - omitNulls base - where base = [ "fields" .= fields - , "like_text" .= text - , "max_query_terms" .= maxTerms - , "ignore_tf" .= ignoreFreq - , "fuzziness" .= fuzziness - , "prefix_length" .= prefixLength - , "analyzer" .= analyzer - , "boost" .= boost ] - -instance FromJSON FuzzyLikeThisQuery where - parseJSON = withObject "FuzzyLikeThisQuery" parse - where parse o = FuzzyLikeThisQuery - <$> o .:? "fields" .!= [] - <*> o .: "like_text" - <*> o .: "max_query_terms" - <*> o .: "ignore_tf" - <*> o .: "fuzziness" - <*> o .: "prefix_length" - <*> o .: "boost" - <*> o .:? "analyzer" - -instance ToJSON DisMaxQuery where - toJSON (DisMaxQuery queries tiebreaker boost) = - omitNulls base - where base = [ "queries" .= queries - , "boost" .= boost - , "tie_breaker" .= tiebreaker ] - -instance FromJSON DisMaxQuery where - parseJSON = withObject "DisMaxQuery" parse - where parse o = DisMaxQuery - <$> o .:? "queries" .!= [] - <*> o .: "tie_breaker" - <*> o .:? "boost" - -instance ToJSON CommonTermsQuery where - toJSON (CommonTermsQuery (FieldName fieldName) - (QueryString query) cf lfo hfo msm - boost analyzer disableCoord) = - object [fieldName .= omitNulls base ] - where base = [ "query" .= query - , "cutoff_frequency" .= cf - , "low_freq_operator" .= lfo - , "minimum_should_match" .= msm - , "boost" .= boost - , "analyzer" .= analyzer - , "disable_coord" .= disableCoord - , "high_freq_operator" .= hfo ] - -instance FromJSON CommonTermsQuery where - parseJSON = withObject "CommonTermsQuery" parse - where parse = fieldTagged $ \fn o -> - CommonTermsQuery fn - <$> o .: "query" - <*> o .: "cutoff_frequency" - <*> o .: "low_freq_operator" - <*> o .: "high_freq_operator" - <*> o .:? "minimum_should_match" - <*> o .:? "boost" - <*> o .:? "analyzer" - <*> o .:? "disable_coord" - -instance ToJSON CommonMinimumMatch where - toJSON (CommonMinimumMatch mm) = toJSON mm - toJSON (CommonMinimumMatchHighLow (MinimumMatchHighLow lowF highF)) = - object [ "low_freq" .= lowF - , "high_freq" .= highF ] - -instance FromJSON CommonMinimumMatch where - parseJSON v = parseMinimum v - <|> parseMinimumHighLow v - where parseMinimum = fmap CommonMinimumMatch . parseJSON - parseMinimumHighLow = fmap CommonMinimumMatchHighLow . withObject "CommonMinimumMatchHighLow" (\o -> - MinimumMatchHighLow - <$> o .: "low_freq" - <*> o .: "high_freq") - - -instance ToJSON BoostingQuery where - toJSON (BoostingQuery bqPositiveQuery bqNegativeQuery bqNegativeBoost) = - object [ "positive" .= bqPositiveQuery - , "negative" .= bqNegativeQuery - , "negative_boost" .= bqNegativeBoost ] - -instance FromJSON BoostingQuery where - parseJSON = withObject "BoostingQuery" parse - where parse o = BoostingQuery - <$> o .: "positive" - <*> o .: "negative" - <*> o .: "negative_boost" - -instance ToJSON BoolQuery where - toJSON (BoolQuery mustM filterM' notM shouldM bqMin boost disableCoord) = - omitNulls base - where base = [ "must" .= mustM - , "filter" .= filterM' - , "must_not" .= notM - , "should" .= shouldM - , "minimum_should_match" .= bqMin - , "boost" .= boost - , "disable_coord" .= disableCoord ] - -instance FromJSON BoolQuery where - parseJSON = withObject "BoolQuery" parse - where parse o = BoolQuery - <$> o .:? "must" .!= [] - <*> o .:? "filter" .!= [] - <*> o .:? "must_not" .!= [] - <*> o .:? "should" .!= [] - <*> o .:? "minimum_should_match" - <*> o .:? "boost" - <*> o .:? "disable_coord" - -instance ToJSON MatchQuery where - toJSON (MatchQuery (FieldName fieldName) - (QueryString mqQueryString) booleanOperator - zeroTermsQuery cutoffFrequency matchQueryType - analyzer maxExpansions lenient boost - minShouldMatch - ) = - object [ fieldName .= omitNulls base ] - where base = [ "query" .= mqQueryString - , "operator" .= booleanOperator - , "zero_terms_query" .= zeroTermsQuery - , "cutoff_frequency" .= cutoffFrequency - , "type" .= matchQueryType - , "analyzer" .= analyzer - , "max_expansions" .= maxExpansions - , "lenient" .= lenient - , "boost" .= boost - , "minimum_should_match" .= minShouldMatch - ] - -instance FromJSON MatchQuery where - parseJSON = withObject "MatchQuery" parse - where parse = fieldTagged $ \fn o -> - MatchQuery fn - <$> o .: "query" - <*> o .: "operator" - <*> o .: "zero_terms_query" - <*> o .:? "cutoff_frequency" - <*> o .:? "type" - <*> o .:? "analyzer" - <*> o .:? "max_expansions" - <*> o .:? "lenient" - <*> o .:? "boost" - <*> o .:? "minimum_should_match" - -instance ToJSON MultiMatchQuery where - toJSON (MultiMatchQuery fields (QueryString query) boolOp - ztQ tb mmqt cf analyzer maxEx lenient) = - object ["multi_match" .= omitNulls base] - where base = [ "fields" .= fmap toJSON fields - , "query" .= query - , "operator" .= boolOp - , "zero_terms_query" .= ztQ - , "tie_breaker" .= tb - , "type" .= mmqt - , "cutoff_frequency" .= cf - , "analyzer" .= analyzer - , "max_expansions" .= maxEx - , "lenient" .= lenient ] - -instance FromJSON MultiMatchQuery where - parseJSON = withObject "MultiMatchQuery" parse - where parse raw = do o <- raw .: "multi_match" - MultiMatchQuery - <$> o .:? "fields" .!= [] - <*> o .: "query" - <*> o .: "operator" - <*> o .: "zero_terms_query" - <*> o .:? "tie_breaker" - <*> o .:? "type" - <*> o .:? "cutoff_frequency" - <*> o .:? "analyzer" - <*> o .:? "max_expansions" - <*> o .:? "lenient" - -instance ToJSON MultiMatchQueryType where - toJSON MultiMatchBestFields = "best_fields" - toJSON MultiMatchMostFields = "most_fields" - toJSON MultiMatchCrossFields = "cross_fields" - toJSON MultiMatchPhrase = "phrase" - toJSON MultiMatchPhrasePrefix = "phrase_prefix" - -instance FromJSON MultiMatchQueryType where - parseJSON = withText "MultiMatchPhrasePrefix" parse - where parse "best_fields" = pure MultiMatchBestFields - parse "most_fields" = pure MultiMatchMostFields - parse "cross_fields" = pure MultiMatchCrossFields - parse "phrase" = pure MultiMatchPhrase - parse "phrase_prefix" = pure MultiMatchPhrasePrefix - parse t = fail ("Unexpected MultiMatchPhrasePrefix: " <> show t) - -instance ToJSON BooleanOperator where - toJSON And = String "and" - toJSON Or = String "or" - -instance FromJSON BooleanOperator where - parseJSON = withText "BooleanOperator" parse - where parse "and" = pure And - parse "or" = pure Or - parse o = fail ("Unexpected BooleanOperator: " <> show o) - -instance ToJSON ZeroTermsQuery where - toJSON ZeroTermsNone = String "none" - toJSON ZeroTermsAll = String "all" - -instance FromJSON ZeroTermsQuery where - parseJSON = withText "ZeroTermsQuery" parse - where parse "none" = pure ZeroTermsNone - parse "all" = pure ZeroTermsAll - parse q = fail ("Unexpected ZeroTermsQuery: " <> show q) - -instance ToJSON MatchQueryType where - toJSON MatchPhrase = "phrase" - toJSON MatchPhrasePrefix = "phrase_prefix" - -instance FromJSON MatchQueryType where - parseJSON = withText "MatchQueryType" parse - where parse "phrase" = pure MatchPhrase - parse "phrase_prefix" = pure MatchPhrasePrefix - parse t = fail ("Unexpected MatchQueryType: " <> show t) - -instance FromJSON Status where - parseJSON (Object v) = Status <$> - v .: "name" <*> - v .: "cluster_name" <*> - v .: "cluster_uuid" <*> - v .: "version" <*> - v .: "tagline" - parseJSON _ = empty - - -instance ToJSON IndexSettings where - toJSON (IndexSettings s r) = object ["settings" .= - object ["index" .= - object ["number_of_shards" .= s, "number_of_replicas" .= r] - ] - ] - -instance FromJSON IndexSettings where - parseJSON = withObject "IndexSettings" parse - where parse o = do s <- o .: "settings" - i <- s .: "index" - IndexSettings <$> i .: "number_of_shards" - <*> i .: "number_of_replicas" - -instance ToJSON UpdatableIndexSetting where - toJSON (NumberOfReplicas x) = oPath ("index" :| ["number_of_replicas"]) x - toJSON (AutoExpandReplicas x) = oPath ("index" :| ["auto_expand_replicas"]) x - toJSON (RefreshInterval x) = oPath ("index" :| ["refresh_interval"]) (NominalDiffTimeJSON x) - toJSON (IndexConcurrency x) = oPath ("index" :| ["concurrency"]) x - toJSON (FailOnMergeFailure x) = oPath ("index" :| ["fail_on_merge_failure"]) x - toJSON (TranslogFlushThresholdOps x) = oPath ("index" :| ["translog", "flush_threshold_ops"]) x - toJSON (TranslogFlushThresholdSize x) = oPath ("index" :| ["translog", "flush_threshold_size"]) x - toJSON (TranslogFlushThresholdPeriod x) = oPath ("index" :| ["translog", "flush_threshold_period"]) (NominalDiffTimeJSON x) - toJSON (TranslogDisableFlush x) = oPath ("index" :| ["translog", "disable_flush"]) x - toJSON (CacheFilterMaxSize x) = oPath ("index" :| ["cache", "filter", "max_size"]) x - toJSON (CacheFilterExpire x) = oPath ("index" :| ["cache", "filter", "expire"]) (NominalDiffTimeJSON <$> x) - toJSON (GatewaySnapshotInterval x) = oPath ("index" :| ["gateway", "snapshot_interval"]) (NominalDiffTimeJSON x) - toJSON (RoutingAllocationInclude fs) = oPath ("index" :| ["routing", "allocation", "include"]) (attrFilterJSON fs) - toJSON (RoutingAllocationExclude fs) = oPath ("index" :| ["routing", "allocation", "exclude"]) (attrFilterJSON fs) - toJSON (RoutingAllocationRequire fs) = oPath ("index" :| ["routing", "allocation", "require"]) (attrFilterJSON fs) - toJSON (RoutingAllocationEnable x) = oPath ("index" :| ["routing", "allocation", "enable"]) x - toJSON (RoutingAllocationShardsPerNode x) = oPath ("index" :| ["routing", "allocation", "total_shards_per_node"]) x - toJSON (RecoveryInitialShards x) = oPath ("index" :| ["recovery", "initial_shards"]) x - toJSON (GCDeletes x) = oPath ("index" :| ["gc_deletes"]) (NominalDiffTimeJSON x) - toJSON (TTLDisablePurge x) = oPath ("index" :| ["ttl", "disable_purge"]) x - toJSON (TranslogFSType x) = oPath ("index" :| ["translog", "fs", "type"]) x - toJSON (CompressionSetting x) = oPath ("index" :| ["codec"]) x - toJSON (IndexCompoundFormat x) = oPath ("index" :| ["compound_format"]) x - toJSON (IndexCompoundOnFlush x) = oPath ("index" :| ["compound_on_flush"]) x - toJSON (WarmerEnabled x) = oPath ("index" :| ["warmer", "enabled"]) x - toJSON (BlocksReadOnly x) = oPath ("blocks" :| ["read_only"]) x - toJSON (BlocksRead x) = oPath ("blocks" :| ["read"]) x - toJSON (BlocksWrite x) = oPath ("blocks" :| ["write"]) x - toJSON (BlocksMetaData x) = oPath ("blocks" :| ["metadata"]) x - toJSON (MappingTotalFieldsLimit x) = oPath ("index" :| ["mapping","total_fields","limit"]) x - toJSON (AnalysisSetting x) = oPath ("index" :| ["analysis"]) x - -instance FromJSON UpdatableIndexSetting where - parseJSON = withObject "UpdatableIndexSetting" parse - where parse o = numberOfReplicas `taggedAt` ["index", "number_of_replicas"] - <|> autoExpandReplicas `taggedAt` ["index", "auto_expand_replicas"] - <|> refreshInterval `taggedAt` ["index", "refresh_interval"] - <|> indexConcurrency `taggedAt` ["index", "concurrency"] - <|> failOnMergeFailure `taggedAt` ["index", "fail_on_merge_failure"] - <|> translogFlushThresholdOps `taggedAt` ["index", "translog", "flush_threshold_ops"] - <|> translogFlushThresholdSize `taggedAt` ["index", "translog", "flush_threshold_size"] - <|> translogFlushThresholdPeriod `taggedAt` ["index", "translog", "flush_threshold_period"] - <|> translogDisableFlush `taggedAt` ["index", "translog", "disable_flush"] - <|> cacheFilterMaxSize `taggedAt` ["index", "cache", "filter", "max_size"] - <|> cacheFilterExpire `taggedAt` ["index", "cache", "filter", "expire"] - <|> gatewaySnapshotInterval `taggedAt` ["index", "gateway", "snapshot_interval"] - <|> routingAllocationInclude `taggedAt` ["index", "routing", "allocation", "include"] - <|> routingAllocationExclude `taggedAt` ["index", "routing", "allocation", "exclude"] - <|> routingAllocationRequire `taggedAt` ["index", "routing", "allocation", "require"] - <|> routingAllocationEnable `taggedAt` ["index", "routing", "allocation", "enable"] - <|> routingAllocationShardsPerNode `taggedAt` ["index", "routing", "allocation", "total_shards_per_node"] - <|> recoveryInitialShards `taggedAt` ["index", "recovery", "initial_shards"] - <|> gcDeletes `taggedAt` ["index", "gc_deletes"] - <|> ttlDisablePurge `taggedAt` ["index", "ttl", "disable_purge"] - <|> translogFSType `taggedAt` ["index", "translog", "fs", "type"] - <|> compressionSetting `taggedAt` ["index", "codec"] - <|> compoundFormat `taggedAt` ["index", "compound_format"] - <|> compoundOnFlush `taggedAt` ["index", "compound_on_flush"] - <|> warmerEnabled `taggedAt` ["index", "warmer", "enabled"] - <|> blocksReadOnly `taggedAt` ["blocks", "read_only"] - <|> blocksRead `taggedAt` ["blocks", "read"] - <|> blocksWrite `taggedAt` ["blocks", "write"] - <|> blocksMetaData `taggedAt` ["blocks", "metadata"] - <|> mappingTotalFieldsLimit `taggedAt` ["index", "mapping", "total_fields", "limit"] - <|> analysisSetting `taggedAt` ["index", "analysis"] - where taggedAt f ks = taggedAt' f (Object o) ks - taggedAt' f v [] = f =<< (parseJSON v <|> (parseJSON (unStringlyTypeJSON v))) - taggedAt' f v (k:ks) = withObject "Object" (\o -> do v' <- o .: k - taggedAt' f v' ks) v - numberOfReplicas = pure . NumberOfReplicas - autoExpandReplicas = pure . AutoExpandReplicas - refreshInterval = pure . RefreshInterval . ndtJSON - indexConcurrency = pure . IndexConcurrency - failOnMergeFailure = pure . FailOnMergeFailure - translogFlushThresholdOps = pure . TranslogFlushThresholdOps - translogFlushThresholdSize = pure . TranslogFlushThresholdSize - translogFlushThresholdPeriod = pure . TranslogFlushThresholdPeriod . ndtJSON - translogDisableFlush = pure . TranslogDisableFlush - cacheFilterMaxSize = pure . CacheFilterMaxSize - cacheFilterExpire = pure . CacheFilterExpire . fmap ndtJSON - gatewaySnapshotInterval = pure . GatewaySnapshotInterval . ndtJSON - routingAllocationInclude = fmap RoutingAllocationInclude . parseAttrFilter - routingAllocationExclude = fmap RoutingAllocationExclude . parseAttrFilter - routingAllocationRequire = fmap RoutingAllocationRequire . parseAttrFilter - routingAllocationEnable = pure . RoutingAllocationEnable - routingAllocationShardsPerNode = pure . RoutingAllocationShardsPerNode - recoveryInitialShards = pure . RecoveryInitialShards - gcDeletes = pure . GCDeletes . ndtJSON - ttlDisablePurge = pure . TTLDisablePurge - translogFSType = pure . TranslogFSType - compressionSetting = pure . CompressionSetting - compoundFormat = pure . IndexCompoundFormat - compoundOnFlush = pure . IndexCompoundOnFlush - warmerEnabled = pure . WarmerEnabled - blocksReadOnly = pure . BlocksReadOnly - blocksRead = pure . BlocksRead - blocksWrite = pure . BlocksWrite - blocksMetaData = pure . BlocksMetaData - mappingTotalFieldsLimit = pure . MappingTotalFieldsLimit - analysisSetting = pure . AnalysisSetting - -instance FromJSON IndexSettingsSummary where - parseJSON = withObject "IndexSettingsSummary" parse - where parse o = case HM.toList o of - [(ixn, v@(Object o'))] -> IndexSettingsSummary (IndexName ixn) - <$> parseJSON v - <*> (fmap (filter (not . redundant)) . parseSettings =<< o' .: "settings") - _ -> fail "Expected single-key object with index name" - redundant (NumberOfReplicas _) = True - redundant _ = False - --- | For some reason in several settings APIs, all leaf values get returned --- as strings. This function attepmts to recover from this for all --- non-recursive JSON types. If nothing can be done, the value is left alone. -unStringlyTypeJSON :: Value -> Value -unStringlyTypeJSON (String "true") = Bool True -unStringlyTypeJSON (String "false") = Bool False -unStringlyTypeJSON (String "null") = Null -unStringlyTypeJSON v@(String t) = case readMay (T.unpack t) of - Just n -> Number n - Nothing -> v -unStringlyTypeJSON v = v - - -parseSettings :: Object -> Parser [UpdatableIndexSetting] -parseSettings o = do - o' <- o .: "index" - -- slice the index object into singleton hashmaps and try to parse each - parses <- forM (HM.toList o') $ \(k, v) -> do - -- blocks are now nested into the "index" key, which is not how they're serialized - let atRoot = Object (HM.singleton k v) - let atIndex = Object (HM.singleton "index" atRoot) - optional (parseJSON atRoot <|> parseJSON atIndex) - return (catMaybes parses) - -oPath :: ToJSON a => NonEmpty Text -> a -> Value -oPath (k :| []) v = object [k .= v] -oPath (k:| (h:t)) v = object [k .= oPath (h :| t) v] - -attrFilterJSON :: NonEmpty NodeAttrFilter -> Value -attrFilterJSON fs = object [ n .= T.intercalate "," (toList vs) - | NodeAttrFilter (NodeAttrName n) vs <- toList fs] - -parseAttrFilter :: Value -> Parser (NonEmpty NodeAttrFilter) -parseAttrFilter = withObject "NonEmpty NodeAttrFilter" parse - where parse o = case HM.toList o of - [] -> fail "Expected non-empty list of NodeAttrFilters" - x:xs -> DT.mapM (uncurry parse') (x :| xs) - parse' n = withText "Text" $ \t -> - case T.splitOn "," t of - fv:fvs -> return (NodeAttrFilter (NodeAttrName n) (fv :| fvs)) - [] -> fail "Expected non-empty list of filter values" - -instance ToJSON ReplicaBounds where - toJSON (ReplicasBounded a b) = String (showText a <> "-" <> showText b) - toJSON (ReplicasLowerBounded a) = String (showText a <> "-all") - toJSON ReplicasUnbounded = Bool False - -instance FromJSON ReplicaBounds where - parseJSON v = withText "ReplicaBounds" parseText v - <|> withBool "ReplicaBounds" parseBool v - where parseText t = case T.splitOn "-" t of - [a, "all"] -> ReplicasLowerBounded <$> parseReadText a - [a, b] -> ReplicasBounded <$> parseReadText a - <*> parseReadText b - _ -> fail ("Could not parse ReplicaBounds: " <> show t) - parseBool False = pure ReplicasUnbounded - parseBool _ = fail "ReplicasUnbounded cannot be represented with True" - -instance ToJSON AllocationPolicy where - toJSON AllocAll = String "all" - toJSON AllocPrimaries = String "primaries" - toJSON AllocNewPrimaries = String "new_primaries" - toJSON AllocNone = String "none" - -instance FromJSON AllocationPolicy where - parseJSON = withText "AllocationPolicy" parse - where parse "all" = pure AllocAll - parse "primaries" = pure AllocPrimaries - parse "new_primaries" = pure AllocNewPrimaries - parse "none" = pure AllocNone - parse t = fail ("Invlaid AllocationPolicy: " <> show t) - -instance ToJSON InitialShardCount where - toJSON QuorumShards = String "quorum" - toJSON QuorumMinus1Shards = String "quorum-1" - toJSON FullShards = String "full" - toJSON FullMinus1Shards = String "full-1" - toJSON (ExplicitShards x) = toJSON x - -instance FromJSON InitialShardCount where - parseJSON v = withText "InitialShardCount" parseText v - <|> ExplicitShards <$> parseJSON v - where parseText "quorum" = pure QuorumShards - parseText "quorum-1" = pure QuorumMinus1Shards - parseText "full" = pure FullShards - parseText "full-1" = pure FullMinus1Shards - parseText _ = mzero - -instance ToJSON FSType where - toJSON FSSimple = "simple" - toJSON FSBuffered = "buffered" - -instance FromJSON FSType where - parseJSON = withText "FSType" parse - where parse "simple" = pure FSSimple - parse "buffered" = pure FSBuffered - parse t = fail ("Invalid FSType: " <> show t) - -instance ToJSON CompoundFormat where - toJSON (CompoundFileFormat x) = Bool x - toJSON (MergeSegmentVsTotalIndex x) = toJSON x - -instance FromJSON CompoundFormat where - parseJSON v = CompoundFileFormat <$> parseJSON v - <|> MergeSegmentVsTotalIndex <$> parseJSON v - -instance ToJSON NominalDiffTimeJSON where - toJSON (NominalDiffTimeJSON t) = String (showText (round t :: Integer) <> "s") - -instance FromJSON NominalDiffTimeJSON where - parseJSON = withText "NominalDiffTime" parse - where parse t = case T.takeEnd 1 t of - "s" -> NominalDiffTimeJSON . fromInteger <$> parseReadText (T.dropEnd 1 t) - _ -> fail "Invalid or missing NominalDiffTime unit (expected s)" - -instance ToJSON IndexTemplate where - toJSON (IndexTemplate p s m) = merge - (object [ "template" .= p - , "mappings" .= foldl' merge (object []) m - ]) - (toJSON s) - where - merge (Object o1) (Object o2) = toJSON $ HM.union o1 o2 - merge o Null = o - merge _ _ = undefined - -instance (FromJSON a) => FromJSON (EsResult a) where - parseJSON jsonVal@(Object v) = do - found <- v .:? "found" .!= False - fr <- if found - then parseJSON jsonVal - else return Nothing - EsResult <$> v .: "_index" <*> - v .: "_type" <*> - v .: "_id" <*> - pure fr - parseJSON _ = empty - -instance (FromJSON a) => FromJSON (EsResultFound a) where - parseJSON (Object v) = EsResultFound <$> - v .: "_version" <*> - v .: "_source" - parseJSON _ = empty - -instance FromJSON EsError where - parseJSON (Object v) = EsError <$> - v .: "status" <*> - (v .: "error" <|> (v .: "error" >>= (.: "reason"))) - parseJSON _ = empty - -instance FromJSON IndexAliasesSummary where - parseJSON = withObject "IndexAliasesSummary" parse - where parse o = IndexAliasesSummary . mconcat <$> mapM (uncurry go) (HM.toList o) - go ixn = withObject "index aliases" $ \ia -> do - aliases <- ia .:? "aliases" .!= mempty - forM (HM.toList aliases) $ \(aName, v) -> do - let indexAlias = IndexAlias (IndexName ixn) (IndexAliasName (IndexName aName)) - IndexAliasSummary indexAlias <$> parseJSON v - - -instance ToJSON IndexAliasAction where - toJSON (AddAlias ia opts) = object ["add" .= (iaObj <> optsObj)] - where Object iaObj = toJSON ia - Object optsObj = toJSON opts - toJSON (RemoveAlias ia) = object ["remove" .= iaObj] - where Object iaObj = toJSON ia - -instance ToJSON IndexAlias where - toJSON IndexAlias {..} = object ["index" .= srcIndex - , "alias" .= indexAlias - ] - -instance ToJSON IndexAliasCreate where - toJSON IndexAliasCreate {..} = Object (filterObj <> routingObj) - where filterObj = maybe mempty (HM.singleton "filter" . toJSON) aliasCreateFilter - Object routingObj = maybe (Object mempty) toJSON aliasCreateRouting - -instance ToJSON AliasRouting where - toJSON (AllAliasRouting v) = object ["routing" .= v] - toJSON (GranularAliasRouting srch idx) = object (catMaybes prs) - where prs = [("search_routing" .=) <$> srch - ,("index_routing" .=) <$> idx] - -instance FromJSON AliasRouting where - parseJSON = withObject "AliasRouting" parse - where parse o = parseAll o <|> parseGranular o - parseAll o = AllAliasRouting <$> o .: "routing" - parseGranular o = do - sr <- o .:? "search_routing" - ir <- o .:? "index_routing" - if isNothing sr && isNothing ir - then fail "Both search_routing and index_routing can't be blank" - else return (GranularAliasRouting sr ir) - -instance FromJSON IndexAliasCreate where - parseJSON v = withObject "IndexAliasCreate" parse v - where parse o = IndexAliasCreate <$> optional (parseJSON v) - <*> o .:? "filter" - -instance ToJSON SearchAliasRouting where - toJSON (SearchAliasRouting rvs) = toJSON (T.intercalate "," (routingValue <$> toList rvs)) - -instance FromJSON SearchAliasRouting where - parseJSON = withText "SearchAliasRouting" parse - where parse t = SearchAliasRouting <$> parseNEJSON (String <$> T.splitOn "," t) - -instance ToJSON Search where - toJSON (Search mquery sFilter sort searchAggs highlight sTrackSortScores sFrom sSize _ sFields sSource sSuggest) = - omitNulls [ "query" .= query' - , "sort" .= sort - , "aggregations" .= searchAggs - , "highlight" .= highlight - , "from" .= sFrom - , "size" .= sSize - , "track_scores" .= sTrackSortScores - , "fields" .= sFields - , "_source" .= sSource - , "suggest" .= sSuggest] - - where query' = case sFilter of - Nothing -> mquery - Just x -> Just . QueryBoolQuery $ mkBoolQuery (maybeToList mquery) [x] [] [] - -instance ToJSON Source where - toJSON NoSource = toJSON False - toJSON (SourcePatterns patterns) = toJSON patterns - toJSON (SourceIncludeExclude incl excl) = object [ "includes" .= incl, "excludes" .= excl ] - -instance ToJSON PatternOrPatterns where - toJSON (PopPattern pattern) = toJSON pattern - toJSON (PopPatterns patterns) = toJSON patterns - -instance ToJSON Include where - toJSON (Include patterns) = toJSON patterns - -instance ToJSON Exclude where - toJSON (Exclude patterns) = toJSON patterns - -instance ToJSON Pattern where - toJSON (Pattern pattern) = toJSON pattern - - -instance ToJSON FieldHighlight where - toJSON (FieldHighlight (FieldName fName) (Just fSettings)) = - object [ fName .= fSettings ] - toJSON (FieldHighlight (FieldName fName) Nothing) = - object [ fName .= emptyObject ] - -instance ToJSON Highlights where - toJSON (Highlights global fields) = - omitNulls (("fields" .= fields) - : highlightSettingsPairs global) - -instance ToJSON HighlightSettings where - toJSON hs = omitNulls (highlightSettingsPairs (Just hs)) - -highlightSettingsPairs :: Maybe HighlightSettings -> [Pair] -highlightSettingsPairs Nothing = [] -highlightSettingsPairs (Just (Plain plh)) = plainHighPairs (Just plh) -highlightSettingsPairs (Just (Postings ph)) = postHighPairs (Just ph) -highlightSettingsPairs (Just (FastVector fvh)) = fastVectorHighPairs (Just fvh) - - -plainHighPairs :: Maybe PlainHighlight -> [Pair] -plainHighPairs Nothing = [] -plainHighPairs (Just (PlainHighlight plCom plNonPost)) = - [ "type" .= String "plain"] - ++ commonHighlightPairs plCom - ++ nonPostingsToPairs plNonPost - -postHighPairs :: Maybe PostingsHighlight -> [Pair] -postHighPairs Nothing = [] -postHighPairs (Just (PostingsHighlight pCom)) = - ("type" .= String "postings") - : commonHighlightPairs pCom - -fastVectorHighPairs :: Maybe FastVectorHighlight -> [Pair] -fastVectorHighPairs Nothing = [] -fastVectorHighPairs (Just - (FastVectorHighlight fvCom fvNonPostSettings fvBoundChars - fvBoundMaxScan fvFragOff fvMatchedFields - fvPhraseLim)) = - [ "type" .= String "fvh" - , "boundary_chars" .= fvBoundChars - , "boundary_max_scan" .= fvBoundMaxScan - , "fragment_offset" .= fvFragOff - , "matched_fields" .= fvMatchedFields - , "phraseLimit" .= fvPhraseLim] - ++ commonHighlightPairs fvCom - ++ nonPostingsToPairs fvNonPostSettings - -deleteSeveral :: (Eq k, Hashable k) => [k] -> HM.HashMap k v -> HM.HashMap k v -deleteSeveral ks hm = foldr HM.delete hm ks - -commonHighlightPairs :: Maybe CommonHighlight -> [Pair] -commonHighlightPairs Nothing = [] -commonHighlightPairs (Just (CommonHighlight chScore chForceSource chTag chEncoder - chNoMatchSize chHighlightQuery - chRequireFieldMatch)) = - [ "order" .= chScore - , "force_source" .= chForceSource - , "encoder" .= chEncoder - , "no_match_size" .= chNoMatchSize - , "highlight_query" .= chHighlightQuery - , "require_fieldMatch" .= chRequireFieldMatch] - ++ highlightTagToPairs chTag - - -nonPostingsToPairs :: Maybe NonPostings -> [Pair] -nonPostingsToPairs Nothing = [] -nonPostingsToPairs (Just (NonPostings npFragSize npNumOfFrags)) = - [ "fragment_size" .= npFragSize - , "number_of_fragments" .= npNumOfFrags] - -parseNEJSON :: (FromJSON a) => [Value] -> Parser (NonEmpty a) -parseNEJSON [] = fail "Expected non-empty list" -parseNEJSON (x:xs) = DT.mapM parseJSON (x :| xs) - - -instance ToJSON HighlightEncoder where - toJSON DefaultEncoder = String "default" - toJSON HTMLEncoder = String "html" - -highlightTagToPairs :: Maybe HighlightTag -> [Pair] -highlightTagToPairs (Just (TagSchema _)) = [ "scheme" .= String "default"] -highlightTagToPairs (Just (CustomTags (pre, post))) = [ "pre_tags" .= pre - , "post_tags" .= post] -highlightTagToPairs Nothing = [] - -instance ToJSON SortSpec where - toJSON (DefaultSortSpec - (DefaultSort (FieldName dsSortFieldName) dsSortOrder dsIgnoreUnmapped - dsSortMode dsMissingSort dsNestedFilter)) = - object [dsSortFieldName .= omitNulls base] where - base = [ "order" .= dsSortOrder - , "unmapped_type" .= dsIgnoreUnmapped - , "mode" .= dsSortMode - , "missing" .= dsMissingSort - , "nested_filter" .= dsNestedFilter ] - - toJSON (GeoDistanceSortSpec gdsSortOrder (GeoPoint (FieldName field) gdsLatLon) units) = - object [ "unit" .= units - , field .= gdsLatLon - , "order" .= gdsSortOrder ] - - -instance ToJSON SortOrder where - toJSON Ascending = String "asc" - toJSON Descending = String "desc" - - -instance ToJSON SortMode where - toJSON SortMin = String "min" - toJSON SortMax = String "max" - toJSON SortSum = String "sum" - toJSON SortAvg = String "avg" - - -instance ToJSON Missing where - toJSON LastMissing = String "_last" - toJSON FirstMissing = String "_first" - toJSON (CustomMissing txt) = String txt - - -instance ToJSON ScoreType where - toJSON ScoreTypeMax = "max" - toJSON ScoreTypeAvg = "avg" - toJSON ScoreTypeSum = "sum" - toJSON ScoreTypeNone = "none" - -instance FromJSON ScoreType where - parseJSON = withText "ScoreType" parse - where parse "max" = pure ScoreTypeMax - parse "avg" = pure ScoreTypeAvg - parse "sum" = pure ScoreTypeSum - parse "none" = pure ScoreTypeNone - parse t = fail ("Unexpected ScoreType: " <> show t) - -instance ToJSON Distance where - toJSON (Distance dCoefficient dUnit) = - String boltedTogether where - coefText = showText dCoefficient - (String unitText) = toJSON dUnit - boltedTogether = mappend coefText unitText - -instance FromJSON Distance where - parseJSON = withText "Distance" parse - where parse t = Distance <$> parseCoeff nT - <*> parseJSON (String unitT) - where (nT, unitT) = T.span validForNumber t - -- may be a better way to do this - validForNumber '-' = True - validForNumber '.' = True - validForNumber 'e' = True - validForNumber c = isNumber c - parseCoeff "" = fail "Empty string cannot be parsed as number" - parseCoeff s = return (read (T.unpack s)) - -instance ToJSON DistanceUnit where - toJSON Miles = String "mi" - toJSON Yards = String "yd" - toJSON Feet = String "ft" - toJSON Inches = String "in" - toJSON Kilometers = String "km" - toJSON Meters = String "m" - toJSON Centimeters = String "cm" - toJSON Millimeters = String "mm" - toJSON NauticalMiles = String "nmi" - - -instance FromJSON DistanceUnit where - parseJSON = withText "DistanceUnit" parse - where parse "mi" = pure Miles - parse "yd" = pure Yards - parse "ft" = pure Feet - parse "in" = pure Inches - parse "km" = pure Kilometers - parse "m" = pure Meters - parse "cm" = pure Centimeters - parse "mm" = pure Millimeters - parse "nmi" = pure NauticalMiles - parse u = fail ("Unrecognized DistanceUnit: " <> show u) - -instance ToJSON DistanceType where - toJSON Arc = String "arc" - toJSON SloppyArc = String "sloppy_arc" - toJSON Plane = String "plane" - -instance FromJSON DistanceType where - parseJSON = withText "DistanceType" parse - where parse "arc" = pure Arc - parse "sloppy_arc" = pure SloppyArc - parse "plane" = pure Plane - parse t = fail ("Unrecognized DistanceType: " <> show t) - - -instance ToJSON OptimizeBbox where - toJSON NoOptimizeBbox = String "none" - toJSON (OptimizeGeoFilterType gft) = toJSON gft - -instance FromJSON OptimizeBbox where - parseJSON v = withText "NoOptimizeBbox" parseNoOptimize v - <|> parseOptimize v - where parseNoOptimize "none" = pure NoOptimizeBbox - parseNoOptimize _ = mzero - parseOptimize = fmap OptimizeGeoFilterType . parseJSON - -instance ToJSON GeoBoundingBoxConstraint where - toJSON (GeoBoundingBoxConstraint - (FieldName gbbcGeoBBField) gbbcConstraintBox cache type') = - object [gbbcGeoBBField .= gbbcConstraintBox - , "_cache" .= cache - , "type" .= type'] - -instance FromJSON GeoBoundingBoxConstraint where - parseJSON = withObject "GeoBoundingBoxConstraint" parse - where parse o = case HM.toList (deleteSeveral ["type", "_cache"] o) of - [(fn, v)] -> GeoBoundingBoxConstraint (FieldName fn) - <$> parseJSON v - <*> o .:? "_cache" .!= defaultCache - <*> o .: "type" - _ -> fail "Could not find field name for GeoBoundingBoxConstraint" - -instance ToJSON GeoFilterType where - toJSON GeoFilterMemory = String "memory" - toJSON GeoFilterIndexed = String "indexed" - -instance FromJSON GeoFilterType where - parseJSON = withText "GeoFilterType" parse - where parse "memory" = pure GeoFilterMemory - parse "indexed" = pure GeoFilterIndexed - parse t = fail ("Unrecognized GeoFilterType: " <> show t) - -instance ToJSON GeoBoundingBox where - toJSON (GeoBoundingBox gbbTopLeft gbbBottomRight) = - object ["top_left" .= gbbTopLeft - , "bottom_right" .= gbbBottomRight] - -instance FromJSON GeoBoundingBox where - parseJSON = withObject "GeoBoundingBox" parse - where parse o = GeoBoundingBox - <$> o .: "top_left" - <*> o .: "bottom_right" - -instance ToJSON LatLon where - toJSON (LatLon lLat lLon) = - object ["lat" .= lLat - , "lon" .= lLon] - -instance FromJSON LatLon where - parseJSON = withObject "LatLon" parse - where parse o = LatLon <$> o .: "lat" - <*> o .: "lon" - --- index for smaller ranges, fielddata for longer ranges -instance ToJSON RangeExecution where - toJSON RangeExecutionIndex = "index" - toJSON RangeExecutionFielddata = "fielddata" - - -instance FromJSON RangeExecution where - parseJSON = withText "RangeExecution" parse - where parse "index" = pure RangeExecutionIndex - parse "fielddata" = pure RangeExecutionFielddata - parse t = error ("Unrecognized RangeExecution " <> show t) - -instance ToJSON RegexpFlags where - toJSON AllRegexpFlags = String "ALL" - toJSON NoRegexpFlags = String "NONE" - toJSON (SomeRegexpFlags (h :| fs)) = String $ T.intercalate "|" flagStrs - where flagStrs = map flagStr . nub $ h:fs - flagStr AnyString = "ANYSTRING" - flagStr Automaton = "AUTOMATON" - flagStr Complement = "COMPLEMENT" - flagStr Empty = "EMPTY" - flagStr Intersection = "INTERSECTION" - flagStr Interval = "INTERVAL" - -instance FromJSON RegexpFlags where - parseJSON = withText "RegexpFlags" parse - where parse "ALL" = pure AllRegexpFlags - parse "NONE" = pure NoRegexpFlags - parse t = SomeRegexpFlags <$> parseNEJSON (String <$> T.splitOn "|" t) - -instance FromJSON RegexpFlag where - parseJSON = withText "RegexpFlag" parse - where parse "ANYSTRING" = pure AnyString - parse "AUTOMATON" = pure Automaton - parse "COMPLEMENT" = pure Complement - parse "EMPTY" = pure Empty - parse "INTERSECTION" = pure Intersection - parse "INTERVAL" = pure Interval - parse f = fail ("Unknown RegexpFlag: " <> show f) - -instance ToJSON Term where - toJSON (Term field value) = object ["term" .= object - [field .= value]] - -instance FromJSON Term where - parseJSON = withObject "Term" parse - where parse o = do termObj <- o .: "term" - case HM.toList termObj of - [(fn, v)] -> Term fn <$> parseJSON v - _ -> fail "Expected object with 1 field-named key" - -instance ToJSON BoolMatch where - toJSON (MustMatch term cache) = object ["must" .= term, - "_cache" .= cache] - toJSON (MustNotMatch term cache) = object ["must_not" .= term, - "_cache" .= cache] - toJSON (ShouldMatch terms cache) = object ["should" .= fmap toJSON terms, - "_cache" .= cache] - -instance FromJSON BoolMatch where - parseJSON = withObject "BoolMatch" parse - where parse o = mustMatch `taggedWith` "must" - <|> mustNotMatch `taggedWith` "must_not" - <|> shouldMatch `taggedWith` "should" - where taggedWith parser k = parser =<< o .: k - mustMatch t = MustMatch t <$> o .:? "_cache" .!= defaultCache - mustNotMatch t = MustNotMatch t <$> o .:? "_cache" .!= defaultCache - shouldMatch t = ShouldMatch t <$> o .:? "_cache" .!= defaultCache + deriving (Eq, Show) instance (FromJSON a) => FromJSON (SearchResult a) where parseJSON (Object v) = SearchResult <$> @@ -3986,1729 +532,6 @@ instance (FromJSON a) => FromJSON (SearchResult a) where v .:? "suggest" parseJSON _ = empty -instance (FromJSON a) => FromJSON (SearchHits a) where - parseJSON (Object v) = SearchHits <$> - v .: "total" <*> - v .: "max_score" <*> - v .: "hits" - parseJSON _ = empty - -instance (FromJSON a) => FromJSON (Hit a) where - parseJSON (Object v) = Hit <$> - v .: "_index" <*> - v .: "_type" <*> - v .: "_id" <*> - v .: "_score" <*> - v .:? "_source" <*> - v .:? "highlight" - parseJSON _ = empty - -instance FromJSON ShardResult where - parseJSON (Object v) = ShardResult <$> - v .: "total" <*> - v .: "successful" <*> - v .: "failed" - parseJSON _ = empty - - -instance FromJSON DocVersion where - parseJSON v = do - i <- parseJSON v - maybe (fail "DocVersion out of range") return $ mkDocVersion i - --- This insanity is because ES *sometimes* returns Replica/Shard counts as strings -instance FromJSON ReplicaCount where - parseJSON v = parseAsInt v - <|> parseAsString v - where parseAsInt = fmap ReplicaCount . parseJSON - parseAsString = withText "ReplicaCount" (fmap ReplicaCount . parseReadText) - -instance FromJSON ShardCount where - parseJSON v = parseAsInt v - <|> parseAsString v - where parseAsInt = fmap ShardCount . parseJSON - parseAsString = withText "ShardCount" (fmap ShardCount . parseReadText) - -instance Bounded DocVersion where - minBound = DocVersion 1 - maxBound = DocVersion 9200000000000000000 -- 9.2e+18 - -instance Enum DocVersion where - succ x - | x /= maxBound = DocVersion (succ $ docVersionNumber x) - | otherwise = succError "DocVersion" - pred x - | x /= minBound = DocVersion (pred $ docVersionNumber x) - | otherwise = predError "DocVersion" - toEnum i = - fromMaybe (error $ show i ++ " out of DocVersion range") $ mkDocVersion i - fromEnum = docVersionNumber - enumFrom = boundedEnumFrom - enumFromThen = boundedEnumFromThen - --- | Username type used for HTTP Basic authentication. See 'basicAuthHook'. -newtype EsUsername = EsUsername { esUsername :: Text } deriving (Read, Show, Eq) - --- | Password type used for HTTP Basic authentication. See 'basicAuthHook'. -newtype EsPassword = EsPassword { esPassword :: Text } deriving (Read, Show, Eq) - - -data SnapshotRepoSelection = SnapshotRepoList (NonEmpty SnapshotRepoPattern) - | AllSnapshotRepos deriving (Eq, Generic, Show, Typeable) - - --- | Either specifies an exact repo name or one with globs in it, --- e.g. @RepoPattern "foo*"@ __NOTE__: Patterns are not supported on ES < 1.7 -data SnapshotRepoPattern = ExactRepo SnapshotRepoName - | RepoPattern Text - deriving (Eq, Generic, Show, Typeable) - --- | The unique name of a snapshot repository. -newtype SnapshotRepoName = SnapshotRepoName { snapshotRepoName :: Text } - deriving (Eq, Ord, Generic, Show, Typeable, ToJSON, FromJSON) - - --- | A generic representation of a snapshot repo. This is what gets --- sent to and parsed from the server. For repo types enabled by --- plugins that aren't exported by this library, consider making a --- custom type which implements 'SnapshotRepo'. If it is a common repo --- type, consider submitting a pull request to have it included in the --- library proper -data GenericSnapshotRepo = GenericSnapshotRepo { - gSnapshotRepoName :: SnapshotRepoName - , gSnapshotRepoType :: SnapshotRepoType - , gSnapshotRepoSettings :: GenericSnapshotRepoSettings - } deriving (Eq, Generic, Show, Typeable) - - -instance SnapshotRepo GenericSnapshotRepo where - toGSnapshotRepo = id - fromGSnapshotRepo = Right - - -newtype SnapshotRepoType = SnapshotRepoType { snapshotRepoType :: Text } - deriving (Eq, Ord, Generic, Show, Typeable, ToJSON, FromJSON) - - --- | Opaque representation of snapshot repo settings. Instances of --- 'SnapshotRepo' will produce this. -newtype GenericSnapshotRepoSettings = GenericSnapshotRepoSettings { gSnapshotRepoSettingsObject :: Object } - deriving (Eq, Generic, Show, Typeable, ToJSON) - - - -- Regardless of whether you send strongly typed json, my version of - -- ES sends back stringly typed json in the settings, e.g. booleans - -- as strings, so we'll try to convert them. -instance FromJSON GenericSnapshotRepoSettings where - parseJSON = fmap (GenericSnapshotRepoSettings . fmap unStringlyTypeJSON). parseJSON - --- | The result of running 'verifySnapshotRepo'. -newtype SnapshotVerification = SnapshotVerification { snapshotNodeVerifications :: [SnapshotNodeVerification] } - deriving (Eq, Generic, Show, Typeable) - - -instance FromJSON SnapshotVerification where - parseJSON = withObject "SnapshotVerification" parse - where - parse o = do - o2 <- o .: "nodes" - SnapshotVerification <$> mapM (uncurry parse') (HM.toList o2) - parse' rawFullId = withObject "SnapshotNodeVerification" $ \o -> - SnapshotNodeVerification (FullNodeId rawFullId) <$> o .: "name" - - --- | A node that has verified a snapshot -data SnapshotNodeVerification = SnapshotNodeVerification { - snvFullId :: FullNodeId - , snvNodeName :: NodeName - } deriving (Eq, Generic, Show, Typeable) - - --- | Unique, automatically-generated name assigned to nodes that are --- usually returned in node-oriented APIs. -newtype FullNodeId = FullNodeId { fullNodeId :: Text } - deriving (Eq, Ord, Generic, Show, Typeable, FromJSON) - - --- | A human-readable node name that is supplied by the user in the --- node config or automatically generated by Elasticsearch. -newtype NodeName = NodeName { nodeName :: Text } - deriving (Eq, Ord, Generic, Show, Typeable, FromJSON) - -newtype ClusterName = ClusterName { clusterName :: Text } - deriving (Eq, Ord, Generic, Show, Typeable, FromJSON) - -data NodesInfo = NodesInfo { - nodesInfo :: [NodeInfo] - , nodesClusterName :: ClusterName - } deriving (Eq, Show, Generic, Typeable) - -data NodesStats = NodesStats { - nodesStats :: [NodeStats] - , nodesStatsClusterName :: ClusterName - } deriving (Eq, Show, Generic, Typeable) - -data NodeStats = NodeStats { - nodeStatsName :: NodeName - , nodeStatsFullId :: FullNodeId - , nodeStatsBreakersStats :: Maybe NodeBreakersStats - , nodeStatsHTTP :: NodeHTTPStats - , nodeStatsTransport :: NodeTransportStats - , nodeStatsFS :: NodeFSStats - , nodeStatsNetwork :: Maybe NodeNetworkStats - , nodeStatsThreadPool :: NodeThreadPoolsStats - , nodeStatsJVM :: NodeJVMStats - , nodeStatsProcess :: NodeProcessStats - , nodeStatsOS :: NodeOSStats - , nodeStatsIndices :: NodeIndicesStats - } deriving (Eq, Show, Generic, Typeable) - -data NodeBreakersStats = NodeBreakersStats { - nodeStatsParentBreaker :: NodeBreakerStats - , nodeStatsRequestBreaker :: NodeBreakerStats - , nodeStatsFieldDataBreaker :: NodeBreakerStats - } deriving (Eq, Show, Generic, Typeable) - -data NodeBreakerStats = NodeBreakerStats { - nodeBreakersTripped :: Int - , nodeBreakersOverhead :: Double - , nodeBreakersEstSize :: Bytes - , nodeBreakersLimitSize :: Bytes - } deriving (Eq, Show, Generic, Typeable) - -data NodeHTTPStats = NodeHTTPStats { - nodeHTTPTotalOpened :: Int - , nodeHTTPCurrentOpen :: Int - } deriving (Eq, Show, Generic, Typeable) - -data NodeTransportStats = NodeTransportStats { - nodeTransportTXSize :: Bytes - , nodeTransportCount :: Int - , nodeTransportRXSize :: Bytes - , nodeTransportRXCount :: Int - , nodeTransportServerOpen :: Int - } deriving (Eq, Show, Generic, Typeable) - -data NodeFSStats = NodeFSStats { - nodeFSDataPaths :: [NodeDataPathStats] - , nodeFSTotal :: NodeFSTotalStats - , nodeFSTimestamp :: UTCTime - } deriving (Eq, Show, Generic, Typeable) - -data NodeDataPathStats = NodeDataPathStats { - nodeDataPathDiskServiceTime :: Maybe Double - , nodeDataPathDiskQueue :: Maybe Double - , nodeDataPathIOSize :: Maybe Bytes - , nodeDataPathWriteSize :: Maybe Bytes - , nodeDataPathReadSize :: Maybe Bytes - , nodeDataPathIOOps :: Maybe Int - , nodeDataPathWrites :: Maybe Int - , nodeDataPathReads :: Maybe Int - , nodeDataPathAvailable :: Bytes - , nodeDataPathFree :: Bytes - , nodeDataPathTotal :: Bytes - , nodeDataPathType :: Maybe Text - , nodeDataPathDevice :: Maybe Text - , nodeDataPathMount :: Text - , nodeDataPathPath :: Text - } deriving (Eq, Show, Generic, Typeable) - -data NodeFSTotalStats = NodeFSTotalStats { - nodeFSTotalDiskServiceTime :: Maybe Double - , nodeFSTotalDiskQueue :: Maybe Double - , nodeFSTotalIOSize :: Maybe Bytes - , nodeFSTotalWriteSize :: Maybe Bytes - , nodeFSTotalReadSize :: Maybe Bytes - , nodeFSTotalIOOps :: Maybe Int - , nodeFSTotalWrites :: Maybe Int - , nodeFSTotalReads :: Maybe Int - , nodeFSTotalAvailable :: Bytes - , nodeFSTotalFree :: Bytes - , nodeFSTotalTotal :: Bytes - } deriving (Eq, Show, Generic, Typeable) - -data NodeNetworkStats = NodeNetworkStats { - nodeNetTCPOutRSTs :: Int - , nodeNetTCPInErrs :: Int - , nodeNetTCPAttemptFails :: Int - , nodeNetTCPEstabResets :: Int - , nodeNetTCPRetransSegs :: Int - , nodeNetTCPOutSegs :: Int - , nodeNetTCPInSegs :: Int - , nodeNetTCPCurrEstab :: Int - , nodeNetTCPPassiveOpens :: Int - , nodeNetTCPActiveOpens :: Int - } deriving (Eq, Show, Generic, Typeable) - -data NodeThreadPoolsStats = NodeThreadPoolsStats { - nodeThreadPoolsStatsSnapshot :: NodeThreadPoolStats - , nodeThreadPoolsStatsBulk :: NodeThreadPoolStats - , nodeThreadPoolsStatsMerge :: NodeThreadPoolStats - , nodeThreadPoolsStatsGet :: NodeThreadPoolStats - , nodeThreadPoolsStatsManagement :: NodeThreadPoolStats - , nodeThreadPoolsStatsFetchShardStore :: Maybe NodeThreadPoolStats - , nodeThreadPoolsStatsOptimize :: Maybe NodeThreadPoolStats - , nodeThreadPoolsStatsFlush :: NodeThreadPoolStats - , nodeThreadPoolsStatsSearch :: NodeThreadPoolStats - , nodeThreadPoolsStatsWarmer :: NodeThreadPoolStats - , nodeThreadPoolsStatsGeneric :: NodeThreadPoolStats - , nodeThreadPoolsStatsSuggest :: Maybe NodeThreadPoolStats - , nodeThreadPoolsStatsRefresh :: NodeThreadPoolStats - , nodeThreadPoolsStatsIndex :: NodeThreadPoolStats - , nodeThreadPoolsStatsListener :: Maybe NodeThreadPoolStats - , nodeThreadPoolsStatsFetchShardStarted :: Maybe NodeThreadPoolStats - , nodeThreadPoolsStatsPercolate :: Maybe NodeThreadPoolStats - } deriving (Eq, Show, Generic, Typeable) - -data NodeThreadPoolStats = NodeThreadPoolStats { - nodeThreadPoolCompleted :: Int - , nodeThreadPoolLargest :: Int - , nodeThreadPoolRejected :: Int - , nodeThreadPoolActive :: Int - , nodeThreadPoolQueue :: Int - , nodeThreadPoolThreads :: Int - } deriving (Eq, Show, Generic, Typeable) - -data NodeJVMStats = NodeJVMStats { - nodeJVMStatsMappedBufferPool :: JVMBufferPoolStats - , nodeJVMStatsDirectBufferPool :: JVMBufferPoolStats - , nodeJVMStatsGCOldCollector :: JVMGCStats - , nodeJVMStatsGCYoungCollector :: JVMGCStats - , nodeJVMStatsPeakThreadsCount :: Int - , nodeJVMStatsThreadsCount :: Int - , nodeJVMStatsOldPool :: JVMPoolStats - , nodeJVMStatsSurvivorPool :: JVMPoolStats - , nodeJVMStatsYoungPool :: JVMPoolStats - , nodeJVMStatsNonHeapCommitted :: Bytes - , nodeJVMStatsNonHeapUsed :: Bytes - , nodeJVMStatsHeapMax :: Bytes - , nodeJVMStatsHeapCommitted :: Bytes - , nodeJVMStatsHeapUsedPercent :: Int - , nodeJVMStatsHeapUsed :: Bytes - , nodeJVMStatsUptime :: NominalDiffTime - , nodeJVMStatsTimestamp :: UTCTime - } deriving (Eq, Show, Generic, Typeable) - -data JVMBufferPoolStats = JVMBufferPoolStats { - jvmBufferPoolStatsTotalCapacity :: Bytes - , jvmBufferPoolStatsUsed :: Bytes - , jvmBufferPoolStatsCount :: Int - } deriving (Eq, Show, Generic, Typeable) - -data JVMGCStats = JVMGCStats { - jvmGCStatsCollectionTime :: NominalDiffTime - , jvmGCStatsCollectionCount :: Int - } deriving (Eq, Show, Generic, Typeable) - -data JVMPoolStats = JVMPoolStats { - jvmPoolStatsPeakMax :: Bytes - , jvmPoolStatsPeakUsed :: Bytes - , jvmPoolStatsMax :: Bytes - , jvmPoolStatsUsed :: Bytes - } deriving (Eq, Show, Generic, Typeable) - -data NodeProcessStats = NodeProcessStats { - nodeProcessTimestamp :: UTCTime - , nodeProcessOpenFDs :: Int - , nodeProcessMaxFDs :: Int - , nodeProcessCPUPercent :: Int - , nodeProcessCPUTotal :: NominalDiffTime - , nodeProcessMemTotalVirtual :: Bytes - } deriving (Eq, Show, Generic, Typeable) - -data NodeOSStats = NodeOSStats { - nodeOSTimestamp :: UTCTime - , nodeOSCPUPercent :: Int - , nodeOSLoad :: Maybe LoadAvgs - , nodeOSMemTotal :: Bytes - , nodeOSMemFree :: Bytes - , nodeOSMemFreePercent :: Int - , nodeOSMemUsed :: Bytes - , nodeOSMemUsedPercent :: Int - , nodeOSSwapTotal :: Bytes - , nodeOSSwapFree :: Bytes - , nodeOSSwapUsed :: Bytes - } deriving (Eq, Show, Generic, Typeable) - -data LoadAvgs = LoadAvgs { - loadAvg1Min :: Double - , loadAvg5Min :: Double - , loadAvg15Min :: Double - } deriving (Eq, Show, Generic, Typeable) - -data NodeIndicesStats = NodeIndicesStats { - nodeIndicesStatsRecoveryThrottleTime :: Maybe NominalDiffTime - , nodeIndicesStatsRecoveryCurrentAsTarget :: Maybe Int - , nodeIndicesStatsRecoveryCurrentAsSource :: Maybe Int - , nodeIndicesStatsQueryCacheMisses :: Maybe Int - , nodeIndicesStatsQueryCacheHits :: Maybe Int - , nodeIndicesStatsQueryCacheEvictions :: Maybe Int - , nodeIndicesStatsQueryCacheSize :: Maybe Bytes - , nodeIndicesStatsSuggestCurrent :: Maybe Int - , nodeIndicesStatsSuggestTime :: Maybe NominalDiffTime - , nodeIndicesStatsSuggestTotal :: Maybe Int - , nodeIndicesStatsTranslogSize :: Bytes - , nodeIndicesStatsTranslogOps :: Int - , nodeIndicesStatsSegFixedBitSetMemory :: Maybe Bytes - , nodeIndicesStatsSegVersionMapMemory :: Bytes - , nodeIndicesStatsSegIndexWriterMaxMemory :: Maybe Bytes - , nodeIndicesStatsSegIndexWriterMemory :: Bytes - , nodeIndicesStatsSegMemory :: Bytes - , nodeIndicesStatsSegCount :: Int - , nodeIndicesStatsCompletionSize :: Bytes - , nodeIndicesStatsPercolateQueries :: Maybe Int - , nodeIndicesStatsPercolateMemory :: Maybe Bytes - , nodeIndicesStatsPercolateCurrent :: Maybe Int - , nodeIndicesStatsPercolateTime :: Maybe NominalDiffTime - , nodeIndicesStatsPercolateTotal :: Maybe Int - , nodeIndicesStatsFieldDataEvictions :: Int - , nodeIndicesStatsFieldDataMemory :: Bytes - , nodeIndicesStatsWarmerTotalTime :: NominalDiffTime - , nodeIndicesStatsWarmerTotal :: Int - , nodeIndicesStatsWarmerCurrent :: Int - , nodeIndicesStatsFlushTotalTime :: NominalDiffTime - , nodeIndicesStatsFlushTotal :: Int - , nodeIndicesStatsRefreshTotalTime :: NominalDiffTime - , nodeIndicesStatsRefreshTotal :: Int - , nodeIndicesStatsMergesTotalSize :: Bytes - , nodeIndicesStatsMergesTotalDocs :: Int - , nodeIndicesStatsMergesTotalTime :: NominalDiffTime - , nodeIndicesStatsMergesTotal :: Int - , nodeIndicesStatsMergesCurrentSize :: Bytes - , nodeIndicesStatsMergesCurrentDocs :: Int - , nodeIndicesStatsMergesCurrent :: Int - , nodeIndicesStatsSearchFetchCurrent :: Int - , nodeIndicesStatsSearchFetchTime :: NominalDiffTime - , nodeIndicesStatsSearchFetchTotal :: Int - , nodeIndicesStatsSearchQueryCurrent :: Int - , nodeIndicesStatsSearchQueryTime :: NominalDiffTime - , nodeIndicesStatsSearchQueryTotal :: Int - , nodeIndicesStatsSearchOpenContexts :: Int - , nodeIndicesStatsGetCurrent :: Int - , nodeIndicesStatsGetMissingTime :: NominalDiffTime - , nodeIndicesStatsGetMissingTotal :: Int - , nodeIndicesStatsGetExistsTime :: NominalDiffTime - , nodeIndicesStatsGetExistsTotal :: Int - , nodeIndicesStatsGetTime :: NominalDiffTime - , nodeIndicesStatsGetTotal :: Int - , nodeIndicesStatsIndexingThrottleTime :: Maybe NominalDiffTime - , nodeIndicesStatsIndexingIsThrottled :: Maybe Bool - , nodeIndicesStatsIndexingNoopUpdateTotal :: Maybe Int - , nodeIndicesStatsIndexingDeleteCurrent :: Int - , nodeIndicesStatsIndexingDeleteTime :: NominalDiffTime - , nodeIndicesStatsIndexingDeleteTotal :: Int - , nodeIndicesStatsIndexingIndexCurrent :: Int - , nodeIndicesStatsIndexingIndexTime :: NominalDiffTime - , nodeIndicesStatsIndexingTotal :: Int - , nodeIndicesStatsStoreThrottleTime :: NominalDiffTime - , nodeIndicesStatsStoreSize :: Bytes - , nodeIndicesStatsDocsDeleted :: Int - , nodeIndicesStatsDocsCount :: Int - } deriving (Eq, Show, Generic, Typeable) - --- | A quirky address format used throughout Elasticsearch. An example --- would be inet[/1.1.1.1:9200]. inet may be a placeholder for a --- . -newtype EsAddress = EsAddress { esAddress :: Text } - deriving (Eq, Ord, Generic, Show, Typeable, FromJSON) - --- | Typically a 7 character hex string. -newtype BuildHash = BuildHash { buildHash :: Text } - deriving (Eq, Ord, Generic, Read, Show, Typeable, FromJSON, ToJSON) - -newtype PluginName = PluginName { pluginName :: Text } - deriving (Eq, Ord, Generic, Show, Typeable, FromJSON) - -data NodeInfo = NodeInfo { - nodeInfoHTTPAddress :: Maybe EsAddress - , nodeInfoBuild :: BuildHash - , nodeInfoESVersion :: VersionNumber - , nodeInfoIP :: Server - , nodeInfoHost :: Server - , nodeInfoTransportAddress :: EsAddress - , nodeInfoName :: NodeName - , nodeInfoFullId :: FullNodeId - , nodeInfoPlugins :: [NodePluginInfo] - , nodeInfoHTTP :: NodeHTTPInfo - , nodeInfoTransport :: NodeTransportInfo - , nodeInfoNetwork :: Maybe NodeNetworkInfo - , nodeInfoThreadPool :: NodeThreadPoolsInfo - , nodeInfoJVM :: NodeJVMInfo - , nodeInfoProcess :: NodeProcessInfo - , nodeInfoOS :: NodeOSInfo - , nodeInfoSettings :: Object - -- ^ The members of the settings objects are not consistent, - -- dependent on plugins, etc. - } deriving (Eq, Show, Generic, Typeable) - -data NodePluginInfo = NodePluginInfo { - nodePluginSite :: Maybe Bool - -- ^ Is this a site plugin? - , nodePluginJVM :: Maybe Bool - -- ^ Is this plugin running on the JVM - , nodePluginDescription :: Text - , nodePluginVersion :: MaybeNA VersionNumber - , nodePluginName :: PluginName - } deriving (Eq, Show, Generic, Typeable) - -data NodeHTTPInfo = NodeHTTPInfo { - nodeHTTPMaxContentLength :: Bytes - , nodeHTTPTransportAddress :: BoundTransportAddress - } deriving (Eq, Show, Generic, Typeable) - -data NodeTransportInfo = NodeTransportInfo { - nodeTransportProfiles :: [BoundTransportAddress] - , nodeTransportAddress :: BoundTransportAddress - } deriving (Eq, Show, Generic, Typeable) - -data BoundTransportAddress = BoundTransportAddress { - publishAddress :: EsAddress - , boundAddress :: [EsAddress] - } deriving (Eq, Show, Generic, Typeable) - -data NodeNetworkInfo = NodeNetworkInfo { - nodeNetworkPrimaryInterface :: NodeNetworkInterface - , nodeNetworkRefreshInterval :: NominalDiffTime - } deriving (Eq, Show, Generic, Typeable) - -newtype MacAddress = MacAddress { macAddress :: Text } - deriving (Eq, Ord, Generic, Show, Typeable, FromJSON) - -newtype NetworkInterfaceName = NetworkInterfaceName { networkInterfaceName :: Text } - deriving (Eq, Ord, Generic, Show, Typeable, FromJSON) - -data NodeNetworkInterface = NodeNetworkInterface { - nodeNetIfaceMacAddress :: MacAddress - , nodeNetIfaceName :: NetworkInterfaceName - , nodeNetIfaceAddress :: Server - } deriving (Eq, Show, Generic, Typeable) - -data NodeThreadPoolsInfo = NodeThreadPoolsInfo { - nodeThreadPoolsRefresh :: NodeThreadPoolInfo - , nodeThreadPoolsManagement :: NodeThreadPoolInfo - , nodeThreadPoolsPercolate :: Maybe NodeThreadPoolInfo - , nodeThreadPoolsListener :: Maybe NodeThreadPoolInfo - , nodeThreadPoolsFetchShardStarted :: Maybe NodeThreadPoolInfo - , nodeThreadPoolsSearch :: NodeThreadPoolInfo - , nodeThreadPoolsFlush :: NodeThreadPoolInfo - , nodeThreadPoolsWarmer :: NodeThreadPoolInfo - , nodeThreadPoolsOptimize :: Maybe NodeThreadPoolInfo - , nodeThreadPoolsBulk :: NodeThreadPoolInfo - , nodeThreadPoolsSuggest :: Maybe NodeThreadPoolInfo - , nodeThreadPoolsMerge :: NodeThreadPoolInfo - , nodeThreadPoolsSnapshot :: NodeThreadPoolInfo - , nodeThreadPoolsGet :: NodeThreadPoolInfo - , nodeThreadPoolsFetchShardStore :: Maybe NodeThreadPoolInfo - , nodeThreadPoolsIndex :: NodeThreadPoolInfo - , nodeThreadPoolsGeneric :: NodeThreadPoolInfo - } deriving (Eq, Show, Generic, Typeable) - -data NodeThreadPoolInfo = NodeThreadPoolInfo { - nodeThreadPoolQueueSize :: ThreadPoolSize - , nodeThreadPoolKeepalive :: Maybe NominalDiffTime - , nodeThreadPoolMin :: Maybe Int - , nodeThreadPoolMax :: Maybe Int - , nodeThreadPoolType :: ThreadPoolType - } deriving (Eq, Show, Generic, Typeable) - -data ThreadPoolSize = ThreadPoolBounded Int - | ThreadPoolUnbounded - deriving (Eq, Show, Generic, Typeable) - -data ThreadPoolType = ThreadPoolScaling - | ThreadPoolFixed - | ThreadPoolCached - deriving (Eq, Show, Generic, Typeable) - -data NodeJVMInfo = NodeJVMInfo { - nodeJVMInfoMemoryPools :: [JVMMemoryPool] - , nodeJVMInfoMemoryPoolsGCCollectors :: [JVMGCCollector] - , nodeJVMInfoMemoryInfo :: JVMMemoryInfo - , nodeJVMInfoStartTime :: UTCTime - , nodeJVMInfoVMVendor :: Text - , nodeJVMVMVersion :: VersionNumber - -- ^ JVM doesn't seme to follow normal version conventions - , nodeJVMVMName :: Text - , nodeJVMVersion :: VersionNumber - , nodeJVMPID :: PID - } deriving (Eq, Show, Generic, Typeable) - --- | Handles quirks in the way JVM versions are rendered (1.7.0_101 -> 1.7.0.101) -newtype JVMVersion = JVMVersion { unJVMVersion :: VersionNumber } - -data JVMMemoryInfo = JVMMemoryInfo { - jvmMemoryInfoDirectMax :: Bytes - , jvmMemoryInfoNonHeapMax :: Bytes - , jvmMemoryInfoNonHeapInit :: Bytes - , jvmMemoryInfoHeapMax :: Bytes - , jvmMemoryInfoHeapInit :: Bytes - } deriving (Eq, Show, Generic, Typeable) - -newtype JVMMemoryPool = JVMMemoryPool { - jvmMemoryPool :: Text - } deriving (Eq, Show, Generic, Typeable, FromJSON) - -newtype JVMGCCollector = JVMGCCollector { - jvmGCCollector :: Text - } deriving (Eq, Show, Generic, Typeable, FromJSON) - -newtype PID = PID { - pid :: Int - } deriving (Eq, Show, Generic, Typeable, FromJSON) - -data NodeOSInfo = NodeOSInfo { - nodeOSRefreshInterval :: NominalDiffTime - , nodeOSName :: Text - , nodeOSArch :: Text - , nodeOSVersion :: VersionNumber - , nodeOSAvailableProcessors :: Int - , nodeOSAllocatedProcessors :: Int - } deriving (Eq, Show, Generic, Typeable) - -data CPUInfo = CPUInfo { - cpuCacheSize :: Bytes - , cpuCoresPerSocket :: Int - , cpuTotalSockets :: Int - , cpuTotalCores :: Int - , cpuMHZ :: Int - , cpuModel :: Text - , cpuVendor :: Text - } deriving (Eq, Show, Generic, Typeable) - -data NodeProcessInfo = NodeProcessInfo { - nodeProcessMLockAll :: Bool - -- ^ See - , nodeProcessMaxFileDescriptors :: Maybe Int - , nodeProcessId :: PID - , nodeProcessRefreshInterval :: NominalDiffTime - } deriving (Eq, Show, Generic, Typeable) - -data SnapshotRepoUpdateSettings = SnapshotRepoUpdateSettings { - repoUpdateVerify :: Bool - -- ^ After creation/update, synchronously check that nodes can - -- write to this repo. Defaults to True. You may use False if you - -- need a faster response and plan on verifying manually later - -- with 'verifySnapshotRepo'. - } deriving (Eq, Show, Generic, Typeable) - - --- | Reasonable defaults for repo creation/update --- --- * repoUpdateVerify True -defaultSnapshotRepoUpdateSettings :: SnapshotRepoUpdateSettings -defaultSnapshotRepoUpdateSettings = SnapshotRepoUpdateSettings True - - --- | A filesystem-based snapshot repo that ships with --- Elasticsearch. This is an instance of 'SnapshotRepo' so it can be --- used with 'updateSnapshotRepo' -data FsSnapshotRepo = FsSnapshotRepo { - fsrName :: SnapshotRepoName - , fsrLocation :: FilePath - , fsrCompressMetadata :: Bool - , fsrChunkSize :: Maybe Bytes - -- ^ Size by which to split large files during snapshotting. - , fsrMaxRestoreBytesPerSec :: Maybe Bytes - -- ^ Throttle node restore rate. If not supplied, defaults to 40mb/sec - , fsrMaxSnapshotBytesPerSec :: Maybe Bytes - -- ^ Throttle node snapshot rate. If not supplied, defaults to 40mb/sec - } deriving (Eq, Generic, Show, Typeable) - - -instance SnapshotRepo FsSnapshotRepo where - toGSnapshotRepo FsSnapshotRepo {..} = - GenericSnapshotRepo fsrName fsRepoType (GenericSnapshotRepoSettings settings) - where - Object settings = object $ [ "location" .= fsrLocation - , "compress" .= fsrCompressMetadata - ] ++ optionalPairs - optionalPairs = catMaybes [ ("chunk_size" .=) <$> fsrChunkSize - , ("max_restore_bytes_per_sec" .=) <$> fsrMaxRestoreBytesPerSec - , ("max_snapshot_bytes_per_sec" .=) <$> fsrMaxSnapshotBytesPerSec - ] - fromGSnapshotRepo GenericSnapshotRepo {..} - | gSnapshotRepoType == fsRepoType = do - let o = gSnapshotRepoSettingsObject gSnapshotRepoSettings - parseRepo $ do - FsSnapshotRepo gSnapshotRepoName <$> o .: "location" - <*> o .:? "compress" .!= False - <*> o .:? "chunk_size" - <*> o .:? "max_restore_bytes_per_sec" - <*> o .:? "max_snapshot_bytes_per_sec" - | otherwise = Left (RepoTypeMismatch fsRepoType gSnapshotRepoType) - - -parseRepo :: Parser a -> Either SnapshotRepoConversionError a -parseRepo parser = case parseEither (const parser) () of - Left e -> Left (OtherRepoConversionError (T.pack e)) - Right a -> Right a - - -fsRepoType :: SnapshotRepoType -fsRepoType = SnapshotRepoType "fs" - --- | Law: fromGSnapshotRepo (toGSnapshotRepo r) == Right r -class SnapshotRepo r where - toGSnapshotRepo :: r -> GenericSnapshotRepo - fromGSnapshotRepo :: GenericSnapshotRepo -> Either SnapshotRepoConversionError r - - -data SnapshotRepoConversionError = RepoTypeMismatch SnapshotRepoType SnapshotRepoType - -- ^ Expected type and actual type - | OtherRepoConversionError Text - deriving (Show, Eq, Generic, Typeable) - - -instance Exception SnapshotRepoConversionError - - -data SnapshotCreateSettings = SnapshotCreateSettings { - snapWaitForCompletion :: Bool - -- ^ Should the API call return immediately after initializing - -- the snapshot or wait until completed? Note that if this is - -- enabled it could wait a long time, so you should adjust your - -- 'ManagerSettings' accordingly to set long timeouts or - -- explicitly handle timeouts. - , snapIndices :: Maybe IndexSelection - -- ^ Nothing will snapshot all indices. Just [] is permissable and - -- will essentially be a no-op snapshot. - , snapIgnoreUnavailable :: Bool - -- ^ If set to True, any matched indices that don't exist will be - -- ignored. Otherwise it will be an error and fail. - , snapIncludeGlobalState :: Bool - , snapPartial :: Bool - -- ^ If some indices failed to snapshot (e.g. if not all primary - -- shards are available), should the process proceed? - } deriving (Eq, Generic, Show, Typeable) - - --- | Reasonable defaults for snapshot creation --- --- * snapWaitForCompletion False --- * snapIndices Nothing --- * snapIgnoreUnavailable False --- * snapIncludeGlobalState True --- * snapPartial False -defaultSnapshotCreateSettings :: SnapshotCreateSettings -defaultSnapshotCreateSettings = SnapshotCreateSettings { - snapWaitForCompletion = False - , snapIndices = Nothing - , snapIgnoreUnavailable = False - , snapIncludeGlobalState = True - , snapPartial = False - } - - -data SnapshotSelection = SnapshotList (NonEmpty SnapshotPattern) - | AllSnapshots deriving (Eq, Generic, Show, Typeable) - - --- | Either specifies an exact snapshot name or one with globs in it, --- e.g. @SnapPattern "foo*"@ __NOTE__: Patterns are not supported on --- ES < 1.7 -data SnapshotPattern = ExactSnap SnapshotName - | SnapPattern Text - deriving (Eq, Generic, Show, Typeable) - - --- | General information about the state of a snapshot. Has some --- redundancies with 'SnapshotStatus' -data SnapshotInfo = SnapshotInfo { - snapInfoShards :: ShardResult - , snapInfoFailures :: [SnapshotShardFailure] - , snapInfoDuration :: NominalDiffTime - , snapInfoEndTime :: UTCTime - , snapInfoStartTime :: UTCTime - , snapInfoState :: SnapshotState - , snapInfoIndices :: [IndexName] - , snapInfoName :: SnapshotName - } deriving (Eq, Generic, Show, Typeable) - - -instance FromJSON SnapshotInfo where - parseJSON = withObject "SnapshotInfo" parse - where - parse o = SnapshotInfo <$> o .: "shards" - <*> o .: "failures" - <*> (unMS <$> o .: "duration_in_millis") - <*> (posixMS <$> o .: "end_time_in_millis") - <*> (posixMS <$> o .: "start_time_in_millis") - <*> o .: "state" - <*> o .: "indices" - <*> o .: "snapshot" - -data SnapshotShardFailure = SnapshotShardFailure { - snapShardFailureIndex :: IndexName - , snapShardFailureNodeId :: Maybe NodeName -- I'm not 100% sure this isn't actually 'FullNodeId' - , snapShardFailureReason :: Text - , snapShardFailureShardId :: ShardId - } deriving (Eq, Show, Generic, Typeable) - - -instance FromJSON SnapshotShardFailure where - parseJSON = withObject "SnapshotShardFailure" parse - where - parse o = SnapshotShardFailure <$> o .: "index" - <*> o .:? "node_id" - <*> o .: "reason" - <*> o .: "shard_id" - - -newtype ShardId = ShardId { shardId :: Int } - deriving (Eq, Show, Generic, Typeable, FromJSON) - --- | Milliseconds -newtype MS = MS NominalDiffTime - - --- keeps the unexported constructor warnings at bay -unMS :: MS -> NominalDiffTime -unMS (MS t) = t - - -instance FromJSON MS where - parseJSON = withScientific "MS" (return . MS . parse) - where - parse n = fromInteger ((truncate n) * 1000) - - -data SnapshotState = SnapshotInit - | SnapshotStarted - | SnapshotSuccess - | SnapshotFailed - | SnapshotAborted - | SnapshotMissing - | SnapshotWaiting - deriving (Show, Eq, Generic, Typeable) - -instance FromJSON SnapshotState where - parseJSON = withText "SnapshotState" parse - where - parse "INIT" = return SnapshotInit - parse "STARTED" = return SnapshotStarted - parse "SUCCESS" = return SnapshotSuccess - parse "FAILED" = return SnapshotFailed - parse "ABORTED" = return SnapshotAborted - parse "MISSING" = return SnapshotMissing - parse "WAITING" = return SnapshotWaiting - parse t = fail ("Invalid snapshot state " <> T.unpack t) - - -newtype SnapshotName = SnapshotName { snapshotName :: Text } - deriving (Show, Eq, Ord, Generic, Typeable, ToJSON, FromJSON) - - -data SnapshotRestoreSettings = SnapshotRestoreSettings { - snapRestoreWaitForCompletion :: Bool - -- ^ Should the API call return immediately after initializing - -- the restore or wait until completed? Note that if this is - -- enabled, it could wait a long time, so you should adjust your - -- 'ManagerSettings' accordingly to set long timeouts or - -- explicitly handle timeouts. - , snapRestoreIndices :: Maybe IndexSelection - -- ^ Nothing will restore all indices in the snapshot. Just [] is - -- permissable and will essentially be a no-op restore. - , snapRestoreIgnoreUnavailable :: Bool - -- ^ If set to True, any indices that do not exist will be ignored - -- during snapshot rather than failing the restore. - , snapRestoreIncludeGlobalState :: Bool - -- ^ If set to false, will ignore any global state in the snapshot - -- and will not restore it. - , snapRestoreRenamePattern :: Maybe RestoreRenamePattern - -- ^ A regex pattern for matching indices. Used with - -- 'snapRestoreRenameReplacement', the restore can reference the - -- matched index and create a new index name upon restore. - , snapRestoreRenameReplacement :: Maybe (NonEmpty RestoreRenameToken) - -- ^ Expression of how index renames should be constructed. - , snapRestorePartial :: Bool - -- ^ If some indices fail to restore, should the process proceed? - , snapRestoreIncludeAliases :: Bool - -- ^ Should the restore also restore the aliases captured in the - -- snapshot. - , snapRestoreIndexSettingsOverrides :: Maybe RestoreIndexSettings - -- ^ Settings to apply during the restore process. __NOTE:__ This - -- option is not supported in ES < 1.5 and should be set to - -- Nothing in that case. - , snapRestoreIgnoreIndexSettings :: Maybe (NonEmpty Text) - -- ^ This type could be more rich but it isn't clear which - -- settings are allowed to be ignored during restore, so we're - -- going with including this feature in a basic form rather than - -- omitting it. One example here would be - -- "index.refresh_interval". Any setting specified here will - -- revert back to the server default during the restore process. - } deriving (Eq, Generic, Show, Typeable) - --- | Regex-stype pattern, e.g. "index_(.+)" to match index names -newtype RestoreRenamePattern = RestoreRenamePattern { rrPattern :: Text } - deriving (Show, Eq, Generic, Typeable, Ord, ToJSON) - - --- | A single token in a index renaming scheme for a restore. These --- are concatenated into a string before being sent to --- Elasticsearch. Check out these Java --- to find out more if you're into that sort of thing. -data RestoreRenameToken = RRTLit Text - -- ^ Just a literal string of characters - | RRSubWholeMatch - -- ^ Equivalent to $0. The entire matched pattern, not any subgroup - | RRSubGroup RRGroupRefNum - -- ^ A specific reference to a group number - deriving (Show, Eq, Generic, Typeable) - - --- | A group number for regex matching. Only values from 1-9 are --- supported. Construct with 'mkRRGroupRefNum' -newtype RRGroupRefNum = RRGroupRefNum { rrGroupRefNum :: Int } - deriving (Show, Eq, Generic, Typeable, Ord) - -instance Bounded RRGroupRefNum where - minBound = RRGroupRefNum 1 - maxBound = RRGroupRefNum 9 - - --- | Only allows valid group number references (1-9). -mkRRGroupRefNum :: Int -> Maybe RRGroupRefNum -mkRRGroupRefNum i - | i >= (rrGroupRefNum minBound) && i <= (rrGroupRefNum maxBound) = - Just $ RRGroupRefNum i - | otherwise = Nothing - - --- | Reasonable defaults for snapshot restores --- --- * snapRestoreWaitForCompletion False --- * snapRestoreIndices Nothing --- * snapRestoreIgnoreUnavailable False --- * snapRestoreIncludeGlobalState True --- * snapRestoreRenamePattern Nothing --- * snapRestoreRenameReplacement Nothing --- * snapRestorePartial False --- * snapRestoreIncludeAliases True --- * snapRestoreIndexSettingsOverrides Nothing --- * snapRestoreIgnoreIndexSettings Nothing -defaultSnapshotRestoreSettings :: SnapshotRestoreSettings -defaultSnapshotRestoreSettings = SnapshotRestoreSettings { - snapRestoreWaitForCompletion = False - , snapRestoreIndices = Nothing - , snapRestoreIgnoreUnavailable = False - , snapRestoreIncludeGlobalState = True - , snapRestoreRenamePattern = Nothing - , snapRestoreRenameReplacement = Nothing - , snapRestorePartial = False - , snapRestoreIncludeAliases = True - , snapRestoreIndexSettingsOverrides = Nothing - , snapRestoreIgnoreIndexSettings = Nothing - } - - --- | Index settings that can be overridden. The docs only mention you --- can update number of replicas, but there may be more. You --- definitely cannot override shard count. -data RestoreIndexSettings = RestoreIndexSettings { - restoreOverrideReplicas :: Maybe ReplicaCount - } deriving (Show, Eq, Generic, Typeable) - - -instance ToJSON RestoreIndexSettings where - toJSON RestoreIndexSettings {..} = object prs - where - prs = catMaybes [("index.number_of_replicas" .=) <$> restoreOverrideReplicas] - - -instance FromJSON NodesInfo where - parseJSON = withObject "NodesInfo" parse - where - parse o = do - nodes <- o .: "nodes" - infos <- forM (HM.toList nodes) $ \(fullNID, v) -> do - node <- parseJSON v - parseNodeInfo (FullNodeId fullNID) node - cn <- o .: "cluster_name" - return (NodesInfo infos cn) - -instance FromJSON NodesStats where - parseJSON = withObject "NodesStats" parse - where - parse o = do - nodes <- o .: "nodes" - stats <- forM (HM.toList nodes) $ \(fullNID, v) -> do - node <- parseJSON v - parseNodeStats (FullNodeId fullNID) node - cn <- o .: "cluster_name" - return (NodesStats stats cn) - -instance FromJSON NodeBreakerStats where - parseJSON = withObject "NodeBreakerStats" parse - where - parse o = NodeBreakerStats <$> o .: "tripped" - <*> o .: "overhead" - <*> o .: "estimated_size_in_bytes" - <*> o .: "limit_size_in_bytes" - -instance FromJSON NodeHTTPStats where - parseJSON = withObject "NodeHTTPStats" parse - where - parse o = NodeHTTPStats <$> o .: "total_opened" - <*> o .: "current_open" - -instance FromJSON NodeTransportStats where - parseJSON = withObject "NodeTransportStats" parse - where - parse o = NodeTransportStats <$> o .: "tx_size_in_bytes" - <*> o .: "tx_count" - <*> o .: "rx_size_in_bytes" - <*> o .: "rx_count" - <*> o .: "server_open" - -instance FromJSON NodeFSStats where - parseJSON = withObject "NodeFSStats" parse - where - parse o = NodeFSStats <$> o .: "data" - <*> o .: "total" - <*> (posixMS <$> o .: "timestamp") - -instance FromJSON NodeDataPathStats where - parseJSON = withObject "NodeDataPathStats" parse - where - parse o = - NodeDataPathStats <$> (fmap unStringlyTypedDouble <$> o .:? "disk_service_time") - <*> (fmap unStringlyTypedDouble <$> o .:? "disk_queue") - <*> o .:? "disk_io_size_in_bytes" - <*> o .:? "disk_write_size_in_bytes" - <*> o .:? "disk_read_size_in_bytes" - <*> o .:? "disk_io_op" - <*> o .:? "disk_writes" - <*> o .:? "disk_reads" - <*> o .: "available_in_bytes" - <*> o .: "free_in_bytes" - <*> o .: "total_in_bytes" - <*> o .:? "type" - <*> o .:? "dev" - <*> o .: "mount" - <*> o .: "path" - -newtype StringlyTypedDouble = StringlyTypedDouble { unStringlyTypedDouble :: Double } - -instance FromJSON StringlyTypedDouble where - parseJSON = fmap StringlyTypedDouble . parseJSON . unStringlyTypeJSON - -newtype StringlyTypedInt = StringlyTypedInt { unStringlyTypedInt :: Int } - -instance FromJSON StringlyTypedInt where - parseJSON = fmap StringlyTypedInt . parseJSON . unStringlyTypeJSON - -newtype StringlyTypedBool = StringlyTypedBool { unStringlyTypedBool :: Bool } - -instance FromJSON StringlyTypedBool where - parseJSON = fmap StringlyTypedBool . parseJSON . unStringlyTypeJSON - -instance FromJSON NodeFSTotalStats where - parseJSON = withObject "NodeFSTotalStats" parse - where - parse o = NodeFSTotalStats <$> (fmap unStringlyTypedDouble <$> o .:? "disk_service_time") - <*> (fmap unStringlyTypedDouble <$> o .:? "disk_queue") - <*> o .:? "disk_io_size_in_bytes" - <*> o .:? "disk_write_size_in_bytes" - <*> o .:? "disk_read_size_in_bytes" - <*> o .:? "disk_io_op" - <*> o .:? "disk_writes" - <*> o .:? "disk_reads" - <*> o .: "available_in_bytes" - <*> o .: "free_in_bytes" - <*> o .: "total_in_bytes" - -instance FromJSON NodeNetworkStats where - parseJSON = withObject "NodeNetworkStats" parse - where - parse o = do - tcp <- o .: "tcp" - NodeNetworkStats <$> tcp .: "out_rsts" - <*> tcp .: "in_errs" - <*> tcp .: "attempt_fails" - <*> tcp .: "estab_resets" - <*> tcp .: "retrans_segs" - <*> tcp .: "out_segs" - <*> tcp .: "in_segs" - <*> tcp .: "curr_estab" - <*> tcp .: "passive_opens" - <*> tcp .: "active_opens" - -instance FromJSON NodeThreadPoolsStats where - parseJSON = withObject "NodeThreadPoolsStats" parse - where - parse o = NodeThreadPoolsStats <$> o .: "snapshot" - <*> o .: "bulk" - <*> o .: "force_merge" - <*> o .: "get" - <*> o .: "management" - <*> o .:? "fetch_shard_store" - <*> o .:? "optimize" - <*> o .: "flush" - <*> o .: "search" - <*> o .: "warmer" - <*> o .: "generic" - <*> o .:? "suggest" - <*> o .: "refresh" - <*> o .: "index" - <*> o .:? "listener" - <*> o .:? "fetch_shard_started" - <*> o .:? "percolate" -instance FromJSON NodeThreadPoolStats where - parseJSON = withObject "NodeThreadPoolStats" parse - where - parse o = NodeThreadPoolStats <$> o .: "completed" - <*> o .: "largest" - <*> o .: "rejected" - <*> o .: "active" - <*> o .: "queue" - <*> o .: "threads" - -instance FromJSON NodeJVMStats where - parseJSON = withObject "NodeJVMStats" parse - where - parse o = do - bufferPools <- o .: "buffer_pools" - mapped <- bufferPools .: "mapped" - direct <- bufferPools .: "direct" - gc <- o .: "gc" - collectors <- gc .: "collectors" - oldC <- collectors .: "old" - youngC <- collectors .: "young" - threads <- o .: "threads" - mem <- o .: "mem" - pools <- mem .: "pools" - oldM <- pools .: "old" - survivorM <- pools .: "survivor" - youngM <- pools .: "young" - NodeJVMStats <$> pure mapped - <*> pure direct - <*> pure oldC - <*> pure youngC - <*> threads .: "peak_count" - <*> threads .: "count" - <*> pure oldM - <*> pure survivorM - <*> pure youngM - <*> mem .: "non_heap_committed_in_bytes" - <*> mem .: "non_heap_used_in_bytes" - <*> mem .: "heap_max_in_bytes" - <*> mem .: "heap_committed_in_bytes" - <*> mem .: "heap_used_percent" - <*> mem .: "heap_used_in_bytes" - <*> (unMS <$> o .: "uptime_in_millis") - <*> (posixMS <$> o .: "timestamp") - -instance FromJSON JVMBufferPoolStats where - parseJSON = withObject "JVMBufferPoolStats" parse - where - parse o = JVMBufferPoolStats <$> o .: "total_capacity_in_bytes" - <*> o .: "used_in_bytes" - <*> o .: "count" - -instance FromJSON JVMGCStats where - parseJSON = withObject "JVMGCStats" parse - where - parse o = JVMGCStats <$> (unMS <$> o .: "collection_time_in_millis") - <*> o .: "collection_count" - -instance FromJSON JVMPoolStats where - parseJSON = withObject "JVMPoolStats" parse - where - parse o = JVMPoolStats <$> o .: "peak_max_in_bytes" - <*> o .: "peak_used_in_bytes" - <*> o .: "max_in_bytes" - <*> o .: "used_in_bytes" - -instance FromJSON NodeProcessStats where - parseJSON = withObject "NodeProcessStats" parse - where - parse o = do - mem <- o .: "mem" - cpu <- o .: "cpu" - NodeProcessStats <$> (posixMS <$> o .: "timestamp") - <*> o .: "open_file_descriptors" - <*> o .: "max_file_descriptors" - <*> cpu .: "percent" - <*> (unMS <$> cpu .: "total_in_millis") - <*> mem .: "total_virtual_in_bytes" - -instance FromJSON NodeOSStats where - parseJSON = withObject "NodeOSStats" parse - where - parse o = do - swap <- o .: "swap" - mem <- o .: "mem" - cpu <- o .: "cpu" - load <- o .:? "load_average" - NodeOSStats <$> (posixMS <$> o .: "timestamp") - <*> cpu .: "percent" - <*> pure load - <*> mem .: "total_in_bytes" - <*> mem .: "free_in_bytes" - <*> mem .: "free_percent" - <*> mem .: "used_in_bytes" - <*> mem .: "used_percent" - <*> swap .: "total_in_bytes" - <*> swap .: "free_in_bytes" - <*> swap .: "used_in_bytes" - -instance FromJSON LoadAvgs where - parseJSON = withArray "LoadAvgs" parse - where - parse v = case V.toList v of - [one, five, fifteen] -> LoadAvgs <$> parseJSON one - <*> parseJSON five - <*> parseJSON fifteen - _ -> fail "Expecting a triple of Doubles" - -instance FromJSON NodeIndicesStats where - parseJSON = withObject "NodeIndicesStats" parse - where - parse o = do - let (.::) mv k = case mv of - Just v -> Just <$> v .: k - Nothing -> pure Nothing - mRecovery <- o .:? "recovery" - mQueryCache <- o .:? "query_cache" - mSuggest <- o .:? "suggest" - translog <- o .: "translog" - segments <- o .: "segments" - completion <- o .: "completion" - mPercolate <- o .:? "percolate" - fielddata <- o .: "fielddata" - warmer <- o .: "warmer" - flush <- o .: "flush" - refresh <- o .: "refresh" - merges <- o .: "merges" - search <- o .: "search" - getStats <- o .: "get" - indexing <- o .: "indexing" - store <- o .: "store" - docs <- o .: "docs" - NodeIndicesStats <$> (fmap unMS <$> mRecovery .:: "throttle_time_in_millis") - <*> mRecovery .:: "current_as_target" - <*> mRecovery .:: "current_as_source" - <*> mQueryCache .:: "miss_count" - <*> mQueryCache .:: "hit_count" - <*> mQueryCache .:: "evictions" - <*> mQueryCache .:: "memory_size_in_bytes" - <*> mSuggest .:: "current" - <*> (fmap unMS <$> mSuggest .:: "time_in_millis") - <*> mSuggest .:: "total" - <*> translog .: "size_in_bytes" - <*> translog .: "operations" - <*> segments .:? "fixed_bit_set_memory_in_bytes" - <*> segments .: "version_map_memory_in_bytes" - <*> segments .:? "index_writer_max_memory_in_bytes" - <*> segments .: "index_writer_memory_in_bytes" - <*> segments .: "memory_in_bytes" - <*> segments .: "count" - <*> completion .: "size_in_bytes" - <*> mPercolate .:: "queries" - <*> mPercolate .:: "memory_size_in_bytes" - <*> mPercolate .:: "current" - <*> (fmap unMS <$> mPercolate .:: "time_in_millis") - <*> mPercolate .:: "total" - <*> fielddata .: "evictions" - <*> fielddata .: "memory_size_in_bytes" - <*> (unMS <$> warmer .: "total_time_in_millis") - <*> warmer .: "total" - <*> warmer .: "current" - <*> (unMS <$> flush .: "total_time_in_millis") - <*> flush .: "total" - <*> (unMS <$> refresh .: "total_time_in_millis") - <*> refresh .: "total" - <*> merges .: "total_size_in_bytes" - <*> merges .: "total_docs" - <*> (unMS <$> merges .: "total_time_in_millis") - <*> merges .: "total" - <*> merges .: "current_size_in_bytes" - <*> merges .: "current_docs" - <*> merges .: "current" - <*> search .: "fetch_current" - <*> (unMS <$> search .: "fetch_time_in_millis") - <*> search .: "fetch_total" - <*> search .: "query_current" - <*> (unMS <$> search .: "query_time_in_millis") - <*> search .: "query_total" - <*> search .: "open_contexts" - <*> getStats .: "current" - <*> (unMS <$> getStats .: "missing_time_in_millis") - <*> getStats .: "missing_total" - <*> (unMS <$> getStats .: "exists_time_in_millis") - <*> getStats .: "exists_total" - <*> (unMS <$> getStats .: "time_in_millis") - <*> getStats .: "total" - <*> (fmap unMS <$> indexing .:? "throttle_time_in_millis") - <*> indexing .:? "is_throttled" - <*> indexing .:? "noop_update_total" - <*> indexing .: "delete_current" - <*> (unMS <$> indexing .: "delete_time_in_millis") - <*> indexing .: "delete_total" - <*> indexing .: "index_current" - <*> (unMS <$> indexing .: "index_time_in_millis") - <*> indexing .: "index_total" - <*> (unMS <$> store .: "throttle_time_in_millis") - <*> store .: "size_in_bytes" - <*> docs .: "deleted" - <*> docs .: "count" - -instance FromJSON NodeBreakersStats where - parseJSON = withObject "NodeBreakersStats" parse - where - parse o = NodeBreakersStats <$> o .: "parent" - <*> o .: "request" - <*> o .: "fielddata" - -parseNodeStats :: FullNodeId -> Object -> Parser NodeStats -parseNodeStats fnid o = do - NodeStats <$> o .: "name" - <*> pure fnid - <*> o .:? "breakers" - <*> o .: "http" - <*> o .: "transport" - <*> o .: "fs" - <*> o .:? "network" - <*> o .: "thread_pool" - <*> o .: "jvm" - <*> o .: "process" - <*> o .: "os" - <*> o .: "indices" - -parseNodeInfo :: FullNodeId -> Object -> Parser NodeInfo -parseNodeInfo nid o = - NodeInfo <$> o .:? "http_address" - <*> o .: "build_hash" - <*> o .: "version" - <*> o .: "ip" - <*> o .: "host" - <*> o .: "transport_address" - <*> o .: "name" - <*> pure nid - <*> o .: "plugins" - <*> o .: "http" - <*> o .: "transport" - <*> o .:? "network" - <*> o .: "thread_pool" - <*> o .: "jvm" - <*> o .: "process" - <*> o .: "os" - <*> o .: "settings" - -instance FromJSON NodePluginInfo where - parseJSON = withObject "NodePluginInfo" parse - where - parse o = NodePluginInfo <$> o .:? "site" - <*> o .:? "jvm" - <*> o .: "description" - <*> o .: "version" - <*> o .: "name" - -instance FromJSON NodeHTTPInfo where - parseJSON = withObject "NodeHTTPInfo" parse - where - parse o = NodeHTTPInfo <$> o .: "max_content_length_in_bytes" - <*> parseJSON (Object o) - -instance FromJSON BoundTransportAddress where - parseJSON = withObject "BoundTransportAddress" parse - where - parse o = BoundTransportAddress <$> o .: "publish_address" - <*> o .: "bound_address" - -instance FromJSON NodeOSInfo where - parseJSON = withObject "NodeOSInfo" parse - where - parse o = do - NodeOSInfo <$> (unMS <$> o .: "refresh_interval_in_millis") - <*> o .: "name" - <*> o .: "arch" - <*> o .: "version" - <*> o .: "available_processors" - <*> o .: "allocated_processors" - - -instance FromJSON CPUInfo where - parseJSON = withObject "CPUInfo" parse - where - parse o = CPUInfo <$> o .: "cache_size_in_bytes" - <*> o .: "cores_per_socket" - <*> o .: "total_sockets" - <*> o .: "total_cores" - <*> o .: "mhz" - <*> o .: "model" - <*> o .: "vendor" - -instance FromJSON NodeProcessInfo where - parseJSON = withObject "NodeProcessInfo" parse - where - parse o = NodeProcessInfo <$> o .: "mlockall" - <*> o .:? "max_file_descriptors" - <*> o .: "id" - <*> (unMS <$> o .: "refresh_interval_in_millis") - -instance FromJSON NodeJVMInfo where - parseJSON = withObject "NodeJVMInfo" parse - where - parse o = NodeJVMInfo <$> o .: "memory_pools" - <*> o .: "gc_collectors" - <*> o .: "mem" - <*> (posixMS <$> o .: "start_time_in_millis") - <*> o .: "vm_vendor" - <*> o .: "vm_version" - <*> o .: "vm_name" - <*> (unJVMVersion <$> o .: "version") - <*> o .: "pid" - -instance FromJSON JVMVersion where - parseJSON (String t) = - JVMVersion <$> parseJSON (String (T.replace "_" "." t)) - parseJSON v = JVMVersion <$> parseJSON v - -instance FromJSON JVMMemoryInfo where - parseJSON = withObject "JVMMemoryInfo" parse - where - parse o = JVMMemoryInfo <$> o .: "direct_max_in_bytes" - <*> o .: "non_heap_max_in_bytes" - <*> o .: "non_heap_init_in_bytes" - <*> o .: "heap_max_in_bytes" - <*> o .: "heap_init_in_bytes" - -instance FromJSON NodeThreadPoolsInfo where - parseJSON = withObject "NodeThreadPoolsInfo" parse - where - parse o = NodeThreadPoolsInfo <$> o .: "refresh" - <*> o .: "management" - <*> o .:? "percolate" - <*> o .:? "listener" - <*> o .:? "fetch_shard_started" - <*> o .: "search" - <*> o .: "flush" - <*> o .: "warmer" - <*> o .:? "optimize" - <*> o .: "bulk" - <*> o .:? "suggest" - <*> o .: "force_merge" - <*> o .: "snapshot" - <*> o .: "get" - <*> o .:? "fetch_shard_store" - <*> o .: "index" - <*> o .: "generic" - -instance FromJSON NodeThreadPoolInfo where - parseJSON = withObject "NodeThreadPoolInfo" parse - where - parse o = do - ka <- maybe (return Nothing) (fmap Just . parseStringInterval) =<< o .:? "keep_alive" - NodeThreadPoolInfo <$> (parseJSON . unStringlyTypeJSON =<< o .: "queue_size") - <*> pure ka - <*> o .:? "min" - <*> o .:? "max" - <*> o .: "type" - -parseStringInterval :: (Monad m) => String -> m NominalDiffTime -parseStringInterval s = case span isNumber s of - ("", _) -> fail "Invalid interval" - (nS, unitS) -> case (readMay nS, readMay unitS) of - (Just n, Just unit) -> return (fromInteger (n * unitNDT unit)) - (Nothing, _) -> fail "Invalid interval number" - (_, Nothing) -> fail "Invalid interval unit" - where - unitNDT Seconds = 1 - unitNDT Minutes = 60 - unitNDT Hours = 60 * 60 - unitNDT Days = 24 * 60 * 60 - unitNDT Weeks = 7 * 24 * 60 * 60 - -instance FromJSON ThreadPoolSize where - parseJSON v = parseAsNumber v <|> parseAsString v - where - parseAsNumber = parseAsInt <=< parseJSON - parseAsInt (-1) = return ThreadPoolUnbounded - parseAsInt n - | n >= 0 = return (ThreadPoolBounded n) - | otherwise = fail "Thread pool size must be >= -1." - parseAsString = withText "ThreadPoolSize" $ \t -> - case first (readMay . T.unpack) (T.span isNumber t) of - (Just n, "k") -> return (ThreadPoolBounded (n * 1000)) - (Just n, "") -> return (ThreadPoolBounded n) - _ -> fail ("Invalid thread pool size " <> T.unpack t) - -instance FromJSON ThreadPoolType where - parseJSON = withText "ThreadPoolType" parse - where - parse "scaling" = return ThreadPoolScaling - parse "fixed" = return ThreadPoolFixed - parse "cached" = return ThreadPoolCached - parse e = fail ("Unexpected thread pool type" <> T.unpack e) - -instance FromJSON NodeTransportInfo where - parseJSON = withObject "NodeTransportInfo" parse - where - parse o = NodeTransportInfo <$> (maybe (return mempty) parseProfiles =<< o .:? "profiles") - <*> parseJSON (Object o) - parseProfiles (Object o) | HM.null o = return [] - parseProfiles v@(Array _) = parseJSON v - parseProfiles Null = return [] - parseProfiles _ = fail "Could not parse profiles" - -instance FromJSON NodeNetworkInfo where - parseJSON = withObject "NodeNetworkInfo" parse - where - parse o = NodeNetworkInfo <$> o .: "primary_interface" - <*> (unMS <$> o .: "refresh_interval_in_millis") - - -instance FromJSON NodeNetworkInterface where - parseJSON = withObject "NodeNetworkInterface" parse - where - parse o = NodeNetworkInterface <$> o .: "mac_address" - <*> o .: "name" - <*> o .: "address" - -newtype MaybeNA a = MaybeNA { unMaybeNA :: Maybe a } - deriving (Show, Eq) - -instance FromJSON a => FromJSON (MaybeNA a) where - parseJSON (String "NA") = pure $ MaybeNA Nothing - parseJSON o = MaybeNA . Just <$> parseJSON o - -data Suggest = Suggest { suggestText :: Text - , suggestName :: Text - , suggestType :: SuggestType - } - deriving (Show, Generic, Eq, Read) - -instance ToJSON Suggest where - toJSON Suggest{..} = object [ "text" .= suggestText - , suggestName .= suggestType - ] - -instance FromJSON Suggest where - parseJSON (Object o) = do - suggestText' <- o .: "text" - let dropTextList = HM.toList $ HM.filterWithKey (\x _ -> x /= "text") o - suggestName' <- case dropTextList of - [(x, _)] -> return x - _ -> fail "error parsing Suggest field name" - suggestType' <- o .: suggestName' - return $ Suggest suggestText' suggestName' suggestType' - parseJSON x = typeMismatch "Suggest" x - -data SuggestType = SuggestTypePhraseSuggester PhraseSuggester - deriving (Show, Generic, Eq, Read) - -instance ToJSON SuggestType where - toJSON (SuggestTypePhraseSuggester x) = object ["phrase" .= x] - -instance FromJSON SuggestType where - parseJSON = withObject "SuggestType" parse - where parse o = phraseSuggester `taggedWith` "phrase" - where taggedWith parser k = parser =<< o .: k - phraseSuggester = pure . SuggestTypePhraseSuggester - -data PhraseSuggester = - PhraseSuggester { phraseSuggesterField :: FieldName - , phraseSuggesterGramSize :: Maybe Int - , phraseSuggesterRealWordErrorLikelihood :: Maybe Int - , phraseSuggesterConfidence :: Maybe Int - , phraseSuggesterMaxErrors :: Maybe Int - , phraseSuggesterSeparator :: Maybe Text - , phraseSuggesterSize :: Maybe Size - , phraseSuggesterAnalyzer :: Maybe Analyzer - , phraseSuggesterShardSize :: Maybe Int - , phraseSuggesterHighlight :: Maybe PhraseSuggesterHighlighter - , phraseSuggesterCollate :: Maybe PhraseSuggesterCollate - , phraseSuggesterCandidateGenerators :: [DirectGenerators] - } - deriving (Show, Generic, Eq, Read) - -instance ToJSON PhraseSuggester where - toJSON PhraseSuggester{..} = omitNulls [ "field" .= phraseSuggesterField - , "gram_size" .= phraseSuggesterGramSize - , "real_word_error_likelihood" .= phraseSuggesterRealWordErrorLikelihood - , "confidence" .= phraseSuggesterConfidence - , "max_errors" .= phraseSuggesterMaxErrors - , "separator" .= phraseSuggesterSeparator - , "size" .= phraseSuggesterSize - , "analyzer" .= phraseSuggesterAnalyzer - , "shard_size" .= phraseSuggesterShardSize - , "highlight" .= phraseSuggesterHighlight - , "collate" .= phraseSuggesterCollate - , "direct_generator" .= phraseSuggesterCandidateGenerators - ] - -instance FromJSON PhraseSuggester where - parseJSON = withObject "PhraseSuggester" parse - where parse o = PhraseSuggester - <$> o .: "field" - <*> o .:? "gram_size" - <*> o .:? "real_word_error_likelihood" - <*> o .:? "confidence" - <*> o .:? "max_errors" - <*> o .:? "separator" - <*> o .:? "size" - <*> o .:? "analyzer" - <*> o .:? "shard_size" - <*> o .:? "highlight" - <*> o .:? "collate" - <*> o .:? "direct_generator" .!= [] - -mkPhraseSuggester :: FieldName -> PhraseSuggester -mkPhraseSuggester fName = - PhraseSuggester fName Nothing Nothing Nothing Nothing Nothing Nothing - Nothing Nothing Nothing Nothing [] - -data PhraseSuggesterHighlighter = - PhraseSuggesterHighlighter { phraseSuggesterHighlighterPreTag :: Text - , phraseSuggesterHighlighterPostTag :: Text - } - deriving (Show, Generic, Eq, Read) - -instance ToJSON PhraseSuggesterHighlighter where - toJSON PhraseSuggesterHighlighter{..} = - object [ "pre_tag" .= phraseSuggesterHighlighterPreTag - , "post_tag" .= phraseSuggesterHighlighterPostTag - ] - -instance FromJSON PhraseSuggesterHighlighter where - parseJSON = withObject "PhraseSuggesterHighlighter" parse - where parse o = PhraseSuggesterHighlighter - <$> o .: "pre_tag" - <*> o .: "post_tag" - -data PhraseSuggesterCollate = - PhraseSuggesterCollate { phraseSuggesterCollateTemplateQuery :: TemplateQueryInline - , phraseSuggesterCollatePrune :: Bool - } - deriving (Show, Generic, Eq, Read) - -instance ToJSON PhraseSuggesterCollate where - toJSON PhraseSuggesterCollate{..} = object [ "query" .= object - [ "inline" .= (inline phraseSuggesterCollateTemplateQuery) - ] - , "params" .= (params phraseSuggesterCollateTemplateQuery) - , "prune" .= phraseSuggesterCollatePrune - ] - -instance FromJSON PhraseSuggesterCollate where - parseJSON (Object o) = do - query' <- o .: "query" - inline' <- query' .: "inline" - params' <- o .: "params" - prune' <- o .:? "prune" .!= False - return $ PhraseSuggesterCollate (TemplateQueryInline inline' params') prune' - parseJSON x = typeMismatch "PhraseSuggesterCollate" x - -data SuggestOptions = - SuggestOptions { suggestOptionsText :: Text - , suggestOptionsScore :: Double - , suggestOptionsFreq :: Maybe Int - , suggestOptionsHighlighted :: Maybe Text - } - deriving (Eq, Read, Show) - -instance FromJSON SuggestOptions where - parseJSON = withObject "SuggestOptions" parse - where parse o = SuggestOptions - <$> o .: "text" - <*> o .: "score" - <*> o .:? "freq" - <*> o .:? "highlighted" - -data SuggestResponse = - SuggestResponse { suggestResponseText :: Text - , suggestResponseOffset :: Int - , suggestResponseLength :: Int - , suggestResponseOptions :: [SuggestOptions] - } - deriving (Eq, Read, Show) - -instance FromJSON SuggestResponse where - parseJSON = withObject "SuggestResponse" parse - where parse o = SuggestResponse - <$> o .: "text" - <*> o .: "offset" - <*> o .: "length" - <*> o .: "options" - -data NamedSuggestionResponse = - NamedSuggestionResponse { nsrName :: Text - , nsrResponses :: [SuggestResponse] - } - deriving (Eq, Read, Show) - -instance FromJSON NamedSuggestionResponse where - parseJSON (Object o) = do - suggestionName' <- case HM.toList o of - [(x, _)] -> return x - _ -> fail "error parsing NamedSuggestionResponse name" - suggestionResponses' <- o .: suggestionName' - return $ NamedSuggestionResponse suggestionName' suggestionResponses' - - parseJSON x = typeMismatch "NamedSuggestionResponse" x - -data DirectGeneratorSuggestModeTypes = DirectGeneratorSuggestModeMissing - | DirectGeneratorSuggestModePopular - | DirectGeneratorSuggestModeAlways - deriving (Show, Eq, Read, Generic) - -instance ToJSON DirectGeneratorSuggestModeTypes where - toJSON DirectGeneratorSuggestModeMissing = "missing" - toJSON DirectGeneratorSuggestModePopular = "popular" - toJSON DirectGeneratorSuggestModeAlways = "always" - -instance FromJSON DirectGeneratorSuggestModeTypes where - parseJSON = withText "DirectGeneratorSuggestModeTypes" parse - where parse "missing" = pure DirectGeneratorSuggestModeMissing - parse "popular" = pure DirectGeneratorSuggestModePopular - parse "always" = pure DirectGeneratorSuggestModeAlways - parse f = fail ("Unexpected DirectGeneratorSuggestModeTypes: " <> show f) - -data DirectGenerators = DirectGenerators - { directGeneratorsField :: FieldName - , directGeneratorsSize :: Maybe Int - , directGeneratorSuggestMode :: DirectGeneratorSuggestModeTypes - , directGeneratorMaxEdits :: Maybe Double - , directGeneratorPrefixLength :: Maybe Int - , directGeneratorMinWordLength :: Maybe Int - , directGeneratorMaxInspections :: Maybe Int - , directGeneratorMinDocFreq :: Maybe Double - , directGeneratorMaxTermFreq :: Maybe Double - , directGeneratorPreFilter :: Maybe Text - , directGeneratorPostFilter :: Maybe Text - } - deriving (Show, Eq, Read, Generic) - - -instance ToJSON DirectGenerators where - toJSON DirectGenerators{..} = omitNulls [ "field" .= directGeneratorsField - , "size" .= directGeneratorsSize - , "suggest_mode" .= directGeneratorSuggestMode - , "max_edits" .= directGeneratorMaxEdits - , "prefix_length" .= directGeneratorPrefixLength - , "min_word_length" .= directGeneratorMinWordLength - , "max_inspections" .= directGeneratorMaxInspections - , "min_doc_freq" .= directGeneratorMinDocFreq - , "max_term_freq" .= directGeneratorMaxTermFreq - , "pre_filter" .= directGeneratorPreFilter - , "post_filter" .= directGeneratorPostFilter - ] - -instance FromJSON DirectGenerators where - parseJSON = withObject "DirectGenerators" parse - where parse o = DirectGenerators - <$> o .: "field" - <*> o .:? "size" - <*> o .: "suggest_mode" - <*> o .:? "max_edits" - <*> o .:? "prefix_length" - <*> o .:? "min_word_length" - <*> o .:? "max_inspections" - <*> o .:? "min_doc_freq" - <*> o .:? "max_term_freq" - <*> o .:? "pre_filter" - <*> o .:? "post_filter" - -mkDirectGenerators :: FieldName -> DirectGenerators -mkDirectGenerators fn = DirectGenerators fn Nothing DirectGeneratorSuggestModeMissing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing +newtype ScrollId = + ScrollId Text + deriving (Eq, Show, Ord, ToJSON, FromJSON) diff --git a/src/Database/V5/Bloodhound/Types/Internal.hs b/src/Database/V5/Bloodhound/Types/Internal.hs deleted file mode 100644 index 685a12c..0000000 --- a/src/Database/V5/Bloodhound/Types/Internal.hs +++ /dev/null @@ -1,57 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE FlexibleInstances #-} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} - -------------------------------------------------------------------------------- --- | --- Module : Database.Bloodhound.Types.Internal --- Copyright : (C) 2014 Chris Allen --- License : BSD-style (see the file LICENSE) --- Maintainer : Chris Allen --- Stability : provisional --- Portability : DeriveGeneric, RecordWildCards --- --- Internal data types for Bloodhound. These types may change without --- notice so import at your own risk. -------------------------------------------------------------------------------- -module Database.V5.Bloodhound.Types.Internal - ( BHEnv(..) - , Server(..) - , MonadBH(..) - ) where - - -import Control.Applicative as A -import Control.Monad.Reader -import Data.Aeson -import Data.Text (Text) -import Data.Typeable (Typeable) -import GHC.Generics (Generic) -import Network.HTTP.Client - -{-| Common environment for Elasticsearch calls. Connections will be - pipelined according to the provided HTTP connection manager. --} -data BHEnv = BHEnv { bhServer :: Server - , bhManager :: Manager - , bhRequestHook :: Request -> IO Request - -- ^ Low-level hook that is run before every request is sent. Used to implement custom authentication strategies. Defaults to 'return' with 'mkBHEnv'. - } - -instance (Functor m, Applicative m, MonadIO m) => MonadBH (ReaderT BHEnv m) where - getBHEnv = ask - -{-| 'Server' is used with the client functions to point at the ES instance --} -newtype Server = Server Text deriving (Eq, Show, Generic, Typeable, FromJSON) - -{-| All API calls to Elasticsearch operate within - MonadBH - . The idea is that it can be easily embedded in your - own monad transformer stack. A default instance for a ReaderT and - alias 'BH' is provided for the simple case. --} -class (Functor m, A.Applicative m, MonadIO m) => MonadBH m where - getBHEnv :: m BHEnv - diff --git a/stack-7.10.yaml b/stack-7.10.yaml index 9350196..d1b418e 100644 --- a/stack-7.10.yaml +++ b/stack-7.10.yaml @@ -15,4 +15,7 @@ extra-deps: - uri-bytestring-0.1.9 - temporary-resourcet-0.1.0.0 - transformers-compat-0.5.1.4 +- pretty-simple-2.0.2.0 +- quickcheck-arbitrary-template-0.2.0.0 + resolver: lts-6.12 diff --git a/stack-7.8.yaml b/stack-7.8.yaml index 14c0226..af7834f 100644 --- a/stack-7.8.yaml +++ b/stack-7.8.yaml @@ -19,4 +19,7 @@ extra-deps: - void-0.7.1 - generics-sop-0.2.2.0 - unordered-containers-0.2.6.0 +- pretty-simple-2.0.2.0 +- quickcheck-arbitrary-template-0.2.0.0 + resolver: lts-2.18 diff --git a/stack-8.0.yaml b/stack-8.0.yaml index 33299b8..c46c50c 100644 --- a/stack-8.0.yaml +++ b/stack-8.0.yaml @@ -7,5 +7,6 @@ packages: - './examples' extra-deps: - quickcheck-properties-0.1 + - quickcheck-arbitrary-template-0.2.0.0 resolver: lts-8.14 diff --git a/stack-8.2.yaml b/stack-8.2.yaml index aba4d8c..6e18ab1 100644 --- a/stack-8.2.yaml +++ b/stack-8.2.yaml @@ -10,5 +10,6 @@ extra-deps: - quickcheck-properties-0.1 - http-types-0.12.1 - aeson-1.3.0.0 + - quickcheck-arbitrary-template-0.2.0.0 resolver: lts-11.0 diff --git a/tests/V1/Test/Aggregation.hs b/tests/V1/Test/Aggregation.hs new file mode 100644 index 0000000..0876832 --- /dev/null +++ b/tests/V1/Test/Aggregation.hs @@ -0,0 +1,132 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Test.Aggregation (spec) where + +import Test.Common +import Test.Import + +import Control.Error (fmapL, note) +import qualified Data.Map as M +import qualified Database.V1.Bloodhound + +spec :: Spec +spec = + describe "Aggregation API" $ do + it "returns term aggregation results" $ withTestEnv $ do + _ <- insertData + let terms = TermsAgg $ mkTermsAggregation "user" + let search = mkAggregateSearch Nothing $ mkAggregations "users" terms + searchExpectAggs search + searchValidBucketAgg search "users" toTerms + + it "return sub-aggregation results" $ withTestEnv $ do + _ <- insertData + let subaggs = mkAggregations "age_agg" . TermsAgg $ mkTermsAggregation "age" + agg = TermsAgg $ (mkTermsAggregation "user") { termAggs = Just subaggs} + search = mkAggregateSearch Nothing $ mkAggregations "users" agg + reply <- searchByIndex testIndex search + let result = decode (responseBody reply) :: Maybe (SearchResult Tweet) + usersAggResults = result >>= aggregations >>= toTerms "users" + subAggResults = usersAggResults >>= (listToMaybe . buckets) >>= termsAggs >>= toTerms "age_agg" + subAddResultsExists = isJust subAggResults + liftIO $ subAddResultsExists `shouldBe` True + + it "returns cardinality aggregation results" $ withTestEnv $ do + _ <- insertData + let cardinality = CardinalityAgg $ mkCardinalityAggregation $ FieldName "user" + let search = mkAggregateSearch Nothing $ mkAggregations "users" cardinality + let search' = search { Database.V1.Bloodhound.from = From 0, size = Size 0 } + searchExpectAggs search' + let docCountPair k n = (k, object ["value" .= Number n]) + res <- searchTweets search' + liftIO $ + fmap aggregations res `shouldBe` Right (Just (M.fromList [ docCountPair "users" 1])) + + it "can give collection hint parameters to term aggregations" $ when' (atleast es13) $ withTestEnv $ do + _ <- insertData + let terms = TermsAgg $ (mkTermsAggregation "user") { termCollectMode = Just BreadthFirst } + let search = mkAggregateSearch Nothing $ mkAggregations "users" terms + searchExpectAggs search + searchValidBucketAgg search "users" toTerms + + -- One of these fails with 1.7.3 + it "can give execution hint parameters to term aggregations" $ when' (atmost es11) $ withTestEnv $ do + _ <- insertData + searchTermsAggHint [Map, Ordinals] + + it "can give execution hint parameters to term aggregations" $ when' (is es12) $ withTestEnv $ do + _ <- insertData + searchTermsAggHint [GlobalOrdinals, GlobalOrdinalsHash, GlobalOrdinalsLowCardinality, Map, Ordinals] + + it "can give execution hint parameters to term aggregations" $ when' (atleast es12) $ withTestEnv $ do + _ <- insertData + searchTermsAggHint [GlobalOrdinals, GlobalOrdinalsHash, GlobalOrdinalsLowCardinality, Map] + -- One of the above. + + it "can execute value_count aggregations" $ withTestEnv $ do + _ <- insertData + _ <- insertOther + let ags = mkAggregations "user_count" (ValueCountAgg (FieldValueCount (FieldName "user"))) <> + mkAggregations "bogus_count" (ValueCountAgg (FieldValueCount (FieldName "bogus"))) + let search = mkAggregateSearch Nothing ags + let docCountPair k n = (k, object ["value" .= Number n]) + res <- searchTweets search + liftIO $ + fmap aggregations res `shouldBe` Right (Just (M.fromList [ docCountPair "user_count" 2 + , docCountPair "bogus_count" 0 + ])) + + it "can execute date_range aggregations" $ withTestEnv $ do + let now = fromGregorian 2015 3 14 + let ltAMonthAgo = UTCTime (fromGregorian 2015 3 1) 0 + let ltAWeekAgo = UTCTime (fromGregorian 2015 3 10) 0 + let oldDoc = exampleTweet { postDate = ltAMonthAgo } + let newDoc = exampleTweet { postDate = ltAWeekAgo } + _ <- indexDocument testIndex testMapping defaultIndexDocumentSettings oldDoc (DocId "1") + _ <- indexDocument testIndex testMapping defaultIndexDocumentSettings newDoc (DocId "2") + _ <- refreshIndex testIndex + let thisMonth = DateRangeFrom (DateMathExpr (DMDate now) [SubtractTime 1 DMMonth]) + let thisWeek = DateRangeFrom (DateMathExpr (DMDate now) [SubtractTime 1 DMWeek]) + let agg = DateRangeAggregation (FieldName "postDate") Nothing (thisMonth :| [thisWeek]) + let ags = mkAggregations "date_ranges" (DateRangeAgg agg) + let search = mkAggregateSearch Nothing ags + res <- searchTweets search + liftIO $ hitsTotal . searchHits <$> res `shouldBe` Right 2 + let bucks = do magrs <- fmapL show (aggregations <$> res) + agrs <- note "no aggregations returned" magrs + rawBucks <- note "no date_ranges aggregation" $ M.lookup "date_ranges" agrs + parseEither parseJSON rawBucks + let fromMonthT = UTCTime (fromGregorian 2015 2 14) 0 + let fromWeekT = UTCTime (fromGregorian 2015 3 7) 0 + liftIO $ buckets <$> bucks `shouldBe` Right [ DateRangeResult "2015-02-14T00:00:00.000Z-*" + (Just fromMonthT) + (Just "2015-02-14T00:00:00.000Z") + Nothing + Nothing + 2 + Nothing + , DateRangeResult "2015-03-07T00:00:00.000Z-*" + (Just fromWeekT) + (Just "2015-03-07T00:00:00.000Z") + Nothing + Nothing + 1 + Nothing + ] + + it "returns date histogram aggregation results" $ withTestEnv $ do + _ <- insertData + let histogram = DateHistogramAgg $ mkDateHistogram (FieldName "postDate") Minute + let search = mkAggregateSearch Nothing (mkAggregations "byDate" histogram) + searchExpectAggs search + searchValidBucketAgg search "byDate" toDateHistogram + + it "can execute missing aggregations" $ withTestEnv $ do + _ <- insertData + _ <- insertExtra + let ags = mkAggregations "missing_agg" (MissingAgg (MissingAggregation "extra")) + let search = mkAggregateSearch Nothing ags + let docCountPair k n = (k, object ["doc_count" .= Number n]) + res <- searchTweets search + liftIO $ + fmap aggregations res `shouldBe` Right (Just (M.fromList [docCountPair "missing_agg" 1])) diff --git a/tests/V1/Test/ApproxEq.hs b/tests/V1/Test/ApproxEq.hs new file mode 100644 index 0000000..551a06a --- /dev/null +++ b/tests/V1/Test/ApproxEq.hs @@ -0,0 +1,63 @@ +{-# LANGUAGE DefaultSignatures #-} +{-# LANGUAGE OverloadedStrings #-} + +module Test.ApproxEq where + +import Database.V1.Bloodhound + +import Test.Import + +import qualified Data.List.NonEmpty as NE + +-- | Typeclass for "equal where it matters". Use this to specify +-- less-strict equivalence for things such as lists that can wind up +-- in an unpredictable order +class ApproxEq a where + (=~) :: a -> a -> Bool + + showApproxEq :: a -> String + default showApproxEq :: (Show a) => a -> String + showApproxEq = show + +(==~) :: (ApproxEq a) => a -> a -> Property +a ==~ b = counterexample (showApproxEq a ++ " !=~ " ++ showApproxEq b) (a =~ b) + +instance ApproxEq NominalDiffTime where (=~) = (==) +instance ApproxEq Bool where (=~) = (==) +instance ApproxEq Int where (=~) = (==) +instance (Eq a, Show a) => ApproxEq (Maybe a) where (=~) = (==) +instance ApproxEq Char where + (=~) = (==) + +instance ApproxEq NodeAttrFilter where (=~) = (==) +instance ApproxEq NodeAttrName where (=~) = (==) +instance (Eq a, Show a) => ApproxEq (NonEmpty a) where (=~) = (==) +instance (ApproxEq l, Show l, ApproxEq r, Show r) => ApproxEq (Either l r) where + Left a =~ Left b = a =~ b + Right a =~ Right b = a =~ b + _ =~ _ = False + showApproxEq (Left x) = "Left " <> showApproxEq x + showApproxEq (Right x) = "Right " <> showApproxEq x +instance (ApproxEq a, Show a) => ApproxEq [a] where + as =~ bs = and (zipWith (=~) as bs) +instance ApproxEq ReplicaCount where (=~) = (==) +instance ApproxEq ReplicaBounds where (=~) = (==) +instance ApproxEq Bytes where (=~) = (==) +instance ApproxEq AllocationPolicy where (=~) = (==) +instance ApproxEq InitialShardCount where (=~) = (==) +instance ApproxEq FSType where (=~) = (==) + +-- | Due to the way nodeattrfilters get serialized here, they may come +-- out in a different order, but they are morally equivalent +instance ApproxEq UpdatableIndexSetting where + RoutingAllocationInclude a =~ RoutingAllocationInclude b = + NE.sort a =~ NE.sort b + RoutingAllocationExclude a =~ RoutingAllocationExclude b = + NE.sort a =~ NE.sort b + RoutingAllocationRequire a =~ RoutingAllocationRequire b = + NE.sort a =~ NE.sort b + a =~ b = a == b + showApproxEq (RoutingAllocationInclude xs) = show (RoutingAllocationInclude (NE.sort xs)) + showApproxEq (RoutingAllocationExclude xs) = show (RoutingAllocationExclude (NE.sort xs)) + showApproxEq (RoutingAllocationRequire xs) = show (RoutingAllocationRequire (NE.sort xs)) + showApproxEq x = show x diff --git a/tests/V1/Test/BulkAPI.hs b/tests/V1/Test/BulkAPI.hs new file mode 100644 index 0000000..496b0a6 --- /dev/null +++ b/tests/V1/Test/BulkAPI.hs @@ -0,0 +1,77 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Test.BulkAPI (spec) where + +import Test.Common +import Test.Import + +import qualified Data.Vector as V +import qualified Lens.Micro.Aeson as LMA + +newtype BulkTest = + BulkTest Text + deriving (Eq, Show) + +instance ToJSON BulkTest where + toJSON (BulkTest name') = + object ["name" .= name'] + +instance FromJSON BulkTest where + parseJSON = withObject "BulkTest" parse + where + parse o = do + t <- o .: "name" + BulkTest <$> parseJSON t + +spec :: Spec +spec = + describe "Bulk API" $ + it "inserts all documents we request" $ withTestEnv $ do + _ <- insertData + let firstTest = BulkTest "blah" + let secondTest = BulkTest "bloo" + let firstDoc = BulkIndex testIndex + testMapping (DocId "2") (toJSON firstTest) + let secondDoc = BulkCreate testIndex + testMapping (DocId "3") (toJSON secondTest) + let stream = V.fromList [firstDoc, secondDoc] + _ <- bulk stream + _ <- refreshIndex testIndex + fDoc <- getDocument testIndex testMapping (DocId "2") + sDoc <- getDocument testIndex testMapping (DocId "3") + -- note that we cannot query for fourthDoc and fifthDoc since we + -- do not know their autogenerated ids. + let maybeFirst = + eitherDecode + $ responseBody fDoc + :: Either String (EsResult BulkTest) + let maybeSecond = + eitherDecode + $ responseBody sDoc + :: Either String (EsResult BulkTest) + liftIO $ do + fmap getSource maybeFirst `shouldBe` Right (Just firstTest) + fmap getSource maybeSecond `shouldBe` Right (Just secondTest) + -- Since we can't get the docs by doc id, we check for their existence in + -- a match all query. + let query = MatchAllQuery Nothing + let search = mkSearch (Just query) Nothing + resp <- searchByIndex testIndex search + parsed <- parseEsResponse resp :: BH IO (Either EsError (SearchResult Value)) + case parsed of + Left e -> + liftIO $ expectationFailure ("Expected a script-transformed result but got: " <> show e) + (Right sr) -> do + liftIO $ + hitsTotal (searchHits sr) `shouldBe` 3 + let nameList :: [Text] + nameList = + hits (searchHits sr) + ^.. traverse + . to hitSource + . _Just + . LMA.key "name" + . _String + liftIO $ + nameList + `shouldBe` ["blah","bloo"] diff --git a/tests/V1/Test/Common.hs b/tests/V1/Test/Common.hs new file mode 100644 index 0000000..69c3d25 --- /dev/null +++ b/tests/V1/Test/Common.hs @@ -0,0 +1,289 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TemplateHaskell #-} + +module Test.Common where + +import Test.Import + +import qualified Data.Map as M +import qualified Data.Version as Vers +import qualified Network.HTTP.Types.Status as NHTS + +testServer :: Server +testServer = Server "http://localhost:9200" +testIndex :: IndexName +testIndex = IndexName "bloodhound-tests-twitter-1" +testMapping :: MappingName +testMapping = MappingName "tweet" + +withTestEnv :: BH IO a -> IO a +withTestEnv = withBH defaultManagerSettings testServer + +data Location = Location { lat :: Double + , lon :: Double } deriving (Eq, Show) + +data Tweet = Tweet { user :: Text + , postDate :: UTCTime + , message :: Text + , age :: Int + , location :: Location + , extra :: Maybe Text } + deriving (Eq, Show) + +$(deriveJSON defaultOptions ''Location) +$(deriveJSON defaultOptions ''Tweet) + +data ParentMapping = ParentMapping deriving (Eq, Show) + +instance ToJSON ParentMapping where + toJSON ParentMapping = + object ["properties" .= + object [ "user" .= object ["type" .= ("string" :: Text) + ] + -- Serializing the date as a date is breaking other tests, mysteriously. + -- , "postDate" .= object [ "type" .= ("date" :: Text) + -- , "format" .= ("YYYY-MM-dd`T`HH:mm:ss.SSSZZ" :: Text)] + , "message" .= object ["type" .= ("string" :: Text)] + , "age" .= object ["type" .= ("integer" :: Text)] + , "location" .= object ["type" .= ("geo_point" :: Text)] + , "extra" .= object ["type" .= ("string" :: Text), "index" .= ("not_analyzed" :: Text)] + ]] + +es13 :: Vers.Version +es13 = Vers.Version [1, 3, 0] [] + +es12 :: Vers.Version +es12 = Vers.Version [1, 2, 0] [] + +es11 :: Vers.Version +es11 = Vers.Version [1, 1, 0] [] + +es14 :: Vers.Version +es14 = Vers.Version [1, 4, 0] [] + +es15 :: Vers.Version +es15 = Vers.Version [1, 5, 0] [] + +es16 :: Vers.Version +es16 = Vers.Version [1, 6, 0] [] + +es20 :: Vers.Version +es20 = Vers.Version [2, 0, 0] [] + +es50 :: Vers.Version +es50 = Vers.Version [5, 0, 0] [] + +getServerVersion :: IO (Maybe Vers.Version) +getServerVersion = fmap extractVersion <$> withTestEnv getStatus + where + extractVersion = versionNumber . number . version + +createExampleIndex :: (MonadBH m) => m Reply +createExampleIndex = + createIndex (IndexSettings (ShardCount 1) (ReplicaCount 0)) testIndex + +deleteExampleIndex :: (MonadBH m) => m Reply +deleteExampleIndex = + deleteIndex testIndex + +validateStatus :: Show body => Response body -> Int -> Expectation +validateStatus resp expected = + if actual == expected + then return () + else expectationFailure ("Expected " <> show expected <> " but got " <> show actual <> ": " <> show body) + where + actual = NHTS.statusCode (responseStatus resp) + body = responseBody resp + +data ChildMapping = ChildMapping deriving (Eq, Show) + +instance ToJSON ChildMapping where + toJSON ChildMapping = + object ["_parent" .= object ["type" .= ("parent" :: Text)] + , "properties" .= + object [ "user" .= object ["type" .= ("string" :: Text) + ] + -- Serializing the date as a date is breaking other tests, mysteriously. + -- , "postDate" .= object [ "type" .= ("date" :: Text) + -- , "format" .= ("YYYY-MM-dd`T`HH:mm:ss.SSSZZ" :: Text)] + , "message" .= object ["type" .= ("string" :: Text)] + , "age" .= object ["type" .= ("integer" :: Text)] + , "location" .= object ["type" .= ("geo_point" :: Text)] + , "extra" .= object ["type" .= ("string" :: Text), "index" .= ("not_analyzed" :: Text)] + ]] + +data TweetMapping = TweetMapping deriving (Eq, Show) + +instance ToJSON TweetMapping where + toJSON TweetMapping = + object ["tweet" .= + object ["properties" .= + object [ "user" .= object [ "type" .= ("string" :: Text) + ] + -- Serializing the date as a date is breaking other tests, mysteriously. + -- , "postDate" .= object [ "type" .= ("date" :: Text) + -- , "format" .= ("YYYY-MM-dd`T`HH:mm:ss.SSSZZ" :: Text)] + , "message" .= object ["type" .= ("string" :: Text)] + , "age" .= object ["type" .= ("integer" :: Text)] + , "location" .= object ["type" .= ("geo_point" :: Text)] + , "extra" .= object ["type" .= ("string" :: Text), "index" .= ("not_analyzed" :: Text)] + ]]] + +exampleTweet :: Tweet +exampleTweet = Tweet { user = "bitemyapp" + , postDate = UTCTime + (ModifiedJulianDay 55000) + (secondsToDiffTime 10) + , message = "Use haskell!" + , age = 10000 + , location = Location 40.12 (-71.34) + , extra = Nothing } + +tweetWithExtra :: Tweet +tweetWithExtra = Tweet { user = "bitemyapp" + , postDate = UTCTime + (ModifiedJulianDay 55000) + (secondsToDiffTime 10) + , message = "Use haskell!" + , age = 10000 + , location = Location 40.12 (-71.34) + , extra = Just "blah blah" } + +newAge :: Int +newAge = 31337 + +newUser :: Text +newUser = "someotherapp" + +tweetPatch :: Value +tweetPatch = + object [ "age" .= newAge + , "user" .= newUser + ] + +patchedTweet :: Tweet +patchedTweet = exampleTweet{age = newAge, user = newUser} + +otherTweet :: Tweet +otherTweet = Tweet { user = "notmyapp" + , postDate = UTCTime + (ModifiedJulianDay 55000) + (secondsToDiffTime 11) + , message = "Use haskell!" + , age = 1000 + , location = Location 40.12 (-71.34) + , extra = Nothing } + +resetIndex :: BH IO () +resetIndex = do + _ <- deleteExampleIndex + _ <- createExampleIndex + _ <- putMapping testIndex testMapping TweetMapping + return () + +insertData :: BH IO Reply +insertData = do + resetIndex + insertData' defaultIndexDocumentSettings + +insertData' :: IndexDocumentSettings -> BH IO Reply +insertData' ids = do + r <- indexDocument testIndex testMapping ids exampleTweet (DocId "1") + _ <- refreshIndex testIndex + return r + +updateData :: BH IO Reply +updateData = do + r <- updateDocument testIndex testMapping defaultIndexDocumentSettings tweetPatch (DocId "1") + _ <- refreshIndex testIndex + return r + +insertOther :: BH IO () +insertOther = do + _ <- indexDocument testIndex testMapping defaultIndexDocumentSettings otherTweet (DocId "2") + _ <- refreshIndex testIndex + return () + +insertExtra :: BH IO () +insertExtra = do + _ <- indexDocument testIndex testMapping defaultIndexDocumentSettings tweetWithExtra (DocId "4") + _ <- refreshIndex testIndex + return () + +insertWithSpaceInId :: BH IO () +insertWithSpaceInId = do + _ <- indexDocument testIndex testMapping defaultIndexDocumentSettings exampleTweet (DocId "Hello World") + _ <- refreshIndex testIndex + return () + +searchTweet :: Search -> BH IO (Either EsError Tweet) +searchTweet search = do + result <- searchTweets search + let myTweet :: Either EsError Tweet + myTweet = grabFirst result + return myTweet + +searchTweets :: Search -> BH IO (Either EsError (SearchResult Tweet)) +searchTweets search = parseEsResponse =<< searchByIndex testIndex search + +searchExpectNoResults :: Search -> BH IO () +searchExpectNoResults search = do + result <- searchTweets search + let emptyHits = fmap (hits . searchHits) result + liftIO $ + emptyHits `shouldBe` Right [] + +searchExpectAggs :: Search -> BH IO () +searchExpectAggs search = do + reply <- searchByIndex testIndex search + let isEmpty x = return (M.null x) + let result = decode (responseBody reply) :: Maybe (SearchResult Tweet) + liftIO $ + (result >>= aggregations >>= isEmpty) `shouldBe` Just False + +searchValidBucketAgg :: (BucketAggregation a, FromJSON a, Show a) => + Search -> Text -> (Text -> AggregationResults -> Maybe (Bucket a)) -> BH IO () +searchValidBucketAgg search aggKey extractor = do + reply <- searchByIndex testIndex search + let bucketDocs = docCount . head . buckets + let result = decode (responseBody reply) :: Maybe (SearchResult Tweet) + let count = result >>= aggregations >>= extractor aggKey >>= \x -> return (bucketDocs x) + liftIO $ + count `shouldBe` Just 1 + +searchTermsAggHint :: [ExecutionHint] -> BH IO () +searchTermsAggHint hints = do + let terms hint = TermsAgg $ (mkTermsAggregation "user") { termExecutionHint = Just hint } + let search hint = mkAggregateSearch Nothing $ mkAggregations "users" $ terms hint + forM_ hints $ searchExpectAggs . search + forM_ hints (\x -> searchValidBucketAgg (search x) "users" toTerms) + +searchTweetHighlight :: Search + -> BH IO (Either EsError (Maybe HitHighlight)) +searchTweetHighlight search = do + result <- searchTweets search + let tweetHit :: Either EsError (Maybe (Hit Tweet)) + tweetHit = fmap (headMay . hits . searchHits) result + myHighlight :: Either EsError (Maybe HitHighlight) + myHighlight = (join . fmap hitHighlight) <$> tweetHit + return myHighlight + +searchExpectSource :: Source -> Either EsError Value -> BH IO () +searchExpectSource src expected = do + _ <- insertData + let query = QueryMatchQuery $ mkMatchQuery (FieldName "_all") (QueryString "haskell") + let search = (mkSearch (Just query) Nothing) { source = Just src } + reply <- searchByIndex testIndex search + result <- parseEsResponse reply + let value = grabFirst result + liftIO $ + value `shouldBe` expected + +atleast :: Vers.Version -> IO Bool +atleast v = getServerVersion >>= \x -> return $ x >= Just v + +atmost :: Vers.Version -> IO Bool +atmost v = getServerVersion >>= \x -> return $ x <= Just v + +is :: Vers.Version -> IO Bool +is v = getServerVersion >>= \x -> return $ x == Just v diff --git a/tests/V1/Test/Documents.hs b/tests/V1/Test/Documents.hs new file mode 100644 index 0000000..7eaa1e6 --- /dev/null +++ b/tests/V1/Test/Documents.hs @@ -0,0 +1,52 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Test.Documents where + +import Test.Common +import Test.Import + +spec :: Spec +spec = + describe "document API" $ do + it "indexes, updates, gets, and then deletes the generated document" $ withTestEnv $ do + _ <- insertData + _ <- updateData + docInserted <- getDocument testIndex testMapping (DocId "1") + let newTweet = eitherDecode + (responseBody docInserted) :: Either String (EsResult Tweet) + liftIO $ fmap getSource newTweet `shouldBe` Right (Just patchedTweet) + + it "indexes, gets, and then deletes the generated document with a DocId containing a space" $ withTestEnv $ do + _ <- insertWithSpaceInId + docInserted <- getDocument testIndex testMapping (DocId "Hello World") + let newTweet = eitherDecode + (responseBody docInserted) :: Either String (EsResult Tweet) + liftIO $ fmap getSource newTweet `shouldBe` Right (Just exampleTweet) + + it "produces a parseable result when looking up a bogus document" $ withTestEnv $ do + doc <- getDocument testIndex testMapping (DocId "bogus") + let noTweet = eitherDecode + (responseBody doc) :: Either String (EsResult Tweet) + liftIO $ fmap foundResult noTweet `shouldBe` Right Nothing + + it "can use optimistic concurrency control" $ withTestEnv $ do + let ev = ExternalDocVersion minBound + let cfg = defaultIndexDocumentSettings { idsVersionControl = ExternalGT ev } + resetIndex + res <- insertData' cfg + liftIO $ isCreated res `shouldBe` True + res' <- insertData' cfg + liftIO $ isVersionConflict res' `shouldBe` True + + it "indexes two documents in a parent/child relationship and checks that the child exists" $ withTestEnv $ do + resetIndex + let validateStatus' stat = liftIO . flip validateStatus stat + _ <- validateStatus' 200 =<< putMapping testIndex (MappingName "child") ChildMapping + _ <- validateStatus' 200 =<< putMapping testIndex (MappingName "parent") ParentMapping + _ <- validateStatus' 201 =<< indexDocument testIndex (MappingName "parent") defaultIndexDocumentSettings exampleTweet (DocId "1") + let parent = (Just . DocumentParent . DocId) "1" + ids = IndexDocumentSettings NoVersionControl parent + _ <- validateStatus' 201 =<< indexDocument testIndex (MappingName "child") ids otherTweet (DocId "2") + _ <- refreshIndex testIndex + exists <- documentExists testIndex (MappingName "child") parent (DocId "2") + liftIO $ exists `shouldBe` True diff --git a/tests/V1/Test/Generators.hs b/tests/V1/Test/Generators.hs new file mode 100644 index 0000000..eac7052 --- /dev/null +++ b/tests/V1/Test/Generators.hs @@ -0,0 +1,432 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TemplateHaskell #-} + +module Test.Generators where + +import Database.V1.Bloodhound + +import Test.Import + +import qualified Data.HashMap.Strict as HM +import qualified Data.List as L +import qualified Data.List.NonEmpty as NE +import qualified Data.Map as M +import qualified Data.Text as T +import qualified Data.Version as Vers +import Test.QuickCheck.TH.Generators + +import Test.ApproxEq + +instance Arbitrary NominalDiffTime where + arbitrary = fromInteger <$> arbitrary + +#if !MIN_VERSION_QuickCheck(2,8,0) +instance (Arbitrary k, Ord k, Arbitrary v) => Arbitrary (M.Map k v) where + arbitrary = M.fromList <$> arbitrary +#endif + +instance Arbitrary Text where + arbitrary = T.pack <$> arbitrary + +instance Arbitrary UTCTime where + arbitrary = UTCTime + <$> arbitrary + <*> (fromRational . toRational <$> choose (0::Double, 86400)) + +instance Arbitrary Day where + arbitrary = + ModifiedJulianDay . (2000 +) <$> arbitrary + shrink = + (ModifiedJulianDay <$>) . shrink . toModifiedJulianDay + +#if !MIN_VERSION_QuickCheck(2,9,0) +instance Arbitrary a => Arbitrary (NonEmpty a) where + arbitrary = liftA2 (:|) arbitrary arbitrary +#endif + +arbitraryScore :: Gen Score +arbitraryScore = fmap getPositive <$> arbitrary + +instance (Arbitrary a, Typeable a) => Arbitrary (Hit a) where + arbitrary = Hit <$> arbitrary + <*> arbitrary + <*> arbitrary + <*> arbitraryScore + <*> arbitrary + <*> arbitrary + +instance (Arbitrary a, Typeable a) => Arbitrary (SearchHits a) where + arbitrary = reduceSize $ do + tot <- getPositive <$> arbitrary + score <- arbitraryScore + hs <- arbitrary + return $ SearchHits tot score hs + + +reduceSize :: Gen a -> Gen a +reduceSize f = sized $ \n -> resize (n `div` 2) f + +arbitraryAlphaNum :: Gen Char +arbitraryAlphaNum = oneof [choose ('a', 'z') + ,choose ('A','Z') + , choose ('0', '9')] + +instance Arbitrary RoutingValue where + arbitrary = RoutingValue . T.pack <$> listOf1 arbitraryAlphaNum + +instance Arbitrary AliasRouting where + arbitrary = oneof [allAlias + ,one + ,theOther + ,both'] + where one = GranularAliasRouting + <$> (Just <$> arbitrary) + <*> pure Nothing + theOther = GranularAliasRouting Nothing + <$> (Just <$> arbitrary) + both' = GranularAliasRouting + <$> (Just <$> arbitrary) + <*> (Just <$> arbitrary) + allAlias = AllAliasRouting <$> arbitrary + + + +instance Arbitrary FieldName where + arbitrary = + FieldName + . T.pack + <$> listOf1 arbitraryAlphaNum + + +#if MIN_VERSION_base(4,10,0) +-- Test.QuickCheck.Modifiers + +qcNonEmptyToNonEmpty :: NonEmptyList a -> NonEmpty a +qcNonEmptyToNonEmpty (NonEmpty (a : xs)) = (a :| xs) +qcNonEmptyToNonEmpty (NonEmpty []) = error "NonEmpty was empty!" + +instance Arbitrary a => Arbitrary (NonEmpty a) where + arbitrary = + qcNonEmptyToNonEmpty + <$> arbitrary +#endif + +instance Arbitrary RegexpFlags where + arbitrary = oneof [ pure AllRegexpFlags + , pure NoRegexpFlags + , SomeRegexpFlags <$> genUniqueFlags + ] + where genUniqueFlags = + NE.fromList . L.nub + <$> listOf1 arbitrary + +instance Arbitrary IndexAliasCreate where + arbitrary = + IndexAliasCreate + <$> arbitrary + <*> reduceSize arbitrary + +instance Arbitrary ReplicaBounds where + arbitrary = oneof [ replicasBounded + , replicasLowerBounded + , pure ReplicasUnbounded + ] + where replicasBounded = do + Positive a <- arbitrary + Positive b <- arbitrary + return (ReplicasBounded a b) + replicasLowerBounded = do + Positive a <- arbitrary + return (ReplicasLowerBounded a) + +instance Arbitrary NodeAttrName where + arbitrary = + NodeAttrName + . T.pack + <$> listOf1 arbitraryAlphaNum + + +instance Arbitrary NodeAttrFilter where + arbitrary = do + n <- arbitrary + s:ss <- listOf1 (listOf1 arbitraryAlphaNum) + let ts = T.pack <$> s :| ss + return (NodeAttrFilter n ts) + +instance Arbitrary VersionNumber where + arbitrary = mk . fmap getPositive . getNonEmpty <$> arbitrary + where + mk versions = VersionNumber (Vers.Version versions []) + +instance Arbitrary TemplateQueryKeyValuePairs where + arbitrary = TemplateQueryKeyValuePairs . HM.fromList <$> arbitrary + shrink (TemplateQueryKeyValuePairs x) = map (TemplateQueryKeyValuePairs . HM.fromList) . shrink $ HM.toList x + +makeArbitrary ''FilteredQuery +instance Arbitrary FilteredQuery where arbitrary = reduceSize arbitraryFilteredQuery +makeArbitrary ''Query +instance Arbitrary Query where arbitrary = reduceSize arbitraryQuery +makeArbitrary ''Filter +instance Arbitrary Filter where arbitrary = reduceSize arbitraryFilter +makeArbitrary ''IndexName +instance Arbitrary IndexName where arbitrary = arbitraryIndexName +makeArbitrary ''MappingName +instance Arbitrary MappingName where arbitrary = arbitraryMappingName +makeArbitrary ''DocId +instance Arbitrary DocId where arbitrary = arbitraryDocId +makeArbitrary ''Version +instance Arbitrary Version where arbitrary = arbitraryVersion +makeArbitrary ''BuildHash +instance Arbitrary BuildHash where arbitrary = arbitraryBuildHash +makeArbitrary ''IndexAliasRouting +instance Arbitrary IndexAliasRouting where arbitrary = arbitraryIndexAliasRouting +makeArbitrary ''ShardCount +instance Arbitrary ShardCount where arbitrary = arbitraryShardCount +makeArbitrary ''ReplicaCount +instance Arbitrary ReplicaCount where arbitrary = arbitraryReplicaCount +makeArbitrary ''TemplateName +instance Arbitrary TemplateName where arbitrary = arbitraryTemplateName +makeArbitrary ''TemplatePattern +instance Arbitrary TemplatePattern where arbitrary = arbitraryTemplatePattern +makeArbitrary ''QueryString +instance Arbitrary QueryString where arbitrary = arbitraryQueryString +makeArbitrary ''CacheName +instance Arbitrary CacheName where arbitrary = arbitraryCacheName +makeArbitrary ''CacheKey +instance Arbitrary CacheKey where arbitrary = arbitraryCacheKey +makeArbitrary ''Existence +instance Arbitrary Existence where arbitrary = arbitraryExistence +makeArbitrary ''CutoffFrequency +instance Arbitrary CutoffFrequency where arbitrary = arbitraryCutoffFrequency +makeArbitrary ''Analyzer +instance Arbitrary Analyzer where arbitrary = arbitraryAnalyzer +makeArbitrary ''MaxExpansions +instance Arbitrary MaxExpansions where arbitrary = arbitraryMaxExpansions +makeArbitrary ''Lenient +instance Arbitrary Lenient where arbitrary = arbitraryLenient +makeArbitrary ''Tiebreaker +instance Arbitrary Tiebreaker where arbitrary = arbitraryTiebreaker +makeArbitrary ''Boost +instance Arbitrary Boost where arbitrary = arbitraryBoost +makeArbitrary ''BoostTerms +instance Arbitrary BoostTerms where arbitrary = arbitraryBoostTerms +makeArbitrary ''MinimumMatch +instance Arbitrary MinimumMatch where arbitrary = arbitraryMinimumMatch +makeArbitrary ''DisableCoord +instance Arbitrary DisableCoord where arbitrary = arbitraryDisableCoord +makeArbitrary ''IgnoreTermFrequency +instance Arbitrary IgnoreTermFrequency where arbitrary = arbitraryIgnoreTermFrequency +makeArbitrary ''MinimumTermFrequency +instance Arbitrary MinimumTermFrequency where arbitrary = arbitraryMinimumTermFrequency +makeArbitrary ''MaxQueryTerms +instance Arbitrary MaxQueryTerms where arbitrary = arbitraryMaxQueryTerms +makeArbitrary ''Fuzziness +instance Arbitrary Fuzziness where arbitrary = arbitraryFuzziness +makeArbitrary ''PrefixLength +instance Arbitrary PrefixLength where arbitrary = arbitraryPrefixLength +makeArbitrary ''TypeName +instance Arbitrary TypeName where arbitrary = arbitraryTypeName +makeArbitrary ''PercentMatch +instance Arbitrary PercentMatch where arbitrary = arbitraryPercentMatch +makeArbitrary ''StopWord +instance Arbitrary StopWord where arbitrary = arbitraryStopWord +makeArbitrary ''QueryPath +instance Arbitrary QueryPath where arbitrary = arbitraryQueryPath +makeArbitrary ''AllowLeadingWildcard +instance Arbitrary AllowLeadingWildcard where arbitrary = arbitraryAllowLeadingWildcard +makeArbitrary ''LowercaseExpanded +instance Arbitrary LowercaseExpanded where arbitrary = arbitraryLowercaseExpanded +makeArbitrary ''EnablePositionIncrements +instance Arbitrary EnablePositionIncrements where arbitrary = arbitraryEnablePositionIncrements +makeArbitrary ''AnalyzeWildcard +instance Arbitrary AnalyzeWildcard where arbitrary = arbitraryAnalyzeWildcard +makeArbitrary ''GeneratePhraseQueries +instance Arbitrary GeneratePhraseQueries where arbitrary = arbitraryGeneratePhraseQueries +makeArbitrary ''Locale +instance Arbitrary Locale where arbitrary = arbitraryLocale +makeArbitrary ''MaxWordLength +instance Arbitrary MaxWordLength where arbitrary = arbitraryMaxWordLength +makeArbitrary ''MinWordLength +instance Arbitrary MinWordLength where arbitrary = arbitraryMinWordLength +makeArbitrary ''PhraseSlop +instance Arbitrary PhraseSlop where arbitrary = arbitraryPhraseSlop +makeArbitrary ''MinDocFrequency +instance Arbitrary MinDocFrequency where arbitrary = arbitraryMinDocFrequency +makeArbitrary ''MaxDocFrequency +instance Arbitrary MaxDocFrequency where arbitrary = arbitraryMaxDocFrequency +makeArbitrary ''Regexp +instance Arbitrary Regexp where arbitrary = arbitraryRegexp +makeArbitrary ''SimpleQueryStringQuery +instance Arbitrary SimpleQueryStringQuery where arbitrary = arbitrarySimpleQueryStringQuery +makeArbitrary ''FieldOrFields +instance Arbitrary FieldOrFields where arbitrary = arbitraryFieldOrFields +makeArbitrary ''SimpleQueryFlag +instance Arbitrary SimpleQueryFlag where arbitrary = arbitrarySimpleQueryFlag +makeArbitrary ''RegexpQuery +instance Arbitrary RegexpQuery where arbitrary = arbitraryRegexpQuery +makeArbitrary ''QueryStringQuery +instance Arbitrary QueryStringQuery where arbitrary = arbitraryQueryStringQuery +makeArbitrary ''RangeQuery +instance Arbitrary RangeQuery where arbitrary = arbitraryRangeQuery +makeArbitrary ''RangeValue +instance Arbitrary RangeValue where arbitrary = arbitraryRangeValue +makeArbitrary ''PrefixQuery +instance Arbitrary PrefixQuery where arbitrary = arbitraryPrefixQuery +makeArbitrary ''NestedQuery +instance Arbitrary NestedQuery where arbitrary = arbitraryNestedQuery +makeArbitrary ''MoreLikeThisFieldQuery +instance Arbitrary MoreLikeThisFieldQuery where arbitrary = arbitraryMoreLikeThisFieldQuery +makeArbitrary ''MoreLikeThisQuery +instance Arbitrary MoreLikeThisQuery where arbitrary = arbitraryMoreLikeThisQuery +makeArbitrary ''IndicesQuery +instance Arbitrary IndicesQuery where arbitrary = arbitraryIndicesQuery +makeArbitrary ''HasParentQuery +instance Arbitrary HasParentQuery where arbitrary = arbitraryHasParentQuery +makeArbitrary ''HasChildQuery +instance Arbitrary HasChildQuery where arbitrary = arbitraryHasChildQuery +makeArbitrary ''FuzzyQuery +instance Arbitrary FuzzyQuery where arbitrary = arbitraryFuzzyQuery +makeArbitrary ''FuzzyLikeFieldQuery +instance Arbitrary FuzzyLikeFieldQuery where arbitrary = arbitraryFuzzyLikeFieldQuery +makeArbitrary ''FuzzyLikeThisQuery +instance Arbitrary FuzzyLikeThisQuery where arbitrary = arbitraryFuzzyLikeThisQuery +makeArbitrary ''DisMaxQuery +instance Arbitrary DisMaxQuery where arbitrary = arbitraryDisMaxQuery +makeArbitrary ''CommonTermsQuery +instance Arbitrary CommonTermsQuery where arbitrary = arbitraryCommonTermsQuery +makeArbitrary ''DistanceRange +instance Arbitrary DistanceRange where arbitrary = arbitraryDistanceRange +makeArbitrary ''MultiMatchQuery +instance Arbitrary MultiMatchQuery where arbitrary = arbitraryMultiMatchQuery +makeArbitrary ''LessThanD +instance Arbitrary LessThanD where arbitrary = arbitraryLessThanD +makeArbitrary ''LessThanEqD +instance Arbitrary LessThanEqD where arbitrary = arbitraryLessThanEqD +makeArbitrary ''GreaterThanD +instance Arbitrary GreaterThanD where arbitrary = arbitraryGreaterThanD +makeArbitrary ''GreaterThanEqD +instance Arbitrary GreaterThanEqD where arbitrary = arbitraryGreaterThanEqD +makeArbitrary ''LessThan +instance Arbitrary LessThan where arbitrary = arbitraryLessThan +makeArbitrary ''LessThanEq +instance Arbitrary LessThanEq where arbitrary = arbitraryLessThanEq +makeArbitrary ''GreaterThan +instance Arbitrary GreaterThan where arbitrary = arbitraryGreaterThan +makeArbitrary ''GreaterThanEq +instance Arbitrary GreaterThanEq where arbitrary = arbitraryGreaterThanEq +makeArbitrary ''GeoPoint +instance Arbitrary GeoPoint where arbitrary = arbitraryGeoPoint +makeArbitrary ''NullValue +instance Arbitrary NullValue where arbitrary = arbitraryNullValue +makeArbitrary ''MinimumMatchHighLow +instance Arbitrary MinimumMatchHighLow where arbitrary = arbitraryMinimumMatchHighLow +makeArbitrary ''CommonMinimumMatch +instance Arbitrary CommonMinimumMatch where arbitrary = arbitraryCommonMinimumMatch +makeArbitrary ''BoostingQuery +instance Arbitrary BoostingQuery where arbitrary = arbitraryBoostingQuery +makeArbitrary ''BoolQuery +instance Arbitrary BoolQuery where arbitrary = arbitraryBoolQuery +makeArbitrary ''MatchQuery +instance Arbitrary MatchQuery where arbitrary = arbitraryMatchQuery +makeArbitrary ''MultiMatchQueryType +instance Arbitrary MultiMatchQueryType where arbitrary = arbitraryMultiMatchQueryType +makeArbitrary ''BooleanOperator +instance Arbitrary BooleanOperator where arbitrary = arbitraryBooleanOperator +makeArbitrary ''ZeroTermsQuery +instance Arbitrary ZeroTermsQuery where arbitrary = arbitraryZeroTermsQuery +makeArbitrary ''MatchQueryType +instance Arbitrary MatchQueryType where arbitrary = arbitraryMatchQueryType +makeArbitrary ''SearchAliasRouting +instance Arbitrary SearchAliasRouting where arbitrary = arbitrarySearchAliasRouting +makeArbitrary ''ScoreType +instance Arbitrary ScoreType where arbitrary = arbitraryScoreType +makeArbitrary ''Distance +instance Arbitrary Distance where arbitrary = arbitraryDistance +makeArbitrary ''DistanceUnit +instance Arbitrary DistanceUnit where arbitrary = arbitraryDistanceUnit +makeArbitrary ''DistanceType +instance Arbitrary DistanceType where arbitrary = arbitraryDistanceType +makeArbitrary ''OptimizeBbox +instance Arbitrary OptimizeBbox where arbitrary = arbitraryOptimizeBbox +makeArbitrary ''GeoBoundingBoxConstraint +instance Arbitrary GeoBoundingBoxConstraint where arbitrary = arbitraryGeoBoundingBoxConstraint +makeArbitrary ''GeoFilterType +instance Arbitrary GeoFilterType where arbitrary = arbitraryGeoFilterType +makeArbitrary ''GeoBoundingBox +instance Arbitrary GeoBoundingBox where arbitrary = arbitraryGeoBoundingBox +makeArbitrary ''LatLon +instance Arbitrary LatLon where arbitrary = arbitraryLatLon +makeArbitrary ''RangeExecution +instance Arbitrary RangeExecution where arbitrary = arbitraryRangeExecution +makeArbitrary ''RegexpFlag +instance Arbitrary RegexpFlag where arbitrary = arbitraryRegexpFlag +makeArbitrary ''BoolMatch +instance Arbitrary BoolMatch where arbitrary = arbitraryBoolMatch +makeArbitrary ''Term +instance Arbitrary Term where arbitrary = arbitraryTerm +makeArbitrary ''IndexSettings +instance Arbitrary IndexSettings where arbitrary = arbitraryIndexSettings +makeArbitrary ''UpdatableIndexSetting +instance Arbitrary UpdatableIndexSetting where + arbitrary = arbitraryUpdatableIndexSetting +makeArbitrary ''Bytes +instance Arbitrary Bytes where arbitrary = arbitraryBytes +makeArbitrary ''AllocationPolicy +instance Arbitrary AllocationPolicy where arbitrary = arbitraryAllocationPolicy +makeArbitrary ''InitialShardCount +instance Arbitrary InitialShardCount where arbitrary = arbitraryInitialShardCount +makeArbitrary ''FSType +instance Arbitrary FSType where arbitrary = arbitraryFSType +makeArbitrary ''CompoundFormat +instance Arbitrary CompoundFormat where arbitrary = arbitraryCompoundFormat +makeArbitrary ''FsSnapshotRepo +instance Arbitrary FsSnapshotRepo where arbitrary = arbitraryFsSnapshotRepo +makeArbitrary ''SnapshotRepoName +instance Arbitrary SnapshotRepoName where arbitrary = arbitrarySnapshotRepoName +makeArbitrary ''TemplateQueryInline +instance Arbitrary TemplateQueryInline where arbitrary = arbitraryTemplateQueryInline +makeArbitrary ''DirectGeneratorSuggestModeTypes +instance Arbitrary DirectGeneratorSuggestModeTypes where arbitrary = arbitraryDirectGeneratorSuggestModeTypes +makeArbitrary ''DirectGenerators +instance Arbitrary DirectGenerators where arbitrary = arbitraryDirectGenerators +makeArbitrary ''PhraseSuggesterCollate +instance Arbitrary PhraseSuggesterCollate where arbitrary = arbitraryPhraseSuggesterCollate +makeArbitrary ''PhraseSuggesterHighlighter +instance Arbitrary PhraseSuggesterHighlighter where arbitrary = arbitraryPhraseSuggesterHighlighter +makeArbitrary ''Size +instance Arbitrary Size where arbitrary = arbitrarySize +makeArbitrary ''PhraseSuggester +instance Arbitrary PhraseSuggester where arbitrary = arbitraryPhraseSuggester +makeArbitrary ''SuggestType +instance Arbitrary SuggestType where arbitrary = arbitrarySuggestType +makeArbitrary ''Suggest +instance Arbitrary Suggest where arbitrary = arbitrarySuggest + + +makeArbitrary ''Script +instance Arbitrary Script where arbitrary = arbitraryScript + +newtype UpdatableIndexSetting' = + UpdatableIndexSetting' UpdatableIndexSetting + deriving (Show, Eq, ToJSON, FromJSON, ApproxEq, Typeable) + +instance Arbitrary UpdatableIndexSetting' where + arbitrary = do + settings <- arbitrary + return $ UpdatableIndexSetting' $ case settings of + RoutingAllocationInclude xs -> + RoutingAllocationInclude (dropDuplicateAttrNames xs) + RoutingAllocationExclude xs -> + RoutingAllocationExclude (dropDuplicateAttrNames xs) + RoutingAllocationRequire xs -> + RoutingAllocationRequire (dropDuplicateAttrNames xs) + x -> x + where + dropDuplicateAttrNames = + NE.fromList . L.nubBy sameAttrName . NE.toList + sameAttrName a b = + nodeAttrFilterName a == nodeAttrFilterName b + -- shrink (UpdatableIndexSetting' x) = map UpdatableIndexSetting' (shrink x) diff --git a/tests/V1/Test/Highlights.hs b/tests/V1/Test/Highlights.hs new file mode 100644 index 0000000..baa2234 --- /dev/null +++ b/tests/V1/Test/Highlights.hs @@ -0,0 +1,32 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Test.Highlights where + +import Test.Common +import Test.Import + +import qualified Data.Map as M + +initHighlights :: Text -> BH IO (Either EsError (Maybe HitHighlight)) +initHighlights fieldName = do + _ <- insertData + _ <- insertOther + let query = QueryMatchQuery $ mkMatchQuery (FieldName fieldName) (QueryString "haskell") + let testHighlight = Highlights Nothing [FieldHighlight (FieldName fieldName) Nothing] + let search = mkHighlightSearch (Just query) testHighlight + searchTweetHighlight search + +spec :: Spec +spec = + describe "Highlights API" $ do + it "returns highlight from query when there should be one" $ withTestEnv $ do + myHighlight <- initHighlights "message" + liftIO $ + myHighlight `shouldBe` + Right (Just (M.fromList [("message", ["Use haskell!"])])) + + it "doesn't return highlight from a query when it shouldn't" $ withTestEnv $ do + myHighlight <- initHighlights "user" + liftIO $ + myHighlight `shouldBe` + Right Nothing diff --git a/tests/V1/Test/Import.hs b/tests/V1/Test/Import.hs new file mode 100644 index 0000000..5af7ffb --- /dev/null +++ b/tests/V1/Test/Import.hs @@ -0,0 +1,58 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Test.Import + ( module X + , module Test.Import + ) where + + +import Control.Applicative as X +import Control.Exception as X (evaluate) +import Control.Monad as X +import Control.Monad.Catch as X +import Control.Monad.Reader as X +import Data.Aeson as X +import Data.Aeson.TH as X +import Data.Aeson.Types as X (parseEither) +import Data.Maybe as X +import Data.List.NonEmpty as X (NonEmpty(..)) +import Data.Monoid as X +import Data.Ord as X (comparing) +import Data.Proxy as X +import Data.Text as X (Text) +import Data.Time.Calendar as X (Day(..), fromGregorian) +import Data.Time.Clock as X +import Data.Typeable as X +import Database.V1.Bloodhound as X hiding (key) +import Lens.Micro as X +import Lens.Micro.Aeson as X +import Network.HTTP.Client as X hiding (Proxy, fileSize) +import System.IO.Temp as X +import System.PosixCompat.Files as X +import Test.Hspec as X +import Test.Hspec.QuickCheck as X (prop) +import Test.QuickCheck as X hiding (Result, Success) +import Test.QuickCheck.Property.Monoid as X (T (..), eq, prop_Monoid) +import Text.Pretty.Simple as X (pPrint) + +import qualified Data.List as L + +noDuplicates :: Eq a => [a] -> Bool +noDuplicates xs = L.nub xs == xs + +getSource :: EsResult a -> Maybe a +getSource = fmap _source . foundResult + +grabFirst :: Either EsError (SearchResult a) -> Either EsError a +grabFirst r = + case fmap (hitSource . head . hits . searchHits) r of + (Left e) -> Left e + (Right Nothing) -> Left (EsError 500 "Source was missing") + (Right (Just x)) -> Right x + +when' :: Monad m => m Bool -> m () -> m () +when' b f = b >>= \x -> when x f + +headMay :: [a] -> Maybe a +headMay (x : _) = Just x +headMay _ = Nothing diff --git a/tests/V1/Test/Indices.hs b/tests/V1/Test/Indices.hs new file mode 100644 index 0000000..75cf585 --- /dev/null +++ b/tests/V1/Test/Indices.hs @@ -0,0 +1,78 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Test.Indices where + +import Test.Common +import Test.Import + +import qualified Data.List as L +import qualified Data.List.NonEmpty as NE + +spec :: Spec +spec = do + describe "Index create/delete API" $ do + it "creates and then deletes the requested index" $ withTestEnv $ do + -- priming state. + _ <- deleteExampleIndex + resp <- createExampleIndex + deleteResp <- deleteExampleIndex + liftIO $ do + validateStatus resp 200 + validateStatus deleteResp 200 + + describe "Index aliases" $ do + let aname = IndexAliasName (IndexName "bloodhound-tests-twitter-1-alias") + let alias = IndexAlias (testIndex) aname + let create = IndexAliasCreate Nothing Nothing + let action = AddAlias alias create + it "handles the simple case of aliasing an existing index" $ do + withTestEnv $ do + resetIndex + resp <- updateIndexAliases (action :| []) + liftIO $ validateStatus resp 200 + let cleanup = withTestEnv (updateIndexAliases (RemoveAlias alias :| [])) + (do aliases <- withTestEnv getIndexAliases + let expected = IndexAliasSummary alias create + case aliases of + Right (IndexAliasesSummary summs) -> + L.find ((== alias) . indexAliasSummaryAlias) summs `shouldBe` Just expected + Left e -> expectationFailure ("Expected an IndexAliasesSummary but got " <> show e)) `finally` cleanup + + describe "Index Listing" $ do + it "returns a list of index names" $ withTestEnv $ do + _ <- createExampleIndex + ixns <- listIndices + liftIO (ixns `shouldContain` [testIndex]) + + describe "Index Settings" $ do + it "persists settings" $ withTestEnv $ do + _ <- deleteExampleIndex + _ <- createExampleIndex + let updates = BlocksWrite False :| [] + updateResp <- updateIndexSettings updates testIndex + liftIO $ validateStatus updateResp 200 + getResp <- getIndexSettings testIndex + liftIO $ + getResp `shouldBe` Right (IndexSettingsSummary + testIndex + (IndexSettings (ShardCount 1) (ReplicaCount 0)) + (NE.toList updates)) + + it "allows total fields to be set" $ when' (atleast es50) $ withTestEnv $ do + _ <- deleteExampleIndex + _ <- createExampleIndex + let updates = FailOnMergeFailure True :| [] + updateResp <- updateIndexSettings updates testIndex + liftIO $ validateStatus updateResp 200 + getResp <- getIndexSettings testIndex + liftIO $ + getResp `shouldBe` Right (IndexSettingsSummary + testIndex + (IndexSettings (ShardCount 1) (ReplicaCount 0)) + (NE.toList updates)) + + describe "Index Optimization" $ do + it "returns a successful response upon completion" $ withTestEnv $ do + _ <- createExampleIndex + resp <- optimizeIndex (IndexList (testIndex :| [])) defaultIndexOptimizationSettings + liftIO $ validateStatus resp 200 diff --git a/tests/V1/Test/JSON.hs b/tests/V1/Test/JSON.hs new file mode 100644 index 0000000..cc8dc9b --- /dev/null +++ b/tests/V1/Test/JSON.hs @@ -0,0 +1,202 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ScopedTypeVariables #-} + +module Test.JSON (spec) where + +import Test.Import + +import qualified Data.ByteString.Lazy.Char8 as BL8 +import qualified Data.List as L +import Data.List.NonEmpty (NonEmpty(..)) +import qualified Data.List.NonEmpty as NE +import qualified Data.HashMap.Strict as HM +import qualified Data.Text as T +import qualified Data.Vector as V + +import Test.ApproxEq +import Test.Generators + +propJSON :: forall a + . ( Arbitrary a + , ToJSON a + , FromJSON a + , Show a + , Eq a + , Typeable a + ) + => Proxy a -> Spec +propJSON _ = prop testName $ \(a :: a) -> + let jsonStr = "via " <> BL8.unpack (encode a) + in counterexample jsonStr (parseEither parseJSON (toJSON a) + === Right a) + where testName = show ty <> " FromJSON/ToJSON roundtrips" + ty = typeOf (undefined :: a) + +propApproxJSON :: forall a + . ( Arbitrary a + , ToJSON a + , FromJSON a + , Show a + , ApproxEq a + , Typeable a + ) + => Proxy a -> Spec +propApproxJSON _ = prop testName $ \(a :: a) -> + let jsonStr = "via " <> BL8.unpack (encode a) + in counterexample jsonStr (parseEither parseJSON (toJSON a) + ==~ Right a) + where testName = show ty <> " FromJSON/ToJSON roundtrips" + ty = typeOf (undefined :: a) + +spec :: Spec +spec = do + describe "ToJSON RegexpFlags" $ do + it "generates the correct JSON for AllRegexpFlags" $ + toJSON AllRegexpFlags `shouldBe` String "ALL" + + it "generates the correct JSON for NoRegexpFlags" $ + toJSON NoRegexpFlags `shouldBe` String "NONE" + + it "generates the correct JSON for SomeRegexpFlags" $ + let flags = AnyString :| [ Automaton + , Complement + , Empty + , Intersection + , Interval ] + in toJSON (SomeRegexpFlags flags) `shouldBe` String "ANYSTRING|AUTOMATON|COMPLEMENT|EMPTY|INTERSECTION|INTERVAL" + + prop "removes duplicates from flags" $ \(flags :: RegexpFlags) -> + let String str = toJSON flags + flagStrs = T.splitOn "|" str + in noDuplicates flagStrs + + describe "omitNulls" $ do + it "checks that omitNulls drops list elements when it should" $ + let dropped = omitNulls $ [ "test1" .= (toJSON ([] :: [Int])) + , "test2" .= (toJSON ("some value" :: Text))] + in dropped `shouldBe` Object (HM.fromList [("test2", String "some value")]) + + it "checks that omitNulls doesn't drop list elements when it shouldn't" $ + let notDropped = omitNulls $ [ "test1" .= (toJSON ([1] :: [Int])) + , "test2" .= (toJSON ("some value" :: Text))] + in notDropped `shouldBe` Object (HM.fromList [ ("test1", Array (V.fromList [Number 1.0])) + , ("test2", String "some value")]) + it "checks that omitNulls drops non list elements when it should" $ + let dropped = omitNulls $ [ "test1" .= (toJSON Null) + , "test2" .= (toJSON ("some value" :: Text))] + in dropped `shouldBe` Object (HM.fromList [("test2", String "some value")]) + it "checks that omitNulls doesn't drop non list elements when it shouldn't" $ + let notDropped = omitNulls $ [ "test1" .= (toJSON (1 :: Int)) + , "test2" .= (toJSON ("some value" :: Text))] + in notDropped `shouldBe` Object (HM.fromList [ ("test1", Number 1.0) + , ("test2", String "some value")]) + + describe "Exact isomorphism JSON instances" $ do + propJSON (Proxy :: Proxy Version) + propJSON (Proxy :: Proxy IndexName) + propJSON (Proxy :: Proxy MappingName) + propJSON (Proxy :: Proxy DocId) + propJSON (Proxy :: Proxy IndexAliasRouting) + propJSON (Proxy :: Proxy RoutingValue) + propJSON (Proxy :: Proxy ShardCount) + propJSON (Proxy :: Proxy ReplicaCount) + propJSON (Proxy :: Proxy TemplateName) + propJSON (Proxy :: Proxy TemplatePattern) + propJSON (Proxy :: Proxy QueryString) + propJSON (Proxy :: Proxy FieldName) + propJSON (Proxy :: Proxy CacheName) + propJSON (Proxy :: Proxy CacheKey) + propJSON (Proxy :: Proxy Existence) + propJSON (Proxy :: Proxy CutoffFrequency) + propJSON (Proxy :: Proxy Analyzer) + propJSON (Proxy :: Proxy MaxExpansions) + propJSON (Proxy :: Proxy Lenient) + propJSON (Proxy :: Proxy Tiebreaker) + propJSON (Proxy :: Proxy Boost) + propJSON (Proxy :: Proxy BoostTerms) + propJSON (Proxy :: Proxy MinimumMatch) + propJSON (Proxy :: Proxy DisableCoord) + propJSON (Proxy :: Proxy IgnoreTermFrequency) + propJSON (Proxy :: Proxy MinimumTermFrequency) + propJSON (Proxy :: Proxy MaxQueryTerms) + propJSON (Proxy :: Proxy Fuzziness) + propJSON (Proxy :: Proxy PrefixLength) + propJSON (Proxy :: Proxy TypeName) + propJSON (Proxy :: Proxy PercentMatch) + propJSON (Proxy :: Proxy StopWord) + propJSON (Proxy :: Proxy QueryPath) + propJSON (Proxy :: Proxy AllowLeadingWildcard) + propJSON (Proxy :: Proxy LowercaseExpanded) + propJSON (Proxy :: Proxy EnablePositionIncrements) + propJSON (Proxy :: Proxy AnalyzeWildcard) + propJSON (Proxy :: Proxy GeneratePhraseQueries) + propJSON (Proxy :: Proxy Locale) + propJSON (Proxy :: Proxy MaxWordLength) + propJSON (Proxy :: Proxy MinWordLength) + propJSON (Proxy :: Proxy PhraseSlop) + propJSON (Proxy :: Proxy MinDocFrequency) + propJSON (Proxy :: Proxy MaxDocFrequency) + propJSON (Proxy :: Proxy Filter) + propJSON (Proxy :: Proxy Query) + propJSON (Proxy :: Proxy SimpleQueryStringQuery) + propJSON (Proxy :: Proxy FieldOrFields) + propJSON (Proxy :: Proxy SimpleQueryFlag) + propJSON (Proxy :: Proxy RegexpQuery) + propJSON (Proxy :: Proxy QueryStringQuery) + propJSON (Proxy :: Proxy RangeQuery) + propJSON (Proxy :: Proxy PrefixQuery) + propJSON (Proxy :: Proxy NestedQuery) + propJSON (Proxy :: Proxy MoreLikeThisFieldQuery) + propJSON (Proxy :: Proxy MoreLikeThisQuery) + propJSON (Proxy :: Proxy IndicesQuery) + propJSON (Proxy :: Proxy HasParentQuery) + propJSON (Proxy :: Proxy HasChildQuery) + propJSON (Proxy :: Proxy FuzzyQuery) + propJSON (Proxy :: Proxy FuzzyLikeFieldQuery) + propJSON (Proxy :: Proxy FuzzyLikeThisQuery) + propJSON (Proxy :: Proxy DisMaxQuery) + propJSON (Proxy :: Proxy CommonTermsQuery) + propJSON (Proxy :: Proxy CommonMinimumMatch) + propJSON (Proxy :: Proxy BoostingQuery) + propJSON (Proxy :: Proxy BoolQuery) + propJSON (Proxy :: Proxy MatchQuery) + propJSON (Proxy :: Proxy MultiMatchQueryType) + propJSON (Proxy :: Proxy BooleanOperator) + propJSON (Proxy :: Proxy ZeroTermsQuery) + propJSON (Proxy :: Proxy MatchQueryType) + propJSON (Proxy :: Proxy AliasRouting) + propJSON (Proxy :: Proxy IndexAliasCreate) + propJSON (Proxy :: Proxy SearchAliasRouting) + propJSON (Proxy :: Proxy ScoreType) + propJSON (Proxy :: Proxy Distance) + propJSON (Proxy :: Proxy DistanceUnit) + propJSON (Proxy :: Proxy DistanceType) + propJSON (Proxy :: Proxy OptimizeBbox) + propJSON (Proxy :: Proxy GeoBoundingBoxConstraint) + propJSON (Proxy :: Proxy GeoFilterType) + propJSON (Proxy :: Proxy GeoBoundingBox) + propJSON (Proxy :: Proxy LatLon) + propJSON (Proxy :: Proxy RangeExecution) + prop "RegexpFlags FromJSON/ToJSON roundtrips, removing dups " $ \rfs -> + let expected = case rfs of + SomeRegexpFlags fs -> SomeRegexpFlags (NE.fromList (L.nub (NE.toList fs))) + x -> x + in parseEither parseJSON (toJSON rfs) === Right expected + propJSON (Proxy :: Proxy BoolMatch) + propJSON (Proxy :: Proxy Term) + propJSON (Proxy :: Proxy MultiMatchQuery) + propJSON (Proxy :: Proxy IndexSettings) + propJSON (Proxy :: Proxy CompoundFormat) + propJSON (Proxy :: Proxy TemplateQueryInline) + propJSON (Proxy :: Proxy Suggest) + propJSON (Proxy :: Proxy DirectGenerators) + propJSON (Proxy :: Proxy DirectGeneratorSuggestModeTypes) + + describe "Approximate isomorphism JSON instances" $ do + propApproxJSON (Proxy :: Proxy UpdatableIndexSetting') + propApproxJSON (Proxy :: Proxy ReplicaCount) + propApproxJSON (Proxy :: Proxy ReplicaBounds) + propApproxJSON (Proxy :: Proxy Bytes) + propApproxJSON (Proxy :: Proxy AllocationPolicy) + propApproxJSON (Proxy :: Proxy InitialShardCount) + propApproxJSON (Proxy :: Proxy FSType) diff --git a/tests/V1/Test/Query.hs b/tests/V1/Test/Query.hs new file mode 100644 index 0000000..811649e --- /dev/null +++ b/tests/V1/Test/Query.hs @@ -0,0 +1,115 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Test.Query where + +import Test.Common +import Test.Import + +import qualified Data.HashMap.Strict as HM + +spec :: Spec +spec = + describe "query API" $ do + it "returns document for term query and identity filter" $ withTestEnv $ do + _ <- insertData + let query = TermQuery (Term "user" "bitemyapp") Nothing + let filter' = IdentityFilter + let search = mkSearch (Just query) (Just filter') + myTweet <- searchTweet search + liftIO $ + myTweet `shouldBe` Right exampleTweet + + it "handles constant score queries" $ withTestEnv $ do + _ <- insertData + let query = TermsQuery "user" ("bitemyapp" :| []) + let cfQuery = ConstantScoreQuery query (Boost 1.0) + let filter' = IdentityFilter + let search = mkSearch (Just cfQuery) (Just filter') + myTweet <- searchTweet search + liftIO $ + myTweet `shouldBe` Right exampleTweet + + it "returns document for terms query and identity filter" $ withTestEnv $ do + _ <- insertData + let query = TermsQuery "user" ("bitemyapp" :| []) + let filter' = IdentityFilter + let search = mkSearch (Just query) (Just filter') + myTweet <- searchTweet search + liftIO $ + myTweet `shouldBe` Right exampleTweet + + it "returns document for match query" $ withTestEnv $ do + _ <- insertData + let query = QueryMatchQuery $ mkMatchQuery (FieldName "user") (QueryString "bitemyapp") + let search = mkSearch (Just query) Nothing + myTweet <- searchTweet search + liftIO $ + myTweet `shouldBe` Right exampleTweet + + it "returns document for multi-match query" $ withTestEnv $ do + _ <- insertData + let flds = [FieldName "user", FieldName "message"] + let query = QueryMultiMatchQuery $ mkMultiMatchQuery flds (QueryString "bitemyapp") + let search = mkSearch (Just query) Nothing + myTweet <- searchTweet search + liftIO $ + myTweet `shouldBe` Right exampleTweet + + it "returns document for multi-match query with a custom tiebreaker" $ withTestEnv $ do + _ <- insertData + let tiebreaker = Just $ Tiebreaker 0.3 + flds = [FieldName "user", FieldName "message"] + multiQuery' = mkMultiMatchQuery flds (QueryString "bitemyapp") + query = QueryMultiMatchQuery $ multiQuery' { multiMatchQueryTiebreaker = tiebreaker } + search = mkSearch (Just query) Nothing + myTweet <- searchTweet search + liftIO $ + myTweet `shouldBe` Right exampleTweet + + it "returns document for bool query" $ withTestEnv $ do + _ <- insertData + let innerQuery = QueryMatchQuery $ + mkMatchQuery (FieldName "user") (QueryString "bitemyapp") + let query = QueryBoolQuery $ + mkBoolQuery [innerQuery] [] [] + let search = mkSearch (Just query) Nothing + myTweet <- searchTweet search + liftIO $ + myTweet `shouldBe` Right exampleTweet + + it "returns document for boosting query" $ withTestEnv $ do + _ <- insertData + let posQuery = QueryMatchQuery $ mkMatchQuery (FieldName "user") (QueryString "bitemyapp") + let negQuery = QueryMatchQuery $ mkMatchQuery (FieldName "user") (QueryString "notmyapp") + let query = QueryBoostingQuery $ BoostingQuery posQuery negQuery (Boost 0.2) + let search = mkSearch (Just query) Nothing + myTweet <- searchTweet search + liftIO $ + myTweet `shouldBe` Right exampleTweet + + it "returns document for common terms query" $ withTestEnv $ do + _ <- insertData + let query = QueryCommonTermsQuery $ + CommonTermsQuery (FieldName "user") + (QueryString "bitemyapp") + (CutoffFrequency 0.0001) + Or Or Nothing Nothing Nothing Nothing + let search = mkSearch (Just query) Nothing + myTweet <- searchTweet search + liftIO $ + myTweet `shouldBe` Right exampleTweet + + it "returns document for for inline template query" $ withTestEnv $ do + _ <- insertData + let innerQuery = QueryMatchQuery $ + mkMatchQuery (FieldName "{{userKey}}") + (QueryString "{{bitemyappKey}}") + templateParams = TemplateQueryKeyValuePairs $ HM.fromList + [ ("userKey", "user") + , ("bitemyappKey", "bitemyapp") + ] + templateQuery = QueryTemplateQueryInline $ + TemplateQueryInline innerQuery templateParams + search = mkSearch (Just templateQuery) Nothing + myTweet <- searchTweet search + liftIO $ myTweet `shouldBe` Right exampleTweet diff --git a/tests/V1/Test/Snapshots.hs b/tests/V1/Test/Snapshots.hs new file mode 100644 index 0000000..cead610 --- /dev/null +++ b/tests/V1/Test/Snapshots.hs @@ -0,0 +1,201 @@ +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE OverloadedStrings #-} + +module Test.Snapshots (spec) where + +import Test.Common +import Test.Import + +import Data.Maybe (fromMaybe) +import qualified Data.List as L +import qualified Data.HashMap.Strict as HM +import qualified Data.Text as T +import qualified Data.Vector as V +import qualified Network.HTTP.Types.Method as NHTM +import qualified Network.URI as URI + +import Test.Generators () + +spec :: Spec +spec = do + describe "FsSnapshotRepo" $ + prop "SnapshotRepo laws" $ \fsr -> + fromGSnapshotRepo (toGSnapshotRepo fsr) === Right (fsr :: FsSnapshotRepo) + + describe "Snapshot repos" $ do + it "always parses all snapshot repos API" $ when' canSnapshot $ withTestEnv $ do + res <- getSnapshotRepos AllSnapshotRepos + liftIO $ case res of + Left e -> expectationFailure ("Expected a right but got Left " <> show e) + Right _ -> return () + + it "finds an existing list of repos" $ when' canSnapshot $ withTestEnv $ do + let r1n = SnapshotRepoName "bloodhound-repo1" + let r2n = SnapshotRepoName "bloodhound-repo2" + withSnapshotRepo r1n $ \r1 -> + withSnapshotRepo r2n $ \r2 -> do + repos <- getSnapshotRepos (SnapshotRepoList (ExactRepo r1n :| [ExactRepo r2n])) + liftIO $ case repos of + Right xs -> do + let srt = L.sortBy (comparing gSnapshotRepoName) + srt xs `shouldBe` srt [r1, r2] + Left e -> expectationFailure (show e) + + it "creates and updates with updateSnapshotRepo" $ when' canSnapshot $ withTestEnv $ do + let r1n = SnapshotRepoName "bloodhound-repo1" + withSnapshotRepo r1n $ \r1 -> do + let Just (String dir) = HM.lookup "location" (gSnapshotRepoSettingsObject (gSnapshotRepoSettings r1)) + let noCompression = FsSnapshotRepo r1n (T.unpack dir) False Nothing Nothing Nothing + resp <- updateSnapshotRepo defaultSnapshotRepoUpdateSettings noCompression + liftIO (validateStatus resp 200) + Right [roundtrippedNoCompression] <- getSnapshotRepos (SnapshotRepoList (ExactRepo r1n :| [])) + liftIO (roundtrippedNoCompression `shouldBe` toGSnapshotRepo noCompression) + + -- verify came around in 1.4 it seems + it "can verify existing repos" $ when' canSnapshot $ when' (atleast es14) $ withTestEnv $ do + let r1n = SnapshotRepoName "bloodhound-repo1" + withSnapshotRepo r1n $ \_ -> do + res <- verifySnapshotRepo r1n + liftIO $ case res of + Right (SnapshotVerification vs) + | null vs -> expectationFailure "Expected nonempty set of verifying nodes" + | otherwise -> return () + Left e -> expectationFailure (show e) + + describe "Snapshots" $ do + it "always parses all snapshots API" $ when' canSnapshot $ withTestEnv $ do + let r1n = SnapshotRepoName "bloodhound-repo1" + withSnapshotRepo r1n $ \_ -> do + res <- getSnapshots r1n AllSnapshots + liftIO $ case res of + Left e -> expectationFailure ("Expected a right but got Left " <> show e) + Right _ -> return () + + it "can parse a snapshot that it created" $ when' canSnapshot $ withTestEnv $ do + let r1n = SnapshotRepoName "bloodhound-repo1" + withSnapshotRepo r1n $ \_ -> do + let s1n = SnapshotName "example-snapshot" + withSnapshot r1n s1n $ do + res <- getSnapshots r1n (SnapshotList (ExactSnap s1n :| [])) + liftIO $ case res of + Right [snap] + | snapInfoState snap == SnapshotSuccess && + snapInfoName snap == s1n -> return () + | otherwise -> expectationFailure (show snap) + Right [] -> expectationFailure "There were no snapshots" + Right snaps -> expectationFailure ("Expected 1 snapshot but got" <> show (length snaps)) + Left e -> expectationFailure (show e) + + describe "Snapshot restore" $ do + it "can restore a snapshot that we create" $ when' canSnapshot $ withTestEnv $ do + let r1n = SnapshotRepoName "bloodhound-repo1" + withSnapshotRepo r1n $ \_ -> do + let s1n = SnapshotName "example-snapshot" + withSnapshot r1n s1n $ do + let settings = defaultSnapshotRestoreSettings { snapRestoreWaitForCompletion = True } + -- have to close an index to restore it + resp1 <- closeIndex testIndex + liftIO (validateStatus resp1 200) + resp2 <- restoreSnapshot r1n s1n settings + liftIO (validateStatus resp2 200) + + it "can restore and rename" $ when' canSnapshot $ withTestEnv $ do + let r1n = SnapshotRepoName "bloodhound-repo1" + withSnapshotRepo r1n $ \_ -> do + let s1n = SnapshotName "example-snapshot" + withSnapshot r1n s1n $ do + let pat = RestoreRenamePattern "bloodhound-tests-twitter-(\\d+)" + let replace = RRTLit "restored-" :| [RRSubWholeMatch] + let expectedIndex = IndexName "restored-bloodhound-tests-twitter-1" + oldEnoughForOverrides <- liftIO (atleast es15) + let overrides = RestoreIndexSettings { restoreOverrideReplicas = Just (ReplicaCount 0) } + let settings = defaultSnapshotRestoreSettings { snapRestoreWaitForCompletion = True + , snapRestoreRenamePattern = Just pat + , snapRestoreRenameReplacement = Just replace + , snapRestoreIndexSettingsOverrides = if oldEnoughForOverrides + then Just overrides + else Nothing + } + -- have to close an index to restore it + let go = do + resp <- restoreSnapshot r1n s1n settings + liftIO (validateStatus resp 200) + exists <- indexExists expectedIndex + liftIO (exists `shouldBe` True) + go `finally` deleteIndex expectedIndex + +-- | Get configured repo paths for snapshotting. Note that by default +-- this is not enabled and if we are over es 1.5, we won't be able to +-- test snapshotting. Note that this can and should be part of the +-- client functionality in a much less ad-hoc incarnation. +getRepoPaths :: IO [FilePath] +getRepoPaths = withTestEnv $ do + bhe <- getBHEnv + let Server s = bhServer bhe + let tUrl = s <> "/" <> "_nodes" + initReq <- parseRequest (URI.escapeURIString URI.isAllowedInURI (T.unpack tUrl)) + let req = setRequestIgnoreStatus $ initReq { method = NHTM.methodGet } + Right (Object o) <- parseEsResponse =<< liftIO (httpLbs req (bhManager bhe)) + return $ fromMaybe mempty $ do + Object nodes <- HM.lookup "nodes" o + Object firstNode <- snd <$> headMay (HM.toList nodes) + Object settings <- HM.lookup "settings" firstNode + Object path <- HM.lookup "path" settings + Array repo <- HM.lookup "repo" path + return [ T.unpack t | String t <- V.toList repo] + +-- | 1.5 and earlier don't care about repo paths +canSnapshot :: IO Bool +canSnapshot = do + caresAboutRepos <- atleast es16 + repoPaths <- getRepoPaths + return (not caresAboutRepos || not (null repoPaths)) + +withSnapshotRepo + :: ( MonadMask m + , MonadBH m + ) + => SnapshotRepoName + -> (GenericSnapshotRepo -> m a) + -> m a +withSnapshotRepo srn@(SnapshotRepoName n) f = do + repoPaths <- liftIO getRepoPaths + -- we'll use the first repo path if available, otherwise system temp + -- dir. Note that this will fail on ES > 1.6, so be sure you use + -- @when' canSnapshot@. + case repoPaths of + (firstRepoPath:_) -> withTempDirectory firstRepoPath (T.unpack n) $ \dir -> bracket (alloc dir) free f + [] -> withSystemTempDirectory (T.unpack n) $ \dir -> bracket (alloc dir) free f + where + alloc dir = do + liftIO (setFileMode dir mode) + let repo = FsSnapshotRepo srn "bloodhound-tests-backups" True Nothing Nothing Nothing + resp <- updateSnapshotRepo defaultSnapshotRepoUpdateSettings repo + liftIO (validateStatus resp 200) + return (toGSnapshotRepo repo) + mode = ownerModes `unionFileModes` groupModes `unionFileModes` otherModes + free GenericSnapshotRepo {..} = do + resp <- deleteSnapshotRepo gSnapshotRepoName + liftIO (validateStatus resp 200) + + +withSnapshot + :: ( MonadMask m + , MonadBH m + ) + => SnapshotRepoName + -> SnapshotName + -> m a + -> m a +withSnapshot srn sn = bracket_ alloc free + where + alloc = do + resp <- createSnapshot srn sn createSettings + liftIO (validateStatus resp 200) + -- We'll make this synchronous for testing purposes + createSettings = defaultSnapshotCreateSettings { snapWaitForCompletion = True + , snapIndices = Just (IndexList (testIndex :| [])) + -- We don't actually need to back up any data + } + free = + deleteSnapshot srn sn diff --git a/tests/V1/Test/Sorting.hs b/tests/V1/Test/Sorting.hs new file mode 100644 index 0000000..9df08ab --- /dev/null +++ b/tests/V1/Test/Sorting.hs @@ -0,0 +1,22 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Test.Sorting where + +import Test.Common +import Test.Import + +spec :: Spec +spec = + describe "sorting" $ + it "returns documents in the right order" $ withTestEnv $ do + _ <- insertData + _ <- insertOther + let sortSpec = DefaultSortSpec $ mkSort (FieldName "age") Ascending + let search = Search Nothing + Nothing (Just [sortSpec]) Nothing Nothing + False (From 0) (Size 10) SearchTypeQueryThenFetch Nothing Nothing + Nothing + result <- searchTweets search + let myTweet = grabFirst result + liftIO $ + myTweet `shouldBe` Right otherTweet diff --git a/tests/V1/Test/SourceFiltering.hs b/tests/V1/Test/SourceFiltering.hs new file mode 100644 index 0000000..447980c --- /dev/null +++ b/tests/V1/Test/SourceFiltering.hs @@ -0,0 +1,38 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Test.SourceFiltering where + +import Test.Common +import Test.Import + +import qualified Data.HashMap.Strict as HM + +spec :: Spec +spec = + describe "Source filtering" $ do + + it "doesn't include source when sources are disabled" $ withTestEnv $ + searchExpectSource + NoSource + (Left (EsError 500 "Source was missing")) + + it "includes a source" $ withTestEnv $ + searchExpectSource + (SourcePatterns (PopPattern (Pattern "message"))) + (Right (Object (HM.fromList [("message", String "Use haskell!")]))) + + it "includes sources" $ withTestEnv $ + searchExpectSource + (SourcePatterns (PopPatterns [Pattern "user", Pattern "message"])) + (Right (Object (HM.fromList [("user",String "bitemyapp"),("message", String "Use haskell!")]))) + + it "includes source patterns" $ withTestEnv $ + searchExpectSource + (SourcePatterns (PopPattern (Pattern "*ge"))) + (Right (Object (HM.fromList [("age", Number 10000),("message", String "Use haskell!")]))) + + it "excludes source patterns" $ withTestEnv $ + searchExpectSource + (SourceIncludeExclude (Include []) + (Exclude [Pattern "l*", Pattern "*ge", Pattern "postDate", Pattern "extra"])) + (Right (Object (HM.fromList [("user",String "bitemyapp")]))) diff --git a/tests/V1/Test/Suggest.hs b/tests/V1/Test/Suggest.hs new file mode 100644 index 0000000..34e4262 --- /dev/null +++ b/tests/V1/Test/Suggest.hs @@ -0,0 +1,22 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Test.Suggest where + +import Test.Common +import Test.Import + +spec :: Spec +spec = + describe "Suggest" $ + it "returns a search suggestion using the phrase suggester" $ withTestEnv $ do + _ <- insertData + let phraseSuggester = mkPhraseSuggester (FieldName "message") + namedSuggester = Suggest "Use haskel" "suggest_name" (SuggestTypePhraseSuggester phraseSuggester) + search' = mkSearch Nothing Nothing + search = search' { suggestBody = Just namedSuggester } + expectedText = Just "use haskell" + resp <- searchByIndex testIndex search + parsed <- parseEsResponse resp :: BH IO (Either EsError (SearchResult Tweet)) + case parsed of + Left e -> liftIO $ expectationFailure ("Expected an search suggestion but got " <> show e) + Right sr -> liftIO $ (suggestOptionsText . head . suggestResponseOptions . head . nsrResponses <$> suggest sr) `shouldBe` expectedText diff --git a/tests/V1/Test/Templates.hs b/tests/V1/Test/Templates.hs new file mode 100644 index 0000000..bda85e0 --- /dev/null +++ b/tests/V1/Test/Templates.hs @@ -0,0 +1,26 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Test.Templates where + +import Test.Common +import Test.Import + +spec :: Spec +spec = + describe "template API" $ do + it "can create a template" $ withTestEnv $ do + let idxTpl = IndexTemplate (TemplatePattern "tweet-*") (Just (IndexSettings (ShardCount 1) (ReplicaCount 1))) [toJSON TweetMapping] + resp <- putTemplate idxTpl (TemplateName "tweet-tpl") + liftIO $ validateStatus resp 200 + + it "can detect if a template exists" $ withTestEnv $ do + exists <- templateExists (TemplateName "tweet-tpl") + liftIO $ exists `shouldBe` True + + it "can delete a template" $ withTestEnv $ do + resp <- deleteTemplate (TemplateName "tweet-tpl") + liftIO $ validateStatus resp 200 + + it "can detect if a template doesn't exist" $ withTestEnv $ do + exists <- templateExists (TemplateName "tweet-tpl") + liftIO $ exists `shouldBe` False diff --git a/tests/V1/tests.hs b/tests/V1/tests.hs index bac9360..d173605 100644 --- a/tests/V1/tests.hs +++ b/tests/V1/tests.hs @@ -1,9 +1,5 @@ {-# LANGUAGE CPP #-} -{-# LANGUAGE DefaultSignatures #-} -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE ScopedTypeVariables #-} @@ -17,969 +13,38 @@ #endif module Main where -import Control.Applicative -import Control.Error -import Control.Exception (evaluate) -import Control.Monad -import Control.Monad.Catch -import Control.Monad.Reader -import Data.Aeson -import Data.Aeson.Types (parseEither) -import qualified Data.ByteString.Lazy.Char8 as BL8 -import qualified Data.HashMap.Strict as HM -import Data.List (nub) -import qualified Data.List as L -import Data.List.NonEmpty (NonEmpty (..)) -import qualified Data.List.NonEmpty as NE -import qualified Data.Map.Strict as M -import Data.Monoid -import Data.Ord (comparing) -import Data.Proxy -import Data.Text (Text) -import qualified Data.Text as T -import Data.Time.Calendar (Day (..), fromGregorian) -import Data.Time.Clock (NominalDiffTime, UTCTime (..), - secondsToDiffTime) -import Data.Typeable -import qualified Data.Vector as V -import qualified Data.Version as Vers -import Database.V1.Bloodhound -import GHC.Generics as G -import Network.HTTP.Client hiding (Proxy) -import qualified Network.HTTP.Types.Method as NHTM -import qualified Network.HTTP.Types.Status as NHTS -import qualified Network.URI as URI -import Prelude hiding (filter) -import System.IO.Temp -import System.PosixCompat.Files -import Test.Hspec -import Test.QuickCheck.Property.Monoid (T (..), eq, prop_Monoid) - -import Test.Hspec.QuickCheck (prop) -import Test.QuickCheck - -import qualified Generics.SOP as SOP -import qualified Generics.SOP.GGP as SOP - -testServer :: Server -testServer = Server "http://localhost:9200" -testIndex :: IndexName -testIndex = IndexName "bloodhound-tests-twitter-1" -testMapping :: MappingName -testMapping = MappingName "tweet" - -withTestEnv :: BH IO a -> IO a -withTestEnv = withBH defaultManagerSettings testServer - -validateStatus :: Show body => Response body -> Int -> Expectation -validateStatus resp expected = - if actual == expected - then return () - else expectationFailure ("Expected " <> show expected <> " but got " <> show actual <> ": " <> show body) - where - actual = NHTS.statusCode (responseStatus resp) - body = responseBody resp - -createExampleIndex :: (MonadBH m) => m Reply -createExampleIndex = createIndex (IndexSettings (ShardCount 1) (ReplicaCount 0)) testIndex -deleteExampleIndex :: (MonadBH m) => m Reply -deleteExampleIndex = deleteIndex testIndex - -es13 :: Vers.Version -es13 = Vers.Version [1, 3, 0] [] - -es12 :: Vers.Version -es12 = Vers.Version [1, 2, 0] [] - -es11 :: Vers.Version -es11 = Vers.Version [1, 1, 0] [] - -es14 :: Vers.Version -es14 = Vers.Version [1, 4, 0] [] - -es15 :: Vers.Version -es15 = Vers.Version [1, 5, 0] [] - -es16 :: Vers.Version -es16 = Vers.Version [1, 6, 0] [] - -es20 :: Vers.Version -es20 = Vers.Version [2, 0, 0] [] - -getServerVersion :: IO (Maybe Vers.Version) -getServerVersion = fmap extractVersion <$> withTestEnv getStatus - where - extractVersion = versionNumber . number . version - --- | Get configured repo paths for snapshotting. Note that by default --- this is not enabled and if we are over es 1.5, we won't be able to --- test snapshotting. Note that this can and should be part of the --- client functionality in a much less ad-hoc incarnation. -getRepoPaths :: IO [FilePath] -getRepoPaths = withTestEnv $ do - bhe <- getBHEnv - let Server s = bhServer bhe - let tUrl = s <> "/" <> "_nodes" - initReq <- parseRequest (URI.escapeURIString URI.isAllowedInURI (T.unpack tUrl)) - let req = setRequestIgnoreStatus $ initReq { method = NHTM.methodGet } - Right (Object o) <- parseEsResponse =<< liftIO (httpLbs req (bhManager bhe)) - return $ fromMaybe mempty $ do - Object nodes <- HM.lookup "nodes" o - Object firstNode <- snd <$> headMay (HM.toList nodes) - Object settings <- HM.lookup "settings" firstNode - Object path <- HM.lookup "path" settings - Array repo <- HM.lookup "repo" path - return [ T.unpack t | String t <- V.toList repo] - --- | 1.5 and earlier don't care about repo paths -canSnapshot :: IO Bool -canSnapshot = do - caresAboutRepos <- atleast es16 - repoPaths <- getRepoPaths - return (not caresAboutRepos || not (null (repoPaths))) - -atleast :: Vers.Version -> IO Bool -atleast v = getServerVersion >>= \x -> return $ x >= Just v - -atmost :: Vers.Version -> IO Bool -atmost v = getServerVersion >>= \x -> return $ x <= Just v - -is :: Vers.Version -> IO Bool -is v = getServerVersion >>= \x -> return $ x == Just v - -when' :: Monad m => m Bool -> m () -> m () -when' b f = b >>= \x -> when x f - -(==~) :: (ApproxEq a) => a -> a -> Property -a ==~ b = counterexample (showApproxEq a ++ " !=~ " ++ showApproxEq b) (a =~ b) - -propJSON :: forall a. (Arbitrary a, ToJSON a, FromJSON a, Show a, ApproxEq a, Typeable a) => Proxy a -> Spec -propJSON _ = prop testName $ \(a :: a) -> - let jsonStr = "via " <> BL8.unpack (encode a) - in counterexample jsonStr (parseEither parseJSON (toJSON a) ==~ Right a) - where testName = show ty <> " FromJSON/ToJSON roundtrips" - ty = typeOf (undefined :: a) - -data Location = Location { lat :: Double - , lon :: Double } deriving (Eq, Generic, Show) - -data Tweet = Tweet { user :: Text - , postDate :: UTCTime - , message :: Text - , age :: Int - , location :: Location - , extra :: Maybe Text } - deriving (Eq, Generic, Show) - -instance ToJSON Tweet where - toJSON = genericToJSON defaultOptions -instance FromJSON Tweet where - parseJSON = genericParseJSON defaultOptions -instance ToJSON Location where - toJSON = genericToJSON defaultOptions -instance FromJSON Location where - parseJSON = genericParseJSON defaultOptions - -data ParentMapping = ParentMapping deriving (Eq, Show) - -instance ToJSON ParentMapping where - toJSON ParentMapping = - object ["properties" .= - object [ "user" .= object ["type" .= ("string" :: Text)] - -- Serializing the date as a date is breaking other tests, mysteriously. - -- , "postDate" .= object [ "type" .= ("date" :: Text) - -- , "format" .= ("YYYY-MM-dd`T`HH:mm:ss.SSSZZ" :: Text)] - , "message" .= object ["type" .= ("string" :: Text)] - , "age" .= object ["type" .= ("integer" :: Text)] - , "location" .= object ["type" .= ("geo_point" :: Text)] - ]] - -data ChildMapping = ChildMapping deriving (Eq, Show) - -instance ToJSON ChildMapping where - toJSON ChildMapping = - object ["_parent" .= object ["type" .= ("parent" :: Text)] - , "properties" .= - object [ "user" .= object ["type" .= ("string" :: Text)] - -- Serializing the date as a date is breaking other tests, mysteriously. - -- , "postDate" .= object [ "type" .= ("date" :: Text) - -- , "format" .= ("YYYY-MM-dd`T`HH:mm:ss.SSSZZ" :: Text)] - , "message" .= object ["type" .= ("string" :: Text)] - , "age" .= object ["type" .= ("integer" :: Text)] - , "location" .= object ["type" .= ("geo_point" :: Text)] - ]] - -data TweetMapping = TweetMapping deriving (Eq, Show) - -instance ToJSON TweetMapping where - toJSON TweetMapping = - object ["properties" .= - object [ "user" .= object ["type" .= ("string" :: Text)] - -- Serializing the date as a date is breaking other tests, mysteriously. - -- , "postDate" .= object [ "type" .= ("date" :: Text) - -- , "format" .= ("YYYY-MM-dd`T`HH:mm:ss.SSSZZ" :: Text)] - , "message" .= object ["type" .= ("string" :: Text)] - , "age" .= object ["type" .= ("integer" :: Text)] - , "location" .= object ["type" .= ("geo_point" :: Text)] - ]] - -exampleTweet :: Tweet -exampleTweet = Tweet { user = "bitemyapp" - , postDate = UTCTime - (ModifiedJulianDay 55000) - (secondsToDiffTime 10) - , message = "Use haskell!" - , age = 10000 - , location = Location 40.12 (-71.34) - , extra = Nothing } - -tweetWithExtra :: Tweet -tweetWithExtra = Tweet { user = "bitemyapp" - , postDate = UTCTime - (ModifiedJulianDay 55000) - (secondsToDiffTime 10) - , message = "Use haskell!" - , age = 10000 - , location = Location 40.12 (-71.34) - , extra = Just "blah blah" } - -newAge :: Int -newAge = 31337 - -newUser :: Text -newUser = "someotherapp" - -tweetPatch :: Value -tweetPatch = - object [ "age" .= newAge - , "user" .= newUser - ] - -patchedTweet :: Tweet -patchedTweet = exampleTweet{age = newAge, user = newUser} - -otherTweet :: Tweet -otherTweet = Tweet { user = "notmyapp" - , postDate = UTCTime - (ModifiedJulianDay 55000) - (secondsToDiffTime 11) - , message = "Use haskell!" - , age = 1000 - , location = Location 40.12 (-71.34) - , extra = Nothing } - -resetIndex :: BH IO () -resetIndex = do - _ <- deleteExampleIndex - _ <- createExampleIndex - _ <- putMapping testIndex testMapping TweetMapping - return () - -insertData :: BH IO Reply -insertData = do - resetIndex - insertData' defaultIndexDocumentSettings - -insertData' :: IndexDocumentSettings -> BH IO Reply -insertData' ids = do - r <- indexDocument testIndex testMapping ids exampleTweet (DocId "1") - _ <- refreshIndex testIndex - return r - -updateData :: BH IO Reply -updateData = do - r <- updateDocument testIndex testMapping defaultIndexDocumentSettings tweetPatch (DocId "1") - _ <- refreshIndex testIndex - return r - -insertOther :: BH IO () -insertOther = do - _ <- indexDocument testIndex testMapping defaultIndexDocumentSettings otherTweet (DocId "2") - _ <- refreshIndex testIndex - return () - -insertExtra :: BH IO () -insertExtra = do - _ <- indexDocument testIndex testMapping defaultIndexDocumentSettings tweetWithExtra (DocId "4") - _ <- refreshIndex testIndex - return () - -insertWithSpaceInId :: BH IO () -insertWithSpaceInId = do - _ <- indexDocument testIndex testMapping defaultIndexDocumentSettings exampleTweet (DocId "Hello World") - _ <- refreshIndex testIndex - return () - -searchTweet :: Search -> BH IO (Either EsError Tweet) -searchTweet search = do - result <- searchTweets search - let myTweet :: Either EsError Tweet - myTweet = grabFirst result - return myTweet - -searchTweets :: Search -> BH IO (Either EsError (SearchResult Tweet)) -searchTweets search = parseEsResponse =<< searchByIndex testIndex search - -searchExpectNoResults :: Search -> BH IO () -searchExpectNoResults search = do - result <- searchTweets search - let emptyHits = fmap (hits . searchHits) result - liftIO $ - emptyHits `shouldBe` Right [] - -searchExpectAggs :: Search -> BH IO () -searchExpectAggs search = do - reply <- searchByIndex testIndex search - let isEmpty x = return (M.null x) - let result = decode (responseBody reply) :: Maybe (SearchResult Tweet) - liftIO $ - (result >>= aggregations >>= isEmpty) `shouldBe` Just False - -searchValidBucketAgg :: (BucketAggregation a, FromJSON a, Show a) => - Search -> Text -> (Text -> AggregationResults -> Maybe (Bucket a)) -> BH IO () -searchValidBucketAgg search aggKey extractor = do - reply <- searchByIndex testIndex search - let bucketDocs = docCount . head . buckets - let result = decode (responseBody reply) :: Maybe (SearchResult Tweet) - let count = result >>= aggregations >>= extractor aggKey >>= \x -> return (bucketDocs x) - liftIO $ - count `shouldBe` Just 1 - -searchTermsAggHint :: [ExecutionHint] -> BH IO () -searchTermsAggHint hints = do - let terms hint = TermsAgg $ (mkTermsAggregation "user") { termExecutionHint = Just hint } - let search hint = mkAggregateSearch Nothing $ mkAggregations "users" $ terms hint - forM_ hints $ searchExpectAggs . search - forM_ hints (\x -> searchValidBucketAgg (search x) "users" toTerms) - -searchTweetHighlight :: Search -> BH IO (Either EsError (Maybe HitHighlight)) -searchTweetHighlight search = do - result <- searchTweets search - let myHighlight = fmap (hitHighlight . head . hits . searchHits) result - return myHighlight - -searchExpectSource :: Source -> Either EsError Value -> BH IO () -searchExpectSource src expected = do - _ <- insertData - let query = QueryMatchQuery $ mkMatchQuery (FieldName "_all") (QueryString "haskell") - let search = (mkSearch (Just query) Nothing) { source = Just src } - reply <- searchByIndex testIndex search - result <- parseEsResponse reply - let value = grabFirst result - liftIO $ - value `shouldBe` expected - -withSnapshotRepo - :: ( MonadMask m - , MonadBH m - ) - => SnapshotRepoName - -> (GenericSnapshotRepo -> m a) - -> m a -withSnapshotRepo srn@(SnapshotRepoName n) f = do - repoPaths <- liftIO getRepoPaths - -- we'll use the first repo path if available, otherwise system temp - -- dir. Note that this will fail on ES > 1.6, so be sure you use - -- @when' canSnapshot@. - case repoPaths of - (firstRepoPath:_) -> withTempDirectory firstRepoPath (T.unpack n) $ \dir -> bracket (alloc dir) free f - [] -> withSystemTempDirectory (T.unpack n) $ \dir -> bracket (alloc dir) free f - where - alloc dir = do - liftIO (setFileMode dir mode) - let repo = FsSnapshotRepo srn "bloodhound-tests-backups" True Nothing Nothing Nothing - resp <- updateSnapshotRepo defaultSnapshotRepoUpdateSettings repo - liftIO (validateStatus resp 200) - return (toGSnapshotRepo repo) - mode = ownerModes `unionFileModes` groupModes `unionFileModes` otherModes - free GenericSnapshotRepo {..} = do - resp <- deleteSnapshotRepo gSnapshotRepoName - liftIO (validateStatus resp 200) - - -withSnapshot - :: ( MonadMask m - , MonadBH m - ) - => SnapshotRepoName - -> SnapshotName - -> m a - -> m a -withSnapshot srn sn = bracket_ alloc free - where - alloc = do - resp <- createSnapshot srn sn createSettings - liftIO (validateStatus resp 200) - -- We'll make this synchronous for testing purposes - createSettings = defaultSnapshotCreateSettings { snapWaitForCompletion = True - , snapIndices = Just (IndexList (testIndex :| [])) - -- We don't actually need to back up any data - } - free = do - deleteSnapshot srn sn - - - -data BulkTest = BulkTest { name :: Text } deriving (Eq, Generic, Show) -instance FromJSON BulkTest where - parseJSON = genericParseJSON defaultOptions -instance ToJSON BulkTest where - toJSON = genericToJSON defaultOptions - -class GApproxEq f where - gApproxEq :: f a -> f a -> Bool - --- | Unit type -instance GApproxEq U1 where - gApproxEq U1 U1 = True - --- | Sum type, ensure same constructors, recurse -instance (GApproxEq a, GApproxEq b) => GApproxEq (a :+: b) where - gApproxEq (L1 a) (L1 b) = gApproxEq a b - gApproxEq (R1 a) (R1 b) = gApproxEq a b - gApproxEq _ _ = False - --- | Product type, ensure each field is approx eq -instance (GApproxEq a, GApproxEq b) => GApproxEq (a :*: b) where - gApproxEq (a1 :*: b1) (a2 :*: b2) = gApproxEq a1 a2 && gApproxEq b1 b2 - --- | Value type, actually check the values for approx equality -instance (ApproxEq a) => GApproxEq (K1 i a) where - gApproxEq (K1 a) (K1 b) = a =~ b - -instance (GApproxEq f) => GApproxEq (M1 i t f) where - gApproxEq (M1 a) (M1 b) = gApproxEq a b - --- | Typeclass for "equal where it matters". Use this to specify --- less-strict equivalence for things such as lists that can wind up --- in an unpredictable order -class ApproxEq a where - (=~) :: a -> a -> Bool - default (=~) :: (Generic a, GApproxEq (Rep a)) => a -> a -> Bool - a =~ b = gApproxEq (G.from a) (G.from b) - - showApproxEq :: a -> String - default showApproxEq :: (Show a) => a -> String - showApproxEq = show - -instance ApproxEq NominalDiffTime where (=~) = (==) -instance ApproxEq UTCTime where (=~) = (==) -instance ApproxEq Text where (=~) = (==) -instance ApproxEq Bool where (=~) = (==) -instance ApproxEq Int where (=~) = (==) -instance ApproxEq Double where (=~) = (==) -instance (ApproxEq a, Show a) => ApproxEq (NonEmpty a) -instance (ApproxEq a, Show a) => ApproxEq (Maybe a) -instance ApproxEq GeoPoint -instance ApproxEq Regexp -instance ApproxEq RangeValue -instance ApproxEq LessThan -instance ApproxEq LessThanEq -instance ApproxEq LessThanD -instance ApproxEq LessThanEqD -instance ApproxEq GreaterThan -instance ApproxEq GreaterThanEq -instance ApproxEq GreaterThanD -instance ApproxEq GreaterThanEqD -instance ApproxEq MinimumMatchHighLow -instance ApproxEq RegexpFlag -instance ApproxEq RegexpFlags -instance ApproxEq NullValue -instance ApproxEq Version -instance ApproxEq VersionNumber -instance ApproxEq DistanceRange -instance ApproxEq IndexName -instance ApproxEq MappingName -instance ApproxEq DocId -instance ApproxEq IndexAliasRouting -instance ApproxEq RoutingValue -instance ApproxEq ShardCount -instance ApproxEq ReplicaCount -instance ApproxEq TemplateName -instance ApproxEq TemplatePattern -instance ApproxEq QueryString -instance ApproxEq FieldName -instance ApproxEq CacheName -instance ApproxEq CacheKey -instance ApproxEq Existence -instance ApproxEq CutoffFrequency -instance ApproxEq Analyzer -instance ApproxEq Lenient -instance ApproxEq Tiebreaker -instance ApproxEq Boost -instance ApproxEq BoostTerms -instance ApproxEq MaxExpansions -instance ApproxEq MinimumMatch -instance ApproxEq DisableCoord -instance ApproxEq IgnoreTermFrequency -instance ApproxEq MinimumTermFrequency -instance ApproxEq MaxQueryTerms -instance ApproxEq Fuzziness -instance ApproxEq PrefixLength -instance ApproxEq TypeName -instance ApproxEq PercentMatch -instance ApproxEq StopWord -instance ApproxEq QueryPath -instance ApproxEq AllowLeadingWildcard -instance ApproxEq LowercaseExpanded -instance ApproxEq EnablePositionIncrements -instance ApproxEq AnalyzeWildcard -instance ApproxEq GeneratePhraseQueries -instance ApproxEq Locale -instance ApproxEq MaxWordLength -instance ApproxEq MinWordLength -instance ApproxEq PhraseSlop -instance ApproxEq MinDocFrequency -instance ApproxEq MaxDocFrequency -instance ApproxEq Filter -instance ApproxEq Query -instance ApproxEq SimpleQueryStringQuery -instance ApproxEq FieldOrFields -instance ApproxEq SimpleQueryFlag -instance ApproxEq RegexpQuery -instance ApproxEq QueryStringQuery -instance ApproxEq RangeQuery -instance ApproxEq PrefixQuery -instance ApproxEq NestedQuery -instance ApproxEq MoreLikeThisFieldQuery -instance ApproxEq MoreLikeThisQuery -instance ApproxEq IndicesQuery -instance ApproxEq HasParentQuery -instance ApproxEq HasChildQuery -instance ApproxEq FuzzyQuery -instance ApproxEq FuzzyLikeFieldQuery -instance ApproxEq FuzzyLikeThisQuery -instance ApproxEq FilteredQuery -instance ApproxEq DisMaxQuery -instance ApproxEq CommonTermsQuery -instance ApproxEq CommonMinimumMatch -instance ApproxEq BoostingQuery -instance ApproxEq BoolQuery -instance ApproxEq MatchQuery -instance ApproxEq MultiMatchQueryType -instance ApproxEq BooleanOperator -instance ApproxEq ZeroTermsQuery -instance ApproxEq MatchQueryType -instance ApproxEq AliasRouting -instance ApproxEq IndexAliasCreate -instance ApproxEq SearchAliasRouting -instance ApproxEq ScoreType -instance ApproxEq Distance -instance ApproxEq DistanceUnit -instance ApproxEq DistanceType -instance ApproxEq OptimizeBbox -instance ApproxEq GeoBoundingBoxConstraint -instance ApproxEq GeoFilterType -instance ApproxEq GeoBoundingBox -instance ApproxEq LatLon -instance ApproxEq RangeExecution -instance ApproxEq FSType -instance ApproxEq CompoundFormat -instance ApproxEq InitialShardCount -instance ApproxEq Bytes -instance ApproxEq ReplicaBounds -instance ApproxEq Term -instance ApproxEq BoolMatch -instance ApproxEq MultiMatchQuery -instance ApproxEq IndexSettings -instance ApproxEq AllocationPolicy -instance ApproxEq Char where - (=~) = (==) -instance ApproxEq Vers.Version where - (=~) = (==) -instance (ApproxEq a, Show a) => ApproxEq [a] where - as =~ bs = and (zipWith (=~) as bs) -instance (ApproxEq l, ApproxEq r) => ApproxEq (Either l r) where - Left a =~ Left b = a =~ b - Right a =~ Right b = a =~ b - _ =~ _ = False - showApproxEq (Left x) = "Left " <> showApproxEq x - showApproxEq (Right x) = "Right " <> showApproxEq x -instance ApproxEq NodeAttrFilter -instance ApproxEq NodeAttrName -instance ApproxEq BuildHash -instance ApproxEq TemplateQueryKeyValuePairs where - (=~) = (==) -instance ApproxEq TemplateQueryInline -instance ApproxEq Size -instance ApproxEq PhraseSuggesterHighlighter -instance ApproxEq PhraseSuggesterCollate -instance ApproxEq PhraseSuggester -instance ApproxEq SuggestType -instance ApproxEq Suggest -instance ApproxEq DirectGenerators -instance ApproxEq DirectGeneratorSuggestModeTypes - --- | Due to the way nodeattrfilters get serialized here, they may come --- out in a different order, but they are morally equivalent -instance ApproxEq UpdatableIndexSetting where - RoutingAllocationInclude a =~ RoutingAllocationInclude b = - NE.sort a =~ NE.sort b - RoutingAllocationExclude a =~ RoutingAllocationExclude b = - NE.sort a =~ NE.sort b - RoutingAllocationRequire a =~ RoutingAllocationRequire b = - NE.sort a =~ NE.sort b - a =~ b = a == b - showApproxEq (RoutingAllocationInclude xs) = show (RoutingAllocationInclude (NE.sort xs)) - showApproxEq (RoutingAllocationExclude xs) = show (RoutingAllocationExclude (NE.sort xs)) - showApproxEq (RoutingAllocationRequire xs) = show (RoutingAllocationRequire (NE.sort xs)) - showApproxEq x = show x - - -noDuplicates :: Eq a => [a] -> Bool -noDuplicates xs = nub xs == xs - -instance Arbitrary NominalDiffTime where - arbitrary = fromInteger <$> arbitrary - -#if !MIN_VERSION_QuickCheck(2,8,0) -instance (Arbitrary k, Ord k, Arbitrary v) => Arbitrary (M.Map k v) where - arbitrary = M.fromList <$> arbitrary -#endif - -instance Arbitrary Text where - arbitrary = T.pack <$> arbitrary - -instance Arbitrary UTCTime where - arbitrary = UTCTime - <$> arbitrary - <*> (fromRational . toRational <$> choose (0::Double, 86400)) - -instance Arbitrary Day where - arbitrary = ModifiedJulianDay <$> (2000 +) <$> arbitrary - shrink = (ModifiedJulianDay <$>) . shrink . toModifiedJulianDay - -#if !MIN_VERSION_QuickCheck(2,9,0) -instance Arbitrary a => Arbitrary (NonEmpty a) where - arbitrary = liftA2 (:|) arbitrary arbitrary -#endif - -arbitraryScore :: Gen Score -arbitraryScore = fmap getPositive <$> arbitrary - -instance (Arbitrary a, Typeable a) => Arbitrary (Hit a) where - arbitrary = Hit <$> arbitrary - <*> arbitrary - <*> arbitrary - <*> arbitraryScore - <*> arbitrary - <*> arbitrary - shrink = genericShrink - - -instance (Arbitrary a, Typeable a) => Arbitrary (SearchHits a) where - arbitrary = reduceSize $ do - tot <- getPositive <$> arbitrary - score <- arbitraryScore - hs <- arbitrary - return $ SearchHits tot score hs - shrink = genericShrink - -reduceSize :: Gen a -> Gen a -reduceSize f = sized $ \n -> resize (n `div` 2) f - -getSource :: EsResult a -> Maybe a -getSource = fmap _source . foundResult - -grabFirst :: Either EsError (SearchResult a) -> Either EsError a -grabFirst r = - case fmap (hitSource . head . hits . searchHits) r of - (Left e) -> Left e - (Right Nothing) -> Left (EsError 500 "Source was missing") - (Right (Just x)) -> Right x - -------------------------------------------------------------------------------- -arbitraryAlphaNum :: Gen Char -arbitraryAlphaNum = oneof [choose ('a', 'z') - ,choose ('A','Z') - , choose ('0', '9')] - -instance Arbitrary RoutingValue where - arbitrary = RoutingValue . T.pack <$> listOf1 arbitraryAlphaNum - -instance Arbitrary AliasRouting where - arbitrary = oneof [allAlias - ,one - ,theOther - ,both] - where one = GranularAliasRouting - <$> (Just <$> arbitrary) - <*> pure Nothing - theOther = GranularAliasRouting Nothing - <$> (Just <$> arbitrary) - both = GranularAliasRouting - <$> (Just <$> arbitrary) - <*> (Just <$> arbitrary) - allAlias = AllAliasRouting <$> arbitrary - shrink = genericShrink - - -instance Arbitrary FieldName where - arbitrary = FieldName . T.pack <$> listOf1 arbitraryAlphaNum - shrink = genericShrink - - -#if MIN_VERSION_base(4,10,0) --- Test.QuickCheck.Modifiers - -qcNonEmptyToNonEmpty :: NonEmptyList a -> NonEmpty a -qcNonEmptyToNonEmpty (NonEmpty (a : xs)) = (a :| xs) -qcNonEmptyToNonEmpty (NonEmpty []) = error "NonEmpty was empty!" - -instance Arbitrary a => Arbitrary (NonEmpty a) where - arbitrary = qcNonEmptyToNonEmpty <$> arbitrary -#endif - -instance Arbitrary RegexpFlags where - arbitrary = oneof [ pure AllRegexpFlags - , pure NoRegexpFlags - , SomeRegexpFlags <$> genUniqueFlags - ] - where genUniqueFlags = NE.fromList . nub <$> listOf1 arbitrary - shrink = genericShrink - - -instance Arbitrary IndexAliasCreate where - arbitrary = IndexAliasCreate <$> arbitrary <*> reduceSize arbitrary - shrink = genericShrink - -instance Arbitrary Query where - arbitrary = reduceSize $ oneof [ TermQuery <$> arbitrary <*> arbitrary - , TermsQuery <$> arbitrary <*> arbitrary - , QueryMatchQuery <$> arbitrary - , QueryMultiMatchQuery <$> arbitrary - , QueryBoolQuery <$> arbitrary - , QueryBoostingQuery <$> arbitrary - , QueryCommonTermsQuery <$> arbitrary - , ConstantScoreFilter <$> arbitrary <*> arbitrary - , ConstantScoreQuery <$> arbitrary <*> arbitrary - , QueryDisMaxQuery <$> arbitrary - , QueryFilteredQuery <$> arbitrary - , QueryFuzzyLikeThisQuery <$> arbitrary - , QueryFuzzyLikeFieldQuery <$> arbitrary - , QueryFuzzyQuery <$> arbitrary - , QueryHasChildQuery <$> arbitrary - , QueryHasParentQuery <$> arbitrary - , IdsQuery <$> arbitrary <*> arbitrary - , QueryIndicesQuery <$> arbitrary - , MatchAllQuery <$> arbitrary - , QueryMoreLikeThisQuery <$> arbitrary - , QueryMoreLikeThisFieldQuery <$> arbitrary - , QueryNestedQuery <$> arbitrary - , QueryPrefixQuery <$> arbitrary - , QueryQueryStringQuery <$> arbitrary - , QuerySimpleQueryStringQuery <$> arbitrary - , QueryRangeQuery <$> arbitrary - , QueryRegexpQuery <$> arbitrary - , QueryTemplateQueryInline <$> arbitrary - ] - shrink = genericShrink - -instance Arbitrary Filter where - arbitrary = reduceSize $ oneof [ AndFilter <$> arbitrary <*> arbitrary - , OrFilter <$> arbitrary <*> arbitrary - , NotFilter <$> arbitrary <*> arbitrary - , pure IdentityFilter - , BoolFilter <$> arbitrary - , ExistsFilter <$> arbitrary - , GeoBoundingBoxFilter <$> arbitrary - , GeoDistanceFilter <$> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary - , GeoDistanceRangeFilter <$> arbitrary <*> arbitrary - , GeoPolygonFilter <$> arbitrary <*> arbitrary - , IdsFilter <$> arbitrary <*> arbitrary - , LimitFilter <$> arbitrary - , MissingFilter <$> arbitrary <*> arbitrary <*> arbitrary - , PrefixFilter <$> arbitrary <*> arbitrary <*> arbitrary - , QueryFilter <$> arbitrary <*> arbitrary - , RangeFilter <$> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary - , RegexpFilter <$> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary - , TermFilter <$> arbitrary <*> arbitrary] - shrink = genericShrink - -instance Arbitrary ReplicaBounds where - arbitrary = oneof [ replicasBounded - , replicasLowerBounded - , pure ReplicasUnbounded - ] - where replicasBounded = do Positive a <- arbitrary - Positive b <- arbitrary - return (ReplicasBounded a b) - replicasLowerBounded = do Positive a <- arbitrary - return (ReplicasLowerBounded a) - -instance Arbitrary NodeAttrName where - arbitrary = NodeAttrName . T.pack <$> listOf1 arbitraryAlphaNum - - -instance Arbitrary NodeAttrFilter where - arbitrary = do - n <- arbitrary - s:ss <- listOf1 (listOf1 arbitraryAlphaNum) - let ts = T.pack <$> s :| ss - return (NodeAttrFilter n ts) - shrink = genericShrink - -instance Arbitrary VersionNumber where - arbitrary = mk . fmap getPositive . getNonEmpty <$> arbitrary - where - mk versions = VersionNumber (Vers.Version versions []) - -instance Arbitrary TemplateQueryKeyValuePairs where - arbitrary = TemplateQueryKeyValuePairs . HM.fromList <$> arbitrary - shrink (TemplateQueryKeyValuePairs x) = map (TemplateQueryKeyValuePairs . HM.fromList) . shrink $ HM.toList x - -instance Arbitrary IndexName where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MappingName where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary DocId where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Version where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary BuildHash where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary IndexAliasRouting where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary ShardCount where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary ReplicaCount where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary TemplateName where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary TemplatePattern where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary QueryString where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary CacheName where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary CacheKey where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Existence where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary CutoffFrequency where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Analyzer where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MaxExpansions where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Lenient where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Tiebreaker where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Boost where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary BoostTerms where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MinimumMatch where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary DisableCoord where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary IgnoreTermFrequency where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MinimumTermFrequency where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MaxQueryTerms where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Fuzziness where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary PrefixLength where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary TypeName where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary PercentMatch where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary StopWord where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary QueryPath where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary AllowLeadingWildcard where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary LowercaseExpanded where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary EnablePositionIncrements where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary AnalyzeWildcard where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary GeneratePhraseQueries where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Locale where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MaxWordLength where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MinWordLength where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary PhraseSlop where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MinDocFrequency where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MaxDocFrequency where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Regexp where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary SimpleQueryStringQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary FieldOrFields where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary SimpleQueryFlag where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary RegexpQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary QueryStringQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary RangeQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary RangeValue where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary PrefixQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary NestedQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MoreLikeThisFieldQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MoreLikeThisQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary IndicesQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary HasParentQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary HasChildQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary FuzzyQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary FuzzyLikeFieldQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary FuzzyLikeThisQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary FilteredQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary DisMaxQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary CommonTermsQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary DistanceRange where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MultiMatchQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary LessThanD where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary LessThanEqD where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary GreaterThanD where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary GreaterThanEqD where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary LessThan where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary LessThanEq where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary GreaterThan where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary GreaterThanEq where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary GeoPoint where - arbitrary = GeoPoint <$> (arbitrary `suchThat` reasonableFieldName) <*> arbitrary - where - -- These are problematic for geopoint for obvious reasons - reasonableFieldName (FieldName "from") = False - reasonableFieldName (FieldName "to") = False - reasonableFieldName _ = True - shrink = genericShrink -instance Arbitrary NullValue where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MinimumMatchHighLow where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary CommonMinimumMatch where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary BoostingQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary BoolQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MatchQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MultiMatchQueryType where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary BooleanOperator where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary ZeroTermsQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MatchQueryType where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary SearchAliasRouting where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary ScoreType where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Distance where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary DistanceUnit where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary DistanceType where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary OptimizeBbox where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary GeoBoundingBoxConstraint where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary GeoFilterType where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary GeoBoundingBox where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary LatLon where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary RangeExecution where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary RegexpFlag where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary BoolMatch where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Term where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary IndexSettings where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary UpdatableIndexSetting where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Bytes where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary AllocationPolicy where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary InitialShardCount where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary FSType where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary CompoundFormat where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary FsSnapshotRepo where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary SnapshotRepoName where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary TemplateQueryInline where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary PhraseSuggesterCollate where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary PhraseSuggesterHighlighter where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Size where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary PhraseSuggester where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary SuggestType where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Suggest where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary DirectGenerators where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary DirectGeneratorSuggestModeTypes where arbitrary = sopArbitrary; shrink = genericShrink - -newtype UpdatableIndexSetting' = UpdatableIndexSetting' UpdatableIndexSetting - deriving (Show, Eq, ToJSON, FromJSON, ApproxEq, Typeable) - -instance Arbitrary UpdatableIndexSetting' where - arbitrary = do - settings <- arbitrary - return $ UpdatableIndexSetting' $ case settings of - RoutingAllocationInclude xs -> RoutingAllocationInclude (dropDuplicateAttrNames xs) - RoutingAllocationExclude xs -> RoutingAllocationExclude (dropDuplicateAttrNames xs) - RoutingAllocationRequire xs -> RoutingAllocationRequire (dropDuplicateAttrNames xs) - x -> x - where - dropDuplicateAttrNames = NE.fromList . L.nubBy sameAttrName . NE.toList - sameAttrName a b = nodeAttrFilterName a == nodeAttrFilterName b +import Test.Common +import Test.Import + +import Prelude + +import qualified Test.Aggregation as Aggregation +import qualified Test.BulkAPI as Bulk +import qualified Test.Documents as Documents +import qualified Test.Highlights as Highlights +import qualified Test.Indices as Indices +import qualified Test.JSON as JSON +import qualified Test.Query as Query +import qualified Test.Snapshots as Snapshots +import qualified Test.Sorting as Sorting +import qualified Test.SourceFiltering as SourceFiltering +import qualified Test.Suggest as Suggest +import qualified Test.Templates as Templates main :: IO () main = hspec $ do - - describe "index create/delete API" $ do - it "creates and then deletes the requested index" $ withTestEnv $ do - -- priming state. - _ <- deleteExampleIndex - resp <- createExampleIndex - deleteResp <- deleteExampleIndex - liftIO $ do - validateStatus resp 200 - validateStatus deleteResp 200 + Aggregation.spec + Bulk.spec + Documents.spec + Highlights.spec + Indices.spec + JSON.spec + Query.spec + Snapshots.spec + Sorting.spec + SourceFiltering.spec + Suggest.spec + Templates.spec describe "error parsing" $ do it "can parse EsErrors for < 2.0" $ when' (atmost es16) $ withTestEnv $ do @@ -992,619 +57,11 @@ main = hspec $ do let errorResp = eitherDecode (responseBody res) liftIO (errorResp `shouldBe` Right (EsError 404 "no such index")) - describe "document API" $ do - it "indexes, updates, gets, and then deletes the generated document" $ withTestEnv $ do - _ <- insertData - _ <- updateData - docInserted <- getDocument testIndex testMapping (DocId "1") - let newTweet = eitherDecode - (responseBody docInserted) :: Either String (EsResult Tweet) - liftIO $ (fmap getSource newTweet `shouldBe` Right (Just patchedTweet)) - - it "indexes, gets, and then deletes the generated document with a DocId containing a space" $ withTestEnv $ do - _ <- insertWithSpaceInId - docInserted <- getDocument testIndex testMapping (DocId "Hello World") - let newTweet = eitherDecode - (responseBody docInserted) :: Either String (EsResult Tweet) - liftIO $ (fmap getSource newTweet `shouldBe` Right (Just exampleTweet)) - - it "produces a parseable result when looking up a bogus document" $ withTestEnv $ do - doc <- getDocument testIndex testMapping (DocId "bogus") - let noTweet = eitherDecode - (responseBody doc) :: Either String (EsResult Tweet) - liftIO $ fmap foundResult noTweet `shouldBe` Right Nothing - - it "can use optimistic concurrency control" $ withTestEnv $ do - let ev = ExternalDocVersion minBound - let cfg = defaultIndexDocumentSettings { idsVersionControl = ExternalGT ev } - resetIndex - res <- insertData' cfg - liftIO $ isCreated res `shouldBe` True - res' <- insertData' cfg - liftIO $ isVersionConflict res' `shouldBe` True - - it "indexes two documents in a parent/child relationship and checks that the child exists" $ withTestEnv $ do - resetIndex - _ <- putMapping testIndex (MappingName "child") ChildMapping - _ <- putMapping testIndex (MappingName "parent") ParentMapping - _ <- indexDocument testIndex (MappingName "parent") defaultIndexDocumentSettings exampleTweet (DocId "1") - let parent = (Just . DocumentParent . DocId) "1" - ids = IndexDocumentSettings NoVersionControl parent - _ <- indexDocument testIndex (MappingName "child") ids otherTweet (DocId "2") - _ <- refreshIndex testIndex - exists <- documentExists testIndex (MappingName "child") parent (DocId "2") - liftIO $ exists `shouldBe` True - - describe "template API" $ do - it "can create a template" $ withTestEnv $ do - let idxTpl = IndexTemplate (TemplatePattern "tweet-*") (Just (IndexSettings (ShardCount 1) (ReplicaCount 1))) [toJSON TweetMapping] - resp <- putTemplate idxTpl (TemplateName "tweet-tpl") - liftIO $ validateStatus resp 200 - - it "can detect if a template exists" $ withTestEnv $ do - exists <- templateExists (TemplateName "tweet-tpl") - liftIO $ exists `shouldBe` True - - it "can delete a template" $ withTestEnv $ do - resp <- deleteTemplate (TemplateName "tweet-tpl") - liftIO $ validateStatus resp 200 - - it "can detect if a template doesn't exist" $ withTestEnv $ do - exists <- templateExists (TemplateName "tweet-tpl") - liftIO $ exists `shouldBe` False - - describe "bulk API" $ do - it "inserts all documents we request" $ withTestEnv $ do - _ <- insertData - let firstTest = BulkTest "blah" - let secondTest = BulkTest "bloo" - let firstDoc = BulkIndex testIndex - testMapping (DocId "2") (toJSON firstTest) - let secondDoc = BulkCreate testIndex - testMapping (DocId "3") (toJSON secondTest) - let stream = V.fromList [firstDoc, secondDoc] - _ <- bulk stream - _ <- refreshIndex testIndex - fDoc <- getDocument testIndex testMapping (DocId "2") - sDoc <- getDocument testIndex testMapping (DocId "3") - let maybeFirst = eitherDecode $ responseBody fDoc :: Either String (EsResult BulkTest) - let maybeSecond = eitherDecode $ responseBody sDoc :: Either String (EsResult BulkTest) - liftIO $ do - fmap getSource maybeFirst `shouldBe` Right (Just firstTest) - fmap getSource maybeSecond `shouldBe` Right (Just secondTest) - - - describe "query API" $ do - it "returns document for term query and identity filter" $ withTestEnv $ do - _ <- insertData - let query = TermQuery (Term "user" "bitemyapp") Nothing - let filter = IdentityFilter <&&> IdentityFilter - let search = mkSearch (Just query) (Just filter) - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "handles constant score queries" $ withTestEnv $ do - _ <- insertData - let query = TermsQuery "user" ("bitemyapp" :| []) - let cfQuery = ConstantScoreQuery query (Boost 1.0) - let filter = IdentityFilter - let search = mkSearch (Just cfQuery) (Just filter) - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - it "handles constant score filters" $ withTestEnv $ do - _ <- insertData - let query = TermsQuery "user" ("bitemyapp" :| []) - let cfFilter = ConstantScoreFilter IdentityFilter (Boost 1.0) - let boolQuery = mkBoolQuery [query, cfFilter] [] [] - let search = mkSearch (Just (QueryBoolQuery boolQuery)) Nothing - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - - it "returns document for terms query and identity filter" $ withTestEnv $ do - _ <- insertData - let query = TermsQuery "user" ("bitemyapp" :| []) - let filter = IdentityFilter <&&> IdentityFilter - let search = mkSearch (Just query) (Just filter) - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "returns document for match query" $ withTestEnv $ do - _ <- insertData - let query = QueryMatchQuery $ mkMatchQuery (FieldName "user") (QueryString "bitemyapp") - let search = mkSearch (Just query) Nothing - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "returns document for multi-match query" $ withTestEnv $ do - _ <- insertData - let flds = [FieldName "user", FieldName "message"] - let query = QueryMultiMatchQuery $ mkMultiMatchQuery flds (QueryString "bitemyapp") - let search = mkSearch (Just query) Nothing - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "returns document for multi-match query with a custom tiebreaker" $ withTestEnv $ do - _ <- insertData - let tiebreaker = Just $ Tiebreaker 0.3 - flds = [FieldName "user", FieldName "message"] - multiQuery' = mkMultiMatchQuery flds (QueryString "bitemyapp") - query = QueryMultiMatchQuery $ multiQuery' { multiMatchQueryTiebreaker = tiebreaker } - search = mkSearch (Just query) Nothing - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "returns document for bool query" $ withTestEnv $ do - _ <- insertData - let innerQuery = QueryMatchQuery $ - mkMatchQuery (FieldName "user") (QueryString "bitemyapp") - let query = QueryBoolQuery $ - mkBoolQuery [innerQuery] [] [] - let search = mkSearch (Just query) Nothing - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "returns document for boosting query" $ withTestEnv $ do - _ <- insertData - let posQuery = QueryMatchQuery $ mkMatchQuery (FieldName "user") (QueryString "bitemyapp") - let negQuery = QueryMatchQuery $ mkMatchQuery (FieldName "user") (QueryString "notmyapp") - let query = QueryBoostingQuery $ BoostingQuery posQuery negQuery (Boost 0.2) - let search = mkSearch (Just query) Nothing - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "returns document for common terms query" $ withTestEnv $ do - _ <- insertData - let query = QueryCommonTermsQuery $ - CommonTermsQuery (FieldName "user") - (QueryString "bitemyapp") - (CutoffFrequency 0.0001) - Or Or Nothing Nothing Nothing Nothing - let search = mkSearch (Just query) Nothing - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "returns document for for inline template query" $ withTestEnv $ do - _ <- insertData - let innerQuery = QueryMatchQuery $ - mkMatchQuery (FieldName "{{userKey}}") - (QueryString "{{bitemyappKey}}") - templateParams = TemplateQueryKeyValuePairs $ HM.fromList - [ ("userKey", "user") - , ("bitemyappKey", "bitemyapp") - ] - templateQuery = QueryTemplateQueryInline $ - TemplateQueryInline innerQuery templateParams - search = mkSearch (Just templateQuery) Nothing - myTweet <- searchTweet search - liftIO $ myTweet `shouldBe` Right exampleTweet - - - describe "sorting" $ do - it "returns documents in the right order" $ withTestEnv $ do - _ <- insertData - _ <- insertOther - let sortSpec = DefaultSortSpec $ mkSort (FieldName "age") Ascending - let search = Search Nothing - (Just IdentityFilter) (Just [sortSpec]) Nothing Nothing - False (From 0) (Size 10) SearchTypeQueryThenFetch Nothing Nothing - Nothing - result <- searchTweets search - let myTweet = grabFirst result - liftIO $ - myTweet `shouldBe` Right otherTweet - - - describe "filtering API" $ do - it "returns document for composed boolmatch and identity" $ withTestEnv $ do - _ <- insertData - let queryFilter = BoolFilter (MustMatch (Term "user" "bitemyapp") False) - <&&> IdentityFilter - let search = mkSearch Nothing (Just queryFilter) - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "returns document for term filter" $ withTestEnv $ do - _ <- insertData - let termFilter = TermFilter (Term "user" "bitemyapp") False - let search = mkSearch Nothing (Just termFilter) - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "returns document for existential filter" $ withTestEnv $ do - _ <- insertData - let search = mkSearch Nothing (Just (ExistsFilter (FieldName "user"))) - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "returns document for geo boundingbox filter" $ withTestEnv $ do - _ <- insertData - let box = GeoBoundingBox (LatLon 40.73 (-74.1)) (LatLon 40.10 (-71.12)) - let bbConstraint = GeoBoundingBoxConstraint (FieldName "tweet.location") box False GeoFilterMemory - let geoFilter = GeoBoundingBoxFilter bbConstraint - let search = mkSearch Nothing (Just geoFilter) - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "doesn't return document for nonsensical boundingbox filter" $ withTestEnv $ do - _ <- insertData - let box = GeoBoundingBox (LatLon 0.73 (-4.1)) (LatLon 0.10 (-1.12)) - let bbConstraint = GeoBoundingBoxConstraint (FieldName "tweet.location") box False GeoFilterMemory - let geoFilter = GeoBoundingBoxFilter bbConstraint - let search = mkSearch Nothing (Just geoFilter) - searchExpectNoResults search - - it "returns document for geo distance filter" $ withTestEnv $ do - _ <- insertData - let geoPoint = GeoPoint (FieldName "tweet.location") (LatLon 40.12 (-71.34)) - let distance = Distance 10.0 Miles - let optimizeBbox = OptimizeGeoFilterType GeoFilterMemory - let geoFilter = GeoDistanceFilter geoPoint distance SloppyArc optimizeBbox False - let search = mkSearch Nothing (Just geoFilter) - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "returns document for geo distance range filter" $ withTestEnv $ do - _ <- insertData - let geoPoint = GeoPoint (FieldName "tweet.location") (LatLon 40.12 (-71.34)) - let distanceRange = DistanceRange (Distance 0.0 Miles) (Distance 10.0 Miles) - let geoFilter = GeoDistanceRangeFilter geoPoint distanceRange - let search = mkSearch Nothing (Just geoFilter) - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "doesn't return document for wild geo distance range filter" $ withTestEnv $ do - _ <- insertData - let geoPoint = GeoPoint (FieldName "tweet.location") (LatLon 40.12 (-71.34)) - let distanceRange = DistanceRange (Distance 100.0 Miles) (Distance 1000.0 Miles) - let geoFilter = GeoDistanceRangeFilter geoPoint distanceRange - let search = mkSearch Nothing (Just geoFilter) - searchExpectNoResults search - - it "returns document for geo polygon filter" $ withTestEnv $ do - _ <- insertData - let points = [LatLon 40.0 (-70.00), - LatLon 40.0 (-72.00), - LatLon 41.0 (-70.00), - LatLon 41.0 (-72.00)] - let geoFilter = GeoPolygonFilter (FieldName "tweet.location") points - let search = mkSearch Nothing (Just geoFilter) - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "doesn't return document for bad geo polygon filter" $ withTestEnv $ do - _ <- insertData - let points = [LatLon 40.0 (-70.00), - LatLon 40.0 (-71.00), - LatLon 41.0 (-70.00), - LatLon 41.0 (-71.00)] - let geoFilter = GeoPolygonFilter (FieldName "tweet.location") points - let search = mkSearch Nothing (Just geoFilter) - searchExpectNoResults search - - it "returns document for ids filter" $ withTestEnv $ do - _ <- insertData - let filter = IdsFilter (MappingName "tweet") [DocId "1"] - let search = mkSearch Nothing (Just filter) - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "returns document for Double range filter" $ withTestEnv $ do - _ <- insertData - let filter = RangeFilter (FieldName "age") - (RangeDoubleGtLt (GreaterThan 1000.0) (LessThan 100000.0)) - RangeExecutionIndex False - let search = mkSearch Nothing (Just filter) - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "returns document for UTCTime date filter" $ withTestEnv $ do - _ <- insertData - let filter = RangeFilter (FieldName "postDate") - (RangeDateGtLt - (GreaterThanD (UTCTime - (ModifiedJulianDay 54000) - (secondsToDiffTime 0))) - (LessThanD (UTCTime - (ModifiedJulianDay 55000) - (secondsToDiffTime 11)))) - RangeExecutionIndex False - let search = mkSearch Nothing (Just filter) - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "returns document for regexp filter" $ withTestEnv $ do - _ <- insertData - let filter = RegexpFilter (FieldName "user") (Regexp "bite.*app") - AllRegexpFlags (CacheName "test") False (CacheKey "key") - let search = mkSearch Nothing (Just filter) - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "doesn't return document for non-matching regexp filter" $ withTestEnv $ do - _ <- insertData - let filter = RegexpFilter (FieldName "user") - (Regexp "boy") AllRegexpFlags - (CacheName "test") False (CacheKey "key") - let search = mkSearch Nothing (Just filter) - searchExpectNoResults search - - it "returns document for query filter, uncached" $ withTestEnv $ do - _ <- insertData - let filter = QueryFilter (TermQuery (Term "user" "bitemyapp") Nothing) True - search = mkSearch Nothing (Just filter) - myTweet <- searchTweet search - liftIO $ myTweet `shouldBe` Right exampleTweet - - it "returns document for query filter, cached" $ withTestEnv $ do - _ <- insertData - let filter = QueryFilter (TermQuery (Term "user" "bitemyapp") Nothing) False - search = mkSearch Nothing (Just filter) - myTweet <- searchTweet search - liftIO $ myTweet `shouldBe` Right exampleTweet - - describe "Aggregation API" $ do - it "returns term aggregation results" $ withTestEnv $ do - _ <- insertData - let terms = TermsAgg $ mkTermsAggregation "user" - let search = mkAggregateSearch Nothing $ mkAggregations "users" terms - searchExpectAggs search - searchValidBucketAgg search "users" toTerms - - it "return sub-aggregation results" $ withTestEnv $ do - _ <- insertData - let subaggs = mkAggregations "age_agg" . TermsAgg $ mkTermsAggregation "age" - agg = TermsAgg $ (mkTermsAggregation "user") { termAggs = Just subaggs} - search = mkAggregateSearch Nothing $ mkAggregations "users" agg - reply <- searchByIndex testIndex search - let result = decode (responseBody reply) :: Maybe (SearchResult Tweet) - usersAggResults = result >>= aggregations >>= toTerms "users" - subAggResults = usersAggResults >>= (listToMaybe . buckets) >>= termsAggs >>= toTerms "age_agg" - subAddResultsExists = isJust subAggResults - liftIO $ (subAddResultsExists) `shouldBe` True - - it "returns cardinality aggregation results" $ withTestEnv $ do - _ <- insertData - let cardinality = CardinalityAgg $ mkCardinalityAggregation $ FieldName "user" - let search = mkAggregateSearch Nothing $ mkAggregations "users" cardinality - let search' = search { Database.V1.Bloodhound.from = From 0, size = Size 0 } - searchExpectAggs search' - let docCountPair k n = (k, object ["value" .= Number n]) - res <- searchTweets search' - liftIO $ - fmap aggregations res `shouldBe` Right (Just (M.fromList [ docCountPair "users" 1])) - - it "can give collection hint parameters to term aggregations" $ when' (atleast es13) $ withTestEnv $ do - _ <- insertData - let terms = TermsAgg $ (mkTermsAggregation "user") { termCollectMode = Just BreadthFirst } - let search = mkAggregateSearch Nothing $ mkAggregations "users" terms - searchExpectAggs search - searchValidBucketAgg search "users" toTerms - - -- One of these fails with 1.7.3 - it "can give execution hint parameters to term aggregations" $ when' (atmost es11) $ withTestEnv $ do - _ <- insertData - searchTermsAggHint [Map, Ordinals] - - it "can give execution hint parameters to term aggregations" $ when' (is es12) $ withTestEnv $ do - _ <- insertData - searchTermsAggHint [GlobalOrdinals, GlobalOrdinalsHash, GlobalOrdinalsLowCardinality, Map, Ordinals] - - it "can give execution hint parameters to term aggregations" $ when' (atleast es12) $ withTestEnv $ do - _ <- insertData - searchTermsAggHint [GlobalOrdinals, GlobalOrdinalsHash, GlobalOrdinalsLowCardinality, Map] - -- One of the above. - - it "can execute value_count aggregations" $ withTestEnv $ do - _ <- insertData - _ <- insertOther - let ags = mkAggregations "user_count" (ValueCountAgg (FieldValueCount (FieldName "user"))) <> - mkAggregations "bogus_count" (ValueCountAgg (FieldValueCount (FieldName "bogus"))) - let search = mkAggregateSearch Nothing ags - let docCountPair k n = (k, object ["value" .= Number n]) - res <- searchTweets search - liftIO $ - fmap aggregations res `shouldBe` Right (Just (M.fromList [ docCountPair "user_count" 2 - , docCountPair "bogus_count" 0 - ])) - - it "can execute filter aggregations" $ withTestEnv $ do - _ <- insertData - _ <- insertOther - let ags = mkAggregations "bitemyapps" (FilterAgg (FilterAggregation (TermFilter (Term "user" "bitemyapp") defaultCache) Nothing)) <> - mkAggregations "notmyapps" (FilterAgg (FilterAggregation (TermFilter (Term "user" "notmyapp") defaultCache) Nothing)) - let search = mkAggregateSearch Nothing ags - let docCountPair k n = (k, object ["doc_count" .= Number n]) - res <- searchTweets search - liftIO $ - fmap aggregations res `shouldBe` Right (Just (M.fromList [ docCountPair "bitemyapps" 1 - , docCountPair "notmyapps" 1 - ])) - it "can execute date_range aggregations" $ withTestEnv $ do - let now = fromGregorian 2015 3 14 - let ltAMonthAgo = UTCTime (fromGregorian 2015 3 1) 0 - let ltAWeekAgo = UTCTime (fromGregorian 2015 3 10) 0 - let oldDoc = exampleTweet { postDate = ltAMonthAgo } - let newDoc = exampleTweet { postDate = ltAWeekAgo } - _ <- indexDocument testIndex testMapping defaultIndexDocumentSettings oldDoc (DocId "1") - _ <- indexDocument testIndex testMapping defaultIndexDocumentSettings newDoc (DocId "2") - _ <- refreshIndex testIndex - let thisMonth = DateRangeFrom (DateMathExpr (DMDate now) [SubtractTime 1 DMMonth]) - let thisWeek = DateRangeFrom (DateMathExpr (DMDate now) [SubtractTime 1 DMWeek]) - let agg = DateRangeAggregation (FieldName "postDate") Nothing (thisMonth :| [thisWeek]) - let ags = mkAggregations "date_ranges" (DateRangeAgg agg) - let search = mkAggregateSearch Nothing ags - res <- searchTweets search - liftIO $ hitsTotal . searchHits <$> res `shouldBe` Right 2 - let bucks = do magrs <- fmapL show (aggregations <$> res) - agrs <- note "no aggregations returned" magrs - rawBucks <- note "no date_ranges aggregation" $ M.lookup "date_ranges" agrs - parseEither parseJSON rawBucks - let fromMonthT = UTCTime (fromGregorian 2015 2 14) 0 - let fromWeekT = UTCTime (fromGregorian 2015 3 7) 0 - liftIO $ buckets <$> bucks `shouldBe` Right [ DateRangeResult "2015-02-14T00:00:00.000Z-*" - (Just fromMonthT) - (Just "2015-02-14T00:00:00.000Z") - Nothing - Nothing - 2 - Nothing - , DateRangeResult "2015-03-07T00:00:00.000Z-*" - (Just fromWeekT) - (Just "2015-03-07T00:00:00.000Z") - Nothing - Nothing - 1 - Nothing - ] - - it "returns date histogram aggregation results" $ withTestEnv $ do - _ <- insertData - let histogram = DateHistogramAgg $ mkDateHistogram (FieldName "postDate") Minute - let search = mkAggregateSearch Nothing (mkAggregations "byDate" histogram) - searchExpectAggs search - searchValidBucketAgg search "byDate" toDateHistogram - - it "returns date histogram using fractional date" $ withTestEnv $ do - _ <- insertData - let periods = [Year, Quarter, Month, Week, Day, Hour, Minute, Second] - let fractionals = map (FractionalInterval 1.5) [Weeks, Days, Hours, Minutes, Seconds] - let intervals = periods ++ fractionals - let histogram = mkDateHistogram (FieldName "postDate") - let search interval = mkAggregateSearch Nothing $ mkAggregations "byDate" $ DateHistogramAgg (histogram interval) - let expect interval = searchExpectAggs (search interval) - let valid interval = searchValidBucketAgg (search interval) "byDate" toDateHistogram - forM_ intervals expect - forM_ intervals valid - - it "can execute missing aggregations" $ withTestEnv $ do - _ <- insertData - _ <- insertExtra - let ags = mkAggregations "missing_agg" (MissingAgg (MissingAggregation "extra")) - let search = mkAggregateSearch Nothing ags - let docCountPair k n = (k, object ["doc_count" .= Number n]) - res <- searchTweets search - liftIO $ - fmap aggregations res `shouldBe` Right (Just (M.fromList [docCountPair "missing_agg" 1])) - - describe "Highlights API" $ do - - it "returns highlight from query when there should be one" $ withTestEnv $ do - _ <- insertData - _ <- insertOther - let query = QueryMatchQuery $ mkMatchQuery (FieldName "_all") (QueryString "haskell") - let testHighlight = Highlights Nothing [FieldHighlight (FieldName "message") Nothing] - - let search = mkHighlightSearch (Just query) testHighlight - myHighlight <- searchTweetHighlight search - liftIO $ - myHighlight `shouldBe` Right (Just (M.fromList [("message",["Use haskell!"])])) - - it "doesn't return highlight from a query when it shouldn't" $ withTestEnv $ do - _ <- insertData - _ <- insertOther - let query = QueryMatchQuery $ mkMatchQuery (FieldName "_all") (QueryString "haskell") - let testHighlight = Highlights Nothing [FieldHighlight (FieldName "user") Nothing] - - let search = mkHighlightSearch (Just query) testHighlight - myHighlight <- searchTweetHighlight search - liftIO $ - myHighlight `shouldBe` Right Nothing - - describe "Source filtering" $ do - - it "doesn't include source when sources are disabled" $ withTestEnv $ do - searchExpectSource - NoSource - (Left (EsError 500 "Source was missing")) - - it "includes a source" $ withTestEnv $ do - searchExpectSource - (SourcePatterns (PopPattern (Pattern "message"))) - (Right (Object (HM.fromList [("message", String "Use haskell!")]))) - - it "includes sources" $ withTestEnv $ do - searchExpectSource - (SourcePatterns (PopPatterns [Pattern "user", Pattern "message"])) - (Right (Object (HM.fromList [("user",String "bitemyapp"),("message", String "Use haskell!")]))) - - it "includes source patterns" $ withTestEnv $ do - searchExpectSource - (SourcePatterns (PopPattern (Pattern "*ge"))) - (Right (Object (HM.fromList [("age", Number 10000),("message", String "Use haskell!")]))) - - it "excludes source patterns" $ withTestEnv $ do - searchExpectSource - (SourceIncludeExclude (Include []) - (Exclude [Pattern "l*", Pattern "*ge", Pattern "postDate", Pattern "extra"])) - (Right (Object (HM.fromList [("user",String "bitemyapp")]))) - - describe "ToJSON RegexpFlags" $ do - it "generates the correct JSON for AllRegexpFlags" $ - toJSON AllRegexpFlags `shouldBe` String "ALL" - - it "generates the correct JSON for NoRegexpFlags" $ - toJSON NoRegexpFlags `shouldBe` String "NONE" - - it "generates the correct JSON for SomeRegexpFlags" $ - let flags = AnyString :| [ Automaton - , Complement - , Empty - , Intersection - , Interval ] - in toJSON (SomeRegexpFlags flags) `shouldBe` String "ANYSTRING|AUTOMATON|COMPLEMENT|EMPTY|INTERSECTION|INTERVAL" - - prop "removes duplicates from flags" $ \(flags :: RegexpFlags) -> - let String str = toJSON flags - flagStrs = T.splitOn "|" str - in noDuplicates flagStrs - - describe "omitNulls" $ do - it "checks that omitNulls drops list elements when it should" $ - let dropped = omitNulls $ [ "test1" .= (toJSON ([] :: [Int])) - , "test2" .= (toJSON ("some value" :: Text))] - in dropped `shouldBe` Object (HM.fromList [("test2", String "some value")]) - - it "checks that omitNulls doesn't drop list elements when it shouldn't" $ - let notDropped = omitNulls $ [ "test1" .= (toJSON ([1] :: [Int])) - , "test2" .= (toJSON ("some value" :: Text))] - in notDropped `shouldBe` Object (HM.fromList [ ("test1", Array (V.fromList [Number 1.0])) - , ("test2", String "some value")]) - it "checks that omitNulls drops non list elements when it should" $ - let dropped = omitNulls $ [ "test1" .= (toJSON Null) - , "test2" .= (toJSON ("some value" :: Text))] - in dropped `shouldBe` Object (HM.fromList [("test2", String "some value")]) - it "checks that omitNulls doesn't drop non list elements when it shouldn't" $ - let notDropped = omitNulls $ [ "test1" .= (toJSON (1 :: Int)) - , "test2" .= (toJSON ("some value" :: Text))] - in notDropped `shouldBe` Object (HM.fromList [ ("test1", Number 1.0) - , ("test2", String "some value")]) - describe "Monoid (SearchHits a)" $ do + describe "Monoid (SearchHits a)" $ prop "abides the monoid laws" $ eq $ prop_Monoid (T :: T (SearchHits ())) - describe "mkDocVersion" $ do + describe "mkDocVersion" $ prop "can never construct an out of range docVersion" $ \i -> let res = mkDocVersion i in case res of @@ -1613,113 +70,7 @@ main = hspec $ do (dv <= maxBound) .&&. docVersionNumber dv === i - describe "FsSnapshotRepo" $ do - prop "SnapshotRepo laws" $ \fsr -> - fromGSnapshotRepo (toGSnapshotRepo fsr) === Right (fsr :: FsSnapshotRepo) - - describe "snapshot repos" $ do - it "always parses all snapshot repos API" $ when' canSnapshot $ withTestEnv $ do - res <- getSnapshotRepos AllSnapshotRepos - liftIO $ case res of - Left e -> expectationFailure ("Expected a right but got Left " <> show e) - Right _ -> return () - - it "finds an existing list of repos" $ when' canSnapshot $ withTestEnv $ do - let r1n = SnapshotRepoName "bloodhound-repo1" - let r2n = SnapshotRepoName "bloodhound-repo2" - withSnapshotRepo r1n $ \r1 -> - withSnapshotRepo r2n $ \r2 -> do - repos <- getSnapshotRepos (SnapshotRepoList (ExactRepo r1n :| [ExactRepo r2n])) - liftIO $ case repos of - Right xs -> do - let srt = L.sortBy (comparing gSnapshotRepoName) - srt xs `shouldBe` srt [r1, r2] - Left e -> expectationFailure (show e) - - it "creates and updates with updateSnapshotRepo" $ when' canSnapshot $ withTestEnv $ do - let r1n = SnapshotRepoName "bloodhound-repo1" - withSnapshotRepo r1n $ \r1 -> do - let Just (String dir) = HM.lookup "location" (gSnapshotRepoSettingsObject (gSnapshotRepoSettings r1)) - let noCompression = FsSnapshotRepo r1n (T.unpack dir) False Nothing Nothing Nothing - resp <- updateSnapshotRepo defaultSnapshotRepoUpdateSettings noCompression - liftIO (validateStatus resp 200) - Right [roundtrippedNoCompression] <- getSnapshotRepos (SnapshotRepoList (ExactRepo r1n :| [])) - liftIO (roundtrippedNoCompression `shouldBe` toGSnapshotRepo noCompression) - - -- verify came around in 1.4 it seems - it "can verify existing repos" $ when' canSnapshot $ when' (atleast es14) $ withTestEnv $ do - let r1n = SnapshotRepoName "bloodhound-repo1" - withSnapshotRepo r1n $ \_ -> do - res <- verifySnapshotRepo r1n - liftIO $ case res of - Right (SnapshotVerification vs) - | null vs -> expectationFailure "Expected nonempty set of verifying nodes" - | otherwise -> return () - Left e -> expectationFailure (show e) - - describe "snapshots" $ do - it "always parses all snapshots API" $ when' canSnapshot $ withTestEnv $ do - let r1n = SnapshotRepoName "bloodhound-repo1" - withSnapshotRepo r1n $ \_ -> do - res <- getSnapshots r1n AllSnapshots - liftIO $ case res of - Left e -> expectationFailure ("Expected a right but got Left " <> show e) - Right _ -> return () - - it "can parse a snapshot that it created" $ when' canSnapshot $ withTestEnv $ do - let r1n = SnapshotRepoName "bloodhound-repo1" - withSnapshotRepo r1n $ \_ -> do - let s1n = SnapshotName "example-snapshot" - withSnapshot r1n s1n $ do - res <- getSnapshots r1n (SnapshotList (ExactSnap s1n :| [])) - liftIO $ case res of - Right [snap] - | snapInfoState snap == SnapshotSuccess && - snapInfoName snap == s1n -> return () - | otherwise -> expectationFailure (show snap) - Right [] -> expectationFailure "There were no snapshots" - Right snaps -> expectationFailure ("Expected 1 snapshot but got" <> show (length snaps)) - Left e -> expectationFailure (show e) - - describe "snapshot restore" $ do - it "can restore a snapshot that we create" $ when' canSnapshot $ withTestEnv $ do - let r1n = SnapshotRepoName "bloodhound-repo1" - withSnapshotRepo r1n $ \_ -> do - let s1n = SnapshotName "example-snapshot" - withSnapshot r1n s1n $ do - let settings = defaultSnapshotRestoreSettings { snapRestoreWaitForCompletion = True } - -- have to close an index to restore it - resp1 <- closeIndex testIndex - liftIO (validateStatus resp1 200) - resp2 <- restoreSnapshot r1n s1n settings - liftIO (validateStatus resp2 200) - - it "can restore and rename" $ when' canSnapshot $ withTestEnv $ do - let r1n = SnapshotRepoName "bloodhound-repo1" - withSnapshotRepo r1n $ \_ -> do - let s1n = SnapshotName "example-snapshot" - withSnapshot r1n s1n $ do - let pat = RestoreRenamePattern "bloodhound-tests-twitter-(\\d+)" - let replace = RRTLit "restored-" :| [RRSubWholeMatch] - let expectedIndex = IndexName "restored-bloodhound-tests-twitter-1" - oldEnoughForOverrides <- liftIO (atleast es15) - let overrides = RestoreIndexSettings { restoreOverrideReplicas = Just (ReplicaCount 0) } - let settings = defaultSnapshotRestoreSettings { snapRestoreWaitForCompletion = True - , snapRestoreRenamePattern = Just pat - , snapRestoreRenameReplacement = Just replace - , snapRestoreIndexSettingsOverrides = if oldEnoughForOverrides - then Just overrides - else Nothing - } - -- have to close an index to restore it - let go = do - resp <- restoreSnapshot r1n s1n settings - liftIO (validateStatus resp 200) - exists <- indexExists expectedIndex - liftIO (exists `shouldBe` True) - go `finally` deleteIndex expectedIndex - - describe "getNodesInfo" $ do + describe "getNodesInfo" $ it "fetches the responding node when LocalNode is used" $ withTestEnv $ do res <- getNodesInfo LocalNode liftIO $ case res of @@ -1729,7 +80,7 @@ main = hspec $ do Right NodesInfo {..} -> length nodesInfo `shouldBe` 1 Left e -> expectationFailure ("Expected NodesInfo but got " <> show e) - describe "getNodesStats" $ do + describe "getNodesStats" $ it "fetches the responding node when LocalNode is used" $ withTestEnv $ do res <- getNodesStats LocalNode liftIO $ case res of @@ -1739,7 +90,7 @@ main = hspec $ do Right NodesStats {..} -> length nodesStats `shouldBe` 1 Left e -> expectationFailure ("Expected NodesStats but got " <> show e) - describe "Enum DocVersion" $ do + describe "Enum DocVersion" $ it "follows the laws of Enum, Bounded" $ do evaluate (succ maxBound :: DocVersion) `shouldThrow` anyErrorCall evaluate (pred minBound :: DocVersion) `shouldThrow` anyErrorCall @@ -1749,11 +100,14 @@ main = hspec $ do enumFrom (pred maxBound :: DocVersion) `shouldBe` [pred maxBound, maxBound] enumFromThen minBound (pred maxBound :: DocVersion) `shouldBe` [minBound, pred maxBound] - describe "scan&scroll API" $ do + describe "Scan & Scroll API" $ it "returns documents using the scan&scroll API" $ withTestEnv $ do _ <- insertData _ <- insertOther - let search = (mkSearch (Just $ MatchAllQuery Nothing) Nothing) { size = (Size 1) } + let search = + (mkSearch + (Just $ MatchAllQuery Nothing) Nothing) + { size = Size 1 } regular_search <- searchTweet search scan_search' <- scanSearch testIndex testMapping search :: BH IO [Hit Tweet] let scan_search = map hitSource scan_search' @@ -1761,206 +115,3 @@ main = hspec $ do regular_search `shouldBe` Right exampleTweet -- Check that the size restrtiction is being honored liftIO $ scan_search `shouldMatchList` [Just exampleTweet, Just otherTweet] - - describe "index aliases" $ do - it "handles the simple case of aliasing an existing index" $ do - let alias = IndexAlias (testIndex) (IndexAliasName (IndexName "bloodhound-tests-twitter-1-alias")) - let create = IndexAliasCreate Nothing Nothing - let action = AddAlias alias create - - withTestEnv $ do - resetIndex - resp <- updateIndexAliases (action :| []) - liftIO $ validateStatus resp 200 - let cleanup = withTestEnv (updateIndexAliases (RemoveAlias alias :| [])) - (do aliases <- withTestEnv getIndexAliases - let expected = IndexAliasSummary alias create - case aliases of - Right (IndexAliasesSummary summs) -> - L.find ((== alias) . indexAliasSummaryAlias) summs `shouldBe` Just expected - Left e -> expectationFailure ("Expected an IndexAliasesSummary but got " <> show e)) `finally` cleanup - - it "handles an alias with routing and a filter" $ do - let alias = IndexAlias (testIndex) (IndexAliasName (IndexName "bloodhound-tests-twitter-1-alias")) - let sar = SearchAliasRouting (RoutingValue "search val" :| []) - let iar = IndexAliasRouting (RoutingValue "index val") - let routing = GranularAliasRouting (Just sar) (Just iar) - let filter = LimitFilter 42 - let create = IndexAliasCreate (Just routing) (Just filter) - let action = AddAlias alias create - - withTestEnv $ do - resetIndex - resp <- updateIndexAliases (action :| []) - liftIO $ validateStatus resp 200 - let cleanup = withTestEnv (updateIndexAliases (RemoveAlias alias :| [])) - (do aliases <- withTestEnv getIndexAliases - let expected = IndexAliasSummary alias create - case aliases of - Right (IndexAliasesSummary summs) -> - L.find ((== alias) . indexAliasSummaryAlias) summs `shouldBe` Just expected - Left e -> expectationFailure ("Expected an IndexAliasesSummary but got " <> show e)) `finally` cleanup - - describe "Index Listing" $ do - it "returns a list of index names" $ withTestEnv $ do - _ <- createExampleIndex - ixns <- listIndices - liftIO (ixns `shouldContain` [testIndex]) - - describe "Index Settings" $ do - it "persists settings" $ withTestEnv $ do - _ <- deleteExampleIndex - _ <- createExampleIndex - let updates = BlocksWrite False :| [] - updateResp <- updateIndexSettings updates testIndex - liftIO $ validateStatus updateResp 200 - getResp <- getIndexSettings testIndex - liftIO $ - getResp `shouldBe` Right (IndexSettingsSummary - testIndex - (IndexSettings (ShardCount 1) (ReplicaCount 0)) - (NE.toList updates)) - - describe "Index Optimization" $ do - it "returns a successful response upon completion" $ withTestEnv $ do - _ <- createExampleIndex - resp <- optimizeIndex (IndexList (testIndex :| [])) defaultIndexOptimizationSettings - liftIO $ validateStatus resp 200 - - describe "Suggest" $ do - it "returns a search suggestion using the phrase suggester" $ withTestEnv $ do - _ <- insertData - let {- query = QueryMatchNoneQuery - query = TermQuery (Term "user" "bitemyapp") Nothing -} - let phraseSuggester = mkPhraseSuggester (FieldName "message") - namedSuggester = Suggest "Use haskel" "suggest_name" (SuggestTypePhraseSuggester phraseSuggester) - search' = mkSearch Nothing Nothing - search = search' { suggestBody = Just namedSuggester } - expectedText = Just "use haskell" - resp <- searchByIndex testIndex search - parsed <- parseEsResponse resp :: BH IO (Either EsError (SearchResult Tweet)) - case parsed of - Left e -> liftIO $ expectationFailure ("Expected an search suggestion but got " <> show e) - Right sr -> liftIO $ (suggestOptionsText . head . suggestResponseOptions . head . nsrResponses <$> suggest sr) `shouldBe` expectedText - - describe "JSON instances" $ do - propJSON (Proxy :: Proxy Version) - propJSON (Proxy :: Proxy IndexName) - propJSON (Proxy :: Proxy MappingName) - propJSON (Proxy :: Proxy DocId) - propJSON (Proxy :: Proxy IndexAliasRouting) - propJSON (Proxy :: Proxy RoutingValue) - propJSON (Proxy :: Proxy ShardCount) - propJSON (Proxy :: Proxy ReplicaCount) - propJSON (Proxy :: Proxy TemplateName) - propJSON (Proxy :: Proxy TemplatePattern) - propJSON (Proxy :: Proxy QueryString) - propJSON (Proxy :: Proxy FieldName) - propJSON (Proxy :: Proxy CacheName) - propJSON (Proxy :: Proxy CacheKey) - propJSON (Proxy :: Proxy Existence) - propJSON (Proxy :: Proxy CutoffFrequency) - propJSON (Proxy :: Proxy Analyzer) - propJSON (Proxy :: Proxy MaxExpansions) - propJSON (Proxy :: Proxy Lenient) - propJSON (Proxy :: Proxy Tiebreaker) - propJSON (Proxy :: Proxy Boost) - propJSON (Proxy :: Proxy BoostTerms) - propJSON (Proxy :: Proxy MinimumMatch) - propJSON (Proxy :: Proxy DisableCoord) - propJSON (Proxy :: Proxy IgnoreTermFrequency) - propJSON (Proxy :: Proxy MinimumTermFrequency) - propJSON (Proxy :: Proxy MaxQueryTerms) - propJSON (Proxy :: Proxy Fuzziness) - propJSON (Proxy :: Proxy PrefixLength) - propJSON (Proxy :: Proxy TypeName) - propJSON (Proxy :: Proxy PercentMatch) - propJSON (Proxy :: Proxy StopWord) - propJSON (Proxy :: Proxy QueryPath) - propJSON (Proxy :: Proxy AllowLeadingWildcard) - propJSON (Proxy :: Proxy LowercaseExpanded) - propJSON (Proxy :: Proxy EnablePositionIncrements) - propJSON (Proxy :: Proxy AnalyzeWildcard) - propJSON (Proxy :: Proxy GeneratePhraseQueries) - propJSON (Proxy :: Proxy Locale) - propJSON (Proxy :: Proxy MaxWordLength) - propJSON (Proxy :: Proxy MinWordLength) - propJSON (Proxy :: Proxy PhraseSlop) - propJSON (Proxy :: Proxy MinDocFrequency) - propJSON (Proxy :: Proxy MaxDocFrequency) - propJSON (Proxy :: Proxy Filter) - propJSON (Proxy :: Proxy Query) - propJSON (Proxy :: Proxy SimpleQueryStringQuery) - propJSON (Proxy :: Proxy FieldOrFields) - propJSON (Proxy :: Proxy SimpleQueryFlag) - propJSON (Proxy :: Proxy RegexpQuery) - propJSON (Proxy :: Proxy QueryStringQuery) - propJSON (Proxy :: Proxy RangeQuery) - propJSON (Proxy :: Proxy PrefixQuery) - propJSON (Proxy :: Proxy NestedQuery) - propJSON (Proxy :: Proxy MoreLikeThisFieldQuery) - propJSON (Proxy :: Proxy MoreLikeThisQuery) - propJSON (Proxy :: Proxy IndicesQuery) - propJSON (Proxy :: Proxy HasParentQuery) - propJSON (Proxy :: Proxy HasChildQuery) - propJSON (Proxy :: Proxy FuzzyQuery) - propJSON (Proxy :: Proxy FuzzyLikeFieldQuery) - propJSON (Proxy :: Proxy FuzzyLikeThisQuery) - propJSON (Proxy :: Proxy FilteredQuery) - propJSON (Proxy :: Proxy DisMaxQuery) - propJSON (Proxy :: Proxy CommonTermsQuery) - propJSON (Proxy :: Proxy CommonMinimumMatch) - propJSON (Proxy :: Proxy BoostingQuery) - propJSON (Proxy :: Proxy BoolQuery) - propJSON (Proxy :: Proxy MatchQuery) - propJSON (Proxy :: Proxy MultiMatchQueryType) - propJSON (Proxy :: Proxy BooleanOperator) - propJSON (Proxy :: Proxy ZeroTermsQuery) - propJSON (Proxy :: Proxy MatchQueryType) - propJSON (Proxy :: Proxy AliasRouting) - propJSON (Proxy :: Proxy IndexAliasCreate) - propJSON (Proxy :: Proxy SearchAliasRouting) - propJSON (Proxy :: Proxy ScoreType) - propJSON (Proxy :: Proxy Distance) - propJSON (Proxy :: Proxy DistanceUnit) - propJSON (Proxy :: Proxy DistanceType) - propJSON (Proxy :: Proxy OptimizeBbox) - propJSON (Proxy :: Proxy GeoBoundingBoxConstraint) - propJSON (Proxy :: Proxy GeoFilterType) - propJSON (Proxy :: Proxy GeoBoundingBox) - propJSON (Proxy :: Proxy LatLon) - propJSON (Proxy :: Proxy RangeExecution) - prop "RegexpFlags FromJSON/ToJSON roundtrips, removing dups " $ \rfs -> - let expected = case rfs of - SomeRegexpFlags fs -> SomeRegexpFlags (NE.fromList (nub (NE.toList fs))) - x -> x - in parseEither parseJSON (toJSON rfs) === Right expected - propJSON (Proxy :: Proxy BoolMatch) - propJSON (Proxy :: Proxy Term) - propJSON (Proxy :: Proxy MultiMatchQuery) - propJSON (Proxy :: Proxy IndexSettings) - propJSON (Proxy :: Proxy UpdatableIndexSetting') - propJSON (Proxy :: Proxy ReplicaBounds) - propJSON (Proxy :: Proxy Bytes) - propJSON (Proxy :: Proxy AllocationPolicy) - propJSON (Proxy :: Proxy InitialShardCount) - propJSON (Proxy :: Proxy FSType) - propJSON (Proxy :: Proxy CompoundFormat) - propJSON (Proxy :: Proxy TemplateQueryInline) - propJSON (Proxy :: Proxy Suggest) - propJSON (Proxy :: Proxy DirectGenerators) - propJSON (Proxy :: Proxy DirectGeneratorSuggestModeTypes) - --- Temporary solution for lacking of generic derivation of Arbitrary --- We use generics-sop, as it's much more concise than directly using GHC.Generics --- --- This will be unneeded after https://github.com/nick8325/quickcheck/pull/112 --- is merged and released -sopArbitrary :: forall a. (Generic a, SOP.GTo a, SOP.All SOP.SListI (SOP.GCode a), SOP.All2 Arbitrary (SOP.GCode a)) => Gen a -sopArbitrary = fmap SOP.gto sopArbitrary' - -sopArbitrary' :: forall xss. (SOP.All SOP.SListI xss, SOP.All2 Arbitrary xss) => Gen (SOP.SOP SOP.I xss) -sopArbitrary' = SOP.hsequence =<< elements (SOP.apInjs_POP $ SOP.hcpure p arbitrary) - where - p :: Proxy Arbitrary - p = Proxy diff --git a/tests/V5/Test/Aggregation.hs b/tests/V5/Test/Aggregation.hs new file mode 100644 index 0000000..394aa22 --- /dev/null +++ b/tests/V5/Test/Aggregation.hs @@ -0,0 +1,147 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Test.Aggregation (spec) where + +import Test.Common +import Test.Import + +import Control.Error (fmapL, note) +import qualified Data.Map as M +import qualified Database.V5.Bloodhound + +spec :: Spec +spec = + describe "Aggregation API" $ do + it "returns term aggregation results" $ withTestEnv $ do + _ <- insertData + let terms = TermsAgg $ mkTermsAggregation "user" + let search = mkAggregateSearch Nothing $ mkAggregations "users" terms + searchExpectAggs search + searchValidBucketAgg search "users" toTerms + + it "return sub-aggregation results" $ withTestEnv $ do + _ <- insertData + let subaggs = mkAggregations "age_agg" . TermsAgg $ mkTermsAggregation "age" + agg = TermsAgg $ (mkTermsAggregation "user") { termAggs = Just subaggs} + search = mkAggregateSearch Nothing $ mkAggregations "users" agg + reply <- searchByIndex testIndex search + let result = decode (responseBody reply) :: Maybe (SearchResult Tweet) + usersAggResults = result >>= aggregations >>= toTerms "users" + subAggResults = usersAggResults >>= (listToMaybe . buckets) >>= termsAggs >>= toTerms "age_agg" + subAddResultsExists = isJust subAggResults + liftIO $ subAddResultsExists `shouldBe` True + + it "returns cardinality aggregation results" $ withTestEnv $ do + _ <- insertData + let cardinality = CardinalityAgg $ mkCardinalityAggregation $ FieldName "user" + let search = mkAggregateSearch Nothing $ mkAggregations "users" cardinality + let search' = search { Database.V5.Bloodhound.from = From 0, size = Size 0 } + searchExpectAggs search' + let docCountPair k n = (k, object ["value" .= Number n]) + res <- searchTweets search' + liftIO $ + fmap aggregations res `shouldBe` Right (Just (M.fromList [ docCountPair "users" 1])) + + it "returns stats aggregation results" $ withTestEnv $ do + _ <- insertData + let stats = StatsAgg $ mkStatsAggregation $ FieldName "age" + let search = mkAggregateSearch Nothing $ mkAggregations "users" stats + let search' = search { Database.V5.Bloodhound.from = From 0, size = Size 0 } + searchExpectAggs search' + let statsAggRes k n = (k, object [ "max" .= Number n + , "avg" .= Number n + , "count" .= Number 1 + , "min" .= Number n + , "sum" .= Number n]) + res <- searchTweets search' + liftIO $ + fmap aggregations res `shouldBe` Right (Just (M.fromList [ statsAggRes "users" 10000])) + + it "can give collection hint parameters to term aggregations" $ when' (atleast es13) $ withTestEnv $ do + _ <- insertData + let terms = TermsAgg $ (mkTermsAggregation "user") { termCollectMode = Just BreadthFirst } + let search = mkAggregateSearch Nothing $ mkAggregations "users" terms + searchExpectAggs search + searchValidBucketAgg search "users" toTerms + + -- One of these fails with 1.7.3 + it "can give execution hint parameters to term aggregations" $ when' (atmost es11) $ withTestEnv $ do + _ <- insertData + searchTermsAggHint [Map, Ordinals] + + it "can give execution hint parameters to term aggregations" $ when' (is es12) $ withTestEnv $ do + _ <- insertData + searchTermsAggHint [GlobalOrdinals, GlobalOrdinalsHash, GlobalOrdinalsLowCardinality, Map, Ordinals] + + it "can give execution hint parameters to term aggregations" $ when' (atleast es12) $ withTestEnv $ do + _ <- insertData + searchTermsAggHint [GlobalOrdinals, GlobalOrdinalsHash, GlobalOrdinalsLowCardinality, Map] + -- One of the above. + + it "can execute value_count aggregations" $ withTestEnv $ do + _ <- insertData + _ <- insertOther + let ags = mkAggregations "user_count" (ValueCountAgg (FieldValueCount (FieldName "user"))) <> + mkAggregations "bogus_count" (ValueCountAgg (FieldValueCount (FieldName "bogus"))) + let search = mkAggregateSearch Nothing ags + let docCountPair k n = (k, object ["value" .= Number n]) + res <- searchTweets search + liftIO $ + fmap aggregations res `shouldBe` Right (Just (M.fromList [ docCountPair "user_count" 2 + , docCountPair "bogus_count" 0 + ])) + + it "can execute date_range aggregations" $ withTestEnv $ do + let now = fromGregorian 2015 3 14 + let ltAMonthAgo = UTCTime (fromGregorian 2015 3 1) 0 + let ltAWeekAgo = UTCTime (fromGregorian 2015 3 10) 0 + let oldDoc = exampleTweet { postDate = ltAMonthAgo } + let newDoc = exampleTweet { postDate = ltAWeekAgo } + _ <- indexDocument testIndex testMapping defaultIndexDocumentSettings oldDoc (DocId "1") + _ <- indexDocument testIndex testMapping defaultIndexDocumentSettings newDoc (DocId "2") + _ <- refreshIndex testIndex + let thisMonth = DateRangeFrom (DateMathExpr (DMDate now) [SubtractTime 1 DMMonth]) + let thisWeek = DateRangeFrom (DateMathExpr (DMDate now) [SubtractTime 1 DMWeek]) + let agg = DateRangeAggregation (FieldName "postDate") Nothing (thisMonth :| [thisWeek]) + let ags = mkAggregations "date_ranges" (DateRangeAgg agg) + let search = mkAggregateSearch Nothing ags + res <- searchTweets search + liftIO $ hitsTotal . searchHits <$> res `shouldBe` Right 2 + let bucks = do magrs <- fmapL show (aggregations <$> res) + agrs <- note "no aggregations returned" magrs + rawBucks <- note "no date_ranges aggregation" $ M.lookup "date_ranges" agrs + parseEither parseJSON rawBucks + let fromMonthT = UTCTime (fromGregorian 2015 2 14) 0 + let fromWeekT = UTCTime (fromGregorian 2015 3 7) 0 + liftIO $ buckets <$> bucks `shouldBe` Right [ DateRangeResult "2015-02-14T00:00:00.000Z-*" + (Just fromMonthT) + (Just "2015-02-14T00:00:00.000Z") + Nothing + Nothing + 2 + Nothing + , DateRangeResult "2015-03-07T00:00:00.000Z-*" + (Just fromWeekT) + (Just "2015-03-07T00:00:00.000Z") + Nothing + Nothing + 1 + Nothing + ] + + it "returns date histogram aggregation results" $ withTestEnv $ do + _ <- insertData + let histogram = DateHistogramAgg $ mkDateHistogram (FieldName "postDate") Minute + let search = mkAggregateSearch Nothing (mkAggregations "byDate" histogram) + searchExpectAggs search + searchValidBucketAgg search "byDate" toDateHistogram + + it "can execute missing aggregations" $ withTestEnv $ do + _ <- insertData + _ <- insertExtra + let ags = mkAggregations "missing_agg" (MissingAgg (MissingAggregation "extra")) + let search = mkAggregateSearch Nothing ags + let docCountPair k n = (k, object ["doc_count" .= Number n]) + res <- searchTweets search + liftIO $ + fmap aggregations res `shouldBe` Right (Just (M.fromList [docCountPair "missing_agg" 1])) diff --git a/tests/V5/Test/ApproxEq.hs b/tests/V5/Test/ApproxEq.hs new file mode 100644 index 0000000..c5489f9 --- /dev/null +++ b/tests/V5/Test/ApproxEq.hs @@ -0,0 +1,63 @@ +{-# LANGUAGE DefaultSignatures #-} +{-# LANGUAGE OverloadedStrings #-} + +module Test.ApproxEq where + +import Database.V5.Bloodhound + +import Test.Import + +import qualified Data.List.NonEmpty as NE + +-- | Typeclass for "equal where it matters". Use this to specify +-- less-strict equivalence for things such as lists that can wind up +-- in an unpredictable order +class ApproxEq a where + (=~) :: a -> a -> Bool + + showApproxEq :: a -> String + default showApproxEq :: (Show a) => a -> String + showApproxEq = show + +(==~) :: (ApproxEq a) => a -> a -> Property +a ==~ b = counterexample (showApproxEq a ++ " !=~ " ++ showApproxEq b) (a =~ b) + +instance ApproxEq NominalDiffTime where (=~) = (==) +instance ApproxEq Bool where (=~) = (==) +instance ApproxEq Int where (=~) = (==) +instance (Eq a, Show a) => ApproxEq (Maybe a) where (=~) = (==) +instance ApproxEq Char where + (=~) = (==) + +instance ApproxEq NodeAttrFilter where (=~) = (==) +instance ApproxEq NodeAttrName where (=~) = (==) +instance (Eq a, Show a) => ApproxEq (NonEmpty a) where (=~) = (==) +instance (ApproxEq l, Show l, ApproxEq r, Show r) => ApproxEq (Either l r) where + Left a =~ Left b = a =~ b + Right a =~ Right b = a =~ b + _ =~ _ = False + showApproxEq (Left x) = "Left " <> showApproxEq x + showApproxEq (Right x) = "Right " <> showApproxEq x +instance (ApproxEq a, Show a) => ApproxEq [a] where + as =~ bs = and (zipWith (=~) as bs) +instance ApproxEq ReplicaCount where (=~) = (==) +instance ApproxEq ReplicaBounds where (=~) = (==) +instance ApproxEq Bytes where (=~) = (==) +instance ApproxEq AllocationPolicy where (=~) = (==) +instance ApproxEq InitialShardCount where (=~) = (==) +instance ApproxEq FSType where (=~) = (==) + +-- | Due to the way nodeattrfilters get serialized here, they may come +-- out in a different order, but they are morally equivalent +instance ApproxEq UpdatableIndexSetting where + RoutingAllocationInclude a =~ RoutingAllocationInclude b = + NE.sort a =~ NE.sort b + RoutingAllocationExclude a =~ RoutingAllocationExclude b = + NE.sort a =~ NE.sort b + RoutingAllocationRequire a =~ RoutingAllocationRequire b = + NE.sort a =~ NE.sort b + a =~ b = a == b + showApproxEq (RoutingAllocationInclude xs) = show (RoutingAllocationInclude (NE.sort xs)) + showApproxEq (RoutingAllocationExclude xs) = show (RoutingAllocationExclude (NE.sort xs)) + showApproxEq (RoutingAllocationRequire xs) = show (RoutingAllocationRequire (NE.sort xs)) + showApproxEq x = show x diff --git a/tests/V5/Test/BulkAPI.hs b/tests/V5/Test/BulkAPI.hs new file mode 100644 index 0000000..ae77e17 --- /dev/null +++ b/tests/V5/Test/BulkAPI.hs @@ -0,0 +1,95 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Test.BulkAPI (spec) where + +import Test.Common +import Test.Import + +import qualified Data.Vector as V +import qualified Lens.Micro.Aeson as LMA + +newtype BulkTest = + BulkTest Text + deriving (Eq, Show) + +instance ToJSON BulkTest where + toJSON (BulkTest name') = + object ["name" .= name'] + +instance FromJSON BulkTest where + parseJSON = withObject "BulkTest" parse + where + parse o = do + t <- o .: "name" + BulkTest <$> parseJSON t + +spec :: Spec +spec = + describe "Bulk API" $ + it "inserts all documents we request" $ withTestEnv $ do + _ <- insertData + let firstTest = BulkTest "blah" + let secondTest = BulkTest "bloo" + let thirdTest = BulkTest "graffle" + let fourthTest = BulkTest "garabadoo" + let fifthTest = BulkTest "serenity" + let firstDoc = BulkIndex testIndex + testMapping (DocId "2") (toJSON firstTest) + let secondDoc = BulkCreate testIndex + testMapping (DocId "3") (toJSON secondTest) + let thirdDoc = BulkCreateEncoding testIndex + testMapping (DocId "4") (toEncoding thirdTest) + let fourthDoc = BulkIndexAuto testIndex + testMapping (toJSON fourthTest) + let fifthDoc = BulkIndexEncodingAuto testIndex + testMapping (toEncoding fifthTest) + let stream = V.fromList [firstDoc, secondDoc, thirdDoc, fourthDoc, fifthDoc] + _ <- bulk stream + -- liftIO $ pPrint bulkResp + _ <- refreshIndex testIndex + -- liftIO $ pPrint refreshResp + fDoc <- getDocument testIndex testMapping (DocId "2") + sDoc <- getDocument testIndex testMapping (DocId "3") + tDoc <- getDocument testIndex testMapping (DocId "4") + -- note that we cannot query for fourthDoc and fifthDoc since we + -- do not know their autogenerated ids. + let maybeFirst = + eitherDecode + $ responseBody fDoc + :: Either String (EsResult BulkTest) + let maybeSecond = + eitherDecode + $ responseBody sDoc + :: Either String (EsResult BulkTest) + let maybeThird = + eitherDecode + $ responseBody tDoc + :: Either String (EsResult BulkTest) + -- liftIO $ pPrint [maybeFirst, maybeSecond, maybeThird] + liftIO $ do + fmap getSource maybeFirst `shouldBe` Right (Just firstTest) + fmap getSource maybeSecond `shouldBe` Right (Just secondTest) + fmap getSource maybeThird `shouldBe` Right (Just thirdTest) + -- Since we can't get the docs by doc id, we check for their existence in + -- a match all query. + let query = MatchAllQuery Nothing + let search = mkSearch (Just query) Nothing + resp <- searchByIndex testIndex search + parsed <- parseEsResponse resp :: BH IO (Either EsError (SearchResult Value)) + case parsed of + Left e -> + liftIO $ expectationFailure ("Expected a script-transformed result but got: " <> show e) + (Right sr) -> do + liftIO $ + hitsTotal (searchHits sr) `shouldBe` 6 + let nameList :: [Text] + nameList = + hits (searchHits sr) + ^.. traverse + . to hitSource + . _Just + . LMA.key "name" + . _String + liftIO $ + nameList + `shouldBe` ["blah","bloo","graffle","garabadoo","serenity"] diff --git a/tests/V5/Test/Common.hs b/tests/V5/Test/Common.hs new file mode 100644 index 0000000..53bd8b2 --- /dev/null +++ b/tests/V5/Test/Common.hs @@ -0,0 +1,292 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TemplateHaskell #-} + +module Test.Common where + +import Test.Import + +import qualified Data.Map as M +import qualified Data.Version as Vers +import qualified Network.HTTP.Types.Status as NHTS + +testServer :: Server +testServer = Server "http://localhost:9200" +testIndex :: IndexName +testIndex = IndexName "bloodhound-tests-twitter-1" +testMapping :: MappingName +testMapping = MappingName "tweet" + +withTestEnv :: BH IO a -> IO a +withTestEnv = withBH defaultManagerSettings testServer + +data Location = Location { lat :: Double + , lon :: Double } deriving (Eq, Show) + +data Tweet = Tweet { user :: Text + , postDate :: UTCTime + , message :: Text + , age :: Int + , location :: Location + , extra :: Maybe Text } + deriving (Eq, Show) + +$(deriveJSON defaultOptions ''Location) +$(deriveJSON defaultOptions ''Tweet) + +data ParentMapping = ParentMapping deriving (Eq, Show) + +instance ToJSON ParentMapping where + toJSON ParentMapping = + object ["properties" .= + object [ "user" .= object ["type" .= ("string" :: Text) + , "fielddata" .= True + ] + -- Serializing the date as a date is breaking other tests, mysteriously. + -- , "postDate" .= object [ "type" .= ("date" :: Text) + -- , "format" .= ("YYYY-MM-dd`T`HH:mm:ss.SSSZZ" :: Text)] + , "message" .= object ["type" .= ("string" :: Text)] + , "age" .= object ["type" .= ("integer" :: Text)] + , "location" .= object ["type" .= ("geo_point" :: Text)] + , "extra" .= object ["type" .= ("keyword" :: Text)] + ]] + +es13 :: Vers.Version +es13 = Vers.Version [1, 3, 0] [] + +es12 :: Vers.Version +es12 = Vers.Version [1, 2, 0] [] + +es11 :: Vers.Version +es11 = Vers.Version [1, 1, 0] [] + +es14 :: Vers.Version +es14 = Vers.Version [1, 4, 0] [] + +es15 :: Vers.Version +es15 = Vers.Version [1, 5, 0] [] + +es16 :: Vers.Version +es16 = Vers.Version [1, 6, 0] [] + +es20 :: Vers.Version +es20 = Vers.Version [2, 0, 0] [] + +es50 :: Vers.Version +es50 = Vers.Version [5, 0, 0] [] + +getServerVersion :: IO (Maybe Vers.Version) +getServerVersion = fmap extractVersion <$> withTestEnv getStatus + where + extractVersion = versionNumber . number . version + +createExampleIndex :: (MonadBH m) => m Reply +createExampleIndex = + createIndex (IndexSettings (ShardCount 1) (ReplicaCount 0)) testIndex + +deleteExampleIndex :: (MonadBH m) => m Reply +deleteExampleIndex = + deleteIndex testIndex + +validateStatus :: Show body => Response body -> Int -> Expectation +validateStatus resp expected = + if actual == expected + then return () + else expectationFailure ("Expected " <> show expected <> " but got " <> show actual <> ": " <> show body) + where + actual = NHTS.statusCode (responseStatus resp) + body = responseBody resp + +data ChildMapping = ChildMapping deriving (Eq, Show) + +instance ToJSON ChildMapping where + toJSON ChildMapping = + object ["_parent" .= object ["type" .= ("parent" :: Text)] + , "properties" .= + object [ "user" .= object ["type" .= ("string" :: Text) + , "fielddata" .= True + ] + -- Serializing the date as a date is breaking other tests, mysteriously. + -- , "postDate" .= object [ "type" .= ("date" :: Text) + -- , "format" .= ("YYYY-MM-dd`T`HH:mm:ss.SSSZZ" :: Text)] + , "message" .= object ["type" .= ("string" :: Text)] + , "age" .= object ["type" .= ("integer" :: Text)] + , "location" .= object ["type" .= ("geo_point" :: Text)] + , "extra" .= object ["type" .= ("keyword" :: Text)] + ]] + +data TweetMapping = TweetMapping deriving (Eq, Show) + +instance ToJSON TweetMapping where + toJSON TweetMapping = + object ["tweet" .= + object ["properties" .= + object [ "user" .= object [ "type" .= ("string" :: Text) + , "fielddata" .= True + ] + -- Serializing the date as a date is breaking other tests, mysteriously. + -- , "postDate" .= object [ "type" .= ("date" :: Text) + -- , "format" .= ("YYYY-MM-dd`T`HH:mm:ss.SSSZZ" :: Text)] + , "message" .= object ["type" .= ("string" :: Text)] + , "age" .= object ["type" .= ("integer" :: Text)] + , "location" .= object ["type" .= ("geo_point" :: Text)] + , "extra" .= object ["type" .= ("keyword" :: Text)] + ]]] + +exampleTweet :: Tweet +exampleTweet = Tweet { user = "bitemyapp" + , postDate = UTCTime + (ModifiedJulianDay 55000) + (secondsToDiffTime 10) + , message = "Use haskell!" + , age = 10000 + , location = Location 40.12 (-71.34) + , extra = Nothing } + +tweetWithExtra :: Tweet +tweetWithExtra = Tweet { user = "bitemyapp" + , postDate = UTCTime + (ModifiedJulianDay 55000) + (secondsToDiffTime 10) + , message = "Use haskell!" + , age = 10000 + , location = Location 40.12 (-71.34) + , extra = Just "blah blah" } + +newAge :: Int +newAge = 31337 + +newUser :: Text +newUser = "someotherapp" + +tweetPatch :: Value +tweetPatch = + object [ "age" .= newAge + , "user" .= newUser + ] + +patchedTweet :: Tweet +patchedTweet = exampleTweet{age = newAge, user = newUser} + +otherTweet :: Tweet +otherTweet = Tweet { user = "notmyapp" + , postDate = UTCTime + (ModifiedJulianDay 55000) + (secondsToDiffTime 11) + , message = "Use haskell!" + , age = 1000 + , location = Location 40.12 (-71.34) + , extra = Nothing } + +resetIndex :: BH IO () +resetIndex = do + _ <- deleteExampleIndex + _ <- createExampleIndex + _ <- putMapping testIndex testMapping TweetMapping + return () + +insertData :: BH IO Reply +insertData = do + resetIndex + insertData' defaultIndexDocumentSettings + +insertData' :: IndexDocumentSettings -> BH IO Reply +insertData' ids = do + r <- indexDocument testIndex testMapping ids exampleTweet (DocId "1") + _ <- refreshIndex testIndex + return r + +updateData :: BH IO Reply +updateData = do + r <- updateDocument testIndex testMapping defaultIndexDocumentSettings tweetPatch (DocId "1") + _ <- refreshIndex testIndex + return r + +insertOther :: BH IO () +insertOther = do + _ <- indexDocument testIndex testMapping defaultIndexDocumentSettings otherTweet (DocId "2") + _ <- refreshIndex testIndex + return () + +insertExtra :: BH IO () +insertExtra = do + _ <- indexDocument testIndex testMapping defaultIndexDocumentSettings tweetWithExtra (DocId "4") + _ <- refreshIndex testIndex + return () + +insertWithSpaceInId :: BH IO () +insertWithSpaceInId = do + _ <- indexDocument testIndex testMapping defaultIndexDocumentSettings exampleTweet (DocId "Hello World") + _ <- refreshIndex testIndex + return () + +searchTweet :: Search -> BH IO (Either EsError Tweet) +searchTweet search = do + result <- searchTweets search + let myTweet :: Either EsError Tweet + myTweet = grabFirst result + return myTweet + +searchTweets :: Search -> BH IO (Either EsError (SearchResult Tweet)) +searchTweets search = parseEsResponse =<< searchByIndex testIndex search + +searchExpectNoResults :: Search -> BH IO () +searchExpectNoResults search = do + result <- searchTweets search + let emptyHits = fmap (hits . searchHits) result + liftIO $ + emptyHits `shouldBe` Right [] + +searchExpectAggs :: Search -> BH IO () +searchExpectAggs search = do + reply <- searchByIndex testIndex search + let isEmpty x = return (M.null x) + let result = decode (responseBody reply) :: Maybe (SearchResult Tweet) + liftIO $ + (result >>= aggregations >>= isEmpty) `shouldBe` Just False + +searchValidBucketAgg :: (BucketAggregation a, FromJSON a, Show a) => + Search -> Text -> (Text -> AggregationResults -> Maybe (Bucket a)) -> BH IO () +searchValidBucketAgg search aggKey extractor = do + reply <- searchByIndex testIndex search + let bucketDocs = docCount . head . buckets + let result = decode (responseBody reply) :: Maybe (SearchResult Tweet) + let count = result >>= aggregations >>= extractor aggKey >>= \x -> return (bucketDocs x) + liftIO $ + count `shouldBe` Just 1 + +searchTermsAggHint :: [ExecutionHint] -> BH IO () +searchTermsAggHint hints = do + let terms hint = TermsAgg $ (mkTermsAggregation "user") { termExecutionHint = Just hint } + let search hint = mkAggregateSearch Nothing $ mkAggregations "users" $ terms hint + forM_ hints $ searchExpectAggs . search + forM_ hints (\x -> searchValidBucketAgg (search x) "users" toTerms) + +searchTweetHighlight :: Search + -> BH IO (Either EsError (Maybe HitHighlight)) +searchTweetHighlight search = do + result <- searchTweets search + let tweetHit :: Either EsError (Maybe (Hit Tweet)) + tweetHit = fmap (headMay . hits . searchHits) result + myHighlight :: Either EsError (Maybe HitHighlight) + myHighlight = (join . fmap hitHighlight) <$> tweetHit + return myHighlight + +searchExpectSource :: Source -> Either EsError Value -> BH IO () +searchExpectSource src expected = do + _ <- insertData + let query = QueryMatchQuery $ mkMatchQuery (FieldName "_all") (QueryString "haskell") + let search = (mkSearch (Just query) Nothing) { source = Just src } + reply <- searchByIndex testIndex search + result <- parseEsResponse reply + let value = grabFirst result + liftIO $ + value `shouldBe` expected + +atleast :: Vers.Version -> IO Bool +atleast v = getServerVersion >>= \x -> return $ x >= Just v + +atmost :: Vers.Version -> IO Bool +atmost v = getServerVersion >>= \x -> return $ x <= Just v + +is :: Vers.Version -> IO Bool +is v = getServerVersion >>= \x -> return $ x == Just v diff --git a/tests/V5/Test/Documents.hs b/tests/V5/Test/Documents.hs new file mode 100644 index 0000000..d9052df --- /dev/null +++ b/tests/V5/Test/Documents.hs @@ -0,0 +1,51 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Test.Documents where + +import Test.Common +import Test.Import + +spec :: Spec +spec = + describe "document API" $ do + it "indexes, updates, gets, and then deletes the generated document" $ withTestEnv $ do + _ <- insertData + _ <- updateData + docInserted <- getDocument testIndex testMapping (DocId "1") + let newTweet = eitherDecode + (responseBody docInserted) :: Either String (EsResult Tweet) + liftIO $ fmap getSource newTweet `shouldBe` Right (Just patchedTweet) + + it "indexes, gets, and then deletes the generated document with a DocId containing a space" $ withTestEnv $ do + _ <- insertWithSpaceInId + docInserted <- getDocument testIndex testMapping (DocId "Hello World") + let newTweet = eitherDecode + (responseBody docInserted) :: Either String (EsResult Tweet) + liftIO $ fmap getSource newTweet `shouldBe` Right (Just exampleTweet) + + it "produces a parseable result when looking up a bogus document" $ withTestEnv $ do + doc <- getDocument testIndex testMapping (DocId "bogus") + let noTweet = eitherDecode + (responseBody doc) :: Either String (EsResult Tweet) + liftIO $ fmap foundResult noTweet `shouldBe` Right Nothing + + it "can use optimistic concurrency control" $ withTestEnv $ do + let ev = ExternalDocVersion minBound + let cfg = defaultIndexDocumentSettings { idsVersionControl = ExternalGT ev } + resetIndex + res <- insertData' cfg + liftIO $ isCreated res `shouldBe` True + res' <- insertData' cfg + liftIO $ isVersionConflict res' `shouldBe` True + + it "indexes two documents in a parent/child relationship and checks that the child exists" $ withTestEnv $ do + resetIndex + _ <- putMapping testIndex (MappingName "child") ChildMapping + _ <- putMapping testIndex (MappingName "parent") ParentMapping + _ <- indexDocument testIndex (MappingName "parent") defaultIndexDocumentSettings exampleTweet (DocId "1") + let parent = (Just . DocumentParent . DocId) "1" + ids = IndexDocumentSettings NoVersionControl parent + _ <- indexDocument testIndex (MappingName "child") ids otherTweet (DocId "2") + _ <- refreshIndex testIndex + exists <- documentExists testIndex (MappingName "child") parent (DocId "2") + liftIO $ exists `shouldBe` True diff --git a/tests/V5/Test/Generators.hs b/tests/V5/Test/Generators.hs new file mode 100644 index 0000000..ea758af --- /dev/null +++ b/tests/V5/Test/Generators.hs @@ -0,0 +1,536 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TemplateHaskell #-} + +module Test.Generators where + +import Database.V5.Bloodhound + +import Test.Import + +import qualified Data.HashMap.Strict as HM +import qualified Data.List as L +import qualified Data.List.NonEmpty as NE +import qualified Data.Map as M +import qualified Data.Text as T +import qualified Data.Version as Vers +import Test.QuickCheck.TH.Generators + +import Test.ApproxEq + +instance Arbitrary NominalDiffTime where + arbitrary = fromInteger <$> arbitrary + +#if !MIN_VERSION_QuickCheck(2,8,0) +instance (Arbitrary k, Ord k, Arbitrary v) => Arbitrary (M.Map k v) where + arbitrary = M.fromList <$> arbitrary +#endif + +instance Arbitrary Text where + arbitrary = T.pack <$> arbitrary + +instance Arbitrary UTCTime where + arbitrary = UTCTime + <$> arbitrary + <*> (fromRational . toRational <$> choose (0::Double, 86400)) + +instance Arbitrary Day where + arbitrary = + ModifiedJulianDay . (2000 +) <$> arbitrary + shrink = + (ModifiedJulianDay <$>) . shrink . toModifiedJulianDay + +#if !MIN_VERSION_QuickCheck(2,9,0) +instance Arbitrary a => Arbitrary (NonEmpty a) where + arbitrary = liftA2 (:|) arbitrary arbitrary +#endif + +arbitraryScore :: Gen Score +arbitraryScore = fmap getPositive <$> arbitrary + +instance (Arbitrary a, Typeable a) => Arbitrary (Hit a) where + arbitrary = Hit <$> arbitrary + <*> arbitrary + <*> arbitrary + <*> arbitraryScore + <*> arbitrary + <*> arbitrary + <*> arbitrary + +instance Arbitrary HitFields where + arbitrary = pure (HitFields M.empty) + shrink = const [] + +instance (Arbitrary a, Typeable a) => Arbitrary (SearchHits a) where + arbitrary = reduceSize $ do + tot <- getPositive <$> arbitrary + score <- arbitraryScore + hs <- arbitrary + return $ SearchHits tot score hs + + +reduceSize :: Gen a -> Gen a +reduceSize f = sized $ \n -> resize (n `div` 2) f + +arbitraryAlphaNum :: Gen Char +arbitraryAlphaNum = oneof [choose ('a', 'z') + ,choose ('A','Z') + , choose ('0', '9')] + +instance Arbitrary RoutingValue where + arbitrary = RoutingValue . T.pack <$> listOf1 arbitraryAlphaNum + +instance Arbitrary AliasRouting where + arbitrary = oneof [allAlias + ,one + ,theOther + ,both'] + where one = GranularAliasRouting + <$> (Just <$> arbitrary) + <*> pure Nothing + theOther = GranularAliasRouting Nothing + <$> (Just <$> arbitrary) + both' = GranularAliasRouting + <$> (Just <$> arbitrary) + <*> (Just <$> arbitrary) + allAlias = AllAliasRouting <$> arbitrary + + + +instance Arbitrary FieldName where + arbitrary = + FieldName + . T.pack + <$> listOf1 arbitraryAlphaNum + + +#if MIN_VERSION_base(4,10,0) +-- Test.QuickCheck.Modifiers + +qcNonEmptyToNonEmpty :: NonEmptyList a -> NonEmpty a +qcNonEmptyToNonEmpty (NonEmpty (a : xs)) = (a :| xs) +qcNonEmptyToNonEmpty (NonEmpty []) = error "NonEmpty was empty!" + +instance Arbitrary a => Arbitrary (NonEmpty a) where + arbitrary = + qcNonEmptyToNonEmpty + <$> arbitrary +#endif +instance Arbitrary ScriptFields where + arbitrary = + pure $ ScriptFields $ + HM.fromList [] + + shrink = const [] + +instance Arbitrary ScriptParams where + arbitrary = + pure $ ScriptParams $ + HM.fromList [ ("a", Number 42) + , ("b", String "forty two") + ] + + shrink = const [] + +instance Arbitrary RegexpFlags where + arbitrary = oneof [ pure AllRegexpFlags + , pure NoRegexpFlags + , SomeRegexpFlags <$> genUniqueFlags + ] + where genUniqueFlags = + NE.fromList . L.nub + <$> listOf1 arbitrary + +instance Arbitrary IndexAliasCreate where + arbitrary = + IndexAliasCreate + <$> arbitrary + <*> reduceSize arbitrary + +instance Arbitrary Query where + arbitrary = + reduceSize + $ oneof [ TermQuery <$> arbitrary <*> arbitrary + , TermsQuery <$> arbitrary <*> arbitrary + , QueryMatchQuery <$> arbitrary + , QueryMultiMatchQuery <$> arbitrary + , QueryBoolQuery <$> arbitrary + , QueryBoostingQuery <$> arbitrary + , QueryCommonTermsQuery <$> arbitrary + , ConstantScoreQuery <$> arbitrary <*> arbitrary + , QueryDisMaxQuery <$> arbitrary + , QueryFuzzyLikeThisQuery <$> arbitrary + , QueryFuzzyLikeFieldQuery <$> arbitrary + , QueryFuzzyQuery <$> arbitrary + , QueryHasChildQuery <$> arbitrary + , QueryHasParentQuery <$> arbitrary + , IdsQuery <$> arbitrary <*> arbitrary + , QueryIndicesQuery <$> arbitrary + , MatchAllQuery <$> arbitrary + , QueryMoreLikeThisQuery <$> arbitrary + , QueryMoreLikeThisFieldQuery <$> arbitrary + , QueryNestedQuery <$> arbitrary + , QueryPrefixQuery <$> arbitrary + , QueryQueryStringQuery <$> arbitrary + , QuerySimpleQueryStringQuery <$> arbitrary + , QueryRangeQuery <$> arbitrary + , QueryRegexpQuery <$> arbitrary + , QueryTemplateQueryInline <$> arbitrary + ] + -- TODO: Implement shrink + -- shrink = genericShrink + +instance Arbitrary Filter where + arbitrary = + Filter <$> arbitrary + shrink (Filter q) = + Filter <$> shrink q + +instance Arbitrary ReplicaBounds where + arbitrary = oneof [ replicasBounded + , replicasLowerBounded + , pure ReplicasUnbounded + ] + where replicasBounded = do + Positive a <- arbitrary + Positive b <- arbitrary + return (ReplicasBounded a b) + replicasLowerBounded = do + Positive a <- arbitrary + return (ReplicasLowerBounded a) + +instance Arbitrary NodeAttrName where + arbitrary = + NodeAttrName + . T.pack + <$> listOf1 arbitraryAlphaNum + + +instance Arbitrary NodeAttrFilter where + arbitrary = do + n <- arbitrary + s:ss <- listOf1 (listOf1 arbitraryAlphaNum) + let ts = T.pack <$> s :| ss + return (NodeAttrFilter n ts) + +instance Arbitrary VersionNumber where + arbitrary = mk . fmap getPositive . getNonEmpty <$> arbitrary + where + mk versions = VersionNumber (Vers.Version versions []) + +instance Arbitrary TemplateQueryKeyValuePairs where + arbitrary = TemplateQueryKeyValuePairs . HM.fromList <$> arbitrary + shrink (TemplateQueryKeyValuePairs x) = map (TemplateQueryKeyValuePairs . HM.fromList) . shrink $ HM.toList x + +makeArbitrary ''IndexName +instance Arbitrary IndexName where arbitrary = arbitraryIndexName +makeArbitrary ''MappingName +instance Arbitrary MappingName where arbitrary = arbitraryMappingName +makeArbitrary ''DocId +instance Arbitrary DocId where arbitrary = arbitraryDocId +makeArbitrary ''Version +instance Arbitrary Version where arbitrary = arbitraryVersion +makeArbitrary ''BuildHash +instance Arbitrary BuildHash where arbitrary = arbitraryBuildHash +makeArbitrary ''IndexAliasRouting +instance Arbitrary IndexAliasRouting where arbitrary = arbitraryIndexAliasRouting +makeArbitrary ''ShardCount +instance Arbitrary ShardCount where arbitrary = arbitraryShardCount +makeArbitrary ''ReplicaCount +instance Arbitrary ReplicaCount where arbitrary = arbitraryReplicaCount +makeArbitrary ''TemplateName +instance Arbitrary TemplateName where arbitrary = arbitraryTemplateName +makeArbitrary ''TemplatePattern +instance Arbitrary TemplatePattern where arbitrary = arbitraryTemplatePattern +makeArbitrary ''QueryString +instance Arbitrary QueryString where arbitrary = arbitraryQueryString +makeArbitrary ''CacheName +instance Arbitrary CacheName where arbitrary = arbitraryCacheName +makeArbitrary ''CacheKey +instance Arbitrary CacheKey where arbitrary = arbitraryCacheKey +makeArbitrary ''Existence +instance Arbitrary Existence where arbitrary = arbitraryExistence +makeArbitrary ''CutoffFrequency +instance Arbitrary CutoffFrequency where arbitrary = arbitraryCutoffFrequency +makeArbitrary ''Analyzer +instance Arbitrary Analyzer where arbitrary = arbitraryAnalyzer +makeArbitrary ''MaxExpansions +instance Arbitrary MaxExpansions where arbitrary = arbitraryMaxExpansions +makeArbitrary ''Lenient +instance Arbitrary Lenient where arbitrary = arbitraryLenient +makeArbitrary ''Tiebreaker +instance Arbitrary Tiebreaker where arbitrary = arbitraryTiebreaker +makeArbitrary ''Boost +instance Arbitrary Boost where arbitrary = arbitraryBoost +makeArbitrary ''BoostTerms +instance Arbitrary BoostTerms where arbitrary = arbitraryBoostTerms +makeArbitrary ''MinimumMatch +instance Arbitrary MinimumMatch where arbitrary = arbitraryMinimumMatch +makeArbitrary ''DisableCoord +instance Arbitrary DisableCoord where arbitrary = arbitraryDisableCoord +makeArbitrary ''IgnoreTermFrequency +instance Arbitrary IgnoreTermFrequency where arbitrary = arbitraryIgnoreTermFrequency +makeArbitrary ''MinimumTermFrequency +instance Arbitrary MinimumTermFrequency where arbitrary = arbitraryMinimumTermFrequency +makeArbitrary ''MaxQueryTerms +instance Arbitrary MaxQueryTerms where arbitrary = arbitraryMaxQueryTerms +makeArbitrary ''Fuzziness +instance Arbitrary Fuzziness where arbitrary = arbitraryFuzziness +makeArbitrary ''PrefixLength +instance Arbitrary PrefixLength where arbitrary = arbitraryPrefixLength +makeArbitrary ''TypeName +instance Arbitrary TypeName where arbitrary = arbitraryTypeName +makeArbitrary ''PercentMatch +instance Arbitrary PercentMatch where arbitrary = arbitraryPercentMatch +makeArbitrary ''StopWord +instance Arbitrary StopWord where arbitrary = arbitraryStopWord +makeArbitrary ''QueryPath +instance Arbitrary QueryPath where arbitrary = arbitraryQueryPath +makeArbitrary ''AllowLeadingWildcard +instance Arbitrary AllowLeadingWildcard where arbitrary = arbitraryAllowLeadingWildcard +makeArbitrary ''LowercaseExpanded +instance Arbitrary LowercaseExpanded where arbitrary = arbitraryLowercaseExpanded +makeArbitrary ''EnablePositionIncrements +instance Arbitrary EnablePositionIncrements where arbitrary = arbitraryEnablePositionIncrements +makeArbitrary ''AnalyzeWildcard +instance Arbitrary AnalyzeWildcard where arbitrary = arbitraryAnalyzeWildcard +makeArbitrary ''GeneratePhraseQueries +instance Arbitrary GeneratePhraseQueries where arbitrary = arbitraryGeneratePhraseQueries +makeArbitrary ''Locale +instance Arbitrary Locale where arbitrary = arbitraryLocale +makeArbitrary ''MaxWordLength +instance Arbitrary MaxWordLength where arbitrary = arbitraryMaxWordLength +makeArbitrary ''MinWordLength +instance Arbitrary MinWordLength where arbitrary = arbitraryMinWordLength +makeArbitrary ''PhraseSlop +instance Arbitrary PhraseSlop where arbitrary = arbitraryPhraseSlop +makeArbitrary ''MinDocFrequency +instance Arbitrary MinDocFrequency where arbitrary = arbitraryMinDocFrequency +makeArbitrary ''MaxDocFrequency +instance Arbitrary MaxDocFrequency where arbitrary = arbitraryMaxDocFrequency +makeArbitrary ''Regexp +instance Arbitrary Regexp where arbitrary = arbitraryRegexp +makeArbitrary ''SimpleQueryStringQuery +instance Arbitrary SimpleQueryStringQuery where arbitrary = arbitrarySimpleQueryStringQuery +makeArbitrary ''FieldOrFields +instance Arbitrary FieldOrFields where arbitrary = arbitraryFieldOrFields +makeArbitrary ''SimpleQueryFlag +instance Arbitrary SimpleQueryFlag where arbitrary = arbitrarySimpleQueryFlag +makeArbitrary ''RegexpQuery +instance Arbitrary RegexpQuery where arbitrary = arbitraryRegexpQuery +makeArbitrary ''QueryStringQuery +instance Arbitrary QueryStringQuery where arbitrary = arbitraryQueryStringQuery +makeArbitrary ''RangeQuery +instance Arbitrary RangeQuery where arbitrary = arbitraryRangeQuery +makeArbitrary ''RangeValue +instance Arbitrary RangeValue where arbitrary = arbitraryRangeValue +makeArbitrary ''PrefixQuery +instance Arbitrary PrefixQuery where arbitrary = arbitraryPrefixQuery +makeArbitrary ''NestedQuery +instance Arbitrary NestedQuery where arbitrary = arbitraryNestedQuery +makeArbitrary ''MoreLikeThisFieldQuery +instance Arbitrary MoreLikeThisFieldQuery where arbitrary = arbitraryMoreLikeThisFieldQuery +makeArbitrary ''MoreLikeThisQuery +instance Arbitrary MoreLikeThisQuery where arbitrary = arbitraryMoreLikeThisQuery +makeArbitrary ''IndicesQuery +instance Arbitrary IndicesQuery where arbitrary = arbitraryIndicesQuery +makeArbitrary ''HasParentQuery +instance Arbitrary HasParentQuery where arbitrary = arbitraryHasParentQuery +makeArbitrary ''HasChildQuery +instance Arbitrary HasChildQuery where arbitrary = arbitraryHasChildQuery +makeArbitrary ''FuzzyQuery +instance Arbitrary FuzzyQuery where arbitrary = arbitraryFuzzyQuery +makeArbitrary ''FuzzyLikeFieldQuery +instance Arbitrary FuzzyLikeFieldQuery where arbitrary = arbitraryFuzzyLikeFieldQuery +makeArbitrary ''FuzzyLikeThisQuery +instance Arbitrary FuzzyLikeThisQuery where arbitrary = arbitraryFuzzyLikeThisQuery +makeArbitrary ''DisMaxQuery +instance Arbitrary DisMaxQuery where arbitrary = arbitraryDisMaxQuery +makeArbitrary ''CommonTermsQuery +instance Arbitrary CommonTermsQuery where arbitrary = arbitraryCommonTermsQuery +makeArbitrary ''DistanceRange +instance Arbitrary DistanceRange where arbitrary = arbitraryDistanceRange +makeArbitrary ''MultiMatchQuery +instance Arbitrary MultiMatchQuery where arbitrary = arbitraryMultiMatchQuery +makeArbitrary ''LessThanD +instance Arbitrary LessThanD where arbitrary = arbitraryLessThanD +makeArbitrary ''LessThanEqD +instance Arbitrary LessThanEqD where arbitrary = arbitraryLessThanEqD +makeArbitrary ''GreaterThanD +instance Arbitrary GreaterThanD where arbitrary = arbitraryGreaterThanD +makeArbitrary ''GreaterThanEqD +instance Arbitrary GreaterThanEqD where arbitrary = arbitraryGreaterThanEqD +makeArbitrary ''LessThan +instance Arbitrary LessThan where arbitrary = arbitraryLessThan +makeArbitrary ''LessThanEq +instance Arbitrary LessThanEq where arbitrary = arbitraryLessThanEq +makeArbitrary ''GreaterThan +instance Arbitrary GreaterThan where arbitrary = arbitraryGreaterThan +makeArbitrary ''GreaterThanEq +instance Arbitrary GreaterThanEq where arbitrary = arbitraryGreaterThanEq +makeArbitrary ''GeoPoint +instance Arbitrary GeoPoint where arbitrary = arbitraryGeoPoint +makeArbitrary ''NullValue +instance Arbitrary NullValue where arbitrary = arbitraryNullValue +makeArbitrary ''MinimumMatchHighLow +instance Arbitrary MinimumMatchHighLow where arbitrary = arbitraryMinimumMatchHighLow +makeArbitrary ''CommonMinimumMatch +instance Arbitrary CommonMinimumMatch where arbitrary = arbitraryCommonMinimumMatch +makeArbitrary ''BoostingQuery +instance Arbitrary BoostingQuery where arbitrary = arbitraryBoostingQuery +makeArbitrary ''BoolQuery +instance Arbitrary BoolQuery where arbitrary = arbitraryBoolQuery +makeArbitrary ''MatchQuery +instance Arbitrary MatchQuery where arbitrary = arbitraryMatchQuery +makeArbitrary ''MultiMatchQueryType +instance Arbitrary MultiMatchQueryType where arbitrary = arbitraryMultiMatchQueryType +makeArbitrary ''BooleanOperator +instance Arbitrary BooleanOperator where arbitrary = arbitraryBooleanOperator +makeArbitrary ''ZeroTermsQuery +instance Arbitrary ZeroTermsQuery where arbitrary = arbitraryZeroTermsQuery +makeArbitrary ''MatchQueryType +instance Arbitrary MatchQueryType where arbitrary = arbitraryMatchQueryType +makeArbitrary ''SearchAliasRouting +instance Arbitrary SearchAliasRouting where arbitrary = arbitrarySearchAliasRouting +makeArbitrary ''ScoreType +instance Arbitrary ScoreType where arbitrary = arbitraryScoreType +makeArbitrary ''Distance +instance Arbitrary Distance where arbitrary = arbitraryDistance +makeArbitrary ''DistanceUnit +instance Arbitrary DistanceUnit where arbitrary = arbitraryDistanceUnit +makeArbitrary ''DistanceType +instance Arbitrary DistanceType where arbitrary = arbitraryDistanceType +makeArbitrary ''OptimizeBbox +instance Arbitrary OptimizeBbox where arbitrary = arbitraryOptimizeBbox +makeArbitrary ''GeoBoundingBoxConstraint +instance Arbitrary GeoBoundingBoxConstraint where arbitrary = arbitraryGeoBoundingBoxConstraint +makeArbitrary ''GeoFilterType +instance Arbitrary GeoFilterType where arbitrary = arbitraryGeoFilterType +makeArbitrary ''GeoBoundingBox +instance Arbitrary GeoBoundingBox where arbitrary = arbitraryGeoBoundingBox +makeArbitrary ''LatLon +instance Arbitrary LatLon where arbitrary = arbitraryLatLon +makeArbitrary ''RangeExecution +instance Arbitrary RangeExecution where arbitrary = arbitraryRangeExecution +makeArbitrary ''RegexpFlag +instance Arbitrary RegexpFlag where arbitrary = arbitraryRegexpFlag +makeArbitrary ''BoolMatch +instance Arbitrary BoolMatch where arbitrary = arbitraryBoolMatch +makeArbitrary ''Term +instance Arbitrary Term where arbitrary = arbitraryTerm +makeArbitrary ''IndexSettings +instance Arbitrary IndexSettings where arbitrary = arbitraryIndexSettings +makeArbitrary ''TokenChar +instance Arbitrary TokenChar where arbitrary = arbitraryTokenChar +makeArbitrary ''Ngram +instance Arbitrary Ngram where arbitrary = arbitraryNgram +makeArbitrary ''TokenizerDefinition +instance Arbitrary TokenizerDefinition where arbitrary = arbitraryTokenizerDefinition +makeArbitrary ''TokenFilter +instance Arbitrary TokenFilter where arbitrary = arbitraryTokenFilter +makeArbitrary ''TokenFilterDefinition +instance Arbitrary TokenFilterDefinition where arbitrary = arbitraryTokenFilterDefinition +makeArbitrary ''Language +instance Arbitrary Language where arbitrary = arbitraryLanguage +makeArbitrary ''Shingle +instance Arbitrary Shingle where arbitrary = arbitraryShingle +makeArbitrary ''AnalyzerDefinition +instance Arbitrary AnalyzerDefinition where arbitrary = arbitraryAnalyzerDefinition +makeArbitrary ''Analysis +instance Arbitrary Analysis where arbitrary = arbitraryAnalysis +makeArbitrary ''Tokenizer +instance Arbitrary Tokenizer where arbitrary = arbitraryTokenizer +makeArbitrary ''UpdatableIndexSetting +instance Arbitrary UpdatableIndexSetting where + arbitrary = arbitraryUpdatableIndexSetting +makeArbitrary ''Compression +instance Arbitrary Compression where arbitrary = arbitraryCompression +makeArbitrary ''Bytes +instance Arbitrary Bytes where arbitrary = arbitraryBytes +makeArbitrary ''AllocationPolicy +instance Arbitrary AllocationPolicy where arbitrary = arbitraryAllocationPolicy +makeArbitrary ''InitialShardCount +instance Arbitrary InitialShardCount where arbitrary = arbitraryInitialShardCount +makeArbitrary ''FSType +instance Arbitrary FSType where arbitrary = arbitraryFSType +makeArbitrary ''CompoundFormat +instance Arbitrary CompoundFormat where arbitrary = arbitraryCompoundFormat +makeArbitrary ''FsSnapshotRepo +instance Arbitrary FsSnapshotRepo where arbitrary = arbitraryFsSnapshotRepo +makeArbitrary ''SnapshotRepoName +instance Arbitrary SnapshotRepoName where arbitrary = arbitrarySnapshotRepoName +makeArbitrary ''TemplateQueryInline +instance Arbitrary TemplateQueryInline where arbitrary = arbitraryTemplateQueryInline +makeArbitrary ''DirectGeneratorSuggestModeTypes +instance Arbitrary DirectGeneratorSuggestModeTypes where arbitrary = arbitraryDirectGeneratorSuggestModeTypes +makeArbitrary ''DirectGenerators +instance Arbitrary DirectGenerators where arbitrary = arbitraryDirectGenerators +makeArbitrary ''PhraseSuggesterCollate +instance Arbitrary PhraseSuggesterCollate where arbitrary = arbitraryPhraseSuggesterCollate +makeArbitrary ''PhraseSuggesterHighlighter +instance Arbitrary PhraseSuggesterHighlighter where arbitrary = arbitraryPhraseSuggesterHighlighter +makeArbitrary ''Size +instance Arbitrary Size where arbitrary = arbitrarySize +makeArbitrary ''PhraseSuggester +instance Arbitrary PhraseSuggester where arbitrary = arbitraryPhraseSuggester +makeArbitrary ''SuggestType +instance Arbitrary SuggestType where arbitrary = arbitrarySuggestType +makeArbitrary ''Suggest +instance Arbitrary Suggest where arbitrary = arbitrarySuggest + +makeArbitrary ''FunctionScoreQuery +instance Arbitrary FunctionScoreQuery where arbitrary = arbitraryFunctionScoreQuery + +makeArbitrary ''FunctionScoreFunction +instance Arbitrary FunctionScoreFunction where arbitrary = arbitraryFunctionScoreFunction +makeArbitrary ''FunctionScoreFunctions +instance Arbitrary FunctionScoreFunctions where arbitrary = arbitraryFunctionScoreFunctions +makeArbitrary ''ComponentFunctionScoreFunction +instance Arbitrary ComponentFunctionScoreFunction where arbitrary = arbitraryComponentFunctionScoreFunction +makeArbitrary ''Script +instance Arbitrary Script where arbitrary = arbitraryScript +makeArbitrary ''ScriptLanguage +instance Arbitrary ScriptLanguage where arbitrary = arbitraryScriptLanguage +makeArbitrary ''ScriptInline +instance Arbitrary ScriptInline where arbitrary = arbitraryScriptInline +makeArbitrary ''ScriptId +instance Arbitrary ScriptId where arbitrary = arbitraryScriptId +makeArbitrary ''ScoreMode +instance Arbitrary ScoreMode where arbitrary = arbitraryScoreMode +makeArbitrary ''BoostMode +instance Arbitrary BoostMode where arbitrary = arbitraryBoostMode +makeArbitrary ''Seed +instance Arbitrary Seed where arbitrary = arbitrarySeed +makeArbitrary ''FieldValueFactor +instance Arbitrary FieldValueFactor where arbitrary = arbitraryFieldValueFactor +makeArbitrary ''Weight +instance Arbitrary Weight where arbitrary = arbitraryWeight +makeArbitrary ''Factor +instance Arbitrary Factor where arbitrary = arbitraryFactor +makeArbitrary ''FactorMissingFieldValue +instance Arbitrary FactorMissingFieldValue where arbitrary = arbitraryFactorMissingFieldValue +makeArbitrary ''FactorModifier +instance Arbitrary FactorModifier where arbitrary = arbitraryFactorModifier + +newtype UpdatableIndexSetting' = + UpdatableIndexSetting' UpdatableIndexSetting + deriving (Show, Eq, ToJSON, FromJSON, ApproxEq, Typeable) + +instance Arbitrary UpdatableIndexSetting' where + arbitrary = do + settings <- arbitrary + return $ UpdatableIndexSetting' $ case settings of + RoutingAllocationInclude xs -> + RoutingAllocationInclude (dropDuplicateAttrNames xs) + RoutingAllocationExclude xs -> + RoutingAllocationExclude (dropDuplicateAttrNames xs) + RoutingAllocationRequire xs -> + RoutingAllocationRequire (dropDuplicateAttrNames xs) + x -> x + where + dropDuplicateAttrNames = + NE.fromList . L.nubBy sameAttrName . NE.toList + sameAttrName a b = + nodeAttrFilterName a == nodeAttrFilterName b + -- shrink (UpdatableIndexSetting' x) = map UpdatableIndexSetting' (shrink x) diff --git a/tests/V5/Test/Highlights.hs b/tests/V5/Test/Highlights.hs new file mode 100644 index 0000000..baa2234 --- /dev/null +++ b/tests/V5/Test/Highlights.hs @@ -0,0 +1,32 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Test.Highlights where + +import Test.Common +import Test.Import + +import qualified Data.Map as M + +initHighlights :: Text -> BH IO (Either EsError (Maybe HitHighlight)) +initHighlights fieldName = do + _ <- insertData + _ <- insertOther + let query = QueryMatchQuery $ mkMatchQuery (FieldName fieldName) (QueryString "haskell") + let testHighlight = Highlights Nothing [FieldHighlight (FieldName fieldName) Nothing] + let search = mkHighlightSearch (Just query) testHighlight + searchTweetHighlight search + +spec :: Spec +spec = + describe "Highlights API" $ do + it "returns highlight from query when there should be one" $ withTestEnv $ do + myHighlight <- initHighlights "message" + liftIO $ + myHighlight `shouldBe` + Right (Just (M.fromList [("message", ["Use haskell!"])])) + + it "doesn't return highlight from a query when it shouldn't" $ withTestEnv $ do + myHighlight <- initHighlights "user" + liftIO $ + myHighlight `shouldBe` + Right Nothing diff --git a/tests/V5/Test/Import.hs b/tests/V5/Test/Import.hs new file mode 100644 index 0000000..2d5f8a1 --- /dev/null +++ b/tests/V5/Test/Import.hs @@ -0,0 +1,58 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Test.Import + ( module X + , module Test.Import + ) where + + +import Control.Applicative as X +import Control.Exception as X (evaluate) +import Control.Monad as X +import Control.Monad.Catch as X +import Control.Monad.Reader as X +import Data.Aeson as X +import Data.Aeson.TH as X +import Data.Aeson.Types as X (parseEither) +import Data.Maybe as X +import Data.List.NonEmpty as X (NonEmpty(..)) +import Data.Monoid as X +import Data.Ord as X (comparing) +import Data.Proxy as X +import Data.Text as X (Text) +import Data.Time.Calendar as X (Day(..), fromGregorian) +import Data.Time.Clock as X +import Data.Typeable as X +import Database.V5.Bloodhound as X hiding (key) +import Lens.Micro as X +import Lens.Micro.Aeson as X +import Network.HTTP.Client as X hiding (Proxy, fileSize) +import System.IO.Temp as X +import System.PosixCompat.Files as X +import Test.Hspec as X +import Test.Hspec.QuickCheck as X (prop) +import Test.QuickCheck as X hiding (Result, Success) +import Test.QuickCheck.Property.Monoid as X (T (..), eq, prop_Monoid) +import Text.Pretty.Simple as X (pPrint) + +import qualified Data.List as L + +noDuplicates :: Eq a => [a] -> Bool +noDuplicates xs = L.nub xs == xs + +getSource :: EsResult a -> Maybe a +getSource = fmap _source . foundResult + +grabFirst :: Either EsError (SearchResult a) -> Either EsError a +grabFirst r = + case fmap (hitSource . head . hits . searchHits) r of + (Left e) -> Left e + (Right Nothing) -> Left (EsError 500 "Source was missing") + (Right (Just x)) -> Right x + +when' :: Monad m => m Bool -> m () -> m () +when' b f = b >>= \x -> when x f + +headMay :: [a] -> Maybe a +headMay (x : _) = Just x +headMay _ = Nothing diff --git a/tests/V5/Test/Indices.hs b/tests/V5/Test/Indices.hs new file mode 100644 index 0000000..4422a8d --- /dev/null +++ b/tests/V5/Test/Indices.hs @@ -0,0 +1,152 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Test.Indices where + +import Test.Common +import Test.Import + +import qualified Data.List as L +import qualified Data.List.NonEmpty as NE +import qualified Data.Map as M +spec :: Spec +spec = do + describe "Index create/delete API" $ do + it "creates and then deletes the requested index" $ withTestEnv $ do + -- priming state. + _ <- deleteExampleIndex + resp <- createExampleIndex + deleteResp <- deleteExampleIndex + liftIO $ do + validateStatus resp 200 + validateStatus deleteResp 200 + + describe "Index aliases" $ do + let aname = IndexAliasName (IndexName "bloodhound-tests-twitter-1-alias") + let alias = IndexAlias (testIndex) aname + let create = IndexAliasCreate Nothing Nothing + let action = AddAlias alias create + it "handles the simple case of aliasing an existing index" $ do + withTestEnv $ do + resetIndex + resp <- updateIndexAliases (action :| []) + liftIO $ validateStatus resp 200 + let cleanup = withTestEnv (updateIndexAliases (RemoveAlias alias :| [])) + (do aliases <- withTestEnv getIndexAliases + let expected = IndexAliasSummary alias create + case aliases of + Right (IndexAliasesSummary summs) -> + L.find ((== alias) . indexAliasSummaryAlias) summs `shouldBe` Just expected + Left e -> expectationFailure ("Expected an IndexAliasesSummary but got " <> show e)) `finally` cleanup + it "allows alias deletion" $ do + aliases <- withTestEnv $ do + resetIndex + resp <- updateIndexAliases (action :| []) + liftIO $ validateStatus resp 200 + _ <- deleteIndexAlias aname + getIndexAliases + -- let expected = IndexAliasSummary alias create + case aliases of + Right (IndexAliasesSummary summs) -> + L.find ( (== aname) + . indexAlias + . indexAliasSummaryAlias + ) summs + `shouldBe` Nothing + Left e -> expectationFailure ("Expected an IndexAliasesSummary but got " <> show e) + + describe "Index Listing" $ do + it "returns a list of index names" $ withTestEnv $ do + _ <- createExampleIndex + ixns <- listIndices + liftIO (ixns `shouldContain` [testIndex]) + + describe "Index Settings" $ do + it "persists settings" $ withTestEnv $ do + _ <- deleteExampleIndex + _ <- createExampleIndex + let updates = BlocksWrite False :| [] + updateResp <- updateIndexSettings updates testIndex + liftIO $ validateStatus updateResp 200 + getResp <- getIndexSettings testIndex + liftIO $ + getResp `shouldBe` Right (IndexSettingsSummary + testIndex + (IndexSettings (ShardCount 1) (ReplicaCount 0)) + (NE.toList updates)) + + it "allows total fields to be set" $ when' (atleast es50) $ withTestEnv $ do + _ <- deleteExampleIndex + _ <- createExampleIndex + let updates = MappingTotalFieldsLimit 2500 :| [] + updateResp <- updateIndexSettings updates testIndex + liftIO $ validateStatus updateResp 200 + getResp <- getIndexSettings testIndex + liftIO $ + getResp `shouldBe` Right (IndexSettingsSummary + testIndex + (IndexSettings (ShardCount 1) (ReplicaCount 0)) + (NE.toList updates)) + + it "accepts customer analyzers" $ when' (atleast es50) $ withTestEnv $ do + _ <- deleteExampleIndex + let analysis = Analysis + (M.singleton "ex_analyzer" + ( AnalyzerDefinition + (Just (Tokenizer "ex_tokenizer")) + (map TokenFilter + [ "ex_filter_lowercase","ex_filter_uppercase","ex_filter_apostrophe" + , "ex_filter_reverse","ex_filter_snowball" + , "ex_filter_shingle" + ] + ) + ) + ) + (M.singleton "ex_tokenizer" + ( TokenizerDefinitionNgram + ( Ngram 3 4 [TokenLetter,TokenDigit]) + ) + ) + (M.fromList + [ ("ex_filter_lowercase",TokenFilterDefinitionLowercase (Just Greek)) + , ("ex_filter_uppercase",TokenFilterDefinitionUppercase Nothing) + , ("ex_filter_apostrophe",TokenFilterDefinitionApostrophe) + , ("ex_filter_reverse",TokenFilterDefinitionReverse) + , ("ex_filter_snowball",TokenFilterDefinitionSnowball English) + , ("ex_filter_shingle",TokenFilterDefinitionShingle (Shingle 3 3 True False " " "_")) + ] + ) + updates = [AnalysisSetting analysis] + createResp <- createIndexWith (updates ++ [NumberOfReplicas (ReplicaCount 0)]) 1 testIndex + liftIO $ validateStatus createResp 200 + getResp <- getIndexSettings testIndex + liftIO $ + getResp `shouldBe` Right (IndexSettingsSummary + testIndex + (IndexSettings (ShardCount 1) (ReplicaCount 0)) + updates + ) + + it "accepts default compression codec" $ when' (atleast es50) $ withTestEnv $ do + _ <- deleteExampleIndex + let updates = [CompressionSetting CompressionDefault] + createResp <- createIndexWith (updates ++ [NumberOfReplicas (ReplicaCount 0)]) 1 testIndex + liftIO $ validateStatus createResp 200 + getResp <- getIndexSettings testIndex + liftIO $ getResp `shouldBe` Right + (IndexSettingsSummary testIndex (IndexSettings (ShardCount 1) (ReplicaCount 0)) updates) + + it "accepts best compression codec" $ when' (atleast es50) $ withTestEnv $ do + _ <- deleteExampleIndex + let updates = [CompressionSetting CompressionBest] + createResp <- createIndexWith (updates ++ [NumberOfReplicas (ReplicaCount 0)]) 1 testIndex + liftIO $ validateStatus createResp 200 + getResp <- getIndexSettings testIndex + liftIO $ getResp `shouldBe` Right + (IndexSettingsSummary testIndex (IndexSettings (ShardCount 1) (ReplicaCount 0)) updates) + + + describe "Index Optimization" $ do + it "returns a successful response upon completion" $ withTestEnv $ do + _ <- createExampleIndex + resp <- forceMergeIndex (IndexList (testIndex :| [])) defaultForceMergeIndexSettings + liftIO $ validateStatus resp 200 diff --git a/tests/V5/Test/JSON.hs b/tests/V5/Test/JSON.hs new file mode 100644 index 0000000..bec04ec --- /dev/null +++ b/tests/V5/Test/JSON.hs @@ -0,0 +1,213 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ScopedTypeVariables #-} + +module Test.JSON (spec) where + +import Test.Import + +import qualified Data.ByteString.Lazy.Char8 as BL8 +import qualified Data.List as L +import Data.List.NonEmpty (NonEmpty(..)) +import qualified Data.List.NonEmpty as NE +import qualified Data.HashMap.Strict as HM +import qualified Data.Text as T +import qualified Data.Vector as V + +import Test.ApproxEq +import Test.Generators + +propJSON :: forall a + . ( Arbitrary a + , ToJSON a + , FromJSON a + , Show a + , Eq a + , Typeable a + ) + => Proxy a -> Spec +propJSON _ = prop testName $ \(a :: a) -> + let jsonStr = "via " <> BL8.unpack (encode a) + in counterexample jsonStr (parseEither parseJSON (toJSON a) + === Right a) + where testName = show ty <> " FromJSON/ToJSON roundtrips" + ty = typeOf (undefined :: a) + +propApproxJSON :: forall a + . ( Arbitrary a + , ToJSON a + , FromJSON a + , Show a + , ApproxEq a + , Typeable a + ) + => Proxy a -> Spec +propApproxJSON _ = prop testName $ \(a :: a) -> + let jsonStr = "via " <> BL8.unpack (encode a) + in counterexample jsonStr (parseEither parseJSON (toJSON a) + ==~ Right a) + where testName = show ty <> " FromJSON/ToJSON roundtrips" + ty = typeOf (undefined :: a) + +spec :: Spec +spec = do + describe "ToJSON RegexpFlags" $ do + it "generates the correct JSON for AllRegexpFlags" $ + toJSON AllRegexpFlags `shouldBe` String "ALL" + + it "generates the correct JSON for NoRegexpFlags" $ + toJSON NoRegexpFlags `shouldBe` String "NONE" + + it "generates the correct JSON for SomeRegexpFlags" $ + let flags = AnyString :| [ Automaton + , Complement + , Empty + , Intersection + , Interval ] + in toJSON (SomeRegexpFlags flags) `shouldBe` String "ANYSTRING|AUTOMATON|COMPLEMENT|EMPTY|INTERSECTION|INTERVAL" + + prop "removes duplicates from flags" $ \(flags :: RegexpFlags) -> + let String str = toJSON flags + flagStrs = T.splitOn "|" str + in noDuplicates flagStrs + + describe "omitNulls" $ do + it "checks that omitNulls drops list elements when it should" $ + let dropped = omitNulls $ [ "test1" .= (toJSON ([] :: [Int])) + , "test2" .= (toJSON ("some value" :: Text))] + in dropped `shouldBe` Object (HM.fromList [("test2", String "some value")]) + + it "checks that omitNulls doesn't drop list elements when it shouldn't" $ + let notDropped = omitNulls $ [ "test1" .= (toJSON ([1] :: [Int])) + , "test2" .= (toJSON ("some value" :: Text))] + in notDropped `shouldBe` Object (HM.fromList [ ("test1", Array (V.fromList [Number 1.0])) + , ("test2", String "some value")]) + it "checks that omitNulls drops non list elements when it should" $ + let dropped = omitNulls $ [ "test1" .= (toJSON Null) + , "test2" .= (toJSON ("some value" :: Text))] + in dropped `shouldBe` Object (HM.fromList [("test2", String "some value")]) + it "checks that omitNulls doesn't drop non list elements when it shouldn't" $ + let notDropped = omitNulls $ [ "test1" .= (toJSON (1 :: Int)) + , "test2" .= (toJSON ("some value" :: Text))] + in notDropped `shouldBe` Object (HM.fromList [ ("test1", Number 1.0) + , ("test2", String "some value")]) + + describe "Exact isomorphism JSON instances" $ do + propJSON (Proxy :: Proxy Version) + propJSON (Proxy :: Proxy IndexName) + propJSON (Proxy :: Proxy MappingName) + propJSON (Proxy :: Proxy DocId) + propJSON (Proxy :: Proxy IndexAliasRouting) + propJSON (Proxy :: Proxy RoutingValue) + propJSON (Proxy :: Proxy ShardCount) + propJSON (Proxy :: Proxy ReplicaCount) + propJSON (Proxy :: Proxy TemplateName) + propJSON (Proxy :: Proxy TemplatePattern) + propJSON (Proxy :: Proxy QueryString) + propJSON (Proxy :: Proxy FieldName) + propJSON (Proxy :: Proxy Script) + propJSON (Proxy :: Proxy ScriptLanguage) + propJSON (Proxy :: Proxy ScriptInline) + propJSON (Proxy :: Proxy ScriptId) + propJSON (Proxy :: Proxy ScriptParams) + propJSON (Proxy :: Proxy CacheName) + propJSON (Proxy :: Proxy CacheKey) + propJSON (Proxy :: Proxy Existence) + propJSON (Proxy :: Proxy CutoffFrequency) + propJSON (Proxy :: Proxy Analyzer) + propJSON (Proxy :: Proxy MaxExpansions) + propJSON (Proxy :: Proxy Lenient) + propJSON (Proxy :: Proxy Tiebreaker) + propJSON (Proxy :: Proxy Boost) + propJSON (Proxy :: Proxy BoostTerms) + propJSON (Proxy :: Proxy MinimumMatch) + propJSON (Proxy :: Proxy DisableCoord) + propJSON (Proxy :: Proxy IgnoreTermFrequency) + propJSON (Proxy :: Proxy MinimumTermFrequency) + propJSON (Proxy :: Proxy MaxQueryTerms) + propJSON (Proxy :: Proxy Fuzziness) + propJSON (Proxy :: Proxy PrefixLength) + propJSON (Proxy :: Proxy TypeName) + propJSON (Proxy :: Proxy PercentMatch) + propJSON (Proxy :: Proxy StopWord) + propJSON (Proxy :: Proxy QueryPath) + propJSON (Proxy :: Proxy AllowLeadingWildcard) + propJSON (Proxy :: Proxy LowercaseExpanded) + propJSON (Proxy :: Proxy EnablePositionIncrements) + propJSON (Proxy :: Proxy AnalyzeWildcard) + propJSON (Proxy :: Proxy GeneratePhraseQueries) + propJSON (Proxy :: Proxy Locale) + propJSON (Proxy :: Proxy MaxWordLength) + propJSON (Proxy :: Proxy MinWordLength) + propJSON (Proxy :: Proxy PhraseSlop) + propJSON (Proxy :: Proxy MinDocFrequency) + propJSON (Proxy :: Proxy MaxDocFrequency) + propJSON (Proxy :: Proxy Filter) + propJSON (Proxy :: Proxy Query) + propJSON (Proxy :: Proxy SimpleQueryStringQuery) + propJSON (Proxy :: Proxy FieldOrFields) + propJSON (Proxy :: Proxy SimpleQueryFlag) + propJSON (Proxy :: Proxy RegexpQuery) + propJSON (Proxy :: Proxy QueryStringQuery) + propJSON (Proxy :: Proxy RangeQuery) + propJSON (Proxy :: Proxy PrefixQuery) + propJSON (Proxy :: Proxy NestedQuery) + propJSON (Proxy :: Proxy MoreLikeThisFieldQuery) + propJSON (Proxy :: Proxy MoreLikeThisQuery) + propJSON (Proxy :: Proxy IndicesQuery) + propJSON (Proxy :: Proxy HasParentQuery) + propJSON (Proxy :: Proxy HasChildQuery) + propJSON (Proxy :: Proxy FuzzyQuery) + propJSON (Proxy :: Proxy FuzzyLikeFieldQuery) + propJSON (Proxy :: Proxy FuzzyLikeThisQuery) + propJSON (Proxy :: Proxy FunctionScoreQuery) + propJSON (Proxy :: Proxy BoostMode) + propJSON (Proxy :: Proxy ScoreMode) + propJSON (Proxy :: Proxy ComponentFunctionScoreFunction) + propJSON (Proxy :: Proxy FieldValueFactor) + propJSON (Proxy :: Proxy FactorModifier) + propJSON (Proxy :: Proxy DisMaxQuery) + propJSON (Proxy :: Proxy CommonTermsQuery) + propJSON (Proxy :: Proxy CommonMinimumMatch) + propJSON (Proxy :: Proxy BoostingQuery) + propJSON (Proxy :: Proxy BoolQuery) + propJSON (Proxy :: Proxy MatchQuery) + propJSON (Proxy :: Proxy MultiMatchQueryType) + propJSON (Proxy :: Proxy BooleanOperator) + propJSON (Proxy :: Proxy ZeroTermsQuery) + propJSON (Proxy :: Proxy MatchQueryType) + propJSON (Proxy :: Proxy AliasRouting) + propJSON (Proxy :: Proxy IndexAliasCreate) + propJSON (Proxy :: Proxy SearchAliasRouting) + propJSON (Proxy :: Proxy ScoreType) + propJSON (Proxy :: Proxy Distance) + propJSON (Proxy :: Proxy DistanceUnit) + propJSON (Proxy :: Proxy DistanceType) + propJSON (Proxy :: Proxy OptimizeBbox) + propJSON (Proxy :: Proxy GeoBoundingBoxConstraint) + propJSON (Proxy :: Proxy GeoFilterType) + propJSON (Proxy :: Proxy GeoBoundingBox) + propJSON (Proxy :: Proxy LatLon) + propJSON (Proxy :: Proxy RangeExecution) + prop "RegexpFlags FromJSON/ToJSON roundtrips, removing dups " $ \rfs -> + let expected = case rfs of + SomeRegexpFlags fs -> SomeRegexpFlags (NE.fromList (L.nub (NE.toList fs))) + x -> x + in parseEither parseJSON (toJSON rfs) === Right expected + propJSON (Proxy :: Proxy BoolMatch) + propJSON (Proxy :: Proxy Term) + propJSON (Proxy :: Proxy MultiMatchQuery) + propJSON (Proxy :: Proxy IndexSettings) + propJSON (Proxy :: Proxy CompoundFormat) + propJSON (Proxy :: Proxy TemplateQueryInline) + propJSON (Proxy :: Proxy Suggest) + propJSON (Proxy :: Proxy DirectGenerators) + propJSON (Proxy :: Proxy DirectGeneratorSuggestModeTypes) + + describe "Approximate isomorphism JSON instances" $ do + propApproxJSON (Proxy :: Proxy UpdatableIndexSetting') + propApproxJSON (Proxy :: Proxy ReplicaCount) + propApproxJSON (Proxy :: Proxy ReplicaBounds) + propApproxJSON (Proxy :: Proxy Bytes) + propApproxJSON (Proxy :: Proxy AllocationPolicy) + propApproxJSON (Proxy :: Proxy InitialShardCount) + propApproxJSON (Proxy :: Proxy FSType) diff --git a/tests/V5/Test/Query.hs b/tests/V5/Test/Query.hs new file mode 100644 index 0000000..364a836 --- /dev/null +++ b/tests/V5/Test/Query.hs @@ -0,0 +1,115 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Test.Query where + +import Test.Common +import Test.Import + +import qualified Data.HashMap.Strict as HM + +spec :: Spec +spec = + describe "query API" $ do + it "returns document for term query and identity filter" $ withTestEnv $ do + _ <- insertData + let query = TermQuery (Term "user" "bitemyapp") Nothing + let filter' = Filter $ MatchAllQuery Nothing + let search = mkSearch (Just query) (Just filter') + myTweet <- searchTweet search + liftIO $ + myTweet `shouldBe` Right exampleTweet + + it "handles constant score queries" $ withTestEnv $ do + _ <- insertData + let query = TermsQuery "user" ("bitemyapp" :| []) + let cfQuery = ConstantScoreQuery query (Boost 1.0) + let filter' = Filter $ MatchAllQuery Nothing + let search = mkSearch (Just cfQuery) (Just filter') + myTweet <- searchTweet search + liftIO $ + myTweet `shouldBe` Right exampleTweet + + it "returns document for terms query and identity filter" $ withTestEnv $ do + _ <- insertData + let query = TermsQuery "user" ("bitemyapp" :| []) + let filter' = Filter $ MatchAllQuery Nothing + let search = mkSearch (Just query) (Just filter') + myTweet <- searchTweet search + liftIO $ + myTweet `shouldBe` Right exampleTweet + + it "returns document for match query" $ withTestEnv $ do + _ <- insertData + let query = QueryMatchQuery $ mkMatchQuery (FieldName "user") (QueryString "bitemyapp") + let search = mkSearch (Just query) Nothing + myTweet <- searchTweet search + liftIO $ + myTweet `shouldBe` Right exampleTweet + + it "returns document for multi-match query" $ withTestEnv $ do + _ <- insertData + let flds = [FieldName "user", FieldName "message"] + let query = QueryMultiMatchQuery $ mkMultiMatchQuery flds (QueryString "bitemyapp") + let search = mkSearch (Just query) Nothing + myTweet <- searchTweet search + liftIO $ + myTweet `shouldBe` Right exampleTweet + + it "returns document for multi-match query with a custom tiebreaker" $ withTestEnv $ do + _ <- insertData + let tiebreaker = Just $ Tiebreaker 0.3 + flds = [FieldName "user", FieldName "message"] + multiQuery' = mkMultiMatchQuery flds (QueryString "bitemyapp") + query = QueryMultiMatchQuery $ multiQuery' { multiMatchQueryTiebreaker = tiebreaker } + search = mkSearch (Just query) Nothing + myTweet <- searchTweet search + liftIO $ + myTweet `shouldBe` Right exampleTweet + + it "returns document for bool query" $ withTestEnv $ do + _ <- insertData + let innerQuery = QueryMatchQuery $ + mkMatchQuery (FieldName "user") (QueryString "bitemyapp") + let query = QueryBoolQuery $ + mkBoolQuery [innerQuery] [] [] [] + let search = mkSearch (Just query) Nothing + myTweet <- searchTweet search + liftIO $ + myTweet `shouldBe` Right exampleTweet + + it "returns document for boosting query" $ withTestEnv $ do + _ <- insertData + let posQuery = QueryMatchQuery $ mkMatchQuery (FieldName "user") (QueryString "bitemyapp") + let negQuery = QueryMatchQuery $ mkMatchQuery (FieldName "user") (QueryString "notmyapp") + let query = QueryBoostingQuery $ BoostingQuery posQuery negQuery (Boost 0.2) + let search = mkSearch (Just query) Nothing + myTweet <- searchTweet search + liftIO $ + myTweet `shouldBe` Right exampleTweet + + it "returns document for common terms query" $ withTestEnv $ do + _ <- insertData + let query = QueryCommonTermsQuery $ + CommonTermsQuery (FieldName "user") + (QueryString "bitemyapp") + (CutoffFrequency 0.0001) + Or Or Nothing Nothing Nothing Nothing + let search = mkSearch (Just query) Nothing + myTweet <- searchTweet search + liftIO $ + myTweet `shouldBe` Right exampleTweet + + it "returns document for for inline template query" $ withTestEnv $ do + _ <- insertData + let innerQuery = QueryMatchQuery $ + mkMatchQuery (FieldName "{{userKey}}") + (QueryString "{{bitemyappKey}}") + templateParams = TemplateQueryKeyValuePairs $ HM.fromList + [ ("userKey", "user") + , ("bitemyappKey", "bitemyapp") + ] + templateQuery = QueryTemplateQueryInline $ + TemplateQueryInline innerQuery templateParams + search = mkSearch (Just templateQuery) Nothing + myTweet <- searchTweet search + liftIO $ myTweet `shouldBe` Right exampleTweet diff --git a/tests/V5/Test/Script.hs b/tests/V5/Test/Script.hs new file mode 100644 index 0000000..e076574 --- /dev/null +++ b/tests/V5/Test/Script.hs @@ -0,0 +1,36 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Test.Script where + +import Test.Common +import Test.Import + +import qualified Data.HashMap.Strict as HM +import qualified Data.Map as M + +spec :: Spec +spec = + describe "Script" $ + it "returns a transformed document based on the script field" $ withTestEnv $ do + _ <- insertData + let query = MatchAllQuery Nothing + sfv = toJSON $ + Script + (Just (ScriptLanguage "painless")) + (Just (ScriptInline "doc['age'].value * 2")) + Nothing + Nothing + sf = ScriptFields $ + HM.fromList [("test1", sfv)] + search' = mkSearch (Just query) Nothing + search = search' { scriptFields = Just sf } + resp <- searchByIndex testIndex search + parsed <- parseEsResponse resp :: BH IO (Either EsError (SearchResult Value)) + case parsed of + Left e -> + liftIO $ expectationFailure ("Expected a script-transformed result but got: " <> show e) + Right sr -> do + let Just results = + hitFields (head (hits (searchHits sr))) + liftIO $ + results `shouldBe` HitFields (M.fromList [("test1", [Number 20000.0])]) diff --git a/tests/V5/Test/Snapshots.hs b/tests/V5/Test/Snapshots.hs new file mode 100644 index 0000000..cead610 --- /dev/null +++ b/tests/V5/Test/Snapshots.hs @@ -0,0 +1,201 @@ +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE OverloadedStrings #-} + +module Test.Snapshots (spec) where + +import Test.Common +import Test.Import + +import Data.Maybe (fromMaybe) +import qualified Data.List as L +import qualified Data.HashMap.Strict as HM +import qualified Data.Text as T +import qualified Data.Vector as V +import qualified Network.HTTP.Types.Method as NHTM +import qualified Network.URI as URI + +import Test.Generators () + +spec :: Spec +spec = do + describe "FsSnapshotRepo" $ + prop "SnapshotRepo laws" $ \fsr -> + fromGSnapshotRepo (toGSnapshotRepo fsr) === Right (fsr :: FsSnapshotRepo) + + describe "Snapshot repos" $ do + it "always parses all snapshot repos API" $ when' canSnapshot $ withTestEnv $ do + res <- getSnapshotRepos AllSnapshotRepos + liftIO $ case res of + Left e -> expectationFailure ("Expected a right but got Left " <> show e) + Right _ -> return () + + it "finds an existing list of repos" $ when' canSnapshot $ withTestEnv $ do + let r1n = SnapshotRepoName "bloodhound-repo1" + let r2n = SnapshotRepoName "bloodhound-repo2" + withSnapshotRepo r1n $ \r1 -> + withSnapshotRepo r2n $ \r2 -> do + repos <- getSnapshotRepos (SnapshotRepoList (ExactRepo r1n :| [ExactRepo r2n])) + liftIO $ case repos of + Right xs -> do + let srt = L.sortBy (comparing gSnapshotRepoName) + srt xs `shouldBe` srt [r1, r2] + Left e -> expectationFailure (show e) + + it "creates and updates with updateSnapshotRepo" $ when' canSnapshot $ withTestEnv $ do + let r1n = SnapshotRepoName "bloodhound-repo1" + withSnapshotRepo r1n $ \r1 -> do + let Just (String dir) = HM.lookup "location" (gSnapshotRepoSettingsObject (gSnapshotRepoSettings r1)) + let noCompression = FsSnapshotRepo r1n (T.unpack dir) False Nothing Nothing Nothing + resp <- updateSnapshotRepo defaultSnapshotRepoUpdateSettings noCompression + liftIO (validateStatus resp 200) + Right [roundtrippedNoCompression] <- getSnapshotRepos (SnapshotRepoList (ExactRepo r1n :| [])) + liftIO (roundtrippedNoCompression `shouldBe` toGSnapshotRepo noCompression) + + -- verify came around in 1.4 it seems + it "can verify existing repos" $ when' canSnapshot $ when' (atleast es14) $ withTestEnv $ do + let r1n = SnapshotRepoName "bloodhound-repo1" + withSnapshotRepo r1n $ \_ -> do + res <- verifySnapshotRepo r1n + liftIO $ case res of + Right (SnapshotVerification vs) + | null vs -> expectationFailure "Expected nonempty set of verifying nodes" + | otherwise -> return () + Left e -> expectationFailure (show e) + + describe "Snapshots" $ do + it "always parses all snapshots API" $ when' canSnapshot $ withTestEnv $ do + let r1n = SnapshotRepoName "bloodhound-repo1" + withSnapshotRepo r1n $ \_ -> do + res <- getSnapshots r1n AllSnapshots + liftIO $ case res of + Left e -> expectationFailure ("Expected a right but got Left " <> show e) + Right _ -> return () + + it "can parse a snapshot that it created" $ when' canSnapshot $ withTestEnv $ do + let r1n = SnapshotRepoName "bloodhound-repo1" + withSnapshotRepo r1n $ \_ -> do + let s1n = SnapshotName "example-snapshot" + withSnapshot r1n s1n $ do + res <- getSnapshots r1n (SnapshotList (ExactSnap s1n :| [])) + liftIO $ case res of + Right [snap] + | snapInfoState snap == SnapshotSuccess && + snapInfoName snap == s1n -> return () + | otherwise -> expectationFailure (show snap) + Right [] -> expectationFailure "There were no snapshots" + Right snaps -> expectationFailure ("Expected 1 snapshot but got" <> show (length snaps)) + Left e -> expectationFailure (show e) + + describe "Snapshot restore" $ do + it "can restore a snapshot that we create" $ when' canSnapshot $ withTestEnv $ do + let r1n = SnapshotRepoName "bloodhound-repo1" + withSnapshotRepo r1n $ \_ -> do + let s1n = SnapshotName "example-snapshot" + withSnapshot r1n s1n $ do + let settings = defaultSnapshotRestoreSettings { snapRestoreWaitForCompletion = True } + -- have to close an index to restore it + resp1 <- closeIndex testIndex + liftIO (validateStatus resp1 200) + resp2 <- restoreSnapshot r1n s1n settings + liftIO (validateStatus resp2 200) + + it "can restore and rename" $ when' canSnapshot $ withTestEnv $ do + let r1n = SnapshotRepoName "bloodhound-repo1" + withSnapshotRepo r1n $ \_ -> do + let s1n = SnapshotName "example-snapshot" + withSnapshot r1n s1n $ do + let pat = RestoreRenamePattern "bloodhound-tests-twitter-(\\d+)" + let replace = RRTLit "restored-" :| [RRSubWholeMatch] + let expectedIndex = IndexName "restored-bloodhound-tests-twitter-1" + oldEnoughForOverrides <- liftIO (atleast es15) + let overrides = RestoreIndexSettings { restoreOverrideReplicas = Just (ReplicaCount 0) } + let settings = defaultSnapshotRestoreSettings { snapRestoreWaitForCompletion = True + , snapRestoreRenamePattern = Just pat + , snapRestoreRenameReplacement = Just replace + , snapRestoreIndexSettingsOverrides = if oldEnoughForOverrides + then Just overrides + else Nothing + } + -- have to close an index to restore it + let go = do + resp <- restoreSnapshot r1n s1n settings + liftIO (validateStatus resp 200) + exists <- indexExists expectedIndex + liftIO (exists `shouldBe` True) + go `finally` deleteIndex expectedIndex + +-- | Get configured repo paths for snapshotting. Note that by default +-- this is not enabled and if we are over es 1.5, we won't be able to +-- test snapshotting. Note that this can and should be part of the +-- client functionality in a much less ad-hoc incarnation. +getRepoPaths :: IO [FilePath] +getRepoPaths = withTestEnv $ do + bhe <- getBHEnv + let Server s = bhServer bhe + let tUrl = s <> "/" <> "_nodes" + initReq <- parseRequest (URI.escapeURIString URI.isAllowedInURI (T.unpack tUrl)) + let req = setRequestIgnoreStatus $ initReq { method = NHTM.methodGet } + Right (Object o) <- parseEsResponse =<< liftIO (httpLbs req (bhManager bhe)) + return $ fromMaybe mempty $ do + Object nodes <- HM.lookup "nodes" o + Object firstNode <- snd <$> headMay (HM.toList nodes) + Object settings <- HM.lookup "settings" firstNode + Object path <- HM.lookup "path" settings + Array repo <- HM.lookup "repo" path + return [ T.unpack t | String t <- V.toList repo] + +-- | 1.5 and earlier don't care about repo paths +canSnapshot :: IO Bool +canSnapshot = do + caresAboutRepos <- atleast es16 + repoPaths <- getRepoPaths + return (not caresAboutRepos || not (null repoPaths)) + +withSnapshotRepo + :: ( MonadMask m + , MonadBH m + ) + => SnapshotRepoName + -> (GenericSnapshotRepo -> m a) + -> m a +withSnapshotRepo srn@(SnapshotRepoName n) f = do + repoPaths <- liftIO getRepoPaths + -- we'll use the first repo path if available, otherwise system temp + -- dir. Note that this will fail on ES > 1.6, so be sure you use + -- @when' canSnapshot@. + case repoPaths of + (firstRepoPath:_) -> withTempDirectory firstRepoPath (T.unpack n) $ \dir -> bracket (alloc dir) free f + [] -> withSystemTempDirectory (T.unpack n) $ \dir -> bracket (alloc dir) free f + where + alloc dir = do + liftIO (setFileMode dir mode) + let repo = FsSnapshotRepo srn "bloodhound-tests-backups" True Nothing Nothing Nothing + resp <- updateSnapshotRepo defaultSnapshotRepoUpdateSettings repo + liftIO (validateStatus resp 200) + return (toGSnapshotRepo repo) + mode = ownerModes `unionFileModes` groupModes `unionFileModes` otherModes + free GenericSnapshotRepo {..} = do + resp <- deleteSnapshotRepo gSnapshotRepoName + liftIO (validateStatus resp 200) + + +withSnapshot + :: ( MonadMask m + , MonadBH m + ) + => SnapshotRepoName + -> SnapshotName + -> m a + -> m a +withSnapshot srn sn = bracket_ alloc free + where + alloc = do + resp <- createSnapshot srn sn createSettings + liftIO (validateStatus resp 200) + -- We'll make this synchronous for testing purposes + createSettings = defaultSnapshotCreateSettings { snapWaitForCompletion = True + , snapIndices = Just (IndexList (testIndex :| [])) + -- We don't actually need to back up any data + } + free = + deleteSnapshot srn sn diff --git a/tests/V5/Test/Sorting.hs b/tests/V5/Test/Sorting.hs new file mode 100644 index 0000000..8665368 --- /dev/null +++ b/tests/V5/Test/Sorting.hs @@ -0,0 +1,22 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Test.Sorting where + +import Test.Common +import Test.Import + +spec :: Spec +spec = + describe "sorting" $ + it "returns documents in the right order" $ withTestEnv $ do + _ <- insertData + _ <- insertOther + let sortSpec = DefaultSortSpec $ mkSort (FieldName "age") Ascending + let search = Search Nothing + Nothing (Just [sortSpec]) Nothing Nothing + False (From 0) (Size 10) SearchTypeQueryThenFetch Nothing Nothing + Nothing Nothing + result <- searchTweets search + let myTweet = grabFirst result + liftIO $ + myTweet `shouldBe` Right otherTweet diff --git a/tests/V5/Test/SourceFiltering.hs b/tests/V5/Test/SourceFiltering.hs new file mode 100644 index 0000000..447980c --- /dev/null +++ b/tests/V5/Test/SourceFiltering.hs @@ -0,0 +1,38 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Test.SourceFiltering where + +import Test.Common +import Test.Import + +import qualified Data.HashMap.Strict as HM + +spec :: Spec +spec = + describe "Source filtering" $ do + + it "doesn't include source when sources are disabled" $ withTestEnv $ + searchExpectSource + NoSource + (Left (EsError 500 "Source was missing")) + + it "includes a source" $ withTestEnv $ + searchExpectSource + (SourcePatterns (PopPattern (Pattern "message"))) + (Right (Object (HM.fromList [("message", String "Use haskell!")]))) + + it "includes sources" $ withTestEnv $ + searchExpectSource + (SourcePatterns (PopPatterns [Pattern "user", Pattern "message"])) + (Right (Object (HM.fromList [("user",String "bitemyapp"),("message", String "Use haskell!")]))) + + it "includes source patterns" $ withTestEnv $ + searchExpectSource + (SourcePatterns (PopPattern (Pattern "*ge"))) + (Right (Object (HM.fromList [("age", Number 10000),("message", String "Use haskell!")]))) + + it "excludes source patterns" $ withTestEnv $ + searchExpectSource + (SourceIncludeExclude (Include []) + (Exclude [Pattern "l*", Pattern "*ge", Pattern "postDate", Pattern "extra"])) + (Right (Object (HM.fromList [("user",String "bitemyapp")]))) diff --git a/tests/V5/Test/Suggest.hs b/tests/V5/Test/Suggest.hs new file mode 100644 index 0000000..b1a2c2b --- /dev/null +++ b/tests/V5/Test/Suggest.hs @@ -0,0 +1,23 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Test.Suggest where + +import Test.Common +import Test.Import + +spec :: Spec +spec = + describe "Suggest" $ + it "returns a search suggestion using the phrase suggester" $ withTestEnv $ do + _ <- insertData + let query = QueryMatchNoneQuery + phraseSuggester = mkPhraseSuggester (FieldName "message") + namedSuggester = Suggest "Use haskel" "suggest_name" (SuggestTypePhraseSuggester phraseSuggester) + search' = mkSearch (Just query) Nothing + search = search' { suggestBody = Just namedSuggester } + expectedText = Just "use haskell" + resp <- searchByIndex testIndex search + parsed <- parseEsResponse resp :: BH IO (Either EsError (SearchResult Tweet)) + case parsed of + Left e -> liftIO $ expectationFailure ("Expected an search suggestion but got " <> show e) + Right sr -> liftIO $ (suggestOptionsText . head . suggestResponseOptions . head . nsrResponses <$> suggest sr) `shouldBe` expectedText diff --git a/tests/V5/Test/Templates.hs b/tests/V5/Test/Templates.hs new file mode 100644 index 0000000..bda85e0 --- /dev/null +++ b/tests/V5/Test/Templates.hs @@ -0,0 +1,26 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Test.Templates where + +import Test.Common +import Test.Import + +spec :: Spec +spec = + describe "template API" $ do + it "can create a template" $ withTestEnv $ do + let idxTpl = IndexTemplate (TemplatePattern "tweet-*") (Just (IndexSettings (ShardCount 1) (ReplicaCount 1))) [toJSON TweetMapping] + resp <- putTemplate idxTpl (TemplateName "tweet-tpl") + liftIO $ validateStatus resp 200 + + it "can detect if a template exists" $ withTestEnv $ do + exists <- templateExists (TemplateName "tweet-tpl") + liftIO $ exists `shouldBe` True + + it "can delete a template" $ withTestEnv $ do + resp <- deleteTemplate (TemplateName "tweet-tpl") + liftIO $ validateStatus resp 200 + + it "can detect if a template doesn't exist" $ withTestEnv $ do + exists <- templateExists (TemplateName "tweet-tpl") + liftIO $ exists `shouldBe` False diff --git a/tests/V5/tests.hs b/tests/V5/tests.hs index 1ddc449..600bca5 100644 --- a/tests/V5/tests.hs +++ b/tests/V5/tests.hs @@ -1,9 +1,5 @@ {-# LANGUAGE CPP #-} -{-# LANGUAGE DefaultSignatures #-} -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE ScopedTypeVariables #-} @@ -17,967 +13,40 @@ #endif module Main where -import Control.Applicative -import Control.Error -import Control.Exception (evaluate) -import Control.Monad -import Control.Monad.Catch -import Control.Monad.Reader -import Data.Aeson -import Data.Aeson.Types (parseEither) -import qualified Data.ByteString.Lazy.Char8 as BL8 -import qualified Data.HashMap.Strict as HM -import Data.List (nub) -import qualified Data.List as L -import Data.List.NonEmpty (NonEmpty (..)) -import qualified Data.List.NonEmpty as NE -import qualified Data.Map.Strict as M -import Data.Monoid -import Data.Ord (comparing) -import Data.Proxy -import Data.Text (Text) -import qualified Data.Text as T -import Data.Time.Calendar (Day (..), fromGregorian) -import Data.Time.Clock (NominalDiffTime, UTCTime (..), - secondsToDiffTime) -import Data.Typeable -import qualified Data.Vector as V -import qualified Data.Version as Vers -import Database.V5.Bloodhound -import GHC.Generics as G -import Network.HTTP.Client hiding (Proxy) -import qualified Network.HTTP.Types.Method as NHTM -import qualified Network.HTTP.Types.Status as NHTS -import qualified Network.URI as URI -import Prelude hiding (filter) -import System.IO.Temp -import System.PosixCompat.Files -import Test.Hspec -import Test.QuickCheck.Property.Monoid (T (..), eq, prop_Monoid) - -import Test.Hspec.QuickCheck (prop) -import Test.QuickCheck - -import qualified Generics.SOP as SOP -import qualified Generics.SOP.GGP as SOP - -testServer :: Server -testServer = Server "http://localhost:9200" -testIndex :: IndexName -testIndex = IndexName "bloodhound-tests-twitter-1" -testMapping :: MappingName -testMapping = MappingName "tweet" - -withTestEnv :: BH IO a -> IO a -withTestEnv = withBH defaultManagerSettings testServer - -validateStatus :: Show body => Response body -> Int -> Expectation -validateStatus resp expected = - if actual == expected - then return () - else expectationFailure ("Expected " <> show expected <> " but got " <> show actual <> ": " <> show body) - where - actual = NHTS.statusCode (responseStatus resp) - body = responseBody resp - -createExampleIndex :: (MonadBH m) => m Reply -createExampleIndex = createIndex (IndexSettings (ShardCount 1) (ReplicaCount 0)) testIndex -deleteExampleIndex :: (MonadBH m) => m Reply -deleteExampleIndex = deleteIndex testIndex - -es13 :: Vers.Version -es13 = Vers.Version [1, 3, 0] [] - -es12 :: Vers.Version -es12 = Vers.Version [1, 2, 0] [] - -es11 :: Vers.Version -es11 = Vers.Version [1, 1, 0] [] - -es14 :: Vers.Version -es14 = Vers.Version [1, 4, 0] [] - -es15 :: Vers.Version -es15 = Vers.Version [1, 5, 0] [] - -es16 :: Vers.Version -es16 = Vers.Version [1, 6, 0] [] - -es20 :: Vers.Version -es20 = Vers.Version [2, 0, 0] [] - -es50 :: Vers.Version -es50 = Vers.Version [5, 0, 0] [] - -getServerVersion :: IO (Maybe Vers.Version) -getServerVersion = fmap extractVersion <$> withTestEnv getStatus - where - extractVersion = versionNumber . number . version - --- | Get configured repo paths for snapshotting. Note that by default --- this is not enabled and if we are over es 1.5, we won't be able to --- test snapshotting. Note that this can and should be part of the --- client functionality in a much less ad-hoc incarnation. -getRepoPaths :: IO [FilePath] -getRepoPaths = withTestEnv $ do - bhe <- getBHEnv - let Server s = bhServer bhe - let tUrl = s <> "/" <> "_nodes" - initReq <- parseRequest (URI.escapeURIString URI.isAllowedInURI (T.unpack tUrl)) - let req = setRequestIgnoreStatus $ initReq { method = NHTM.methodGet } - Right (Object o) <- parseEsResponse =<< liftIO (httpLbs req (bhManager bhe)) - return $ fromMaybe mempty $ do - Object nodes <- HM.lookup "nodes" o - Object firstNode <- snd <$> headMay (HM.toList nodes) - Object settings <- HM.lookup "settings" firstNode - Object path <- HM.lookup "path" settings - Array repo <- HM.lookup "repo" path - return [ T.unpack t | String t <- V.toList repo] - --- | 1.5 and earlier don't care about repo paths -canSnapshot :: IO Bool -canSnapshot = do - caresAboutRepos <- atleast es16 - repoPaths <- getRepoPaths - return (not caresAboutRepos || not (null (repoPaths))) - -atleast :: Vers.Version -> IO Bool -atleast v = getServerVersion >>= \x -> return $ x >= Just v - -atmost :: Vers.Version -> IO Bool -atmost v = getServerVersion >>= \x -> return $ x <= Just v - -is :: Vers.Version -> IO Bool -is v = getServerVersion >>= \x -> return $ x == Just v - -when' :: Monad m => m Bool -> m () -> m () -when' b f = b >>= \x -> when x f - -(==~) :: (ApproxEq a) => a -> a -> Property -a ==~ b = counterexample (showApproxEq a ++ " !=~ " ++ showApproxEq b) (a =~ b) - -propJSON :: forall a. (Arbitrary a, ToJSON a, FromJSON a, Show a, ApproxEq a, Typeable a) => Proxy a -> Spec -propJSON _ = prop testName $ \(a :: a) -> - let jsonStr = "via " <> BL8.unpack (encode a) - in counterexample jsonStr (parseEither parseJSON (toJSON a) ==~ Right a) - where testName = show ty <> " FromJSON/ToJSON roundtrips" - ty = typeOf (undefined :: a) - -data Location = Location { lat :: Double - , lon :: Double } deriving (Eq, Generic, Show) - -data Tweet = Tweet { user :: Text - , postDate :: UTCTime - , message :: Text - , age :: Int - , location :: Location - , extra :: Maybe Text } - deriving (Eq, Generic, Show) - -instance ToJSON Tweet where - toJSON = genericToJSON defaultOptions -instance FromJSON Tweet where - parseJSON = genericParseJSON defaultOptions -instance ToJSON Location where - toJSON = genericToJSON defaultOptions -instance FromJSON Location where - parseJSON = genericParseJSON defaultOptions - -data ParentMapping = ParentMapping deriving (Eq, Show) - -instance ToJSON ParentMapping where - toJSON ParentMapping = - object ["properties" .= - object [ "user" .= object ["type" .= ("string" :: Text) - , "fielddata" .= True - ] - -- Serializing the date as a date is breaking other tests, mysteriously. - -- , "postDate" .= object [ "type" .= ("date" :: Text) - -- , "format" .= ("YYYY-MM-dd`T`HH:mm:ss.SSSZZ" :: Text)] - , "message" .= object ["type" .= ("string" :: Text)] - , "age" .= object ["type" .= ("integer" :: Text)] - , "location" .= object ["type" .= ("geo_point" :: Text)] - , "extra" .= object ["type" .= ("keyword" :: Text)] - ]] - -data ChildMapping = ChildMapping deriving (Eq, Show) - -instance ToJSON ChildMapping where - toJSON ChildMapping = - object ["_parent" .= object ["type" .= ("parent" :: Text)] - , "properties" .= - object [ "user" .= object ["type" .= ("string" :: Text) - , "fielddata" .= True - ] - -- Serializing the date as a date is breaking other tests, mysteriously. - -- , "postDate" .= object [ "type" .= ("date" :: Text) - -- , "format" .= ("YYYY-MM-dd`T`HH:mm:ss.SSSZZ" :: Text)] - , "message" .= object ["type" .= ("string" :: Text)] - , "age" .= object ["type" .= ("integer" :: Text)] - , "location" .= object ["type" .= ("geo_point" :: Text)] - , "extra" .= object ["type" .= ("keyword" :: Text)] - ]] - -data TweetMapping = TweetMapping deriving (Eq, Show) - -instance ToJSON TweetMapping where - toJSON TweetMapping = - object ["tweet" .= - object ["properties" .= - object [ "user" .= object [ "type" .= ("string" :: Text) - , "fielddata" .= True - ] - -- Serializing the date as a date is breaking other tests, mysteriously. - -- , "postDate" .= object [ "type" .= ("date" :: Text) - -- , "format" .= ("YYYY-MM-dd`T`HH:mm:ss.SSSZZ" :: Text)] - , "message" .= object ["type" .= ("string" :: Text)] - , "age" .= object ["type" .= ("integer" :: Text)] - , "location" .= object ["type" .= ("geo_point" :: Text)] - , "extra" .= object ["type" .= ("keyword" :: Text)] - ]]] - -exampleTweet :: Tweet -exampleTweet = Tweet { user = "bitemyapp" - , postDate = UTCTime - (ModifiedJulianDay 55000) - (secondsToDiffTime 10) - , message = "Use haskell!" - , age = 10000 - , location = Location 40.12 (-71.34) - , extra = Nothing } - -tweetWithExtra :: Tweet -tweetWithExtra = Tweet { user = "bitemyapp" - , postDate = UTCTime - (ModifiedJulianDay 55000) - (secondsToDiffTime 10) - , message = "Use haskell!" - , age = 10000 - , location = Location 40.12 (-71.34) - , extra = Just "blah blah" } - -newAge :: Int -newAge = 31337 - -newUser :: Text -newUser = "someotherapp" - -tweetPatch :: Value -tweetPatch = - object [ "age" .= newAge - , "user" .= newUser - ] - -patchedTweet :: Tweet -patchedTweet = exampleTweet{age = newAge, user = newUser} - -otherTweet :: Tweet -otherTweet = Tweet { user = "notmyapp" - , postDate = UTCTime - (ModifiedJulianDay 55000) - (secondsToDiffTime 11) - , message = "Use haskell!" - , age = 1000 - , location = Location 40.12 (-71.34) - , extra = Nothing } - -resetIndex :: BH IO () -resetIndex = do - _ <- deleteExampleIndex - _ <- createExampleIndex - _ <- putMapping testIndex testMapping TweetMapping - return () - -insertData :: BH IO Reply -insertData = do - resetIndex - insertData' defaultIndexDocumentSettings - -insertData' :: IndexDocumentSettings -> BH IO Reply -insertData' ids = do - r <- indexDocument testIndex testMapping ids exampleTweet (DocId "1") - _ <- refreshIndex testIndex - return r - -updateData :: BH IO Reply -updateData = do - r <- updateDocument testIndex testMapping defaultIndexDocumentSettings tweetPatch (DocId "1") - _ <- refreshIndex testIndex - return r - -insertOther :: BH IO () -insertOther = do - _ <- indexDocument testIndex testMapping defaultIndexDocumentSettings otherTweet (DocId "2") - _ <- refreshIndex testIndex - return () - -insertExtra :: BH IO () -insertExtra = do - _ <- indexDocument testIndex testMapping defaultIndexDocumentSettings tweetWithExtra (DocId "4") - _ <- refreshIndex testIndex - return () - -insertWithSpaceInId :: BH IO () -insertWithSpaceInId = do - _ <- indexDocument testIndex testMapping defaultIndexDocumentSettings exampleTweet (DocId "Hello World") - _ <- refreshIndex testIndex - return () - -searchTweet :: Search -> BH IO (Either EsError Tweet) -searchTweet search = do - result <- searchTweets search - let myTweet :: Either EsError Tweet - myTweet = grabFirst result - return myTweet - -searchTweets :: Search -> BH IO (Either EsError (SearchResult Tweet)) -searchTweets search = parseEsResponse =<< searchByIndex testIndex search - -searchExpectNoResults :: Search -> BH IO () -searchExpectNoResults search = do - result <- searchTweets search - let emptyHits = fmap (hits . searchHits) result - liftIO $ - emptyHits `shouldBe` Right [] - -searchExpectAggs :: Search -> BH IO () -searchExpectAggs search = do - reply <- searchByIndex testIndex search - let isEmpty x = return (M.null x) - let result = decode (responseBody reply) :: Maybe (SearchResult Tweet) - liftIO $ - (result >>= aggregations >>= isEmpty) `shouldBe` Just False - -searchValidBucketAgg :: (BucketAggregation a, FromJSON a, Show a) => - Search -> Text -> (Text -> AggregationResults -> Maybe (Bucket a)) -> BH IO () -searchValidBucketAgg search aggKey extractor = do - reply <- searchByIndex testIndex search - let bucketDocs = docCount . head . buckets - let result = decode (responseBody reply) :: Maybe (SearchResult Tweet) - let count = result >>= aggregations >>= extractor aggKey >>= \x -> return (bucketDocs x) - liftIO $ - count `shouldBe` Just 1 - -searchTermsAggHint :: [ExecutionHint] -> BH IO () -searchTermsAggHint hints = do - let terms hint = TermsAgg $ (mkTermsAggregation "user") { termExecutionHint = Just hint } - let search hint = mkAggregateSearch Nothing $ mkAggregations "users" $ terms hint - forM_ hints $ searchExpectAggs . search - forM_ hints (\x -> searchValidBucketAgg (search x) "users" toTerms) - -searchTweetHighlight :: Search -> BH IO (Either EsError (Maybe HitHighlight)) -searchTweetHighlight search = do - result <- searchTweets search - let myHighlight = fmap (hitHighlight . head . hits . searchHits) result - return myHighlight - -searchExpectSource :: Source -> Either EsError Value -> BH IO () -searchExpectSource src expected = do - _ <- insertData - let query = QueryMatchQuery $ mkMatchQuery (FieldName "_all") (QueryString "haskell") - let search = (mkSearch (Just query) Nothing) { source = Just src } - reply <- searchByIndex testIndex search - result <- parseEsResponse reply - let value = grabFirst result - liftIO $ - value `shouldBe` expected - -withSnapshotRepo - :: ( MonadMask m - , MonadBH m - ) - => SnapshotRepoName - -> (GenericSnapshotRepo -> m a) - -> m a -withSnapshotRepo srn@(SnapshotRepoName n) f = do - repoPaths <- liftIO getRepoPaths - -- we'll use the first repo path if available, otherwise system temp - -- dir. Note that this will fail on ES > 1.6, so be sure you use - -- @when' canSnapshot@. - case repoPaths of - (firstRepoPath:_) -> withTempDirectory firstRepoPath (T.unpack n) $ \dir -> bracket (alloc dir) free f - [] -> withSystemTempDirectory (T.unpack n) $ \dir -> bracket (alloc dir) free f - where - alloc dir = do - liftIO (setFileMode dir mode) - let repo = FsSnapshotRepo srn "bloodhound-tests-backups" True Nothing Nothing Nothing - resp <- updateSnapshotRepo defaultSnapshotRepoUpdateSettings repo - liftIO (validateStatus resp 200) - return (toGSnapshotRepo repo) - mode = ownerModes `unionFileModes` groupModes `unionFileModes` otherModes - free GenericSnapshotRepo {..} = do - resp <- deleteSnapshotRepo gSnapshotRepoName - liftIO (validateStatus resp 200) - - -withSnapshot - :: ( MonadMask m - , MonadBH m - ) - => SnapshotRepoName - -> SnapshotName - -> m a - -> m a -withSnapshot srn sn = bracket_ alloc free - where - alloc = do - resp <- createSnapshot srn sn createSettings - liftIO (validateStatus resp 200) - -- We'll make this synchronous for testing purposes - createSettings = defaultSnapshotCreateSettings { snapWaitForCompletion = True - , snapIndices = Just (IndexList (testIndex :| [])) - -- We don't actually need to back up any data - } - free = do - deleteSnapshot srn sn - - - -data BulkTest = BulkTest { name :: Text } deriving (Eq, Generic, Show) -instance FromJSON BulkTest where - parseJSON = genericParseJSON defaultOptions -instance ToJSON BulkTest where - toJSON = genericToJSON defaultOptions - -class GApproxEq f where - gApproxEq :: f a -> f a -> Bool - --- | Unit type -instance GApproxEq U1 where - gApproxEq U1 U1 = True - --- | Sum type, ensure same constructors, recurse -instance (GApproxEq a, GApproxEq b) => GApproxEq (a :+: b) where - gApproxEq (L1 a) (L1 b) = gApproxEq a b - gApproxEq (R1 a) (R1 b) = gApproxEq a b - gApproxEq _ _ = False - --- | Product type, ensure each field is approx eq -instance (GApproxEq a, GApproxEq b) => GApproxEq (a :*: b) where - gApproxEq (a1 :*: b1) (a2 :*: b2) = gApproxEq a1 a2 && gApproxEq b1 b2 - --- | Value type, actually check the values for approx equality -instance (ApproxEq a) => GApproxEq (K1 i a) where - gApproxEq (K1 a) (K1 b) = a =~ b - -instance (GApproxEq f) => GApproxEq (M1 i t f) where - gApproxEq (M1 a) (M1 b) = gApproxEq a b - --- | Typeclass for "equal where it matters". Use this to specify --- less-strict equivalence for things such as lists that can wind up --- in an unpredictable order -class ApproxEq a where - (=~) :: a -> a -> Bool - default (=~) :: (Generic a, GApproxEq (Rep a)) => a -> a -> Bool - a =~ b = gApproxEq (G.from a) (G.from b) - - showApproxEq :: a -> String - default showApproxEq :: (Show a) => a -> String - showApproxEq = show - -instance ApproxEq NominalDiffTime where (=~) = (==) -instance ApproxEq UTCTime where (=~) = (==) -instance ApproxEq Text where (=~) = (==) -instance ApproxEq Bool where (=~) = (==) -instance ApproxEq Int where (=~) = (==) -instance ApproxEq Double where (=~) = (==) -instance (ApproxEq a, Show a) => ApproxEq (NonEmpty a) -instance (ApproxEq a, Show a) => ApproxEq (Maybe a) -instance ApproxEq GeoPoint -instance ApproxEq Regexp -instance ApproxEq RangeValue -instance ApproxEq LessThan -instance ApproxEq LessThanEq -instance ApproxEq LessThanD -instance ApproxEq LessThanEqD -instance ApproxEq GreaterThan -instance ApproxEq GreaterThanEq -instance ApproxEq GreaterThanD -instance ApproxEq GreaterThanEqD -instance ApproxEq MinimumMatchHighLow -instance ApproxEq RegexpFlag -instance ApproxEq RegexpFlags -instance ApproxEq NullValue -instance ApproxEq Version -instance ApproxEq VersionNumber -instance ApproxEq DistanceRange -instance ApproxEq IndexName -instance ApproxEq MappingName -instance ApproxEq DocId -instance ApproxEq IndexAliasRouting -instance ApproxEq RoutingValue -instance ApproxEq ShardCount -instance ApproxEq ReplicaCount -instance ApproxEq TemplateName -instance ApproxEq TemplatePattern -instance ApproxEq QueryString -instance ApproxEq FieldName -instance ApproxEq CacheName -instance ApproxEq CacheKey -instance ApproxEq Existence -instance ApproxEq CutoffFrequency -instance ApproxEq Analyzer -instance ApproxEq Lenient -instance ApproxEq Tiebreaker -instance ApproxEq Boost -instance ApproxEq BoostTerms -instance ApproxEq MaxExpansions -instance ApproxEq MinimumMatch -instance ApproxEq DisableCoord -instance ApproxEq IgnoreTermFrequency -instance ApproxEq MinimumTermFrequency -instance ApproxEq MaxQueryTerms -instance ApproxEq Fuzziness -instance ApproxEq PrefixLength -instance ApproxEq TypeName -instance ApproxEq PercentMatch -instance ApproxEq StopWord -instance ApproxEq QueryPath -instance ApproxEq AllowLeadingWildcard -instance ApproxEq LowercaseExpanded -instance ApproxEq EnablePositionIncrements -instance ApproxEq AnalyzeWildcard -instance ApproxEq GeneratePhraseQueries -instance ApproxEq Locale -instance ApproxEq MaxWordLength -instance ApproxEq MinWordLength -instance ApproxEq PhraseSlop -instance ApproxEq MinDocFrequency -instance ApproxEq MaxDocFrequency -instance ApproxEq Filter -instance ApproxEq Query -instance ApproxEq SimpleQueryStringQuery -instance ApproxEq FieldOrFields -instance ApproxEq SimpleQueryFlag -instance ApproxEq RegexpQuery -instance ApproxEq QueryStringQuery -instance ApproxEq RangeQuery -instance ApproxEq PrefixQuery -instance ApproxEq NestedQuery -instance ApproxEq MoreLikeThisFieldQuery -instance ApproxEq MoreLikeThisQuery -instance ApproxEq IndicesQuery -instance ApproxEq HasParentQuery -instance ApproxEq HasChildQuery -instance ApproxEq FuzzyQuery -instance ApproxEq FuzzyLikeFieldQuery -instance ApproxEq FuzzyLikeThisQuery -instance ApproxEq DisMaxQuery -instance ApproxEq CommonTermsQuery -instance ApproxEq CommonMinimumMatch -instance ApproxEq BoostingQuery -instance ApproxEq BoolQuery -instance ApproxEq MatchQuery -instance ApproxEq MultiMatchQueryType -instance ApproxEq BooleanOperator -instance ApproxEq ZeroTermsQuery -instance ApproxEq MatchQueryType -instance ApproxEq AliasRouting -instance ApproxEq IndexAliasCreate -instance ApproxEq SearchAliasRouting -instance ApproxEq ScoreType -instance ApproxEq Distance -instance ApproxEq DistanceUnit -instance ApproxEq DistanceType -instance ApproxEq OptimizeBbox -instance ApproxEq GeoBoundingBoxConstraint -instance ApproxEq GeoFilterType -instance ApproxEq GeoBoundingBox -instance ApproxEq LatLon -instance ApproxEq RangeExecution -instance ApproxEq FSType -instance ApproxEq CompoundFormat -instance ApproxEq InitialShardCount -instance ApproxEq Bytes -instance ApproxEq ReplicaBounds -instance ApproxEq Term -instance ApproxEq BoolMatch -instance ApproxEq MultiMatchQuery -instance ApproxEq IndexSettings -instance ApproxEq AllocationPolicy -instance ApproxEq Char where - (=~) = (==) -instance ApproxEq Vers.Version where - (=~) = (==) -instance (ApproxEq a, Show a) => ApproxEq [a] where - as =~ bs = and (zipWith (=~) as bs) -instance (ApproxEq l, Show l, ApproxEq r, Show r) => ApproxEq (Either l r) where - Left a =~ Left b = a =~ b - Right a =~ Right b = a =~ b - _ =~ _ = False - showApproxEq (Left x) = "Left " <> showApproxEq x - showApproxEq (Right x) = "Right " <> showApproxEq x -instance ApproxEq NodeAttrFilter -instance ApproxEq NodeAttrName -instance ApproxEq BuildHash -instance ApproxEq TemplateQueryKeyValuePairs where - (=~) = (==) -instance ApproxEq TemplateQueryInline -instance ApproxEq Size -instance ApproxEq PhraseSuggesterHighlighter -instance ApproxEq PhraseSuggesterCollate -instance ApproxEq PhraseSuggester -instance ApproxEq SuggestType -instance ApproxEq Suggest -instance ApproxEq DirectGenerators -instance ApproxEq DirectGeneratorSuggestModeTypes - --- | Due to the way nodeattrfilters get serialized here, they may come --- out in a different order, but they are morally equivalent -instance ApproxEq UpdatableIndexSetting where - RoutingAllocationInclude a =~ RoutingAllocationInclude b = - NE.sort a =~ NE.sort b - RoutingAllocationExclude a =~ RoutingAllocationExclude b = - NE.sort a =~ NE.sort b - RoutingAllocationRequire a =~ RoutingAllocationRequire b = - NE.sort a =~ NE.sort b - a =~ b = a == b - showApproxEq (RoutingAllocationInclude xs) = show (RoutingAllocationInclude (NE.sort xs)) - showApproxEq (RoutingAllocationExclude xs) = show (RoutingAllocationExclude (NE.sort xs)) - showApproxEq (RoutingAllocationRequire xs) = show (RoutingAllocationRequire (NE.sort xs)) - showApproxEq x = show x - - -noDuplicates :: Eq a => [a] -> Bool -noDuplicates xs = nub xs == xs - -instance Arbitrary NominalDiffTime where - arbitrary = fromInteger <$> arbitrary - -#if !MIN_VERSION_QuickCheck(2,8,0) -instance (Arbitrary k, Ord k, Arbitrary v) => Arbitrary (M.Map k v) where - arbitrary = M.fromList <$> arbitrary -#endif - -instance Arbitrary Text where - arbitrary = T.pack <$> arbitrary - -instance Arbitrary UTCTime where - arbitrary = UTCTime - <$> arbitrary - <*> (fromRational . toRational <$> choose (0::Double, 86400)) - -instance Arbitrary Day where - arbitrary = ModifiedJulianDay <$> (2000 +) <$> arbitrary - shrink = (ModifiedJulianDay <$>) . shrink . toModifiedJulianDay - -#if !MIN_VERSION_QuickCheck(2,9,0) -instance Arbitrary a => Arbitrary (NonEmpty a) where - arbitrary = liftA2 (:|) arbitrary arbitrary -#endif - -arbitraryScore :: Gen Score -arbitraryScore = fmap getPositive <$> arbitrary - -instance (Arbitrary a, Typeable a) => Arbitrary (Hit a) where - arbitrary = Hit <$> arbitrary - <*> arbitrary - <*> arbitrary - <*> arbitraryScore - <*> arbitrary - <*> arbitrary - shrink = genericShrink - - -instance (Arbitrary a, Typeable a) => Arbitrary (SearchHits a) where - arbitrary = reduceSize $ do - tot <- getPositive <$> arbitrary - score <- arbitraryScore - hs <- arbitrary - return $ SearchHits tot score hs - shrink = genericShrink - -reduceSize :: Gen a -> Gen a -reduceSize f = sized $ \n -> resize (n `div` 2) f - -getSource :: EsResult a -> Maybe a -getSource = fmap _source . foundResult - -grabFirst :: Either EsError (SearchResult a) -> Either EsError a -grabFirst r = - case fmap (hitSource . head . hits . searchHits) r of - (Left e) -> Left e - (Right Nothing) -> Left (EsError 500 "Source was missing") - (Right (Just x)) -> Right x - -------------------------------------------------------------------------------- -arbitraryAlphaNum :: Gen Char -arbitraryAlphaNum = oneof [choose ('a', 'z') - ,choose ('A','Z') - , choose ('0', '9')] - -instance Arbitrary RoutingValue where - arbitrary = RoutingValue . T.pack <$> listOf1 arbitraryAlphaNum - -instance Arbitrary AliasRouting where - arbitrary = oneof [allAlias - ,one - ,theOther - ,both] - where one = GranularAliasRouting - <$> (Just <$> arbitrary) - <*> pure Nothing - theOther = GranularAliasRouting Nothing - <$> (Just <$> arbitrary) - both = GranularAliasRouting - <$> (Just <$> arbitrary) - <*> (Just <$> arbitrary) - allAlias = AllAliasRouting <$> arbitrary - shrink = genericShrink - - -instance Arbitrary FieldName where - arbitrary = FieldName . T.pack <$> listOf1 arbitraryAlphaNum - shrink = genericShrink - - -#if MIN_VERSION_base(4,10,0) --- Test.QuickCheck.Modifiers - -qcNonEmptyToNonEmpty :: NonEmptyList a -> NonEmpty a -qcNonEmptyToNonEmpty (NonEmpty (a : xs)) = (a :| xs) -qcNonEmptyToNonEmpty (NonEmpty []) = error "NonEmpty was empty!" - -instance Arbitrary a => Arbitrary (NonEmpty a) where - arbitrary = qcNonEmptyToNonEmpty <$> arbitrary -#endif - -instance Arbitrary RegexpFlags where - arbitrary = oneof [ pure AllRegexpFlags - , pure NoRegexpFlags - , SomeRegexpFlags <$> genUniqueFlags - ] - where genUniqueFlags = NE.fromList . nub <$> listOf1 arbitrary - - shrink = genericShrink - - -instance Arbitrary IndexAliasCreate where - arbitrary = IndexAliasCreate <$> arbitrary <*> reduceSize arbitrary - shrink = genericShrink - -instance Arbitrary Query where - arbitrary = reduceSize $ oneof [ TermQuery <$> arbitrary <*> arbitrary - , TermsQuery <$> arbitrary <*> arbitrary - , QueryMatchQuery <$> arbitrary - , QueryMultiMatchQuery <$> arbitrary - , QueryBoolQuery <$> arbitrary - , QueryBoostingQuery <$> arbitrary - , QueryCommonTermsQuery <$> arbitrary - , ConstantScoreQuery <$> arbitrary <*> arbitrary - , QueryDisMaxQuery <$> arbitrary - , QueryFuzzyLikeThisQuery <$> arbitrary - , QueryFuzzyLikeFieldQuery <$> arbitrary - , QueryFuzzyQuery <$> arbitrary - , QueryHasChildQuery <$> arbitrary - , QueryHasParentQuery <$> arbitrary - , IdsQuery <$> arbitrary <*> arbitrary - , QueryIndicesQuery <$> arbitrary - , MatchAllQuery <$> arbitrary - , QueryMoreLikeThisQuery <$> arbitrary - , QueryMoreLikeThisFieldQuery <$> arbitrary - , QueryNestedQuery <$> arbitrary - , QueryPrefixQuery <$> arbitrary - , QueryQueryStringQuery <$> arbitrary - , QuerySimpleQueryStringQuery <$> arbitrary - , QueryRangeQuery <$> arbitrary - , QueryRegexpQuery <$> arbitrary - , QueryTemplateQueryInline <$> arbitrary - ] - shrink = genericShrink - -instance Arbitrary Filter where - arbitrary = Filter <$> arbitrary - shrink = genericShrink - -instance Arbitrary ReplicaBounds where - arbitrary = oneof [ replicasBounded - , replicasLowerBounded - , pure ReplicasUnbounded - ] - where replicasBounded = do Positive a <- arbitrary - Positive b <- arbitrary - return (ReplicasBounded a b) - replicasLowerBounded = do Positive a <- arbitrary - return (ReplicasLowerBounded a) - -instance Arbitrary NodeAttrName where - arbitrary = NodeAttrName . T.pack <$> listOf1 arbitraryAlphaNum - - -instance Arbitrary NodeAttrFilter where - arbitrary = do - n <- arbitrary - s:ss <- listOf1 (listOf1 arbitraryAlphaNum) - let ts = T.pack <$> s :| ss - return (NodeAttrFilter n ts) - shrink = genericShrink - -instance Arbitrary VersionNumber where - arbitrary = mk . fmap getPositive . getNonEmpty <$> arbitrary - where - mk versions = VersionNumber (Vers.Version versions []) - -instance Arbitrary TemplateQueryKeyValuePairs where - arbitrary = TemplateQueryKeyValuePairs . HM.fromList <$> arbitrary - shrink (TemplateQueryKeyValuePairs x) = map (TemplateQueryKeyValuePairs . HM.fromList) . shrink $ HM.toList x - -instance Arbitrary IndexName where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MappingName where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary DocId where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Version where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary BuildHash where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary IndexAliasRouting where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary ShardCount where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary ReplicaCount where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary TemplateName where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary TemplatePattern where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary QueryString where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary CacheName where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary CacheKey where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Existence where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary CutoffFrequency where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Analyzer where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MaxExpansions where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Lenient where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Tiebreaker where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Boost where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary BoostTerms where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MinimumMatch where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary DisableCoord where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary IgnoreTermFrequency where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MinimumTermFrequency where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MaxQueryTerms where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Fuzziness where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary PrefixLength where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary TypeName where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary PercentMatch where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary StopWord where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary QueryPath where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary AllowLeadingWildcard where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary LowercaseExpanded where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary EnablePositionIncrements where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary AnalyzeWildcard where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary GeneratePhraseQueries where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Locale where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MaxWordLength where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MinWordLength where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary PhraseSlop where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MinDocFrequency where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MaxDocFrequency where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Regexp where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary SimpleQueryStringQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary FieldOrFields where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary SimpleQueryFlag where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary RegexpQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary QueryStringQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary RangeQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary RangeValue where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary PrefixQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary NestedQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MoreLikeThisFieldQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MoreLikeThisQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary IndicesQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary HasParentQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary HasChildQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary FuzzyQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary FuzzyLikeFieldQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary FuzzyLikeThisQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary DisMaxQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary CommonTermsQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary DistanceRange where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MultiMatchQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary LessThanD where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary LessThanEqD where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary GreaterThanD where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary GreaterThanEqD where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary LessThan where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary LessThanEq where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary GreaterThan where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary GreaterThanEq where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary GeoPoint where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary NullValue where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MinimumMatchHighLow where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary CommonMinimumMatch where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary BoostingQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary BoolQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MatchQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MultiMatchQueryType where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary BooleanOperator where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary ZeroTermsQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MatchQueryType where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary SearchAliasRouting where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary ScoreType where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Distance where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary DistanceUnit where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary DistanceType where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary OptimizeBbox where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary GeoBoundingBoxConstraint where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary GeoFilterType where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary GeoBoundingBox where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary LatLon where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary RangeExecution where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary RegexpFlag where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary BoolMatch where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Term where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary IndexSettings where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary TokenChar where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Ngram where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary TokenizerDefinition where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary AnalyzerDefinition where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary TokenFilterDefinition where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Shingle where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Language where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Analysis where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Tokenizer where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary TokenFilter where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary UpdatableIndexSetting where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Compression where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Bytes where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary AllocationPolicy where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary InitialShardCount where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary FSType where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary CompoundFormat where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary FsSnapshotRepo where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary SnapshotRepoName where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary TemplateQueryInline where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary PhraseSuggesterCollate where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary PhraseSuggesterHighlighter where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Size where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary PhraseSuggester where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary SuggestType where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Suggest where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary DirectGenerators where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary DirectGeneratorSuggestModeTypes where arbitrary = sopArbitrary; shrink = genericShrink - -newtype UpdatableIndexSetting' = UpdatableIndexSetting' UpdatableIndexSetting - deriving (Show, Eq, ToJSON, FromJSON, ApproxEq, Typeable) - -instance Arbitrary UpdatableIndexSetting' where - arbitrary = do - settings <- arbitrary - return $ UpdatableIndexSetting' $ case settings of - RoutingAllocationInclude xs -> RoutingAllocationInclude (dropDuplicateAttrNames xs) - RoutingAllocationExclude xs -> RoutingAllocationExclude (dropDuplicateAttrNames xs) - RoutingAllocationRequire xs -> RoutingAllocationRequire (dropDuplicateAttrNames xs) - x -> x - where - dropDuplicateAttrNames = NE.fromList . L.nubBy sameAttrName . NE.toList - sameAttrName a b = nodeAttrFilterName a == nodeAttrFilterName b - shrink (UpdatableIndexSetting' x) = map UpdatableIndexSetting' (genericShrink x) +import Test.Common +import Test.Import + +import Prelude + +import qualified Test.Aggregation as Aggregation +import qualified Test.BulkAPI as Bulk +import qualified Test.Documents as Documents +import qualified Test.Highlights as Highlights +import qualified Test.Indices as Indices +import qualified Test.JSON as JSON +import qualified Test.Query as Query +import qualified Test.Script as Script +import qualified Test.Snapshots as Snapshots +import qualified Test.Sorting as Sorting +import qualified Test.SourceFiltering as SourceFiltering +import qualified Test.Suggest as Suggest +import qualified Test.Templates as Templates main :: IO () main = hspec $ do - - describe "index create/delete API" $ do - it "creates and then deletes the requested index" $ withTestEnv $ do - -- priming state. - _ <- deleteExampleIndex - resp <- createExampleIndex - deleteResp <- deleteExampleIndex - liftIO $ do - validateStatus resp 200 - validateStatus deleteResp 200 + Aggregation.spec + Bulk.spec + Documents.spec + Highlights.spec + Indices.spec + JSON.spec + Query.spec + Script.spec + Snapshots.spec + Sorting.spec + SourceFiltering.spec + Suggest.spec + Templates.spec describe "error parsing" $ do it "can parse EsErrors for < 2.0" $ when' (atmost es16) $ withTestEnv $ do @@ -990,446 +59,11 @@ main = hspec $ do let errorResp = eitherDecode (responseBody res) liftIO (errorResp `shouldBe` Right (EsError 404 "no such index")) - describe "document API" $ do - it "indexes, updates, gets, and then deletes the generated document" $ withTestEnv $ do - _ <- insertData - _ <- updateData - docInserted <- getDocument testIndex testMapping (DocId "1") - let newTweet = eitherDecode - (responseBody docInserted) :: Either String (EsResult Tweet) - liftIO $ (fmap getSource newTweet `shouldBe` Right (Just patchedTweet)) - - it "indexes, gets, and then deletes the generated document with a DocId containing a space" $ withTestEnv $ do - _ <- insertWithSpaceInId - docInserted <- getDocument testIndex testMapping (DocId "Hello World") - let newTweet = eitherDecode - (responseBody docInserted) :: Either String (EsResult Tweet) - liftIO $ (fmap getSource newTweet `shouldBe` Right (Just exampleTweet)) - - it "produces a parseable result when looking up a bogus document" $ withTestEnv $ do - doc <- getDocument testIndex testMapping (DocId "bogus") - let noTweet = eitherDecode - (responseBody doc) :: Either String (EsResult Tweet) - liftIO $ fmap foundResult noTweet `shouldBe` Right Nothing - - it "can use optimistic concurrency control" $ withTestEnv $ do - let ev = ExternalDocVersion minBound - let cfg = defaultIndexDocumentSettings { idsVersionControl = ExternalGT ev } - resetIndex - res <- insertData' cfg - liftIO $ isCreated res `shouldBe` True - res' <- insertData' cfg - liftIO $ isVersionConflict res' `shouldBe` True - - it "indexes two documents in a parent/child relationship and checks that the child exists" $ withTestEnv $ do - resetIndex - _ <- putMapping testIndex (MappingName "child") ChildMapping - _ <- putMapping testIndex (MappingName "parent") ParentMapping - _ <- indexDocument testIndex (MappingName "parent") defaultIndexDocumentSettings exampleTweet (DocId "1") - let parent = (Just . DocumentParent . DocId) "1" - ids = IndexDocumentSettings NoVersionControl parent - _ <- indexDocument testIndex (MappingName "child") ids otherTweet (DocId "2") - _ <- refreshIndex testIndex - exists <- documentExists testIndex (MappingName "child") parent (DocId "2") - liftIO $ exists `shouldBe` True - - describe "template API" $ do - it "can create a template" $ withTestEnv $ do - let idxTpl = IndexTemplate (TemplatePattern "tweet-*") (Just (IndexSettings (ShardCount 1) (ReplicaCount 1))) [toJSON TweetMapping] - resp <- putTemplate idxTpl (TemplateName "tweet-tpl") - liftIO $ validateStatus resp 200 - - it "can detect if a template exists" $ withTestEnv $ do - exists <- templateExists (TemplateName "tweet-tpl") - liftIO $ exists `shouldBe` True - - it "can delete a template" $ withTestEnv $ do - resp <- deleteTemplate (TemplateName "tweet-tpl") - liftIO $ validateStatus resp 200 - - it "can detect if a template doesn't exist" $ withTestEnv $ do - exists <- templateExists (TemplateName "tweet-tpl") - liftIO $ exists `shouldBe` False - - describe "bulk API" $ do - it "inserts all documents we request" $ withTestEnv $ do - _ <- insertData - let firstTest = BulkTest "blah" - let secondTest = BulkTest "bloo" - let thirdTest = BulkTest "graffle" - let firstDoc = BulkIndex testIndex - testMapping (DocId "2") (toJSON firstTest) - let secondDoc = BulkCreate testIndex - testMapping (DocId "3") (toJSON secondTest) - let thirdDoc = BulkCreateEncoding testIndex - testMapping (DocId "4") (toEncoding thirdTest) - let stream = V.fromList [firstDoc, secondDoc, thirdDoc] - _ <- bulk stream - _ <- refreshIndex testIndex - fDoc <- getDocument testIndex testMapping (DocId "2") - sDoc <- getDocument testIndex testMapping (DocId "3") - tDoc <- getDocument testIndex testMapping (DocId "4") - let maybeFirst = eitherDecode $ responseBody fDoc :: Either String (EsResult BulkTest) - let maybeSecond = eitherDecode $ responseBody sDoc :: Either String (EsResult BulkTest) - let maybeThird = eitherDecode $ responseBody tDoc :: Either String (EsResult BulkTest) - liftIO $ do - fmap getSource maybeFirst `shouldBe` Right (Just firstTest) - fmap getSource maybeSecond `shouldBe` Right (Just secondTest) - fmap getSource maybeThird `shouldBe` Right (Just thirdTest) - - - describe "query API" $ do - it "returns document for term query and identity filter" $ withTestEnv $ do - _ <- insertData - let query = TermQuery (Term "user" "bitemyapp") Nothing - let filter = Filter $ MatchAllQuery Nothing - let search = mkSearch (Just query) (Just filter) - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "handles constant score queries" $ withTestEnv $ do - _ <- insertData - let query = TermsQuery "user" ("bitemyapp" :| []) - let cfQuery = ConstantScoreQuery query (Boost 1.0) - let filter = Filter $ MatchAllQuery Nothing - let search = mkSearch (Just cfQuery) (Just filter) - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "returns document for terms query and identity filter" $ withTestEnv $ do - _ <- insertData - let query = TermsQuery "user" ("bitemyapp" :| []) - let filter = Filter $ MatchAllQuery Nothing - let search = mkSearch (Just query) (Just filter) - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "returns document for match query" $ withTestEnv $ do - _ <- insertData - let query = QueryMatchQuery $ mkMatchQuery (FieldName "user") (QueryString "bitemyapp") - let search = mkSearch (Just query) Nothing - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "returns document for multi-match query" $ withTestEnv $ do - _ <- insertData - let flds = [FieldName "user", FieldName "message"] - let query = QueryMultiMatchQuery $ mkMultiMatchQuery flds (QueryString "bitemyapp") - let search = mkSearch (Just query) Nothing - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "returns document for multi-match query with a custom tiebreaker" $ withTestEnv $ do - _ <- insertData - let tiebreaker = Just $ Tiebreaker 0.3 - flds = [FieldName "user", FieldName "message"] - multiQuery' = mkMultiMatchQuery flds (QueryString "bitemyapp") - query = QueryMultiMatchQuery $ multiQuery' { multiMatchQueryTiebreaker = tiebreaker } - search = mkSearch (Just query) Nothing - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "returns document for bool query" $ withTestEnv $ do - _ <- insertData - let innerQuery = QueryMatchQuery $ - mkMatchQuery (FieldName "user") (QueryString "bitemyapp") - let query = QueryBoolQuery $ - mkBoolQuery [innerQuery] [] [] [] - let search = mkSearch (Just query) Nothing - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "returns document for boosting query" $ withTestEnv $ do - _ <- insertData - let posQuery = QueryMatchQuery $ mkMatchQuery (FieldName "user") (QueryString "bitemyapp") - let negQuery = QueryMatchQuery $ mkMatchQuery (FieldName "user") (QueryString "notmyapp") - let query = QueryBoostingQuery $ BoostingQuery posQuery negQuery (Boost 0.2) - let search = mkSearch (Just query) Nothing - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "returns document for common terms query" $ withTestEnv $ do - _ <- insertData - let query = QueryCommonTermsQuery $ - CommonTermsQuery (FieldName "user") - (QueryString "bitemyapp") - (CutoffFrequency 0.0001) - Or Or Nothing Nothing Nothing Nothing - let search = mkSearch (Just query) Nothing - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "returns document for for inline template query" $ withTestEnv $ do - _ <- insertData - let innerQuery = QueryMatchQuery $ - mkMatchQuery (FieldName "{{userKey}}") - (QueryString "{{bitemyappKey}}") - templateParams = TemplateQueryKeyValuePairs $ HM.fromList - [ ("userKey", "user") - , ("bitemyappKey", "bitemyapp") - ] - templateQuery = QueryTemplateQueryInline $ - TemplateQueryInline innerQuery templateParams - search = mkSearch (Just templateQuery) Nothing - myTweet <- searchTweet search - liftIO $ myTweet `shouldBe` Right exampleTweet - - - describe "sorting" $ do - it "returns documents in the right order" $ withTestEnv $ do - _ <- insertData - _ <- insertOther - let sortSpec = DefaultSortSpec $ mkSort (FieldName "age") Ascending - let search = Search Nothing - Nothing (Just [sortSpec]) Nothing Nothing - False (From 0) (Size 10) SearchTypeQueryThenFetch Nothing Nothing - Nothing - result <- searchTweets search - let myTweet = grabFirst result - liftIO $ - myTweet `shouldBe` Right otherTweet - - describe "Aggregation API" $ do - it "returns term aggregation results" $ withTestEnv $ do - _ <- insertData - let terms = TermsAgg $ mkTermsAggregation "user" - let search = mkAggregateSearch Nothing $ mkAggregations "users" terms - searchExpectAggs search - searchValidBucketAgg search "users" toTerms - - it "return sub-aggregation results" $ withTestEnv $ do - _ <- insertData - let subaggs = mkAggregations "age_agg" . TermsAgg $ mkTermsAggregation "age" - agg = TermsAgg $ (mkTermsAggregation "user") { termAggs = Just subaggs} - search = mkAggregateSearch Nothing $ mkAggregations "users" agg - reply <- searchByIndex testIndex search - let result = decode (responseBody reply) :: Maybe (SearchResult Tweet) - usersAggResults = result >>= aggregations >>= toTerms "users" - subAggResults = usersAggResults >>= (listToMaybe . buckets) >>= termsAggs >>= toTerms "age_agg" - subAddResultsExists = isJust subAggResults - liftIO $ (subAddResultsExists) `shouldBe` True - - it "returns cardinality aggregation results" $ withTestEnv $ do - _ <- insertData - let cardinality = CardinalityAgg $ mkCardinalityAggregation $ FieldName "user" - let search = mkAggregateSearch Nothing $ mkAggregations "users" cardinality - let search' = search { Database.V5.Bloodhound.from = From 0, size = Size 0 } - searchExpectAggs search' - let docCountPair k n = (k, object ["value" .= Number n]) - res <- searchTweets search' - liftIO $ - fmap aggregations res `shouldBe` Right (Just (M.fromList [ docCountPair "users" 1])) - - it "returns stats aggregation results" $ withTestEnv $ do - _ <- insertData - let stats = StatsAgg $ mkStatsAggregation $ FieldName "age" - let search = mkAggregateSearch Nothing $ mkAggregations "users" stats - let search' = search { Database.V5.Bloodhound.from = From 0, size = Size 0 } - searchExpectAggs search' - let statsAggRes k n = (k, object [ "max" .= Number n - , "avg" .= Number n - , "count" .= Number 1 - , "min" .= Number n - , "sum" .= Number n]) - res <- searchTweets search' - liftIO $ - fmap aggregations res `shouldBe` Right (Just (M.fromList [ statsAggRes "users" 10000])) - - it "can give collection hint parameters to term aggregations" $ when' (atleast es13) $ withTestEnv $ do - _ <- insertData - let terms = TermsAgg $ (mkTermsAggregation "user") { termCollectMode = Just BreadthFirst } - let search = mkAggregateSearch Nothing $ mkAggregations "users" terms - searchExpectAggs search - searchValidBucketAgg search "users" toTerms - - -- One of these fails with 1.7.3 - it "can give execution hint parameters to term aggregations" $ when' (atmost es11) $ withTestEnv $ do - _ <- insertData - searchTermsAggHint [Map, Ordinals] - - it "can give execution hint parameters to term aggregations" $ when' (is es12) $ withTestEnv $ do - _ <- insertData - searchTermsAggHint [GlobalOrdinals, GlobalOrdinalsHash, GlobalOrdinalsLowCardinality, Map, Ordinals] - - it "can give execution hint parameters to term aggregations" $ when' (atleast es12) $ withTestEnv $ do - _ <- insertData - searchTermsAggHint [GlobalOrdinals, GlobalOrdinalsHash, GlobalOrdinalsLowCardinality, Map] - -- One of the above. - - it "can execute value_count aggregations" $ withTestEnv $ do - _ <- insertData - _ <- insertOther - let ags = mkAggregations "user_count" (ValueCountAgg (FieldValueCount (FieldName "user"))) <> - mkAggregations "bogus_count" (ValueCountAgg (FieldValueCount (FieldName "bogus"))) - let search = mkAggregateSearch Nothing ags - let docCountPair k n = (k, object ["value" .= Number n]) - res <- searchTweets search - liftIO $ - fmap aggregations res `shouldBe` Right (Just (M.fromList [ docCountPair "user_count" 2 - , docCountPair "bogus_count" 0 - ])) - - it "can execute date_range aggregations" $ withTestEnv $ do - let now = fromGregorian 2015 3 14 - let ltAMonthAgo = UTCTime (fromGregorian 2015 3 1) 0 - let ltAWeekAgo = UTCTime (fromGregorian 2015 3 10) 0 - let oldDoc = exampleTweet { postDate = ltAMonthAgo } - let newDoc = exampleTweet { postDate = ltAWeekAgo } - _ <- indexDocument testIndex testMapping defaultIndexDocumentSettings oldDoc (DocId "1") - _ <- indexDocument testIndex testMapping defaultIndexDocumentSettings newDoc (DocId "2") - _ <- refreshIndex testIndex - let thisMonth = DateRangeFrom (DateMathExpr (DMDate now) [SubtractTime 1 DMMonth]) - let thisWeek = DateRangeFrom (DateMathExpr (DMDate now) [SubtractTime 1 DMWeek]) - let agg = DateRangeAggregation (FieldName "postDate") Nothing (thisMonth :| [thisWeek]) - let ags = mkAggregations "date_ranges" (DateRangeAgg agg) - let search = mkAggregateSearch Nothing ags - res <- searchTweets search - liftIO $ hitsTotal . searchHits <$> res `shouldBe` Right 2 - let bucks = do magrs <- fmapL show (aggregations <$> res) - agrs <- note "no aggregations returned" magrs - rawBucks <- note "no date_ranges aggregation" $ M.lookup "date_ranges" agrs - parseEither parseJSON rawBucks - let fromMonthT = UTCTime (fromGregorian 2015 2 14) 0 - let fromWeekT = UTCTime (fromGregorian 2015 3 7) 0 - liftIO $ buckets <$> bucks `shouldBe` Right [ DateRangeResult "2015-02-14T00:00:00.000Z-*" - (Just fromMonthT) - (Just "2015-02-14T00:00:00.000Z") - Nothing - Nothing - 2 - Nothing - , DateRangeResult "2015-03-07T00:00:00.000Z-*" - (Just fromWeekT) - (Just "2015-03-07T00:00:00.000Z") - Nothing - Nothing - 1 - Nothing - ] - - it "returns date histogram aggregation results" $ withTestEnv $ do - _ <- insertData - let histogram = DateHistogramAgg $ mkDateHistogram (FieldName "postDate") Minute - let search = mkAggregateSearch Nothing (mkAggregations "byDate" histogram) - searchExpectAggs search - searchValidBucketAgg search "byDate" toDateHistogram - - it "can execute missing aggregations" $ withTestEnv $ do - _ <- insertData - _ <- insertExtra - let ags = mkAggregations "missing_agg" (MissingAgg (MissingAggregation "extra")) - let search = mkAggregateSearch Nothing ags - let docCountPair k n = (k, object ["doc_count" .= Number n]) - res <- searchTweets search - liftIO $ - fmap aggregations res `shouldBe` Right (Just (M.fromList [docCountPair "missing_agg" 1])) - - describe "Highlights API" $ do - - it "returns highlight from query when there should be one" $ withTestEnv $ do - _ <- insertData - _ <- insertOther - let query = QueryMatchQuery $ mkMatchQuery (FieldName "message") (QueryString "haskell") - let testHighlight = Highlights Nothing [FieldHighlight (FieldName "message") Nothing] - - let search = mkHighlightSearch (Just query) testHighlight - myHighlight <- searchTweetHighlight search - liftIO $ - myHighlight `shouldBe` Right (Just (M.fromList [("message",["Use haskell!"])])) - - it "doesn't return highlight from a query when it shouldn't" $ withTestEnv $ do - _ <- insertData - _ <- insertOther - let query = QueryMatchQuery $ mkMatchQuery (FieldName "message") (QueryString "haskell") - let testHighlight = Highlights Nothing [FieldHighlight (FieldName "user") Nothing] - - let search = mkHighlightSearch (Just query) testHighlight - myHighlight <- searchTweetHighlight search - liftIO $ - myHighlight `shouldBe` Right Nothing - - describe "Source filtering" $ do - - it "doesn't include source when sources are disabled" $ withTestEnv $ do - searchExpectSource - NoSource - (Left (EsError 500 "Source was missing")) - - it "includes a source" $ withTestEnv $ do - searchExpectSource - (SourcePatterns (PopPattern (Pattern "message"))) - (Right (Object (HM.fromList [("message", String "Use haskell!")]))) - - it "includes sources" $ withTestEnv $ do - searchExpectSource - (SourcePatterns (PopPatterns [Pattern "user", Pattern "message"])) - (Right (Object (HM.fromList [("user",String "bitemyapp"),("message", String "Use haskell!")]))) - - it "includes source patterns" $ withTestEnv $ do - searchExpectSource - (SourcePatterns (PopPattern (Pattern "*ge"))) - (Right (Object (HM.fromList [("age", Number 10000),("message", String "Use haskell!")]))) - - it "excludes source patterns" $ withTestEnv $ do - searchExpectSource - (SourceIncludeExclude (Include []) - (Exclude [Pattern "l*", Pattern "*ge", Pattern "postDate", Pattern "extra"])) - (Right (Object (HM.fromList [("user",String "bitemyapp")]))) - - describe "ToJSON RegexpFlags" $ do - it "generates the correct JSON for AllRegexpFlags" $ - toJSON AllRegexpFlags `shouldBe` String "ALL" - - it "generates the correct JSON for NoRegexpFlags" $ - toJSON NoRegexpFlags `shouldBe` String "NONE" - - it "generates the correct JSON for SomeRegexpFlags" $ - let flags = AnyString :| [ Automaton - , Complement - , Empty - , Intersection - , Interval ] - in toJSON (SomeRegexpFlags flags) `shouldBe` String "ANYSTRING|AUTOMATON|COMPLEMENT|EMPTY|INTERSECTION|INTERVAL" - - prop "removes duplicates from flags" $ \(flags :: RegexpFlags) -> - let String str = toJSON flags - flagStrs = T.splitOn "|" str - in noDuplicates flagStrs - - describe "omitNulls" $ do - it "checks that omitNulls drops list elements when it should" $ - let dropped = omitNulls $ [ "test1" .= (toJSON ([] :: [Int])) - , "test2" .= (toJSON ("some value" :: Text))] - in dropped `shouldBe` Object (HM.fromList [("test2", String "some value")]) - - it "checks that omitNulls doesn't drop list elements when it shouldn't" $ - let notDropped = omitNulls $ [ "test1" .= (toJSON ([1] :: [Int])) - , "test2" .= (toJSON ("some value" :: Text))] - in notDropped `shouldBe` Object (HM.fromList [ ("test1", Array (V.fromList [Number 1.0])) - , ("test2", String "some value")]) - it "checks that omitNulls drops non list elements when it should" $ - let dropped = omitNulls $ [ "test1" .= (toJSON Null) - , "test2" .= (toJSON ("some value" :: Text))] - in dropped `shouldBe` Object (HM.fromList [("test2", String "some value")]) - it "checks that omitNulls doesn't drop non list elements when it shouldn't" $ - let notDropped = omitNulls $ [ "test1" .= (toJSON (1 :: Int)) - , "test2" .= (toJSON ("some value" :: Text))] - in notDropped `shouldBe` Object (HM.fromList [ ("test1", Number 1.0) - , ("test2", String "some value")]) - describe "Monoid (SearchHits a)" $ do + describe "Monoid (SearchHits a)" $ prop "abides the monoid laws" $ eq $ prop_Monoid (T :: T (SearchHits ())) - describe "mkDocVersion" $ do + describe "mkDocVersion" $ prop "can never construct an out of range docVersion" $ \i -> let res = mkDocVersion i in case res of @@ -1438,113 +72,7 @@ main = hspec $ do (dv <= maxBound) .&&. docVersionNumber dv === i - describe "FsSnapshotRepo" $ do - prop "SnapshotRepo laws" $ \fsr -> - fromGSnapshotRepo (toGSnapshotRepo fsr) === Right (fsr :: FsSnapshotRepo) - - describe "snapshot repos" $ do - it "always parses all snapshot repos API" $ when' canSnapshot $ withTestEnv $ do - res <- getSnapshotRepos AllSnapshotRepos - liftIO $ case res of - Left e -> expectationFailure ("Expected a right but got Left " <> show e) - Right _ -> return () - - it "finds an existing list of repos" $ when' canSnapshot $ withTestEnv $ do - let r1n = SnapshotRepoName "bloodhound-repo1" - let r2n = SnapshotRepoName "bloodhound-repo2" - withSnapshotRepo r1n $ \r1 -> - withSnapshotRepo r2n $ \r2 -> do - repos <- getSnapshotRepos (SnapshotRepoList (ExactRepo r1n :| [ExactRepo r2n])) - liftIO $ case repos of - Right xs -> do - let srt = L.sortBy (comparing gSnapshotRepoName) - srt xs `shouldBe` srt [r1, r2] - Left e -> expectationFailure (show e) - - it "creates and updates with updateSnapshotRepo" $ when' canSnapshot $ withTestEnv $ do - let r1n = SnapshotRepoName "bloodhound-repo1" - withSnapshotRepo r1n $ \r1 -> do - let Just (String dir) = HM.lookup "location" (gSnapshotRepoSettingsObject (gSnapshotRepoSettings r1)) - let noCompression = FsSnapshotRepo r1n (T.unpack dir) False Nothing Nothing Nothing - resp <- updateSnapshotRepo defaultSnapshotRepoUpdateSettings noCompression - liftIO (validateStatus resp 200) - Right [roundtrippedNoCompression] <- getSnapshotRepos (SnapshotRepoList (ExactRepo r1n :| [])) - liftIO (roundtrippedNoCompression `shouldBe` toGSnapshotRepo noCompression) - - -- verify came around in 1.4 it seems - it "can verify existing repos" $ when' canSnapshot $ when' (atleast es14) $ withTestEnv $ do - let r1n = SnapshotRepoName "bloodhound-repo1" - withSnapshotRepo r1n $ \_ -> do - res <- verifySnapshotRepo r1n - liftIO $ case res of - Right (SnapshotVerification vs) - | null vs -> expectationFailure "Expected nonempty set of verifying nodes" - | otherwise -> return () - Left e -> expectationFailure (show e) - - describe "snapshots" $ do - it "always parses all snapshots API" $ when' canSnapshot $ withTestEnv $ do - let r1n = SnapshotRepoName "bloodhound-repo1" - withSnapshotRepo r1n $ \_ -> do - res <- getSnapshots r1n AllSnapshots - liftIO $ case res of - Left e -> expectationFailure ("Expected a right but got Left " <> show e) - Right _ -> return () - - it "can parse a snapshot that it created" $ when' canSnapshot $ withTestEnv $ do - let r1n = SnapshotRepoName "bloodhound-repo1" - withSnapshotRepo r1n $ \_ -> do - let s1n = SnapshotName "example-snapshot" - withSnapshot r1n s1n $ do - res <- getSnapshots r1n (SnapshotList (ExactSnap s1n :| [])) - liftIO $ case res of - Right [snap] - | snapInfoState snap == SnapshotSuccess && - snapInfoName snap == s1n -> return () - | otherwise -> expectationFailure (show snap) - Right [] -> expectationFailure "There were no snapshots" - Right snaps -> expectationFailure ("Expected 1 snapshot but got" <> show (length snaps)) - Left e -> expectationFailure (show e) - - describe "snapshot restore" $ do - it "can restore a snapshot that we create" $ when' canSnapshot $ withTestEnv $ do - let r1n = SnapshotRepoName "bloodhound-repo1" - withSnapshotRepo r1n $ \_ -> do - let s1n = SnapshotName "example-snapshot" - withSnapshot r1n s1n $ do - let settings = defaultSnapshotRestoreSettings { snapRestoreWaitForCompletion = True } - -- have to close an index to restore it - resp1 <- closeIndex testIndex - liftIO (validateStatus resp1 200) - resp2 <- restoreSnapshot r1n s1n settings - liftIO (validateStatus resp2 200) - - it "can restore and rename" $ when' canSnapshot $ withTestEnv $ do - let r1n = SnapshotRepoName "bloodhound-repo1" - withSnapshotRepo r1n $ \_ -> do - let s1n = SnapshotName "example-snapshot" - withSnapshot r1n s1n $ do - let pat = RestoreRenamePattern "bloodhound-tests-twitter-(\\d+)" - let replace = RRTLit "restored-" :| [RRSubWholeMatch] - let expectedIndex = IndexName "restored-bloodhound-tests-twitter-1" - oldEnoughForOverrides <- liftIO (atleast es15) - let overrides = RestoreIndexSettings { restoreOverrideReplicas = Just (ReplicaCount 0) } - let settings = defaultSnapshotRestoreSettings { snapRestoreWaitForCompletion = True - , snapRestoreRenamePattern = Just pat - , snapRestoreRenameReplacement = Just replace - , snapRestoreIndexSettingsOverrides = if oldEnoughForOverrides - then Just overrides - else Nothing - } - -- have to close an index to restore it - let go = do - resp <- restoreSnapshot r1n s1n settings - liftIO (validateStatus resp 200) - exists <- indexExists expectedIndex - liftIO (exists `shouldBe` True) - go `finally` deleteIndex expectedIndex - - describe "getNodesInfo" $ do + describe "getNodesInfo" $ it "fetches the responding node when LocalNode is used" $ withTestEnv $ do res <- getNodesInfo LocalNode liftIO $ case res of @@ -1554,7 +82,7 @@ main = hspec $ do Right NodesInfo {..} -> length nodesInfo `shouldBe` 1 Left e -> expectationFailure ("Expected NodesInfo but got " <> show e) - describe "getNodesStats" $ do + describe "getNodesStats" $ it "fetches the responding node when LocalNode is used" $ withTestEnv $ do res <- getNodesStats LocalNode liftIO $ case res of @@ -1564,7 +92,7 @@ main = hspec $ do Right NodesStats {..} -> length nodesStats `shouldBe` 1 Left e -> expectationFailure ("Expected NodesStats but got " <> show e) - describe "Enum DocVersion" $ do + describe "Enum DocVersion" $ it "follows the laws of Enum, Bounded" $ do evaluate (succ maxBound :: DocVersion) `shouldThrow` anyErrorCall evaluate (pred minBound :: DocVersion) `shouldThrow` anyErrorCall @@ -1574,11 +102,14 @@ main = hspec $ do enumFrom (pred maxBound :: DocVersion) `shouldBe` [pred maxBound, maxBound] enumFromThen minBound (pred maxBound :: DocVersion) `shouldBe` [minBound, pred maxBound] - describe "scan&scroll API" $ do + describe "Scan & Scroll API" $ it "returns documents using the scan&scroll API" $ withTestEnv $ do _ <- insertData _ <- insertOther - let search = (mkSearch (Just $ MatchAllQuery Nothing) Nothing) { size = (Size 1) } + let search = + (mkSearch + (Just $ MatchAllQuery Nothing) Nothing) + { size = Size 1 } regular_search <- searchTweet search scan_search' <- scanSearch testIndex testMapping search :: BH IO [Hit Tweet] let scan_search = map hitSource scan_search' @@ -1586,266 +117,3 @@ main = hspec $ do regular_search `shouldBe` Right exampleTweet -- Check that the size restrtiction is being honored liftIO $ scan_search `shouldMatchList` [Just exampleTweet, Just otherTweet] - - describe "index aliases" $ do - let aname = IndexAliasName (IndexName "bloodhound-tests-twitter-1-alias") - let alias = IndexAlias (testIndex) aname - let create = IndexAliasCreate Nothing Nothing - let action = AddAlias alias create - it "handles the simple case of aliasing an existing index" $ do - withTestEnv $ do - resetIndex - resp <- updateIndexAliases (action :| []) - liftIO $ validateStatus resp 200 - let cleanup = withTestEnv (updateIndexAliases (RemoveAlias alias :| [])) - (do aliases <- withTestEnv getIndexAliases - let expected = IndexAliasSummary alias create - case aliases of - Right (IndexAliasesSummary summs) -> - L.find ((== alias) . indexAliasSummaryAlias) summs `shouldBe` Just expected - Left e -> expectationFailure ("Expected an IndexAliasesSummary but got " <> show e)) `finally` cleanup - it "allows alias deletion" $ do - aliases <- withTestEnv $ do - resetIndex - resp <- updateIndexAliases (action :| []) - liftIO $ validateStatus resp 200 - deleteIndexAlias aname - getIndexAliases - let expected = IndexAliasSummary alias create - case aliases of - Right (IndexAliasesSummary summs) -> - L.find ((== aname) . indexAlias . indexAliasSummaryAlias) summs `shouldBe` Nothing - Left e -> expectationFailure ("Expected an IndexAliasesSummary but got " <> show e) - - describe "Index Listing" $ do - it "returns a list of index names" $ withTestEnv $ do - _ <- createExampleIndex - ixns <- listIndices - liftIO (ixns `shouldContain` [testIndex]) - - describe "Index Settings" $ do - it "persists settings" $ withTestEnv $ do - _ <- deleteExampleIndex - _ <- createExampleIndex - let updates = BlocksWrite False :| [] - updateResp <- updateIndexSettings updates testIndex - liftIO $ validateStatus updateResp 200 - getResp <- getIndexSettings testIndex - liftIO $ - getResp `shouldBe` Right (IndexSettingsSummary - testIndex - (IndexSettings (ShardCount 1) (ReplicaCount 0)) - (NE.toList updates)) - - it "allows total fields to be set" $ when' (atleast es50) $ withTestEnv $ do - _ <- deleteExampleIndex - _ <- createExampleIndex - let updates = MappingTotalFieldsLimit 2500 :| [] - updateResp <- updateIndexSettings updates testIndex - liftIO $ validateStatus updateResp 200 - getResp <- getIndexSettings testIndex - liftIO $ - getResp `shouldBe` Right (IndexSettingsSummary - testIndex - (IndexSettings (ShardCount 1) (ReplicaCount 0)) - (NE.toList updates)) - - it "accepts customer analyzers" $ when' (atleast es50) $ withTestEnv $ do - _ <- deleteExampleIndex - let analysis = Analysis - (M.singleton "ex_analyzer" - ( AnalyzerDefinition - (Just (Tokenizer "ex_tokenizer")) - (map TokenFilter - [ "ex_filter_lowercase","ex_filter_uppercase","ex_filter_apostrophe" - , "ex_filter_reverse","ex_filter_snowball" - , "ex_filter_shingle" - ] - ) - ) - ) - (M.singleton "ex_tokenizer" - ( TokenizerDefinitionNgram - ( Ngram 3 4 [TokenLetter,TokenDigit]) - ) - ) - (M.fromList - [ ("ex_filter_lowercase",TokenFilterDefinitionLowercase (Just Greek)) - , ("ex_filter_uppercase",TokenFilterDefinitionUppercase Nothing) - , ("ex_filter_apostrophe",TokenFilterDefinitionApostrophe) - , ("ex_filter_reverse",TokenFilterDefinitionReverse) - , ("ex_filter_snowball",TokenFilterDefinitionSnowball English) - , ("ex_filter_shingle",TokenFilterDefinitionShingle (Shingle 3 3 True False " " "_")) - ] - ) - updates = [AnalysisSetting analysis] - createResp <- createIndexWith (updates ++ [NumberOfReplicas (ReplicaCount 0)]) 1 testIndex - liftIO $ validateStatus createResp 200 - getResp <- getIndexSettings testIndex - liftIO $ - getResp `shouldBe` Right (IndexSettingsSummary - testIndex - (IndexSettings (ShardCount 1) (ReplicaCount 0)) - updates - ) - - it "accepts default compression codec" $ when' (atleast es50) $ withTestEnv $ do - _ <- deleteExampleIndex - let updates = [CompressionSetting CompressionDefault] - createResp <- createIndexWith (updates ++ [NumberOfReplicas (ReplicaCount 0)]) 1 testIndex - liftIO $ validateStatus createResp 200 - getResp <- getIndexSettings testIndex - liftIO $ getResp `shouldBe` Right - (IndexSettingsSummary testIndex (IndexSettings (ShardCount 1) (ReplicaCount 0)) updates) - - it "accepts best compression codec" $ when' (atleast es50) $ withTestEnv $ do - _ <- deleteExampleIndex - let updates = [CompressionSetting CompressionBest] - createResp <- createIndexWith (updates ++ [NumberOfReplicas (ReplicaCount 0)]) 1 testIndex - liftIO $ validateStatus createResp 200 - getResp <- getIndexSettings testIndex - liftIO $ getResp `shouldBe` Right - (IndexSettingsSummary testIndex (IndexSettings (ShardCount 1) (ReplicaCount 0)) updates) - - - describe "Index Optimization" $ do - it "returns a successful response upon completion" $ withTestEnv $ do - _ <- createExampleIndex - resp <- forceMergeIndex (IndexList (testIndex :| [])) defaultForceMergeIndexSettings - liftIO $ validateStatus resp 200 - - describe "Suggest" $ do - it "returns a search suggestion using the phrase suggester" $ withTestEnv $ do - _ <- insertData - let query = QueryMatchNoneQuery - phraseSuggester = mkPhraseSuggester (FieldName "message") - namedSuggester = Suggest "Use haskel" "suggest_name" (SuggestTypePhraseSuggester phraseSuggester) - search' = mkSearch (Just query) Nothing - search = search' { suggestBody = Just namedSuggester } - expectedText = Just "use haskell" - resp <- searchByIndex testIndex search - parsed <- parseEsResponse resp :: BH IO (Either EsError (SearchResult Tweet)) - case parsed of - Left e -> liftIO $ expectationFailure ("Expected an search suggestion but got " <> show e) - Right sr -> liftIO $ (suggestOptionsText . head . suggestResponseOptions . head . nsrResponses <$> suggest sr) `shouldBe` expectedText - - describe "JSON instances" $ do - propJSON (Proxy :: Proxy Version) - propJSON (Proxy :: Proxy IndexName) - propJSON (Proxy :: Proxy MappingName) - propJSON (Proxy :: Proxy DocId) - propJSON (Proxy :: Proxy IndexAliasRouting) - propJSON (Proxy :: Proxy RoutingValue) - propJSON (Proxy :: Proxy ShardCount) - propJSON (Proxy :: Proxy ReplicaCount) - propJSON (Proxy :: Proxy TemplateName) - propJSON (Proxy :: Proxy TemplatePattern) - propJSON (Proxy :: Proxy QueryString) - propJSON (Proxy :: Proxy FieldName) - propJSON (Proxy :: Proxy CacheName) - propJSON (Proxy :: Proxy CacheKey) - propJSON (Proxy :: Proxy Existence) - propJSON (Proxy :: Proxy CutoffFrequency) - propJSON (Proxy :: Proxy Analyzer) - propJSON (Proxy :: Proxy MaxExpansions) - propJSON (Proxy :: Proxy Lenient) - propJSON (Proxy :: Proxy Tiebreaker) - propJSON (Proxy :: Proxy Boost) - propJSON (Proxy :: Proxy BoostTerms) - propJSON (Proxy :: Proxy MinimumMatch) - propJSON (Proxy :: Proxy DisableCoord) - propJSON (Proxy :: Proxy IgnoreTermFrequency) - propJSON (Proxy :: Proxy MinimumTermFrequency) - propJSON (Proxy :: Proxy MaxQueryTerms) - propJSON (Proxy :: Proxy Fuzziness) - propJSON (Proxy :: Proxy PrefixLength) - propJSON (Proxy :: Proxy TypeName) - propJSON (Proxy :: Proxy PercentMatch) - propJSON (Proxy :: Proxy StopWord) - propJSON (Proxy :: Proxy QueryPath) - propJSON (Proxy :: Proxy AllowLeadingWildcard) - propJSON (Proxy :: Proxy LowercaseExpanded) - propJSON (Proxy :: Proxy EnablePositionIncrements) - propJSON (Proxy :: Proxy AnalyzeWildcard) - propJSON (Proxy :: Proxy GeneratePhraseQueries) - propJSON (Proxy :: Proxy Locale) - propJSON (Proxy :: Proxy MaxWordLength) - propJSON (Proxy :: Proxy MinWordLength) - propJSON (Proxy :: Proxy PhraseSlop) - propJSON (Proxy :: Proxy MinDocFrequency) - propJSON (Proxy :: Proxy MaxDocFrequency) - propJSON (Proxy :: Proxy Filter) - propJSON (Proxy :: Proxy Query) - propJSON (Proxy :: Proxy SimpleQueryStringQuery) - propJSON (Proxy :: Proxy FieldOrFields) - propJSON (Proxy :: Proxy SimpleQueryFlag) - propJSON (Proxy :: Proxy RegexpQuery) - propJSON (Proxy :: Proxy QueryStringQuery) - propJSON (Proxy :: Proxy RangeQuery) - propJSON (Proxy :: Proxy PrefixQuery) - propJSON (Proxy :: Proxy NestedQuery) - propJSON (Proxy :: Proxy MoreLikeThisFieldQuery) - propJSON (Proxy :: Proxy MoreLikeThisQuery) - propJSON (Proxy :: Proxy IndicesQuery) - propJSON (Proxy :: Proxy HasParentQuery) - propJSON (Proxy :: Proxy HasChildQuery) - propJSON (Proxy :: Proxy FuzzyQuery) - propJSON (Proxy :: Proxy FuzzyLikeFieldQuery) - propJSON (Proxy :: Proxy FuzzyLikeThisQuery) - propJSON (Proxy :: Proxy DisMaxQuery) - propJSON (Proxy :: Proxy CommonTermsQuery) - propJSON (Proxy :: Proxy CommonMinimumMatch) - propJSON (Proxy :: Proxy BoostingQuery) - propJSON (Proxy :: Proxy BoolQuery) - propJSON (Proxy :: Proxy MatchQuery) - propJSON (Proxy :: Proxy MultiMatchQueryType) - propJSON (Proxy :: Proxy BooleanOperator) - propJSON (Proxy :: Proxy ZeroTermsQuery) - propJSON (Proxy :: Proxy MatchQueryType) - propJSON (Proxy :: Proxy AliasRouting) - propJSON (Proxy :: Proxy IndexAliasCreate) - propJSON (Proxy :: Proxy SearchAliasRouting) - propJSON (Proxy :: Proxy ScoreType) - propJSON (Proxy :: Proxy Distance) - propJSON (Proxy :: Proxy DistanceUnit) - propJSON (Proxy :: Proxy DistanceType) - propJSON (Proxy :: Proxy OptimizeBbox) - propJSON (Proxy :: Proxy GeoBoundingBoxConstraint) - propJSON (Proxy :: Proxy GeoFilterType) - propJSON (Proxy :: Proxy GeoBoundingBox) - propJSON (Proxy :: Proxy LatLon) - propJSON (Proxy :: Proxy RangeExecution) - prop "RegexpFlags FromJSON/ToJSON roundtrips, removing dups " $ \rfs -> - let expected = case rfs of - SomeRegexpFlags fs -> SomeRegexpFlags (NE.fromList (nub (NE.toList fs))) - x -> x - in parseEither parseJSON (toJSON rfs) === Right expected - propJSON (Proxy :: Proxy BoolMatch) - propJSON (Proxy :: Proxy Term) - propJSON (Proxy :: Proxy MultiMatchQuery) - propJSON (Proxy :: Proxy IndexSettings) - propJSON (Proxy :: Proxy UpdatableIndexSetting') - propJSON (Proxy :: Proxy ReplicaBounds) - propJSON (Proxy :: Proxy Bytes) - propJSON (Proxy :: Proxy AllocationPolicy) - propJSON (Proxy :: Proxy InitialShardCount) - propJSON (Proxy :: Proxy FSType) - propJSON (Proxy :: Proxy CompoundFormat) - propJSON (Proxy :: Proxy TemplateQueryInline) - propJSON (Proxy :: Proxy Suggest) - propJSON (Proxy :: Proxy DirectGenerators) - propJSON (Proxy :: Proxy DirectGeneratorSuggestModeTypes) - --- Temporary solution for lacking of generic derivation of Arbitrary --- We use generics-sop, as it's much more concise than directly using GHC.Generics --- --- This will be unneeded after https://github.com/nick8325/quickcheck/pull/112 --- is merged and released -sopArbitrary :: forall a. (Generic a, SOP.GTo a, SOP.All SOP.SListI (SOP.GCode a), SOP.All2 Arbitrary (SOP.GCode a)) => Gen a -sopArbitrary = fmap SOP.gto sopArbitrary' - -sopArbitrary' :: forall xss. (SOP.All SOP.SListI xss, SOP.All2 Arbitrary xss) => Gen (SOP.SOP SOP.I xss) -sopArbitrary' = SOP.hsequence =<< elements (SOP.apInjs_POP $ SOP.hcpure p arbitrary) - where - p :: Proxy Arbitrary - p = Proxy