This commit is contained in:
Chris Allen 2018-04-07 16:25:48 -05:00
commit cfc828afba
72 changed files with 15476 additions and 14264 deletions

1
.gitignore vendored
View File

@ -18,3 +18,4 @@ bloodhound.iml
.hgignore
examples/bloodhound-examples.cabal
/.ghc.environment.*
.hspec-failures

66
.hlint.yaml Normal file
View File

@ -0,0 +1,66 @@
# HLint configuration file
# https://github.com/ndmitchell/hlint
##########################
# This file contains a template configuration file, which is typically
# placed as .hlint.yaml in the root of your project
# Specify additional command line arguments
#
# - arguments: [--color, --cpp-simple, -XQuasiQuotes]
# Control which extensions/flags/modules/functions can be used
#
# - extensions:
# - default: false # all extension are banned by default
# - name: [PatternGuards, ViewPatterns] # only these listed extensions can be used
# - {name: CPP, within: CrossPlatform} # CPP can only be used in a given module
#
# - flags:
# - {name: -w, within: []} # -w is allowed nowhere
#
# - modules:
# - {name: [Data.Set, Data.HashSet], as: Set} # if you import Data.Set qualified, it must be as 'Set'
# - {name: Control.Arrow, within: []} # Certain modules are banned entirely
#
# - functions:
# - {name: unsafePerformIO, within: []} # unsafePerformIO can only appear in no modules
# Add custom hints for this project
#
# Will suggest replacing "wibbleMany [myvar]" with "wibbleOne myvar"
# - error: {lhs: "wibbleMany [x]", rhs: wibbleOne x}
# Turn on hints that are off by default
#
# Ban "module X(module X) where", to require a real export list
# - warn: {name: Use explicit module export list}
#
# Replace a $ b $ c with a . b $ c
# - group: {name: dollar, enabled: true}
#
# Generalise map to fmap, ++ to <>
# - group: {name: generalise, enabled: true}
# Ignore some builtin hints
- ignore: {name: "Use <$>"}
- ignore: {name: "Use lambda-case"}
# When we don't use camelCase it's to match ES.
# Possibly this is a mistake.
- ignore: {name: "Use camelCase"}
- ignore: {name: "Eta reduce"}
# - ignore: {name: Use let}
# - ignore: {name: Use const, within: SpecialModule} # Only within certain modules
# Define some custom infix operators
# - fixity: infixr 3 ~^#^~
# To generate a suitable file for HLint do:
# $ hlint --default > .hlint.yaml

View File

@ -8,21 +8,21 @@ addons:
- oracle-java8-installer
env:
global:
- JAVA_HOME=/usr/lib/jvm/java-8-oracle
matrix:
- GHCVER=7.8 ESVER=1.3.6 STACK_YAML=stack-7.8.yaml ESFLAG=ES1 DLINK=https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch
- GHCVER=7.8 ESVER=1.4.1 STACK_YAML=stack-7.8.yaml ESFLAG=ES1 DLINK=https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch
- GHCVER=7.10 ESVER=1.5.2 STACK_YAML=stack-7.10.yaml ESFLAG=ES1 DLINK=https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch
- GHCVER=7.10 ESVER=1.6.0 STACK_YAML=stack-7.10.yaml ESFLAG=ES1 DLINK=https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch
- GHCVER=7.10 ESVER=1.7.6 STACK_YAML=stack-7.10.yaml ESFLAG=ES1 DLINK=https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch
- GHCVER=8.0 ESVER=1.7.6 STACK_YAML=stack-8.0.yaml ESFLAG=ES1 DLINK=https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch
- GHCVER=8.0 ESVER=5.0.2 STACK_YAML=stack-8.0.yaml ESFLAG=ES5 DLINK=https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch ES_JAVA_OPTS="-Xms500m -Xmx750m"
- GHCVER=8.2 ESVER=1.7.6 STACK_YAML=stack.yaml ESFLAG=ES1 DLINK=https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch
- GHCVER=8.2 ESVER=5.0.2 STACK_YAML=stack.yaml ESFLAG=ES5 DLINK=https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch ES_JAVA_OPTS="-Xms500m -Xmx750m"
- GHCVER=8.2 ESVER=5.5.0 STACK_YAML=stack.yaml ESFLAG=ES5 DLINK=https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch ES_JAVA_OPTS="-Xms500m -Xmx750m"
allow_failures:
- GHCVER=8.2 ESVER=6.1.3 STACK_YAML=stack.yaml ESFLAG=ES5 DLINK=https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch ES_JAVA_OPTS="-Xms500m -Xmx750m"
global:
- JAVA_HOME=/usr/lib/jvm/java-8-oracle
matrix:
- GHCVER=7.8 ESVER=1.3.6 STACK_YAML=stack-7.8.yaml ESFLAG=ES1 DLINK=https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch
- GHCVER=7.8 ESVER=1.4.1 STACK_YAML=stack-7.8.yaml ESFLAG=ES1 DLINK=https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch
- GHCVER=7.10 ESVER=1.5.2 STACK_YAML=stack-7.10.yaml ESFLAG=ES1 DLINK=https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch
- GHCVER=7.10 ESVER=1.6.0 STACK_YAML=stack-7.10.yaml ESFLAG=ES1 DLINK=https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch
- GHCVER=7.10 ESVER=1.7.6 STACK_YAML=stack-7.10.yaml ESFLAG=ES1 DLINK=https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch
- GHCVER=8.0 ESVER=1.7.6 STACK_YAML=stack-8.0.yaml ESFLAG=ES1 DLINK=https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch
- GHCVER=8.0 ESVER=5.0.2 STACK_YAML=stack-8.0.yaml ESFLAG=ES5 DLINK=https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch ES_JAVA_OPTS="-Xms500m -Xmx750m"
- GHCVER=8.2 ESVER=1.7.6 STACK_YAML=stack.yaml ESFLAG=ES1 DLINK=https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch
- GHCVER=8.2 ESVER=5.0.2 STACK_YAML=stack.yaml ESFLAG=ES5 DLINK=https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch ES_JAVA_OPTS="-Xms500m -Xmx750m"
- GHCVER=8.2 ESVER=5.5.0 STACK_YAML=stack.yaml ESFLAG=ES5 DLINK=https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch ES_JAVA_OPTS="-Xms500m -Xmx750m"
# allow_failures:
# - GHCVER=8.2 ESVER=6.1.3 STACK_YAML=stack.yaml ESFLAG=ES5 DLINK=https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch ES_JAVA_OPTS="-Xms500m -Xmx750m"
install:
# stack
@ -30,7 +30,6 @@ install:
- travis_retry curl -L https://github.com/commercialhaskell/stack/releases/download/v1.6.1/stack-1.6.1-linux-x86_64.tar.gz | tar xz --wildcards --strip-components=1 -C ~/.local/bin '*/stack'
- export PATH=~/.local/bin:$PATH
- stack --no-terminal --version
# elasticsearch
- wget --no-check-certificate $DLINK-$ESVER.tar.gz
- tar xzf elasticsearch-$ESVER.tar.gz
@ -43,7 +42,7 @@ script:
- stack update --no-terminal
- stack build -j2 --fast --no-terminal
- travis_wait 45 sleep 1800 &
- stack test --fast --no-terminal bloodhound:tests --test-arguments="--qc-max-success 500" --flag bloodhound:$ESFLAG
- stack test --fast --no-terminal bloodhound:bloodhound-tests --test-arguments="--qc-max-success 500" --flag bloodhound:$ESFLAG
cache:
timeout: 2000

View File

@ -1,15 +1,45 @@
stack = STACK_YAML='stack.yaml' stack
build:
stack build
build-validate:
stack build --fast --ghc-options '-Wall -Werror'
ghci:
stack ghci
test: echo-warn
stack test
test-rerun: echo-warn
stack test --test-arguments "-r"
test-ghci:
stack ghci bloodhound:test:bloodhound-tests
ghcid:
ghcid -c "$(stack) ghci bloodhound:lib --test --ghci-options='-fobject-code -fno-warn-unused-do-bind' --main-is bloodhound:test:bloodhound-tests"
ghcid-validate:
ghcid -c "$(stack) ghci bloodhound:lib --test --ghci-options='-Werror -fobject-code -fno-warn-unused-do-bind' --main-is bloodhound:test:bloodhound-tests"
weeder:
weeder . --build
# hlint --default > .hlint.yaml
hlint:
hlint .
hlint-watch:
sos src/ -c "hlint ." -p "src/(.*)\.hs"
mod-build:
stack build --ghc-options '+RTS -A128M -RTS'
echo-warn:
echo "Make certain you have an elasticsearch instance on localhost:9200 !"
test: echo-warn
stack test
7.8-build:
STACK_YAML="stack-7.8.yaml" stack build
@ -23,16 +53,22 @@ test: echo-warn
STACK_YAML="stack-7.10.yaml" stack test
7.10-test-ES1:
STACK_YAML="stack-7.10.yaml" stack test --fast bloodhound:tests --test-arguments="--qc-max-success 500" --flag bloodhound:ES1
STACK_YAML="stack-7.10.yaml" stack test --fast bloodhound:test:bloodhound-tests --test-arguments="--qc-max-success 500" --flag bloodhound:ES1
7.10-test-ES5:
STACK_YAML="stack-7.10.yaml" stack test --fast bloodhound:tests --test-arguments="--qc-max-success 500" --flag bloodhound:ES5
STACK_YAML="stack-7.10.yaml" stack test --fast bloodhound:test:bloodhound-tests --test-arguments="--qc-max-success 500" --flag bloodhound:ES5
8.0-test-ES1:
STACK_YAML="stack.yaml" stack test --fast bloodhound:tests --test-arguments="--qc-max-success 500" --flag bloodhound:ES1
STACK_YAML="stack-8.0.yaml" stack test --fast bloodhound:test:bloodhound-tests --test-arguments="--qc-max-success 500" --flag bloodhound:ES1
8.0-test-ES5:
STACK_YAML="stack-8.0.yaml" stack test --fast bloodhound:tests --test-arguments="--qc-max-success 500" --flag bloodhound:ES5
STACK_YAML="stack-8.0.yaml" stack test --fast bloodhound:test:bloodhound-tests --test-arguments="--qc-max-success 500" --flag bloodhound:ES5
8.2-test-ES1:
STACK_YAML="stack.yaml" stack test --fast bloodhound:test:bloodhound-tests --test-arguments="--qc-max-success 500" --flag bloodhound:ES1
8.2-test-ES5:
STACK_YAML="stack.yaml" stack test --fast bloodhound:test:bloodhound-tests --test-arguments="--qc-max-success 500" --flag bloodhound:ES5
8.0-build:
STACK_YAML="stack-8.0.yaml" stack build
@ -44,8 +80,5 @@ module-touch:
touch src/Database/V1/Bloodhound/Types.hs
touch src/Database/V5/Bloodhound/Types.hs
ghci:
stack ghci
upload:
stack upload --no-signature .

View File

@ -7,7 +7,7 @@ license: BSD3
license-file: LICENSE
author: Chris Allen
maintainer: cma@bitemyapp.com
copyright: 2015, Chris Allen
copyright: 2018 Chris Allen
category: Database, Search
build-type: Simple
cabal-version: >=1.10
@ -33,16 +33,35 @@ Flag ES5
library
ghc-options: -Wall
exposed-modules: Database.V1.Bloodhound
Database.V1.Bloodhound.Client
Database.V1.Bloodhound.Types
Database.V1.Bloodhound.Types.Class
Database.V1.Bloodhound.Types.Internal
Database.V5.Bloodhound
exposed-modules: Database.V5.Bloodhound
Database.V5.Bloodhound.Client
Database.V5.Bloodhound.Types
Database.V5.Bloodhound.Types.Class
Database.V5.Bloodhound.Types.Internal
Database.V5.Bloodhound.Internal.Aggregation
Database.V5.Bloodhound.Internal.Analysis
Database.V5.Bloodhound.Internal.Client
Database.V5.Bloodhound.Internal.Highlight
Database.V5.Bloodhound.Internal.Newtypes
Database.V5.Bloodhound.Internal.Query
Database.V5.Bloodhound.Internal.Sort
Database.V5.Bloodhound.Internal.StringlyTyped
Database.V5.Bloodhound.Internal.Suggest
Database.V1.Bloodhound
Database.V1.Bloodhound.Client
Database.V1.Bloodhound.Types
Database.V1.Bloodhound.Types.Class
Database.V1.Bloodhound.Internal.Aggregation
Database.V1.Bloodhound.Internal.Client
Database.V1.Bloodhound.Internal.Highlight
Database.V1.Bloodhound.Internal.Newtypes
Database.V1.Bloodhound.Internal.Query
Database.V1.Bloodhound.Internal.Sort
Database.V1.Bloodhound.Internal.StringlyTyped
Database.V1.Bloodhound.Internal.Suggest
other-modules: Bloodhound.Import
Database.Bloodhound.Common.Script
hs-source-dirs: src
build-depends: base >= 4.3 && <5,
bytestring >= 0.10.0 && <0.11,
@ -58,15 +77,13 @@ library
http-types >= 0.8 && <0.13,
vector >= 0.10.9 && <0.13,
scientific >= 0.3.0.0 && <0.4.0.0,
exceptions,
data-default-class,
blaze-builder,
unordered-containers,
mtl-compat,
hashable
exceptions,
hashable,
unordered-containers
default-language: Haskell2010
test-suite tests
test-suite bloodhound-tests
ghc-options: -Wall -fno-warn-orphans
type: exitcode-stdio-1.0
main-is: tests.hs
@ -74,7 +91,22 @@ test-suite tests
hs-source-dirs: tests/V1
else
hs-source-dirs: tests/V5
other-modules: Test.Aggregation
Test.ApproxEq
Test.BulkAPI
Test.Common
Test.Documents
Test.Generators
Test.Highlights
Test.Import
Test.Indices
Test.JSON
Test.Query
Test.Snapshots
Test.Sorting
Test.SourceFiltering
Test.Suggest
Test.Templates
build-depends: base,
bloodhound,
bytestring,
@ -90,11 +122,14 @@ test-suite tests
vector,
unordered-containers >= 0.2.5.0 && <0.3,
mtl,
pretty-simple,
quickcheck-arbitrary-template,
quickcheck-properties,
generics-sop >=0.2 && <0.4,
errors,
exceptions,
temporary,
unix-compat,
network-uri
network-uri,
microlens,
microlens-aeson
default-language: Haskell2010

View File

@ -1,3 +1,17 @@
0.16.0.0
========
- @bitemyapp
- Reorganized modules internally, ripped out Generic,
rewrote part of the test suite
- @andrewthad
- Added support for autogenerated elasticsearch ids in the bulk API
- Added support for token filters
- Added action for searching multiple indices
- @lunaris
- Added support for scripts fields and function score queries
- @bermanjosh
- Added support for direct generators
0.15.0.2
========
- @michaelxavier

View File

@ -5,7 +5,6 @@ module Main
) where
-------------------------------------------------------------------------------
import Control.Monad.IO.Class (liftIO)
import Data.Aeson (FromJSON (..), defaultOptions,
genericParseJSON, genericToJSON,
@ -18,7 +17,6 @@ import qualified Data.Vector as V
import Database.V5.Bloodhound
import GHC.Generics (Generic)
import Network.HTTP.Client (defaultManagerSettings)
-------------------------------------------------------------------------------
data TweetMapping = TweetMapping deriving (Eq, Show)
@ -29,7 +27,7 @@ instance ToJSON TweetMapping where
[ "properties" .=
object ["location" .= object ["type" .= ("geo_point" :: Text)]]
]
-------------------------------------------------------------------------------
data Tweet = Tweet
{ user :: Text
, postDate :: UTCTime
@ -39,7 +37,6 @@ data Tweet = Tweet
} deriving (Eq, Generic, Show)
-------------------------------------------------------------------------------
exampleTweet :: Tweet
exampleTweet =
Tweet

View File

@ -17,7 +17,6 @@ dependencies:
- http-client
- vector
- semigroups
- transformers
ghc-options:
- -Wall
- -threaded

81
src/Bloodhound/Import.hs Normal file
View File

@ -0,0 +1,81 @@
module Bloodhound.Import
( module X
, LByteString
, Method
, omitNulls
, parseNEJSON
, parseReadText
, readMay
, showText
, deleteSeveral
, oPath
) where
import Control.Applicative as X (Alternative (..), optional)
import Control.Exception as X (Exception)
import Control.Monad as X (MonadPlus (..), forM, (<=<))
import Control.Monad.Catch as X (MonadCatch, MonadMask,
MonadThrow)
import Control.Monad.Except as X (MonadError)
import Control.Monad.Fix as X (MonadFix)
import Control.Monad.IO.Class as X (MonadIO (..))
import Control.Monad.Reader as X (MonadReader (..),
MonadTrans (..), ReaderT (..))
import Control.Monad.State as X (MonadState)
import Control.Monad.Writer as X (MonadWriter)
import Data.Aeson as X
import Data.Aeson.Types as X (Pair, Parser, emptyObject,
parseEither, parseMaybe,
typeMismatch)
import Data.Bifunctor as X (first)
import Data.Char as X (isNumber)
import Data.Hashable as X (Hashable)
import qualified Data.HashMap.Strict as HM
import Data.List as X (foldl', intercalate, nub)
import Data.List.NonEmpty as X (NonEmpty (..), toList)
import Data.Maybe as X (catMaybes, fromMaybe,
isNothing, maybeToList)
import Data.Scientific as X (Scientific)
import Data.Semigroup as X (Semigroup (..))
import Data.Text as X (Text)
import Data.Time.Calendar as X (Day (..), showGregorian)
import Data.Time.Clock as X (NominalDiffTime, UTCTime)
import Data.Time.Clock.POSIX as X
import qualified Data.ByteString.Lazy as BL
import qualified Data.Text as T
import qualified Data.Traversable as DT
import qualified Data.Vector as V
import qualified Network.HTTP.Types.Method as NHTM
type LByteString = BL.ByteString
type Method = NHTM.Method
readMay :: Read a => String -> Maybe a
readMay s = case reads s of
(a, ""):_ -> Just a
_ -> Nothing
parseReadText :: Read a => Text -> Parser a
parseReadText = maybe mzero return . readMay . T.unpack
showText :: Show a => a -> Text
showText = T.pack . show
omitNulls :: [(Text, Value)] -> Value
omitNulls = object . filter notNull where
notNull (_, Null) = False
notNull (_, Array a) = (not . V.null) a
notNull _ = True
parseNEJSON :: (FromJSON a) => [Value] -> Parser (NonEmpty a)
parseNEJSON [] = fail "Expected non-empty list"
parseNEJSON (x:xs) = DT.mapM parseJSON (x :| xs)
deleteSeveral :: (Eq k, Hashable k) => [k] -> HM.HashMap k v -> HM.HashMap k v
deleteSeveral ks hm = foldr HM.delete hm ks
oPath :: ToJSON a => NonEmpty Text -> a -> Value
oPath (k :| []) v = object [k .= v]
oPath (k:| (h:t)) v = object [k .= oPath (h :| t) v]

View File

@ -0,0 +1,220 @@
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
module Database.Bloodhound.Common.Script where
import Bloodhound.Import
import qualified Data.HashMap.Strict as HM
import Database.V5.Bloodhound.Internal.Newtypes
newtype ScriptFields =
ScriptFields (HM.HashMap ScriptFieldName ScriptFieldValue)
deriving (Eq, Show)
type ScriptFieldName = Text
type ScriptFieldValue = Value
data Script =
Script { scriptLanguage :: Maybe ScriptLanguage
, scriptInline :: Maybe ScriptInline
, scriptStored :: Maybe ScriptId
, scriptParams :: Maybe ScriptParams
} deriving (Eq, Show)
newtype ScriptLanguage =
ScriptLanguage Text deriving (Eq, Show, FromJSON, ToJSON)
newtype ScriptInline =
ScriptInline Text deriving (Eq, Show, FromJSON, ToJSON)
newtype ScriptId =
ScriptId Text deriving (Eq, Show, FromJSON, ToJSON)
newtype ScriptParams =
ScriptParams (HM.HashMap ScriptParamName ScriptParamValue)
deriving (Eq, Show)
type ScriptParamName = Text
type ScriptParamValue = Value
data BoostMode =
BoostModeMultiply
| BoostModeReplace
| BoostModeSum
| BoostModeAvg
| BoostModeMax
| BoostModeMin deriving (Eq, Show)
data ScoreMode =
ScoreModeMultiply
| ScoreModeSum
| ScoreModeAvg
| ScoreModeFirst
| ScoreModeMax
| ScoreModeMin deriving (Eq, Show)
data FunctionScoreFunction =
FunctionScoreFunctionScript Script
| FunctionScoreFunctionRandom Seed
| FunctionScoreFunctionFieldValueFactor FieldValueFactor
deriving (Eq, Show)
newtype Weight =
Weight Float deriving (Eq, Show, FromJSON, ToJSON)
newtype Seed =
Seed Float deriving (Eq, Show, FromJSON, ToJSON)
data FieldValueFactor =
FieldValueFactor { fieldValueFactorField :: FieldName
, fieldValueFactor :: Maybe Factor
, fieldValueFactorModifier :: Maybe FactorModifier
, fieldValueFactorMissing :: Maybe FactorMissingFieldValue
} deriving (Eq, Show)
newtype Factor =
Factor Float deriving (Eq, Show, FromJSON, ToJSON)
data FactorModifier =
FactorModifierNone
| FactorModifierLog
| FactorModifierLog1p
| FactorModifierLog2p
| FactorModifierLn
| FactorModifierLn1p
| FactorModifierLn2p
| FactorModifierSquare
| FactorModifierSqrt
| FactorModifierReciprocal deriving (Eq, Show)
newtype FactorMissingFieldValue =
FactorMissingFieldValue Float deriving (Eq, Show, FromJSON, ToJSON)
instance ToJSON BoostMode where
toJSON BoostModeMultiply = "multiply"
toJSON BoostModeReplace = "replace"
toJSON BoostModeSum = "sum"
toJSON BoostModeAvg = "avg"
toJSON BoostModeMax = "max"
toJSON BoostModeMin = "min"
instance FromJSON BoostMode where
parseJSON = withText "BoostMode" parse
where parse "multiply" = pure BoostModeMultiply
parse "replace" = pure BoostModeReplace
parse "sum" = pure BoostModeSum
parse "avg" = pure BoostModeAvg
parse "max" = pure BoostModeMax
parse "min" = pure BoostModeMin
parse bm = fail ("Unexpected BoostMode: " <> show bm)
instance ToJSON ScoreMode where
toJSON ScoreModeMultiply = "multiply"
toJSON ScoreModeSum = "sum"
toJSON ScoreModeFirst = "first"
toJSON ScoreModeAvg = "avg"
toJSON ScoreModeMax = "max"
toJSON ScoreModeMin = "min"
instance FromJSON ScoreMode where
parseJSON = withText "ScoreMode" parse
where parse "multiply" = pure ScoreModeMultiply
parse "sum" = pure ScoreModeSum
parse "first" = pure ScoreModeFirst
parse "avg" = pure ScoreModeAvg
parse "max" = pure ScoreModeMax
parse "min" = pure ScoreModeMin
parse sm = fail ("Unexpected ScoreMode: " <> show sm)
functionScoreFunctionPair :: FunctionScoreFunction -> (Text, Value)
functionScoreFunctionPair (FunctionScoreFunctionScript functionScoreScript) =
("script_score", toJSON functionScoreScript)
functionScoreFunctionPair (FunctionScoreFunctionRandom seed) =
("random_score", omitNulls [ "seed" .= seed ])
functionScoreFunctionPair (FunctionScoreFunctionFieldValueFactor fvf) =
("field_value_factor", toJSON fvf)
parseFunctionScoreFunction :: Object -> Parser FunctionScoreFunction
parseFunctionScoreFunction o =
singleScript `taggedWith` "script_score"
<|> singleRandom `taggedWith` "random_score"
<|> singleFieldValueFactor `taggedWith` "field_value_factor"
where taggedWith parser k = parser =<< o .: k
singleScript = pure . FunctionScoreFunctionScript
singleRandom o' = FunctionScoreFunctionRandom <$> o' .: "seed"
singleFieldValueFactor = pure . FunctionScoreFunctionFieldValueFactor
instance ToJSON ScriptFields where
toJSON (ScriptFields x) = Object x
instance FromJSON ScriptFields where
parseJSON (Object o) = pure (ScriptFields o)
parseJSON _ = fail "error parsing ScriptFields"
instance ToJSON Script where
toJSON (Script lang inline stored params) =
object [ "script" .= omitNulls base ]
where base = [ "lang" .= lang
, "inline" .= inline
, "stored" .= stored
, "params" .= params ]
instance FromJSON Script where
parseJSON = withObject "Script" parse
where parse o = o .: "script" >>= \o' ->
Script
<$> o' .:? "lang"
<*> o' .:? "inline"
<*> o' .:? "stored"
<*> o' .:? "params"
instance ToJSON ScriptParams where
toJSON (ScriptParams x) = Object x
instance FromJSON ScriptParams where
parseJSON (Object o) = pure (ScriptParams o)
parseJSON _ = fail "error parsing ScriptParams"
instance ToJSON FieldValueFactor where
toJSON (FieldValueFactor field factor modifier missing) =
omitNulls base
where base = [ "field" .= field
, "factor" .= factor
, "modifier" .= modifier
, "missing" .= missing ]
instance FromJSON FieldValueFactor where
parseJSON = withObject "FieldValueFactor" parse
where parse o = FieldValueFactor
<$> o .: "field"
<*> o .:? "factor"
<*> o .:? "modifier"
<*> o .:? "missing"
instance ToJSON FactorModifier where
toJSON FactorModifierNone = "none"
toJSON FactorModifierLog = "log"
toJSON FactorModifierLog1p = "log1p"
toJSON FactorModifierLog2p = "log2p"
toJSON FactorModifierLn = "ln"
toJSON FactorModifierLn1p = "ln1p"
toJSON FactorModifierLn2p = "ln2p"
toJSON FactorModifierSquare = "square"
toJSON FactorModifierSqrt = "sqrt"
toJSON FactorModifierReciprocal = "reciprocal"
instance FromJSON FactorModifier where
parseJSON = withText "FactorModifier" parse
where parse "none" = pure FactorModifierNone
parse "log" = pure FactorModifierLog
parse "log1p" = pure FactorModifierLog1p
parse "log2p" = pure FactorModifierLog2p
parse "ln" = pure FactorModifierLn
parse "ln1p" = pure FactorModifierLn1p
parse "ln2p" = pure FactorModifierLn2p
parse "square" = pure FactorModifierSquare
parse "sqrt" = pure FactorModifierSqrt
parse "reciprocal" = pure FactorModifierReciprocal
parse fm = fail ("Unexpected FactorModifier: " <> show fm)

View File

@ -223,7 +223,8 @@ getStatus :: MonadBH m => m (Maybe Status)
getStatus = do
response <- get =<< url
return $ decode (responseBody response)
where url = joinPath []
where
url = joinPath []
-- | 'getSnapshotRepos' gets the definitions of a subset of the
-- defined snapshot repos.
@ -252,7 +253,7 @@ instance FromJSON GSRs where
parseJSON = withObject "Collection of GenericSnapshotRepo" parse
where
parse = fmap GSRs . mapM (uncurry go) . HM.toList
go rawName = withObject "GenericSnapshotRepo" $ \o -> do
go rawName = withObject "GenericSnapshotRepo" $ \o ->
GenericSnapshotRepo (SnapshotRepoName rawName) <$> o .: "type"
<*> o .: "settings"
@ -454,16 +455,18 @@ deleteIndex (IndexName indexName) =
updateIndexSettings :: MonadBH m => NonEmpty UpdatableIndexSetting -> IndexName -> m Reply
updateIndexSettings updates (IndexName indexName) =
bindM2 put url (return body)
where url = joinPath [indexName, "_settings"]
body = Just (encode jsonBody)
jsonBody = Object (deepMerge [u | Object u <- toJSON <$> toList updates])
where
url = joinPath [indexName, "_settings"]
body = Just (encode jsonBody)
jsonBody = Object (deepMerge [u | Object u <- toJSON <$> toList updates])
getIndexSettings :: (MonadBH m, MonadThrow m) => IndexName
-> m (Either EsError IndexSettingsSummary)
getIndexSettings (IndexName indexName) = do
getIndexSettings (IndexName indexName) =
parseEsResponse =<< get =<< url
where url = joinPath [indexName, "_settings"]
where
url = joinPath [indexName, "_settings"]
-- | 'optimizeIndex' will optimize a single index, list of indexes or
@ -586,7 +589,7 @@ listIndices =
url = joinPath ["_cat/indices?format=json"]
parse body = maybe (throwM (EsProtocolException body)) return $ do
vals <- decode body
forM vals $ \val -> do
forM vals $ \val ->
case val of
Object obj -> do
indexVal <- HM.lookup "index" obj
@ -718,7 +721,8 @@ encodeBulkOperations stream = collapsed where
collapsed = toLazyByteString $ mappend mashedTaters (byteString "\n")
mash :: Builder -> V.Vector L.ByteString -> Builder
mash = V.foldl' (\b x -> b `mappend` (byteString "\n") `mappend` (lazyByteString x))
mash =
V.foldl' (\b x -> b <> byteString "\n" <> lazyByteString x)
mkBulkStreamValue :: Text -> Text -> Text -> Text -> Value
mkBulkStreamValue operation indexName mappingName docId =

View File

@ -0,0 +1,416 @@
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Database.V1.Bloodhound.Internal.Aggregation where
import Bloodhound.Import
import qualified Data.HashMap.Strict as HM
import qualified Data.Map.Strict as M
import qualified Data.Text as T
import Database.V1.Bloodhound.Internal.Client
import Database.V1.Bloodhound.Internal.Highlight (HitHighlight)
import Database.V1.Bloodhound.Internal.Newtypes
import Database.V1.Bloodhound.Internal.Query
import Database.V1.Bloodhound.Internal.Sort
type Aggregations = M.Map Text Aggregation
emptyAggregations :: Aggregations
emptyAggregations = M.empty
mkAggregations :: Text -> Aggregation -> Aggregations
mkAggregations name aggregation = M.insert name aggregation emptyAggregations
data Aggregation = TermsAgg TermsAggregation
| CardinalityAgg CardinalityAggregation
| DateHistogramAgg DateHistogramAggregation
| ValueCountAgg ValueCountAggregation
| FilterAgg FilterAggregation
| DateRangeAgg DateRangeAggregation
| MissingAgg MissingAggregation
| TopHitsAgg TopHitsAggregation
deriving (Eq, Show)
instance ToJSON Aggregation where
toJSON (TermsAgg (TermsAggregation term include exclude order minDocCount size shardSize collectMode executionHint termAggs)) =
omitNulls ["terms" .= omitNulls [ toJSON' term,
"include" .= include,
"exclude" .= exclude,
"order" .= order,
"min_doc_count" .= minDocCount,
"size" .= size,
"shard_size" .= shardSize,
"collect_mode" .= collectMode,
"execution_hint" .= executionHint
],
"aggs" .= termAggs ]
where
toJSON' x = case x of { Left y -> "field" .= y; Right y -> "script" .= y }
toJSON (CardinalityAgg (CardinalityAggregation field precisionThreshold)) =
object ["cardinality" .= omitNulls [ "field" .= field,
"precisionThreshold" .= precisionThreshold
]
]
toJSON (DateHistogramAgg (DateHistogramAggregation field interval format preZone postZone preOffset postOffset dateHistoAggs)) =
omitNulls ["date_histogram" .= omitNulls [ "field" .= field,
"interval" .= interval,
"format" .= format,
"pre_zone" .= preZone,
"post_zone" .= postZone,
"pre_offset" .= preOffset,
"post_offset" .= postOffset
],
"aggs" .= dateHistoAggs ]
toJSON (ValueCountAgg a) = object ["value_count" .= v]
where v = case a of
(FieldValueCount (FieldName n)) -> object ["field" .= n]
(ScriptValueCount (Script s)) -> object ["script" .= s]
toJSON (FilterAgg (FilterAggregation filt ags)) =
omitNulls [ "filter" .= filt
, "aggs" .= ags]
toJSON (DateRangeAgg a) = object [ "date_range" .= a
]
toJSON (MissingAgg (MissingAggregation{..})) =
object ["missing" .= object ["field" .= maField]]
toJSON (TopHitsAgg (TopHitsAggregation mfrom msize msort)) =
omitNulls ["top_hits" .= omitNulls [ "size" .= msize
, "from" .= mfrom
, "sort" .= msort
]
]
data TopHitsAggregation = TopHitsAggregation
{ taFrom :: Maybe From
, taSize :: Maybe Size
, taSort :: Maybe Sort
} deriving (Eq, Show)
data MissingAggregation = MissingAggregation
{ maField :: Text
} deriving (Eq, Show)
data TermsAggregation = TermsAggregation { term :: Either Text Text
, termInclude :: Maybe TermInclusion
, termExclude :: Maybe TermInclusion
, termOrder :: Maybe TermOrder
, termMinDocCount :: Maybe Int
, termSize :: Maybe Int
, termShardSize :: Maybe Int
, termCollectMode :: Maybe CollectionMode
, termExecutionHint :: Maybe ExecutionHint
, termAggs :: Maybe Aggregations
} deriving (Eq, Show)
data CardinalityAggregation = CardinalityAggregation { cardinalityField :: FieldName,
precisionThreshold :: Maybe Int
} deriving (Eq, Show)
data DateHistogramAggregation = DateHistogramAggregation { dateField :: FieldName
, dateInterval :: Interval
, dateFormat :: Maybe Text
-- pre and post deprecated in 1.5
, datePreZone :: Maybe Text
, datePostZone :: Maybe Text
, datePreOffset :: Maybe Text
, datePostOffset :: Maybe Text
, dateAggs :: Maybe Aggregations
} deriving (Eq, Show)
data DateRangeAggregation = DateRangeAggregation { draField :: FieldName
, draFormat :: Maybe Text
, draRanges :: NonEmpty DateRangeAggRange
} deriving (Eq, Show)
data DateRangeAggRange = DateRangeFrom DateMathExpr
| DateRangeTo DateMathExpr
| DateRangeFromAndTo DateMathExpr DateMathExpr deriving (Eq, Show)
-- | See <https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics-valuecount-aggregation.html> for more information.
data ValueCountAggregation = FieldValueCount FieldName
| ScriptValueCount Script deriving (Eq, Show)
-- | Single-bucket filter aggregations. See <https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-filter-aggregation.html#search-aggregations-bucket-filter-aggregation> for more information.
data FilterAggregation = FilterAggregation { faFilter :: Filter
, faAggs :: Maybe Aggregations} deriving (Eq, Show)
mkTermsAggregation :: Text -> TermsAggregation
mkTermsAggregation t = TermsAggregation (Left t) Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing
mkTermsScriptAggregation :: Text -> TermsAggregation
mkTermsScriptAggregation t = TermsAggregation (Right t) Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing
mkDateHistogram :: FieldName -> Interval -> DateHistogramAggregation
mkDateHistogram t i = DateHistogramAggregation t i Nothing Nothing Nothing Nothing Nothing Nothing
mkCardinalityAggregation :: FieldName -> CardinalityAggregation
mkCardinalityAggregation t = CardinalityAggregation t Nothing
data TermInclusion = TermInclusion Text
| TermPattern Text Text deriving (Eq, Show)
instance ToJSON TermInclusion where
toJSON (TermInclusion x) = toJSON x
toJSON (TermPattern pattern flags) =
omitNulls [ "pattern" .= pattern
, "flags" .= flags]
data TermOrder = TermOrder
{ termSortField :: Text
, termSortOrder :: SortOrder } deriving (Eq, Show)
instance ToJSON TermOrder where
toJSON (TermOrder termSortField termSortOrder) =
object [termSortField .= termSortOrder]
data ExecutionHint = Ordinals
| GlobalOrdinals
| GlobalOrdinalsHash
| GlobalOrdinalsLowCardinality
| Map deriving (Eq, Show)
instance ToJSON ExecutionHint where
toJSON Ordinals = "ordinals"
toJSON GlobalOrdinals = "global_ordinals"
toJSON GlobalOrdinalsHash = "global_ordinals_hash"
toJSON GlobalOrdinalsLowCardinality = "global_ordinals_low_cardinality"
toJSON Map = "map"
-- | See <https://www.elastic.co/guide/en/elasticsearch/reference/current/common-options.html#date-math> for more information.
data DateMathExpr =
DateMathExpr DateMathAnchor [DateMathModifier]
deriving (Eq, Show)
instance ToJSON DateMathExpr where
toJSON (DateMathExpr a mods) = String (fmtA a <> mconcat (fmtMod <$> mods))
where fmtA DMNow = "now"
fmtA (DMDate date) = (T.pack $ showGregorian date) <> "||"
fmtMod (AddTime n u) = "+" <> showText n <> fmtU u
fmtMod (SubtractTime n u) = "-" <> showText n <> fmtU u
fmtMod (RoundDownTo u) = "/" <> fmtU u
fmtU DMYear = "y"
fmtU DMMonth = "M"
fmtU DMWeek = "w"
fmtU DMDay = "d"
fmtU DMHour = "h"
fmtU DMMinute = "m"
fmtU DMSecond = "s"
-- | Starting point for a date range. This along with the 'DateMathModifiers' gets you the date ES will start from.
data DateMathAnchor =
DMNow
| DMDate Day
deriving (Eq, Show)
data DateMathModifier =
AddTime Int DateMathUnit
| SubtractTime Int DateMathUnit
| RoundDownTo DateMathUnit
deriving (Eq, Show)
data DateMathUnit =
DMYear
| DMMonth
| DMWeek
| DMDay
| DMHour
| DMMinute
| DMSecond
deriving (Eq, Show)
data CollectionMode = BreadthFirst
| DepthFirst deriving (Eq, Show)
type AggregationResults = M.Map Text Value
class BucketAggregation a where
key :: a -> BucketValue
docCount :: a -> Int
aggs :: a -> Maybe AggregationResults
data BucketValue = TextValue Text
| ScientificValue Scientific
| BoolValue Bool deriving (Show)
data Bucket a = Bucket { buckets :: [a]} deriving (Show)
data TermsResult = TermsResult { termKey :: BucketValue
, termsDocCount :: Int
, termsAggs :: Maybe AggregationResults } deriving (Show)
data DateHistogramResult = DateHistogramResult { dateKey :: Int
, dateKeyStr :: Maybe Text
, dateDocCount :: Int
, dateHistogramAggs :: Maybe AggregationResults } deriving (Show)
data DateRangeResult = DateRangeResult { dateRangeKey :: Text
, dateRangeFrom :: Maybe UTCTime
, dateRangeFromAsString :: Maybe Text
, dateRangeTo :: Maybe UTCTime
, dateRangeToAsString :: Maybe Text
, dateRangeDocCount :: Int
, dateRangeAggs :: Maybe AggregationResults } deriving (Show, Eq)
toTerms :: Text -> AggregationResults -> Maybe (Bucket TermsResult)
toTerms = toAggResult
toDateHistogram :: Text -> AggregationResults -> Maybe (Bucket DateHistogramResult)
toDateHistogram = toAggResult
toMissing :: Text -> AggregationResults -> Maybe MissingResult
toMissing = toAggResult
toTopHits :: (FromJSON a) => Text -> AggregationResults -> Maybe (TopHitResult a)
toTopHits = toAggResult
toAggResult :: (FromJSON a) => Text -> AggregationResults -> Maybe a
toAggResult t a = M.lookup t a >>= deserialize
where deserialize = parseMaybe parseJSON
instance BucketAggregation TermsResult where
key = termKey
docCount = termsDocCount
aggs = termsAggs
instance BucketAggregation DateHistogramResult where
key = TextValue . showText . dateKey
docCount = dateDocCount
aggs = dateHistogramAggs
instance BucketAggregation DateRangeResult where
key = TextValue . dateRangeKey
docCount = dateRangeDocCount
aggs = dateRangeAggs
instance (FromJSON a) => FromJSON (Bucket a) where
parseJSON (Object v) = Bucket <$>
v .: "buckets"
parseJSON _ = mempty
instance FromJSON BucketValue where
parseJSON (String t) = return $ TextValue t
parseJSON (Number s) = return $ ScientificValue s
parseJSON (Bool b) = return $ BoolValue b
parseJSON _ = mempty
instance FromJSON MissingResult where
parseJSON = withObject "MissingResult" parse
where parse v = MissingResult <$> v .: "doc_count"
instance FromJSON TermsResult where
parseJSON (Object v) = TermsResult <$>
v .: "key" <*>
v .: "doc_count" <*>
(pure $ getNamedSubAgg v ["key", "doc_count"])
parseJSON _ = mempty
instance FromJSON DateHistogramResult where
parseJSON (Object v) = DateHistogramResult <$>
v .: "key" <*>
v .:? "key_as_string" <*>
v .: "doc_count" <*>
(pure $ getNamedSubAgg v [ "key"
, "doc_count"
, "key_as_string"
]
)
parseJSON _ = mempty
instance FromJSON DateRangeResult where
parseJSON = withObject "DateRangeResult" parse
where parse v = DateRangeResult <$>
v .: "key" <*>
(fmap posixMS <$> v .:? "from") <*>
v .:? "from_as_string" <*>
(fmap posixMS <$> v .:? "to") <*>
v .:? "to_as_string" <*>
v .: "doc_count" <*>
(pure $ getNamedSubAgg v [ "key"
, "from"
, "from_as_string"
, "to"
, "to_as_string"
, "doc_count"
]
)
instance (FromJSON a) => FromJSON (TopHitResult a) where
parseJSON (Object v) = TopHitResult <$>
v .: "hits"
parseJSON _ = fail "Failure in FromJSON (TopHitResult a)"
data MissingResult = MissingResult { missingDocCount :: Int } deriving (Show)
data TopHitResult a = TopHitResult { tarHits :: (SearchHits a)
} deriving Show
data SearchHits a =
SearchHits { hitsTotal :: Int
, maxScore :: Score
, hits :: [Hit a] } deriving (Eq, Show)
instance Semigroup (SearchHits a) where
(SearchHits ta ma ha) <> (SearchHits tb mb hb) = SearchHits (ta + tb) (max ma mb) (ha <> hb)
instance Monoid (SearchHits a) where
mempty = SearchHits 0 Nothing mempty
mappend = (<>)
data Hit a =
Hit { hitIndex :: IndexName
, hitType :: MappingName
, hitDocId :: DocId
, hitScore :: Score
, hitSource :: Maybe a
, hitHighlight :: Maybe HitHighlight } deriving (Eq, Show)
-- Try to get an AggregationResults when we don't know the
-- field name. We filter out the known keys to try to minimize the noise.
getNamedSubAgg :: Object -> [Text] -> Maybe AggregationResults
getNamedSubAgg o knownKeys = maggRes
where unknownKeys = HM.filterWithKey (\k _ -> k `notElem` knownKeys) o
maggRes
| HM.null unknownKeys = Nothing
| otherwise = Just . M.fromList $ HM.toList unknownKeys
instance ToJSON CollectionMode where
toJSON BreadthFirst = "breadth_first"
toJSON DepthFirst = "depth_first"
instance ToJSON DateRangeAggregation where
toJSON DateRangeAggregation {..} =
omitNulls [ "field" .= draField
, "format" .= draFormat
, "ranges" .= toList draRanges
]
instance (FromJSON a) => FromJSON (SearchHits a) where
parseJSON (Object v) = SearchHits <$>
v .: "total" <*>
v .: "max_score" <*>
v .: "hits"
parseJSON _ = empty
instance ToJSON DateRangeAggRange where
toJSON (DateRangeFrom e) = object [ "from" .= e ]
toJSON (DateRangeTo e) = object [ "to" .= e ]
toJSON (DateRangeFromAndTo f t) = object [ "from" .= f, "to" .= t ]
instance (FromJSON a) => FromJSON (Hit a) where
parseJSON (Object v) = Hit <$>
v .: "_index" <*>
v .: "_type" <*>
v .: "_id" <*>
v .: "_score" <*>
v .:? "_source" <*>
v .:? "highlight"
parseJSON _ = empty

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,144 @@
{-# LANGUAGE OverloadedStrings #-}
module Database.V1.Bloodhound.Internal.Highlight where
import Bloodhound.Import
import qualified Data.Map as M
import Database.V1.Bloodhound.Internal.Newtypes
import Database.V1.Bloodhound.Internal.Query
type HitHighlight = M.Map Text [Text]
data Highlights = Highlights { globalsettings :: Maybe HighlightSettings
, highlightFields :: [FieldHighlight]
} deriving (Show, Eq)
instance ToJSON Highlights where
toJSON (Highlights global fields) =
omitNulls (("fields" .= fields)
: highlightSettingsPairs global)
data HighlightSettings = Plain PlainHighlight
| Postings PostingsHighlight
| FastVector FastVectorHighlight
deriving (Show, Eq)
data FieldHighlight = FieldHighlight FieldName (Maybe HighlightSettings)
deriving (Show, Eq)
data PlainHighlight =
PlainHighlight { plainCommon :: Maybe CommonHighlight
, plainNonPost :: Maybe NonPostings } deriving (Show, Eq)
-- This requires that index_options are set to 'offset' in the mapping.
data PostingsHighlight = PostingsHighlight (Maybe CommonHighlight) deriving (Show, Eq)
-- This requires that term_vector is set to 'with_positions_offsets' in the mapping.
data FastVectorHighlight =
FastVectorHighlight { fvCommon :: Maybe CommonHighlight
, fvNonPostSettings :: Maybe NonPostings
, boundaryChars :: Maybe Text
, boundaryMaxScan :: Maybe Int
, fragmentOffset :: Maybe Int
, matchedFields :: [Text]
, phraseLimit :: Maybe Int
} deriving (Show, Eq)
data CommonHighlight =
CommonHighlight { order :: Maybe Text
, forceSource :: Maybe Bool
, tag :: Maybe HighlightTag
, encoder :: Maybe HighlightEncoder
, noMatchSize :: Maybe Int
, highlightQuery :: Maybe Query
, requireFieldMatch :: Maybe Bool
} deriving (Show, Eq)
-- Settings that are only applicable to FastVector and Plain highlighters.
data NonPostings =
NonPostings { fragmentSize :: Maybe Int
, numberOfFragments :: Maybe Int} deriving (Show, Eq)
data HighlightEncoder = DefaultEncoder
| HTMLEncoder
deriving (Show, Eq)
-- NOTE: Should the tags use some kind of HTML type, rather than Text?
data HighlightTag = TagSchema Text
| CustomTags ([Text], [Text]) -- Only uses more than the first value in the lists if fvh
deriving (Show, Eq)
highlightSettingsPairs :: Maybe HighlightSettings -> [Pair]
highlightSettingsPairs Nothing = []
highlightSettingsPairs (Just (Plain plh)) = plainHighPairs (Just plh)
highlightSettingsPairs (Just (Postings ph)) = postHighPairs (Just ph)
highlightSettingsPairs (Just (FastVector fvh)) = fastVectorHighPairs (Just fvh)
plainHighPairs :: Maybe PlainHighlight -> [Pair]
plainHighPairs Nothing = []
plainHighPairs (Just (PlainHighlight plCom plNonPost)) =
[ "type" .= String "plain"]
++ commonHighlightPairs plCom
++ nonPostingsToPairs plNonPost
postHighPairs :: Maybe PostingsHighlight -> [Pair]
postHighPairs Nothing = []
postHighPairs (Just (PostingsHighlight pCom)) =
("type" .= String "postings")
: commonHighlightPairs pCom
fastVectorHighPairs :: Maybe FastVectorHighlight -> [Pair]
fastVectorHighPairs Nothing = []
fastVectorHighPairs (Just
(FastVectorHighlight fvCom fvNonPostSettings' fvBoundChars
fvBoundMaxScan fvFragOff fvMatchedFields
fvPhraseLim)) =
[ "type" .= String "fvh"
, "boundary_chars" .= fvBoundChars
, "boundary_max_scan" .= fvBoundMaxScan
, "fragment_offset" .= fvFragOff
, "matched_fields" .= fvMatchedFields
, "phraseLimit" .= fvPhraseLim]
++ commonHighlightPairs fvCom
++ nonPostingsToPairs fvNonPostSettings'
commonHighlightPairs :: Maybe CommonHighlight -> [Pair]
commonHighlightPairs Nothing = []
commonHighlightPairs (Just (CommonHighlight chScore chForceSource chTag chEncoder
chNoMatchSize chHighlightQuery
chRequireFieldMatch)) =
[ "order" .= chScore
, "force_source" .= chForceSource
, "encoder" .= chEncoder
, "no_match_size" .= chNoMatchSize
, "highlight_query" .= chHighlightQuery
, "require_fieldMatch" .= chRequireFieldMatch]
++ highlightTagToPairs chTag
nonPostingsToPairs :: Maybe NonPostings -> [Pair]
nonPostingsToPairs Nothing = []
nonPostingsToPairs (Just (NonPostings npFragSize npNumOfFrags)) =
[ "fragment_size" .= npFragSize
, "number_of_fragments" .= npNumOfFrags]
highlightTagToPairs :: Maybe HighlightTag -> [Pair]
highlightTagToPairs (Just (TagSchema _)) = [ "scheme" .= String "default"]
highlightTagToPairs (Just (CustomTags (pre, post))) = [ "pre_tags" .= pre
, "post_tags" .= post]
highlightTagToPairs Nothing = []
instance ToJSON FieldHighlight where
toJSON (FieldHighlight (FieldName fName) (Just fSettings)) =
object [ fName .= fSettings ]
toJSON (FieldHighlight (FieldName fName) Nothing) =
object [ fName .= emptyObject ]
instance ToJSON HighlightSettings where
toJSON hs = omitNulls (highlightSettingsPairs (Just hs))
instance ToJSON HighlightEncoder where
toJSON DefaultEncoder = String "default"
toJSON HTMLEncoder = String "html"

View File

@ -0,0 +1,211 @@
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
module Database.V1.Bloodhound.Internal.Newtypes where
import Bloodhound.Import
newtype From = From Int deriving (Eq, Show, ToJSON)
newtype Size = Size Int deriving (Eq, Show, ToJSON, FromJSON)
{-| 'FieldName' is used all over the place wherever a specific field within
a document needs to be specified, usually in 'Query's or 'Filter's.
-}
newtype FieldName =
FieldName Text
deriving (Eq, Show, ToJSON, FromJSON)
newtype Boost =
Boost Double
deriving (Eq, Show, ToJSON, FromJSON)
newtype BoostTerms =
BoostTerms Double
deriving (Eq, Show, ToJSON, FromJSON)
{-| 'ReplicaCount' is part of 'IndexSettings' -}
newtype ReplicaCount =
ReplicaCount Int
deriving (Eq, Show, ToJSON)
{-| 'ShardCount' is part of 'IndexSettings' -}
newtype ShardCount =
ShardCount Int
deriving (Eq, Show, ToJSON)
{-| 'TemplateName' is used to describe which template to query/create/delete
-}
newtype TemplateName = TemplateName Text deriving (Eq, Show, ToJSON, FromJSON)
{-| 'TemplatePattern' represents a pattern which is matched against index names
-}
newtype TemplatePattern = TemplatePattern Text deriving (Eq, Show, ToJSON, FromJSON)
{-| 'MappingName' is part of mappings which are how ES describes and schematizes
the data in the indices.
-}
newtype MappingName = MappingName Text deriving (Eq, Show, ToJSON, FromJSON)
{-| 'DocId' is a generic wrapper value for expressing unique Document IDs.
Can be set by the user or created by ES itself. Often used in client
functions for poking at specific documents.
-}
newtype DocId = DocId Text deriving (Eq, Show, ToJSON, FromJSON)
{-| 'QueryString' is used to wrap query text bodies, be they human written or not.
-}
newtype QueryString = QueryString Text deriving (Eq, Show, ToJSON, FromJSON)
{-| 'Script' is often used in place of 'FieldName' to specify more
complex ways of extracting a value from a document.
-}
newtype Script = Script { scriptText :: Text } deriving (Eq, Show)
{-| 'CacheName' is used in 'RegexpFilter' for describing the
'CacheKey' keyed caching behavior.
-}
newtype CacheName = CacheName Text deriving (Eq, Show, ToJSON, FromJSON)
{-| 'CacheKey' is used in 'RegexpFilter' to key regex caching.
-}
newtype CacheKey =
CacheKey Text deriving (Eq, Show, ToJSON, FromJSON)
newtype Existence =
Existence Bool deriving (Eq, Show, ToJSON, FromJSON)
newtype NullValue =
NullValue Bool deriving (Eq, Show, ToJSON, FromJSON)
newtype CutoffFrequency =
CutoffFrequency Double deriving (Eq, Show, ToJSON, FromJSON)
newtype Analyzer =
Analyzer Text deriving (Eq, Show, ToJSON, FromJSON)
newtype MaxExpansions =
MaxExpansions Int deriving (Eq, Show, ToJSON, FromJSON)
{-| 'Lenient', if set to true, will cause format based failures to be
ignored. I don't know what the bloody default is, Elasticsearch
documentation didn't say what it was. Let me know if you figure it out.
-}
newtype Lenient =
Lenient Bool deriving (Eq, Show, ToJSON, FromJSON)
newtype Tiebreaker =
Tiebreaker Double deriving (Eq, Show, ToJSON, FromJSON)
{-| 'MinimumMatch' controls how many should clauses in the bool query should
match. Can be an absolute value (2) or a percentage (30%) or a
combination of both.
-}
newtype MinimumMatch =
MinimumMatch Int deriving (Eq, Show, ToJSON, FromJSON)
newtype DisableCoord =
DisableCoord Bool deriving (Eq, Show, ToJSON, FromJSON)
newtype IgnoreTermFrequency =
IgnoreTermFrequency Bool deriving (Eq, Show, ToJSON, FromJSON)
newtype MinimumTermFrequency =
MinimumTermFrequency Int deriving (Eq, Show, ToJSON, FromJSON)
newtype MaxQueryTerms =
MaxQueryTerms Int deriving (Eq, Show, ToJSON, FromJSON)
newtype Fuzziness =
Fuzziness Double deriving (Eq, Show, ToJSON, FromJSON)
{-| 'PrefixLength' is the prefix length used in queries, defaults to 0. -}
newtype PrefixLength =
PrefixLength Int deriving (Eq, Show, ToJSON, FromJSON)
newtype TypeName =
TypeName Text deriving (Eq, Show, ToJSON, FromJSON)
newtype PercentMatch =
PercentMatch Double deriving (Eq, Show, ToJSON, FromJSON)
newtype StopWord =
StopWord Text deriving (Eq, Show, ToJSON, FromJSON)
newtype QueryPath =
QueryPath Text deriving (Eq, Show, ToJSON, FromJSON)
{-| Allowing a wildcard at the beginning of a word (eg "*ing") is particularly
heavy, because all terms in the index need to be examined, just in case
they match. Leading wildcards can be disabled by setting
'AllowLeadingWildcard' to false. -}
newtype AllowLeadingWildcard =
AllowLeadingWildcard Bool deriving (Eq, Show, ToJSON, FromJSON)
newtype LowercaseExpanded =
LowercaseExpanded Bool deriving (Eq, Show, ToJSON, FromJSON)
newtype EnablePositionIncrements =
EnablePositionIncrements Bool deriving (Eq, Show, ToJSON, FromJSON)
{-| By default, wildcard terms in a query are not analyzed.
Setting 'AnalyzeWildcard' to true enables best-effort analysis.
-}
newtype AnalyzeWildcard = AnalyzeWildcard Bool deriving (Eq, Show, ToJSON, FromJSON)
{-| 'GeneratePhraseQueries' defaults to false.
-}
newtype GeneratePhraseQueries =
GeneratePhraseQueries Bool deriving (Eq, Show, ToJSON, FromJSON)
{-| 'Locale' is used for string conversions - defaults to ROOT.
-}
newtype Locale = Locale Text deriving (Eq, Show, ToJSON, FromJSON)
newtype MaxWordLength = MaxWordLength Int deriving (Eq, Show, ToJSON, FromJSON)
newtype MinWordLength = MinWordLength Int deriving (Eq, Show, ToJSON, FromJSON)
{-| 'PhraseSlop' sets the default slop for phrases, 0 means exact
phrase matches. Default is 0.
-}
newtype PhraseSlop = PhraseSlop Int deriving (Eq, Show, ToJSON, FromJSON)
newtype MinDocFrequency = MinDocFrequency Int deriving (Eq, Show, ToJSON, FromJSON)
newtype MaxDocFrequency = MaxDocFrequency Int deriving (Eq, Show, ToJSON, FromJSON)
-- | Newtype wrapper to parse ES's concerning tendency to in some APIs return a floating point number of milliseconds since epoch ಠ_ಠ
newtype POSIXMS = POSIXMS { posixMS :: UTCTime }
instance FromJSON POSIXMS where
parseJSON = withScientific "POSIXMS" (return . parse)
where parse n = let n' = truncate n :: Integer
in POSIXMS (posixSecondsToUTCTime (fromInteger (n' `div` 1000)))
{-| 'IndexName' is used to describe which index to query/create/delete
-}
newtype IndexName = IndexName Text deriving (Eq, Show, ToJSON, FromJSON)
newtype IndexAliasName = IndexAliasName { indexAliasName :: IndexName } deriving (Eq, Show, ToJSON)
type Score = Maybe Double
newtype ShardId = ShardId { shardId :: Int }
deriving (Eq, Show, FromJSON)
-- | Milliseconds
newtype MS = MS NominalDiffTime
-- keeps the unexported constructor warnings at bay
unMS :: MS -> NominalDiffTime
unMS (MS t) = t
instance FromJSON MS where
parseJSON = withScientific "MS" (return . MS . parse)
where
parse n = fromInteger ((truncate n) * 1000)
newtype MaybeNA a = MaybeNA { unMaybeNA :: Maybe a }
deriving (Show, Eq)
instance FromJSON a => FromJSON (MaybeNA a) where
parseJSON (String "NA") = pure $ MaybeNA Nothing
parseJSON o = MaybeNA . Just <$> parseJSON o
newtype SnapshotName = SnapshotName { snapshotName :: Text }
deriving (Show, Eq, Ord, ToJSON, FromJSON)
instance FromJSON ShardCount where
parseJSON v = parseAsInt v
<|> parseAsString v
where parseAsInt = fmap ShardCount . parseJSON
parseAsString = withText "ShardCount" (fmap ShardCount . parseReadText)
instance FromJSON ReplicaCount where
parseJSON v = parseAsInt v
<|> parseAsString v
where parseAsInt = fmap ReplicaCount . parseJSON
parseAsString = withText "ReplicaCount" (fmap ReplicaCount . parseReadText)

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,106 @@
{-# LANGUAGE OverloadedStrings #-}
module Database.V1.Bloodhound.Internal.Sort where
import Bloodhound.Import
import Database.V1.Bloodhound.Internal.Newtypes
import Database.V1.Bloodhound.Internal.Query
{-| 'Sort' is a synonym for a list of 'SortSpec's. Sort behavior is order
dependent with later sorts acting as tie-breakers for earlier sorts.
-}
type Sort = [SortSpec]
{-| The two main kinds of 'SortSpec' are 'DefaultSortSpec' and
'GeoDistanceSortSpec'. The latter takes a 'SortOrder', 'GeoPoint', and
'DistanceUnit' to express "nearness" to a single geographical point as a
sort specification.
<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-sort.html#search-request-sort>
-}
data SortSpec = DefaultSortSpec DefaultSort
| GeoDistanceSortSpec SortOrder GeoPoint DistanceUnit deriving (Eq, Show)
instance ToJSON SortSpec where
toJSON (DefaultSortSpec
(DefaultSort (FieldName dsSortFieldName) dsSortOrder dsIgnoreUnmapped
dsSortMode dsMissingSort dsNestedFilter)) =
object [dsSortFieldName .= omitNulls base] where
base = [ "order" .= dsSortOrder
, "ignore_unmapped" .= dsIgnoreUnmapped
, "mode" .= dsSortMode
, "missing" .= dsMissingSort
, "nested_filter" .= dsNestedFilter ]
toJSON (GeoDistanceSortSpec gdsSortOrder (GeoPoint (FieldName field) gdsLatLon) units) =
object [ "unit" .= units
, field .= gdsLatLon
, "order" .= gdsSortOrder ]
{-| 'DefaultSort' is usually the kind of 'SortSpec' you'll want. There's a
'mkSort' convenience function for when you want to specify only the most
common parameters.
<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-sort.html#search-request-sort>
-}
data DefaultSort =
DefaultSort { sortFieldName :: FieldName
, sortOrder :: SortOrder
-- default False
, ignoreUnmapped :: Bool
, sortMode :: Maybe SortMode
, missingSort :: Maybe Missing
, nestedFilter :: Maybe Filter } deriving (Eq, Show)
{-| 'SortOrder' is 'Ascending' or 'Descending', as you might expect. These get
encoded into "asc" or "desc" when turned into JSON.
<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-sort.html#search-request-sort>
-}
data SortOrder = Ascending
| Descending deriving (Eq, Show)
instance ToJSON SortOrder where
toJSON Ascending = String "asc"
toJSON Descending = String "desc"
{-| 'SortMode' prescribes how to handle sorting array/multi-valued fields.
http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-sort.html#_sort_mode_option
-}
data SortMode = SortMin
| SortMax
| SortSum
| SortAvg deriving (Eq, Show)
instance ToJSON SortMode where
toJSON SortMin = String "min"
toJSON SortMax = String "max"
toJSON SortSum = String "sum"
toJSON SortAvg = String "avg"
{-| 'Missing' prescribes how to handle missing fields. A missing field can be
sorted last, first, or using a custom value as a substitute.
<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-sort.html#_missing_values>
-}
data Missing = LastMissing
| FirstMissing
| CustomMissing Text deriving (Eq, Show)
instance ToJSON Missing where
toJSON LastMissing = String "_last"
toJSON FirstMissing = String "_first"
toJSON (CustomMissing txt) = String txt
-- {-| 'mkSort' defaults everything but the 'FieldName' and the 'SortOrder' so
-- that you can concisely describe the usual kind of 'SortSpec's you want.
-- -}
mkSort :: FieldName -> SortOrder -> DefaultSort
mkSort fieldName sOrder = DefaultSort fieldName sOrder False Nothing Nothing Nothing

View File

@ -0,0 +1,26 @@
{-# LANGUAGE OverloadedStrings #-}
module Database.V1.Bloodhound.Internal.StringlyTyped where
import Bloodhound.Import
import qualified Data.Text as T
newtype StringlyTypedDouble = StringlyTypedDouble { unStringlyTypedDouble :: Double }
instance FromJSON StringlyTypedDouble where
parseJSON = fmap StringlyTypedDouble . parseJSON . unStringlyTypeJSON
-- | For some reason in several settings APIs, all leaf values get returned
-- as strings. This function attepmts to recover from this for all
-- non-recursive JSON types. If nothing can be done, the value is left alone.
unStringlyTypeJSON :: Value -> Value
unStringlyTypeJSON (String "true") = Bool True
unStringlyTypeJSON (String "false") = Bool False
unStringlyTypeJSON (String "null") = Null
unStringlyTypeJSON v@(String t) = case readMay (T.unpack t) of
Just n -> Number n
Nothing -> v
unStringlyTypeJSON v = v

View File

@ -0,0 +1,252 @@
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Database.V1.Bloodhound.Internal.Suggest where
import Bloodhound.Import
import qualified Data.HashMap.Strict as HM
import Database.V1.Bloodhound.Internal.Newtypes
import Database.V1.Bloodhound.Internal.Query
data Suggest = Suggest { suggestText :: Text
, suggestName :: Text
, suggestType :: SuggestType
}
deriving (Show, Eq)
instance ToJSON Suggest where
toJSON Suggest{..} = object [ "text" .= suggestText
, suggestName .= suggestType
]
instance FromJSON Suggest where
parseJSON (Object o) = do
suggestText' <- o .: "text"
let dropTextList = HM.toList $ HM.filterWithKey (\x _ -> x /= "text") o
suggestName' <- case dropTextList of
[(x, _)] -> return x
_ -> fail "error parsing Suggest field name"
suggestType' <- o .: suggestName'
return $ Suggest suggestText' suggestName' suggestType'
parseJSON x = typeMismatch "Suggest" x
data SuggestType = SuggestTypePhraseSuggester PhraseSuggester
deriving (Show, Eq)
instance ToJSON SuggestType where
toJSON (SuggestTypePhraseSuggester x) = object ["phrase" .= x]
instance FromJSON SuggestType where
parseJSON = withObject "SuggestType" parse
where parse o = phraseSuggester `taggedWith` "phrase"
where taggedWith parser k = parser =<< o .: k
phraseSuggester = pure . SuggestTypePhraseSuggester
data PhraseSuggester =
PhraseSuggester { phraseSuggesterField :: FieldName
, phraseSuggesterGramSize :: Maybe Int
, phraseSuggesterRealWordErrorLikelihood :: Maybe Int
, phraseSuggesterConfidence :: Maybe Int
, phraseSuggesterMaxErrors :: Maybe Int
, phraseSuggesterSeparator :: Maybe Text
, phraseSuggesterSize :: Maybe Size
, phraseSuggesterAnalyzer :: Maybe Analyzer
, phraseSuggesterShardSize :: Maybe Int
, phraseSuggesterHighlight :: Maybe PhraseSuggesterHighlighter
, phraseSuggesterCollate :: Maybe PhraseSuggesterCollate
, phraseSuggesterCandidateGenerators :: [DirectGenerators]
}
deriving (Show, Eq)
instance ToJSON PhraseSuggester where
toJSON PhraseSuggester{..} = omitNulls [ "field" .= phraseSuggesterField
, "gram_size" .= phraseSuggesterGramSize
, "real_word_error_likelihood" .= phraseSuggesterRealWordErrorLikelihood
, "confidence" .= phraseSuggesterConfidence
, "max_errors" .= phraseSuggesterMaxErrors
, "separator" .= phraseSuggesterSeparator
, "size" .= phraseSuggesterSize
, "analyzer" .= phraseSuggesterAnalyzer
, "shard_size" .= phraseSuggesterShardSize
, "highlight" .= phraseSuggesterHighlight
, "collate" .= phraseSuggesterCollate
, "direct_generator" .= phraseSuggesterCandidateGenerators
]
instance FromJSON PhraseSuggester where
parseJSON = withObject "PhraseSuggester" parse
where parse o = PhraseSuggester
<$> o .: "field"
<*> o .:? "gram_size"
<*> o .:? "real_word_error_likelihood"
<*> o .:? "confidence"
<*> o .:? "max_errors"
<*> o .:? "separator"
<*> o .:? "size"
<*> o .:? "analyzer"
<*> o .:? "shard_size"
<*> o .:? "highlight"
<*> o .:? "collate"
<*> o .:? "direct_generator" .!= []
mkPhraseSuggester :: FieldName -> PhraseSuggester
mkPhraseSuggester fName =
PhraseSuggester fName Nothing Nothing Nothing Nothing Nothing Nothing
Nothing Nothing Nothing Nothing []
data PhraseSuggesterHighlighter =
PhraseSuggesterHighlighter { phraseSuggesterHighlighterPreTag :: Text
, phraseSuggesterHighlighterPostTag :: Text
}
deriving (Show, Eq)
instance ToJSON PhraseSuggesterHighlighter where
toJSON PhraseSuggesterHighlighter{..} =
object [ "pre_tag" .= phraseSuggesterHighlighterPreTag
, "post_tag" .= phraseSuggesterHighlighterPostTag
]
instance FromJSON PhraseSuggesterHighlighter where
parseJSON = withObject "PhraseSuggesterHighlighter" parse
where parse o = PhraseSuggesterHighlighter
<$> o .: "pre_tag"
<*> o .: "post_tag"
data PhraseSuggesterCollate =
PhraseSuggesterCollate { phraseSuggesterCollateTemplateQuery :: TemplateQueryInline
, phraseSuggesterCollatePrune :: Bool
}
deriving (Show, Eq)
instance ToJSON PhraseSuggesterCollate where
toJSON PhraseSuggesterCollate{..} = object [ "query" .= object
[ "inline" .= (inline phraseSuggesterCollateTemplateQuery)
]
, "params" .= (params phraseSuggesterCollateTemplateQuery)
, "prune" .= phraseSuggesterCollatePrune
]
instance FromJSON PhraseSuggesterCollate where
parseJSON (Object o) = do
query' <- o .: "query"
inline' <- query' .: "inline"
params' <- o .: "params"
prune' <- o .:? "prune" .!= False
return $ PhraseSuggesterCollate (TemplateQueryInline inline' params') prune'
parseJSON x = typeMismatch "PhraseSuggesterCollate" x
data DirectGenerators = DirectGenerators
{ directGeneratorsField :: FieldName
, directGeneratorsSize :: Maybe Int
, directGeneratorSuggestMode :: DirectGeneratorSuggestModeTypes
, directGeneratorMaxEdits :: Maybe Double
, directGeneratorPrefixLength :: Maybe Int
, directGeneratorMinWordLength :: Maybe Int
, directGeneratorMaxInspections :: Maybe Int
, directGeneratorMinDocFreq :: Maybe Double
, directGeneratorMaxTermFreq :: Maybe Double
, directGeneratorPreFilter :: Maybe Text
, directGeneratorPostFilter :: Maybe Text
}
deriving (Show, Eq)
instance ToJSON DirectGenerators where
toJSON DirectGenerators{..} = omitNulls [ "field" .= directGeneratorsField
, "size" .= directGeneratorsSize
, "suggest_mode" .= directGeneratorSuggestMode
, "max_edits" .= directGeneratorMaxEdits
, "prefix_length" .= directGeneratorPrefixLength
, "min_word_length" .= directGeneratorMinWordLength
, "max_inspections" .= directGeneratorMaxInspections
, "min_doc_freq" .= directGeneratorMinDocFreq
, "max_term_freq" .= directGeneratorMaxTermFreq
, "pre_filter" .= directGeneratorPreFilter
, "post_filter" .= directGeneratorPostFilter
]
instance FromJSON DirectGenerators where
parseJSON = withObject "DirectGenerators" parse
where parse o = DirectGenerators
<$> o .: "field"
<*> o .:? "size"
<*> o .: "suggest_mode"
<*> o .:? "max_edits"
<*> o .:? "prefix_length"
<*> o .:? "min_word_length"
<*> o .:? "max_inspections"
<*> o .:? "min_doc_freq"
<*> o .:? "max_term_freq"
<*> o .:? "pre_filter"
<*> o .:? "post_filter"
mkDirectGenerators :: FieldName -> DirectGenerators
mkDirectGenerators fn = DirectGenerators fn Nothing DirectGeneratorSuggestModeMissing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing
data DirectGeneratorSuggestModeTypes = DirectGeneratorSuggestModeMissing
| DirectGeneratorSuggestModePopular
| DirectGeneratorSuggestModeAlways
deriving (Show, Eq)
instance ToJSON DirectGeneratorSuggestModeTypes where
toJSON DirectGeneratorSuggestModeMissing = "missing"
toJSON DirectGeneratorSuggestModePopular = "popular"
toJSON DirectGeneratorSuggestModeAlways = "always"
instance FromJSON DirectGeneratorSuggestModeTypes where
parseJSON = withText "DirectGeneratorSuggestModeTypes" parse
where parse "missing" = pure DirectGeneratorSuggestModeMissing
parse "popular" = pure DirectGeneratorSuggestModePopular
parse "always" = pure DirectGeneratorSuggestModeAlways
parse f = fail ("Unexpected DirectGeneratorSuggestModeTypes: " <> show f)
data SuggestOptions =
SuggestOptions { suggestOptionsText :: Text
, suggestOptionsScore :: Double
, suggestOptionsFreq :: Maybe Int
, suggestOptionsHighlighted :: Maybe Text
}
deriving (Eq, Show)
instance FromJSON SuggestOptions where
parseJSON = withObject "SuggestOptions" parse
where parse o = SuggestOptions
<$> o .: "text"
<*> o .: "score"
<*> o .:? "freq"
<*> o .:? "highlighted"
data SuggestResponse =
SuggestResponse { suggestResponseText :: Text
, suggestResponseOffset :: Int
, suggestResponseLength :: Int
, suggestResponseOptions :: [SuggestOptions]
}
deriving (Eq, Show)
instance FromJSON SuggestResponse where
parseJSON = withObject "SuggestResponse" parse
where parse o = SuggestResponse
<$> o .: "text"
<*> o .: "offset"
<*> o .: "length"
<*> o .: "options"
data NamedSuggestionResponse =
NamedSuggestionResponse { nsrName :: Text
, nsrResponses :: [SuggestResponse]
}
deriving (Eq, Show)
instance FromJSON NamedSuggestionResponse where
parseJSON (Object o) = do
suggestionName' <- case HM.toList o of
[(x, _)] -> return x
_ -> fail "error parsing NamedSuggestionResponse name"
suggestionResponses' <- o .: suggestionName'
return $ NamedSuggestionResponse suggestionName' suggestionResponses'
parseJSON x = typeMismatch "NamedSuggestionResponse" x

File diff suppressed because it is too large Load Diff

View File

@ -1,57 +0,0 @@
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
-------------------------------------------------------------------------------
-- |
-- Module : Database.Bloodhound.Types.Internal
-- Copyright : (C) 2014 Chris Allen
-- License : BSD-style (see the file LICENSE)
-- Maintainer : Chris Allen <cma@bitemyapp.com>
-- Stability : provisional
-- Portability : DeriveGeneric, RecordWildCards
--
-- Internal data types for Bloodhound. These types may change without
-- notice so import at your own risk.
-------------------------------------------------------------------------------
module Database.V1.Bloodhound.Types.Internal
( BHEnv(..)
, Server(..)
, MonadBH(..)
) where
import Control.Applicative as A
import Control.Monad.Reader
import Data.Aeson
import Data.Text (Text)
import Data.Typeable (Typeable)
import GHC.Generics (Generic)
import Network.HTTP.Client
{-| Common environment for Elasticsearch calls. Connections will be
pipelined according to the provided HTTP connection manager.
-}
data BHEnv = BHEnv { bhServer :: Server
, bhManager :: Manager
, bhRequestHook :: Request -> IO Request
-- ^ Low-level hook that is run before every request is sent. Used to implement custom authentication strategies. Defaults to 'return' with 'mkBHEnv'.
}
instance (Functor m, Applicative m, MonadIO m) => MonadBH (ReaderT BHEnv m) where
getBHEnv = ask
{-| 'Server' is used with the client functions to point at the ES instance
-}
newtype Server = Server Text deriving (Eq, Show, Generic, Typeable, FromJSON)
{-| All API calls to Elasticsearch operate within
MonadBH
. The idea is that it can be easily embedded in your
own monad transformer stack. A default instance for a ReaderT and
alias 'BH' is provided for the simple case.
-}
class (Functor m, A.Applicative m, MonadIO m) => MonadBH m where
getBHEnv :: m BHEnv

View File

@ -5,7 +5,7 @@
-------------------------------------------------------------------------------
-- |
-- Module : Database.Bloodhound.Client
-- Copyright : (C) 2014 Chris Allen
-- Copyright : (C) 2014, 2018 Chris Allen
-- License : BSD-style (see the file LICENSE)
-- Maintainer : Chris Allen <cma@bitemyapp.com>
-- Stability : provisional
@ -216,6 +216,7 @@ dispatch dMethod url body = do
$ setRequestIgnoreStatus
$ initReq { method = dMethod
, requestHeaders =
-- "application/x-ndjson" for bulk
("Content-Type", "application/json") : requestHeaders initReq
, requestBody = reqBody }
-- req <- liftIO $ reqHook $ setRequestIgnoreStatus $ initReq { method = dMethod
@ -316,7 +317,7 @@ instance FromJSON GSRs where
parseJSON = withObject "Collection of GenericSnapshotRepo" parse
where
parse = fmap GSRs . mapM (uncurry go) . HM.toList
go rawName = withObject "GenericSnapshotRepo" $ \o -> do
go rawName = withObject "GenericSnapshotRepo" $ \o ->
GenericSnapshotRepo (SnapshotRepoName rawName) <$> o .: "type"
<*> o .: "settings"
@ -554,16 +555,18 @@ deleteIndex (IndexName indexName) =
updateIndexSettings :: MonadBH m => NonEmpty UpdatableIndexSetting -> IndexName -> m Reply
updateIndexSettings updates (IndexName indexName) =
bindM2 put url (return body)
where url = joinPath [indexName, "_settings"]
body = Just (encode jsonBody)
jsonBody = Object (deepMerge [u | Object u <- toJSON <$> toList updates])
where
url = joinPath [indexName, "_settings"]
body = Just (encode jsonBody)
jsonBody = Object (deepMerge [u | Object u <- toJSON <$> toList updates])
getIndexSettings :: (MonadBH m, MonadThrow m) => IndexName
-> m (Either EsError IndexSettingsSummary)
getIndexSettings (IndexName indexName) = do
getIndexSettings (IndexName indexName) =
parseEsResponse =<< get =<< url
where url = joinPath [indexName, "_settings"]
where
url = joinPath [indexName, "_settings"]
-- | 'forceMergeIndex'
--
@ -703,7 +706,7 @@ listIndices =
url = joinPath ["_cat/indices?format=json"]
parse body = maybe (throwM (EsProtocolException body)) return $ do
vals <- decode body
forM vals $ \val -> do
forM vals $ \val ->
case val of
Object obj -> do
indexVal <- HM.lookup "index" obj
@ -860,9 +863,11 @@ deleteDocument (IndexName indexName)
-- >>> _ <- runBH' $ bulk stream
-- >>> _ <- runBH' $ refreshIndex testIndex
bulk :: MonadBH m => V.Vector BulkOperation -> m Reply
bulk bulkOps = bindM2 post url (return body)
where url = joinPath ["_bulk"]
body = Just $ encodeBulkOperations bulkOps
bulk bulkOps =
bindM2 post url (return body)
where
url = joinPath ["_bulk"]
body = Just $ encodeBulkOperations bulkOps
-- | 'encodeBulkOperations' is a convenience function for dumping a vector of 'BulkOperation'
-- into an 'L.ByteString'
@ -872,12 +877,15 @@ bulk bulkOps = bindM2 post url (return body)
-- "\n{\"index\":{\"_type\":\"tweet\",\"_id\":\"2\",\"_index\":\"twitter\"}}\n{\"name\":\"blah\"}\n"
encodeBulkOperations :: V.Vector BulkOperation -> L.ByteString
encodeBulkOperations stream = collapsed where
blobs = fmap encodeBulkOperation stream
mashedTaters = mash (mempty :: Builder) blobs
collapsed = toLazyByteString $ mappend mashedTaters (byteString "\n")
blobs =
fmap encodeBulkOperation stream
mashedTaters =
mash (mempty :: Builder) blobs
collapsed =
toLazyByteString $ mappend mashedTaters (byteString "\n")
mash :: Builder -> V.Vector L.ByteString -> Builder
mash = V.foldl' (\b x -> b `mappend` (byteString "\n") `mappend` (lazyByteString x))
mash = V.foldl' (\b x -> b <> byteString "\n" <> lazyByteString x)
mkBulkStreamValue :: Text -> Text -> Text -> Text -> Value
mkBulkStreamValue operation indexName mappingName docId =
@ -886,6 +894,12 @@ mkBulkStreamValue operation indexName mappingName docId =
, "_type" .= mappingName
, "_id" .= docId]]
mkBulkStreamValueAuto :: Text -> Text -> Text -> Value
mkBulkStreamValueAuto operation indexName mappingName =
object [operation .=
object [ "_index" .= indexName
, "_type" .= mappingName]]
-- | 'encodeBulkOperation' is a convenience function for dumping a single 'BulkOperation'
-- into an 'L.ByteString'
--
@ -899,6 +913,18 @@ encodeBulkOperation (BulkIndex (IndexName indexName)
where metadata = mkBulkStreamValue "index" indexName mappingName docId
blob = encode metadata `mappend` "\n" `mappend` encode value
encodeBulkOperation (BulkIndexAuto (IndexName indexName)
(MappingName mappingName)
value) = blob
where metadata = mkBulkStreamValueAuto "index" indexName mappingName
blob = encode metadata `mappend` "\n" `mappend` encode value
encodeBulkOperation (BulkIndexEncodingAuto (IndexName indexName)
(MappingName mappingName)
encoding) = toLazyByteString blob
where metadata = toEncoding (mkBulkStreamValueAuto "index" indexName mappingName)
blob = fromEncoding metadata <> "\n" <> fromEncoding encoding
encodeBulkOperation (BulkCreate (IndexName indexName)
(MappingName mappingName)
(DocId docId) value) = blob
@ -1099,7 +1125,7 @@ scanSearch indexName mappingName search = do
-- >>> mkSearch (Just query) Nothing
-- Search {queryBody = Just (TermQuery (Term {termField = "user", termValue = "bitemyapp"}) Nothing), filterBody = Nothing, sortBody = Nothing, aggBody = Nothing, highlight = Nothing, trackSortScores = False, from = From 0, size = Size 10, searchType = SearchTypeQueryThenFetch, fields = Nothing, source = Nothing}
mkSearch :: Maybe Query -> Maybe Filter -> Search
mkSearch query filter = Search query filter Nothing Nothing Nothing False (From 0) (Size 10) SearchTypeQueryThenFetch Nothing Nothing Nothing
mkSearch query filter = Search query filter Nothing Nothing Nothing False (From 0) (Size 10) SearchTypeQueryThenFetch Nothing Nothing Nothing Nothing
-- | 'mkAggregateSearch' is a helper function that defaults everything in a 'Search' except for
-- the 'Query' and the 'Aggregation'.
@ -1109,7 +1135,7 @@ mkSearch query filter = Search query filter Nothing Nothing Nothing False (From
-- TermsAgg (TermsAggregation {term = Left "user", termInclude = Nothing, termExclude = Nothing, termOrder = Nothing, termMinDocCount = Nothing, termSize = Nothing, termShardSize = Nothing, termCollectMode = Just BreadthFirst, termExecutionHint = Nothing, termAggs = Nothing})
-- >>> let myAggregation = mkAggregateSearch Nothing $ mkAggregations "users" terms
mkAggregateSearch :: Maybe Query -> Aggregations -> Search
mkAggregateSearch query mkSearchAggs = Search query Nothing Nothing (Just mkSearchAggs) Nothing False (From 0) (Size 0) SearchTypeQueryThenFetch Nothing Nothing Nothing
mkAggregateSearch query mkSearchAggs = Search query Nothing Nothing (Just mkSearchAggs) Nothing False (From 0) (Size 0) SearchTypeQueryThenFetch Nothing Nothing Nothing Nothing
-- | 'mkHighlightSearch' is a helper function that defaults everything in a 'Search' except for
-- the 'Query' and the 'Aggregation'.
@ -1118,7 +1144,7 @@ mkAggregateSearch query mkSearchAggs = Search query Nothing Nothing (Just mkSear
-- >>> let testHighlight = Highlights Nothing [FieldHighlight (FieldName "message") Nothing]
-- >>> let search = mkHighlightSearch (Just query) testHighlight
mkHighlightSearch :: Maybe Query -> Highlights -> Search
mkHighlightSearch query searchHighlights = Search query Nothing Nothing Nothing (Just searchHighlights) False (From 0) (Size 10) SearchTypeQueryThenFetch Nothing Nothing Nothing
mkHighlightSearch query searchHighlights = Search query Nothing Nothing Nothing (Just searchHighlights) False (From 0) (Size 10) SearchTypeQueryThenFetch Nothing Nothing Nothing Nothing
-- | 'pageSearch' is a helper function that takes a search and assigns the from
-- and size fields for the search. The from parameter defines the offset

View File

@ -0,0 +1,467 @@
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Database.V5.Bloodhound.Internal.Aggregation where
import Bloodhound.Import
import qualified Data.HashMap.Strict as HM
import qualified Data.Map.Strict as M
import qualified Data.Text as T
import Database.V5.Bloodhound.Internal.Client
import Database.V5.Bloodhound.Internal.Highlight (HitHighlight)
import Database.V5.Bloodhound.Internal.Newtypes
import Database.V5.Bloodhound.Internal.Query
import Database.V5.Bloodhound.Internal.Sort
type Aggregations = M.Map Text Aggregation
emptyAggregations :: Aggregations
emptyAggregations = M.empty
mkAggregations :: Text -> Aggregation -> Aggregations
mkAggregations name aggregation = M.insert name aggregation emptyAggregations
data Aggregation = TermsAgg TermsAggregation
| CardinalityAgg CardinalityAggregation
| DateHistogramAgg DateHistogramAggregation
| ValueCountAgg ValueCountAggregation
| FilterAgg FilterAggregation
| DateRangeAgg DateRangeAggregation
| MissingAgg MissingAggregation
| TopHitsAgg TopHitsAggregation
| StatsAgg StatisticsAggregation
deriving (Eq, Show)
instance ToJSON Aggregation where
toJSON (TermsAgg (TermsAggregation term include exclude order minDocCount size shardSize collectMode executionHint termAggs)) =
omitNulls ["terms" .= omitNulls [ toJSON' term,
"include" .= include,
"exclude" .= exclude,
"order" .= order,
"min_doc_count" .= minDocCount,
"size" .= size,
"shard_size" .= shardSize,
"collect_mode" .= collectMode,
"execution_hint" .= executionHint
],
"aggs" .= termAggs ]
where
toJSON' x = case x of { Left y -> "field" .= y; Right y -> "script" .= y }
toJSON (CardinalityAgg (CardinalityAggregation field precisionThreshold)) =
object ["cardinality" .= omitNulls [ "field" .= field,
"precisionThreshold" .= precisionThreshold
]
]
toJSON (DateHistogramAgg
(DateHistogramAggregation field interval format
preZone postZone preOffset postOffset dateHistoAggs)) =
omitNulls ["date_histogram" .= omitNulls [ "field" .= field,
"interval" .= interval,
"format" .= format,
"pre_zone" .= preZone,
"post_zone" .= postZone,
"pre_offset" .= preOffset,
"post_offset" .= postOffset
],
"aggs" .= dateHistoAggs ]
toJSON (ValueCountAgg a) = object ["value_count" .= v]
where v = case a of
(FieldValueCount (FieldName n)) ->
object ["field" .= n]
(ScriptValueCount s) ->
object ["script" .= s]
toJSON (FilterAgg (FilterAggregation filt ags)) =
omitNulls [ "filter" .= filt
, "aggs" .= ags]
toJSON (DateRangeAgg a) = object [ "date_range" .= a
]
toJSON (MissingAgg (MissingAggregation{..})) =
object ["missing" .= object ["field" .= maField]]
toJSON (TopHitsAgg (TopHitsAggregation mfrom msize msort)) =
omitNulls ["top_hits" .= omitNulls [ "size" .= msize
, "from" .= mfrom
, "sort" .= msort
]
]
toJSON (StatsAgg (StatisticsAggregation typ field)) =
object [stType .= omitNulls [ "field" .= field ]]
where
stType | typ == Basic = "stats"
| otherwise = "extended_stats"
data TopHitsAggregation = TopHitsAggregation
{ taFrom :: Maybe From
, taSize :: Maybe Size
, taSort :: Maybe Sort
} deriving (Eq, Show)
data MissingAggregation = MissingAggregation
{ maField :: Text
} deriving (Eq, Show)
data TermsAggregation = TermsAggregation
{ term :: Either Text Text
, termInclude :: Maybe TermInclusion
, termExclude :: Maybe TermInclusion
, termOrder :: Maybe TermOrder
, termMinDocCount :: Maybe Int
, termSize :: Maybe Int
, termShardSize :: Maybe Int
, termCollectMode :: Maybe CollectionMode
, termExecutionHint :: Maybe ExecutionHint
, termAggs :: Maybe Aggregations
} deriving (Eq, Show)
data CardinalityAggregation = CardinalityAggregation
{ cardinalityField :: FieldName,
precisionThreshold :: Maybe Int
} deriving (Eq, Show)
data DateHistogramAggregation = DateHistogramAggregation
{ dateField :: FieldName
, dateInterval :: Interval
, dateFormat :: Maybe Text
-- pre and post deprecated in 1.5
, datePreZone :: Maybe Text
, datePostZone :: Maybe Text
, datePreOffset :: Maybe Text
, datePostOffset :: Maybe Text
, dateAggs :: Maybe Aggregations
} deriving (Eq, Show)
data DateRangeAggregation = DateRangeAggregation
{ draField :: FieldName
, draFormat :: Maybe Text
, draRanges :: NonEmpty DateRangeAggRange
} deriving (Eq, Show)
instance ToJSON DateRangeAggregation where
toJSON DateRangeAggregation {..} =
omitNulls [ "field" .= draField
, "format" .= draFormat
, "ranges" .= toList draRanges
]
data DateRangeAggRange =
DateRangeFrom DateMathExpr
| DateRangeTo DateMathExpr
| DateRangeFromAndTo DateMathExpr DateMathExpr
deriving (Eq, Show)
instance ToJSON DateRangeAggRange where
toJSON (DateRangeFrom e) = object [ "from" .= e ]
toJSON (DateRangeTo e) = object [ "to" .= e ]
toJSON (DateRangeFromAndTo f t) = object [ "from" .= f, "to" .= t ]
-- | See <https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics-valuecount-aggregation.html> for more information.
data ValueCountAggregation =
FieldValueCount FieldName
| ScriptValueCount Script
deriving (Eq, Show)
-- | Single-bucket filter aggregations. See <https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-filter-aggregation.html#search-aggregations-bucket-filter-aggregation> for more information.
data FilterAggregation = FilterAggregation
{ faFilter :: Filter
, faAggs :: Maybe Aggregations }
deriving (Eq, Show)
data StatisticsAggregation = StatisticsAggregation
{ statsType :: StatsType
, statsField :: FieldName }
deriving (Eq, Show)
data StatsType
= Basic
| Extended
deriving (Eq, Show)
mkTermsAggregation :: Text -> TermsAggregation
mkTermsAggregation t =
TermsAggregation (Left t)
Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing
mkTermsScriptAggregation :: Text -> TermsAggregation
mkTermsScriptAggregation t = TermsAggregation (Right t) Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing
mkDateHistogram :: FieldName -> Interval -> DateHistogramAggregation
mkDateHistogram t i = DateHistogramAggregation t i Nothing Nothing Nothing Nothing Nothing Nothing
mkCardinalityAggregation :: FieldName -> CardinalityAggregation
mkCardinalityAggregation t = CardinalityAggregation t Nothing
mkStatsAggregation :: FieldName -> StatisticsAggregation
mkStatsAggregation = StatisticsAggregation Basic
mkExtendedStatsAggregation :: FieldName -> StatisticsAggregation
mkExtendedStatsAggregation = StatisticsAggregation Extended
type AggregationResults = M.Map Text Value
class BucketAggregation a where
key :: a -> BucketValue
docCount :: a -> Int
aggs :: a -> Maybe AggregationResults
data Bucket a = Bucket
{ buckets :: [a]
} deriving (Read, Show)
instance (FromJSON a) => FromJSON (Bucket a) where
parseJSON (Object v) = Bucket <$>
v .: "buckets"
parseJSON _ = mempty
data BucketValue = TextValue Text
| ScientificValue Scientific
| BoolValue Bool deriving (Read, Show)
instance FromJSON BucketValue where
parseJSON (String t) = return $ TextValue t
parseJSON (Number s) = return $ ScientificValue s
parseJSON (Bool b) = return $ BoolValue b
parseJSON _ = mempty
data TermInclusion = TermInclusion Text
| TermPattern Text Text deriving (Eq, Show)
instance ToJSON TermInclusion where
toJSON (TermInclusion x) = toJSON x
toJSON (TermPattern pattern flags) =
omitNulls [ "pattern" .= pattern
, "flags" .= flags]
data TermOrder = TermOrder
{ termSortField :: Text
, termSortOrder :: SortOrder } deriving (Eq, Show)
instance ToJSON TermOrder where
toJSON (TermOrder termSortField termSortOrder) =
object [termSortField .= termSortOrder]
data CollectionMode = BreadthFirst
| DepthFirst deriving (Eq, Show)
instance ToJSON CollectionMode where
toJSON BreadthFirst = "breadth_first"
toJSON DepthFirst = "depth_first"
data ExecutionHint = Ordinals
| GlobalOrdinals
| GlobalOrdinalsHash
| GlobalOrdinalsLowCardinality
| Map deriving (Eq, Show)
instance ToJSON ExecutionHint where
toJSON Ordinals = "ordinals"
toJSON GlobalOrdinals = "global_ordinals"
toJSON GlobalOrdinalsHash = "global_ordinals_hash"
toJSON GlobalOrdinalsLowCardinality = "global_ordinals_low_cardinality"
toJSON Map = "map"
-- | See <https://www.elastic.co/guide/en/elasticsearch/reference/current/common-options.html#date-math> for more information.
data DateMathExpr =
DateMathExpr DateMathAnchor [DateMathModifier]
deriving (Eq, Show)
instance ToJSON DateMathExpr where
toJSON (DateMathExpr a mods) = String (fmtA a <> mconcat (fmtMod <$> mods))
where fmtA DMNow = "now"
fmtA (DMDate date) = (T.pack $ showGregorian date) <> "||"
fmtMod (AddTime n u) = "+" <> showText n <> fmtU u
fmtMod (SubtractTime n u) = "-" <> showText n <> fmtU u
fmtMod (RoundDownTo u) = "/" <> fmtU u
fmtU DMYear = "y"
fmtU DMMonth = "M"
fmtU DMWeek = "w"
fmtU DMDay = "d"
fmtU DMHour = "h"
fmtU DMMinute = "m"
fmtU DMSecond = "s"
-- | Starting point for a date range. This along with the 'DateMathModifiers' gets you the date ES will start from.
data DateMathAnchor =
DMNow
| DMDate Day
deriving (Eq, Show)
data DateMathModifier =
AddTime Int DateMathUnit
| SubtractTime Int DateMathUnit
| RoundDownTo DateMathUnit
deriving (Eq, Show)
data DateMathUnit =
DMYear
| DMMonth
| DMWeek
| DMDay
| DMHour
| DMMinute
| DMSecond
deriving (Eq, Show)
data TermsResult = TermsResult
{ termKey :: BucketValue
, termsDocCount :: Int
, termsAggs :: Maybe AggregationResults
} deriving (Read, Show)
instance FromJSON TermsResult where
parseJSON (Object v) = TermsResult <$>
v .: "key" <*>
v .: "doc_count" <*>
(pure $ getNamedSubAgg v ["key", "doc_count"])
parseJSON _ = mempty
instance BucketAggregation TermsResult where
key = termKey
docCount = termsDocCount
aggs = termsAggs
data DateHistogramResult = DateHistogramResult
{ dateKey :: Int
, dateKeyStr :: Maybe Text
, dateDocCount :: Int
, dateHistogramAggs :: Maybe AggregationResults
} deriving (Show)
instance FromJSON DateHistogramResult where
parseJSON (Object v) = DateHistogramResult <$>
v .: "key" <*>
v .:? "key_as_string" <*>
v .: "doc_count" <*>
(pure $ getNamedSubAgg v [ "key"
, "doc_count"
, "key_as_string"
]
)
parseJSON _ = mempty
instance BucketAggregation DateHistogramResult where
key = TextValue . showText . dateKey
docCount = dateDocCount
aggs = dateHistogramAggs
data DateRangeResult = DateRangeResult
{ dateRangeKey :: Text
, dateRangeFrom :: Maybe UTCTime
, dateRangeFromAsString :: Maybe Text
, dateRangeTo :: Maybe UTCTime
, dateRangeToAsString :: Maybe Text
, dateRangeDocCount :: Int
, dateRangeAggs :: Maybe AggregationResults
} deriving (Eq, Show)
instance FromJSON DateRangeResult where
parseJSON = withObject "DateRangeResult" parse
where parse v = DateRangeResult <$>
v .: "key" <*>
(fmap posixMS <$> v .:? "from") <*>
v .:? "from_as_string" <*>
(fmap posixMS <$> v .:? "to") <*>
v .:? "to_as_string" <*>
v .: "doc_count" <*>
(pure $ getNamedSubAgg v [ "key"
, "from"
, "from_as_string"
, "to"
, "to_as_string"
, "doc_count"
]
)
instance BucketAggregation DateRangeResult where
key = TextValue . dateRangeKey
docCount = dateRangeDocCount
aggs = dateRangeAggs
toTerms :: Text -> AggregationResults -> Maybe (Bucket TermsResult)
toTerms = toAggResult
toDateHistogram :: Text -> AggregationResults -> Maybe (Bucket DateHistogramResult)
toDateHistogram = toAggResult
toMissing :: Text -> AggregationResults -> Maybe MissingResult
toMissing = toAggResult
toTopHits :: (FromJSON a) => Text -> AggregationResults -> Maybe (TopHitResult a)
toTopHits = toAggResult
toAggResult :: (FromJSON a) => Text -> AggregationResults -> Maybe a
toAggResult t a = M.lookup t a >>= deserialize
where deserialize = parseMaybe parseJSON
-- Try to get an AggregationResults when we don't know the
-- field name. We filter out the known keys to try to minimize the noise.
getNamedSubAgg :: Object -> [Text] -> Maybe AggregationResults
getNamedSubAgg o knownKeys = maggRes
where unknownKeys = HM.filterWithKey (\k _ -> k `notElem` knownKeys) o
maggRes
| HM.null unknownKeys = Nothing
| otherwise = Just . M.fromList $ HM.toList unknownKeys
data MissingResult = MissingResult
{ missingDocCount :: Int
} deriving (Show)
instance FromJSON MissingResult where
parseJSON = withObject "MissingResult" parse
where parse v = MissingResult <$> v .: "doc_count"
data TopHitResult a = TopHitResult
{ tarHits :: (SearchHits a)
} deriving Show
instance (FromJSON a) => FromJSON (TopHitResult a) where
parseJSON (Object v) = TopHitResult <$>
v .: "hits"
parseJSON _ = fail "Failure in FromJSON (TopHitResult a)"
data SearchHits a =
SearchHits { hitsTotal :: Int
, maxScore :: Score
, hits :: [Hit a] } deriving (Eq, Show)
instance (FromJSON a) => FromJSON (SearchHits a) where
parseJSON (Object v) = SearchHits <$>
v .: "total" <*>
v .: "max_score" <*>
v .: "hits"
parseJSON _ = empty
instance Semigroup (SearchHits a) where
(SearchHits ta ma ha) <> (SearchHits tb mb hb) =
SearchHits (ta + tb) (max ma mb) (ha <> hb)
instance Monoid (SearchHits a) where
mempty = SearchHits 0 Nothing mempty
mappend = (<>)
data Hit a =
Hit { hitIndex :: IndexName
, hitType :: MappingName
, hitDocId :: DocId
, hitScore :: Score
, hitSource :: Maybe a
, hitFields :: Maybe HitFields
, hitHighlight :: Maybe HitHighlight } deriving (Eq, Show)
instance (FromJSON a) => FromJSON (Hit a) where
parseJSON (Object v) = Hit <$>
v .: "_index" <*>
v .: "_type" <*>
v .: "_id" <*>
v .: "_score" <*>
v .:? "_source" <*>
v .:? "fields" <*>
v .:? "highlight"
parseJSON _ = empty

View File

@ -0,0 +1,312 @@
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
module Database.V5.Bloodhound.Internal.Analysis where
import Bloodhound.Import
import qualified Data.Map.Strict as M
import qualified Data.Text as T
import Database.V5.Bloodhound.Internal.Newtypes
import Database.V5.Bloodhound.Internal.StringlyTyped
data Analysis = Analysis
{ analysisAnalyzer :: M.Map Text AnalyzerDefinition
, analysisTokenizer :: M.Map Text TokenizerDefinition
, analysisTokenFilter :: M.Map Text TokenFilterDefinition
} deriving (Eq, Show)
instance ToJSON Analysis where
toJSON (Analysis analyzer tokenizer tokenFilter) = object
[ "analyzer" .= analyzer
, "tokenizer" .= tokenizer
, "filter" .= tokenFilter
]
instance FromJSON Analysis where
parseJSON = withObject "Analysis" $ \m -> Analysis
<$> m .: "analyzer"
<*> m .:? "tokenizer" .!= M.empty
<*> m .:? "filter" .!= M.empty
newtype Tokenizer =
Tokenizer Text
deriving (Eq, Show, ToJSON, FromJSON)
data AnalyzerDefinition = AnalyzerDefinition
{ analyzerDefinitionTokenizer :: Maybe Tokenizer
, analyzerDefinitionFilter :: [TokenFilter]
} deriving (Eq,Show)
instance ToJSON AnalyzerDefinition where
toJSON (AnalyzerDefinition tokenizer tokenFilter) = object $ catMaybes
[ fmap ("tokenizer" .=) tokenizer
, Just $ "filter" .= tokenFilter
]
instance FromJSON AnalyzerDefinition where
parseJSON = withObject "AnalyzerDefinition" $ \m -> AnalyzerDefinition
<$> m .:? "tokenizer"
<*> m .:? "filter" .!= []
newtype TokenizerDefinition =
TokenizerDefinitionNgram Ngram
deriving (Eq,Show)
instance ToJSON TokenizerDefinition where
toJSON x = case x of
TokenizerDefinitionNgram (Ngram minGram maxGram tokenChars) -> object
[ "type" .= ("ngram" :: Text)
, "min_gram" .= minGram
, "max_gram" .= maxGram
, "token_chars" .= tokenChars
]
instance FromJSON TokenizerDefinition where
parseJSON = withObject "TokenizerDefinition" $ \m -> do
typ <- m .: "type" :: Parser Text
case typ of
"ngram" -> fmap TokenizerDefinitionNgram $ Ngram
<$> fmap unStringlyTypedInt (m .: "min_gram")
<*> fmap unStringlyTypedInt (m .: "max_gram")
<*> m .: "token_chars"
_ -> fail "invalid TokenizerDefinition"
data Ngram = Ngram
{ ngramMinGram :: Int
, ngramMaxGram :: Int
, ngramTokenChars :: [TokenChar]
} deriving (Eq,Show)
data TokenChar =
TokenLetter
| TokenDigit
| TokenWhitespace
| TokenPunctuation
| TokenSymbol
deriving (Eq,Show)
instance ToJSON TokenChar where
toJSON t = String $ case t of
TokenLetter -> "letter"
TokenDigit -> "digit"
TokenWhitespace -> "whitespace"
TokenPunctuation -> "punctuation"
TokenSymbol -> "symbol"
instance FromJSON TokenChar where
parseJSON = withText "TokenChar" $ \t -> case t of
"letter" -> return TokenLetter
"digit" -> return TokenDigit
"whitespace" -> return TokenWhitespace
"punctuation" -> return TokenPunctuation
"symbol" -> return TokenSymbol
_ -> fail "invalid TokenChar"
-- | Token filters are used to create custom analyzers.
data TokenFilterDefinition
= TokenFilterDefinitionLowercase (Maybe Language)
| TokenFilterDefinitionUppercase (Maybe Language)
| TokenFilterDefinitionApostrophe
| TokenFilterDefinitionReverse
| TokenFilterDefinitionSnowball Language
| TokenFilterDefinitionShingle Shingle
deriving (Eq, Show)
instance ToJSON TokenFilterDefinition where
toJSON x = case x of
TokenFilterDefinitionLowercase mlang -> object $ catMaybes
[ Just $ "type" .= ("lowercase" :: Text)
, fmap (\lang -> "language" .= languageToText lang) mlang
]
TokenFilterDefinitionUppercase mlang -> object $ catMaybes
[ Just $ "type" .= ("uppercase" :: Text)
, fmap (\lang -> "language" .= languageToText lang) mlang
]
TokenFilterDefinitionApostrophe -> object
[ "type" .= ("apostrophe" :: Text)
]
TokenFilterDefinitionReverse -> object
[ "type" .= ("reverse" :: Text)
]
TokenFilterDefinitionSnowball lang -> object
[ "type" .= ("snowball" :: Text)
, "language" .= languageToText lang
]
TokenFilterDefinitionShingle s -> object
[ "type" .= ("shingle" :: Text)
, "max_shingle_size" .= shingleMaxSize s
, "min_shingle_size" .= shingleMinSize s
, "output_unigrams" .= shingleOutputUnigrams s
, "output_unigrams_if_no_shingles" .= shingleOutputUnigramsIfNoShingles s
, "token_separator" .= shingleTokenSeparator s
, "filler_token" .= shingleFillerToken s
]
instance FromJSON TokenFilterDefinition where
parseJSON = withObject "TokenFilterDefinition" $ \m -> do
t <- m .: "type"
case (t :: Text) of
"reverse" -> return TokenFilterDefinitionReverse
"apostrophe" -> return TokenFilterDefinitionApostrophe
"lowercase" -> TokenFilterDefinitionLowercase
<$> m .:? "language"
"uppercase" -> TokenFilterDefinitionUppercase
<$> m .:? "language"
"snowball" -> TokenFilterDefinitionSnowball
<$> m .: "language"
"shingle" -> fmap TokenFilterDefinitionShingle $ Shingle
<$> (fmap.fmap) unStringlyTypedInt (m .:? "max_shingle_size") .!= 2
<*> (fmap.fmap) unStringlyTypedInt (m .:? "min_shingle_size") .!= 2
<*> (fmap.fmap) unStringlyTypedBool (m .:? "output_unigrams") .!= True
<*> (fmap.fmap) unStringlyTypedBool (m .:? "output_unigrams_if_no_shingles") .!= False
<*> m .:? "token_separator" .!= " "
<*> m .:? "filler_token" .!= "_"
_ -> fail ("unrecognized token filter type: " ++ T.unpack t)
-- | The set of languages that can be passed to various analyzers,
-- filters, etc. in ElasticSearch. Most data types in this module
-- that have a 'Language' field are actually only actually to
-- handle a subset of these languages. Consult the official
-- ElasticSearch documentation to see what is actually supported.
data Language
= Arabic
| Armenian
| Basque
| Bengali
| Brazilian
| Bulgarian
| Catalan
| Cjk
| Czech
| Danish
| Dutch
| English
| Finnish
| French
| Galician
| German
| German2
| Greek
| Hindi
| Hungarian
| Indonesian
| Irish
| Italian
| Kp
| Latvian
| Lithuanian
| Lovins
| Norwegian
| Persian
| Porter
| Portuguese
| Romanian
| Russian
| Sorani
| Spanish
| Swedish
| Thai
| Turkish
deriving (Eq, Show)
instance ToJSON Language where
toJSON = String . languageToText
instance FromJSON Language where
parseJSON = withText "Language" $ \t -> case languageFromText t of
Nothing -> fail "not a supported ElasticSearch language"
Just lang -> return lang
languageToText :: Language -> Text
languageToText x = case x of
Arabic -> "arabic"
Armenian -> "armenian"
Basque -> "basque"
Bengali -> "bengali"
Brazilian -> "brazilian"
Bulgarian -> "bulgarian"
Catalan -> "catalan"
Cjk -> "cjk"
Czech -> "czech"
Danish -> "danish"
Dutch -> "dutch"
English -> "english"
Finnish -> "finnish"
French -> "french"
Galician -> "galician"
German -> "german"
German2 -> "german2"
Greek -> "greek"
Hindi -> "hindi"
Hungarian -> "hungarian"
Indonesian -> "indonesian"
Irish -> "irish"
Italian -> "italian"
Kp -> "kp"
Latvian -> "latvian"
Lithuanian -> "lithuanian"
Lovins -> "lovins"
Norwegian -> "norwegian"
Persian -> "persian"
Porter -> "porter"
Portuguese -> "portuguese"
Romanian -> "romanian"
Russian -> "russian"
Sorani -> "sorani"
Spanish -> "spanish"
Swedish -> "swedish"
Thai -> "thai"
Turkish -> "turkish"
languageFromText :: Text -> Maybe Language
languageFromText x = case x of
"arabic" -> Just Arabic
"armenian" -> Just Armenian
"basque" -> Just Basque
"bengali" -> Just Bengali
"brazilian" -> Just Brazilian
"bulgarian" -> Just Bulgarian
"catalan" -> Just Catalan
"cjk" -> Just Cjk
"czech" -> Just Czech
"danish" -> Just Danish
"dutch" -> Just Dutch
"english" -> Just English
"finnish" -> Just Finnish
"french" -> Just French
"galician" -> Just Galician
"german" -> Just German
"german2" -> Just German2
"greek" -> Just Greek
"hindi" -> Just Hindi
"hungarian" -> Just Hungarian
"indonesian" -> Just Indonesian
"irish" -> Just Irish
"italian" -> Just Italian
"kp" -> Just Kp
"latvian" -> Just Latvian
"lithuanian" -> Just Lithuanian
"lovins" -> Just Lovins
"norwegian" -> Just Norwegian
"persian" -> Just Persian
"porter" -> Just Porter
"portuguese" -> Just Portuguese
"romanian" -> Just Romanian
"russian" -> Just Russian
"sorani" -> Just Sorani
"spanish" -> Just Spanish
"swedish" -> Just Swedish
"thai" -> Just Thai
"turkish" -> Just Turkish
_ -> Nothing
data Shingle = Shingle
{ shingleMaxSize :: Int
, shingleMinSize :: Int
, shingleOutputUnigrams :: Bool
, shingleOutputUnigramsIfNoShingles :: Bool
, shingleTokenSeparator :: Text
, shingleFillerToken :: Text
} deriving (Eq, Show)

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,164 @@
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
module Database.V5.Bloodhound.Internal.Highlight where
import Bloodhound.Import
import qualified Data.Map.Strict as M
import Database.V5.Bloodhound.Internal.Newtypes
import Database.V5.Bloodhound.Internal.Query
type HitHighlight = M.Map Text [Text]
data Highlights = Highlights
{ globalsettings :: Maybe HighlightSettings
, highlightFields :: [FieldHighlight]
} deriving (Eq, Show)
instance ToJSON Highlights where
toJSON (Highlights global fields) =
omitNulls (("fields" .= fields)
: highlightSettingsPairs global)
data FieldHighlight =
FieldHighlight FieldName (Maybe HighlightSettings)
deriving (Eq, Show)
instance ToJSON FieldHighlight where
toJSON (FieldHighlight (FieldName fName) (Just fSettings)) =
object [ fName .= fSettings ]
toJSON (FieldHighlight (FieldName fName) Nothing) =
object [ fName .= emptyObject ]
data HighlightSettings =
Plain PlainHighlight
| Postings PostingsHighlight
| FastVector FastVectorHighlight
deriving (Eq, Show)
instance ToJSON HighlightSettings where
toJSON hs = omitNulls (highlightSettingsPairs (Just hs))
data PlainHighlight =
PlainHighlight { plainCommon :: Maybe CommonHighlight
, plainNonPost :: Maybe NonPostings }
deriving (Eq, Show)
-- This requires that index_options are set to 'offset' in the mapping.
data PostingsHighlight =
PostingsHighlight (Maybe CommonHighlight)
deriving (Eq, Show)
-- This requires that term_vector is set to 'with_positions_offsets' in the mapping.
data FastVectorHighlight = FastVectorHighlight
{ fvCommon :: Maybe CommonHighlight
, fvNonPostSettings :: Maybe NonPostings
, boundaryChars :: Maybe Text
, boundaryMaxScan :: Maybe Int
, fragmentOffset :: Maybe Int
, matchedFields :: [Text]
, phraseLimit :: Maybe Int
} deriving (Eq, Show)
data CommonHighlight = CommonHighlight
{ order :: Maybe Text
, forceSource :: Maybe Bool
, tag :: Maybe HighlightTag
, encoder :: Maybe HighlightEncoder
, noMatchSize :: Maybe Int
, highlightQuery :: Maybe Query
, requireFieldMatch :: Maybe Bool
} deriving (Eq, Show)
-- Settings that are only applicable to FastVector and Plain highlighters.
data NonPostings =
NonPostings { fragmentSize :: Maybe Int
, numberOfFragments :: Maybe Int
} deriving (Eq, Show)
data HighlightEncoder = DefaultEncoder
| HTMLEncoder
deriving (Eq, Show)
instance ToJSON HighlightEncoder where
toJSON DefaultEncoder = String "default"
toJSON HTMLEncoder = String "html"
-- NOTE: Should the tags use some kind of HTML type, rather than Text?
data HighlightTag =
TagSchema Text
-- Only uses more than the first value in the lists if fvh
| CustomTags ([Text], [Text])
deriving (Eq, Show)
highlightSettingsPairs :: Maybe HighlightSettings -> [Pair]
highlightSettingsPairs Nothing = []
highlightSettingsPairs (Just (Plain plh)) = plainHighPairs (Just plh)
highlightSettingsPairs (Just (Postings ph)) = postHighPairs (Just ph)
highlightSettingsPairs (Just (FastVector fvh)) = fastVectorHighPairs (Just fvh)
plainHighPairs :: Maybe PlainHighlight -> [Pair]
plainHighPairs Nothing = []
plainHighPairs (Just (PlainHighlight plCom plNonPost)) =
[ "type" .= String "plain"]
++ commonHighlightPairs plCom
++ nonPostingsToPairs plNonPost
postHighPairs :: Maybe PostingsHighlight -> [Pair]
postHighPairs Nothing = []
postHighPairs (Just (PostingsHighlight pCom)) =
("type" .= String "postings")
: commonHighlightPairs pCom
fastVectorHighPairs :: Maybe FastVectorHighlight -> [Pair]
fastVectorHighPairs Nothing = []
fastVectorHighPairs
(Just
(FastVectorHighlight fvCom fvNonPostSettings' fvBoundChars
fvBoundMaxScan fvFragOff fvMatchedFields
fvPhraseLim)) =
[ "type" .= String "fvh"
, "boundary_chars" .= fvBoundChars
, "boundary_max_scan" .= fvBoundMaxScan
, "fragment_offset" .= fvFragOff
, "matched_fields" .= fvMatchedFields
, "phraseLimit" .= fvPhraseLim]
++ commonHighlightPairs fvCom
++ nonPostingsToPairs fvNonPostSettings'
commonHighlightPairs :: Maybe CommonHighlight -> [Pair]
commonHighlightPairs Nothing = []
commonHighlightPairs (Just (CommonHighlight chScore chForceSource
chTag chEncoder chNoMatchSize
chHighlightQuery chRequireFieldMatch)) =
[ "order" .= chScore
, "force_source" .= chForceSource
, "encoder" .= chEncoder
, "no_match_size" .= chNoMatchSize
, "highlight_query" .= chHighlightQuery
, "require_fieldMatch" .= chRequireFieldMatch
]
++ highlightTagToPairs chTag
nonPostingsToPairs :: Maybe NonPostings -> [Pair]
nonPostingsToPairs Nothing = []
nonPostingsToPairs (Just (NonPostings npFragSize npNumOfFrags)) =
[ "fragment_size" .= npFragSize
, "number_of_fragments" .= npNumOfFrags
]
highlightTagToPairs :: Maybe HighlightTag -> [Pair]
highlightTagToPairs (Just (TagSchema _)) =
[ "scheme" .= String "default"
]
highlightTagToPairs (Just (CustomTags (pre, post))) =
[ "pre_tags" .= pre
, "post_tags" .= post
]
highlightTagToPairs Nothing = []

View File

@ -0,0 +1,234 @@
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
module Database.V5.Bloodhound.Internal.Newtypes where
import Bloodhound.Import
import qualified Data.Map.Strict as M
newtype From = From Int deriving (Eq, Show, ToJSON)
newtype Size = Size Int deriving (Eq, Show, ToJSON, FromJSON)
-- Used with scripts
newtype HitFields =
HitFields (M.Map Text [Value])
deriving (Eq, Show)
instance FromJSON HitFields where
parseJSON x
= HitFields <$> parseJSON x
-- Slight misnomer.
type Score = Maybe Double
{-| 'MappingName' is part of mappings which are how ES describes and schematizes
the data in the indices.
-}
newtype MappingName =
MappingName Text
deriving (Eq, Show, ToJSON, FromJSON)
newtype ShardId = ShardId { shardId :: Int }
deriving (Eq, Show, FromJSON)
{-| 'DocId' is a generic wrapper value for expressing unique Document IDs.
Can be set by the user or created by ES itself. Often used in client
functions for poking at specific documents.
-}
newtype DocId =
DocId Text
deriving (Eq, Show, ToJSON, FromJSON)
{-| 'FieldName' is used all over the place wherever a specific field within
a document needs to be specified, usually in 'Query's or 'Filter's.
-}
newtype FieldName =
FieldName Text
deriving (Eq, Read, Show, ToJSON, FromJSON)
newtype TypeName =
TypeName Text
deriving (Eq, Show, ToJSON, FromJSON)
{-| 'QueryString' is used to wrap query text bodies, be they human written or not.
-}
newtype QueryString =
QueryString Text
deriving (Eq, Show, ToJSON, FromJSON)
-- {-| 'Script' is often used in place of 'FieldName' to specify more
-- complex ways of extracting a value from a document.
-- -}
-- newtype Script =
-- Script { scriptText :: Text }
-- deriving (Eq, Show)
{-| 'CacheName' is used in 'RegexpFilter' for describing the
'CacheKey' keyed caching behavior.
-}
newtype CacheName =
CacheName Text
deriving (Eq, Show, FromJSON, ToJSON)
{-| 'CacheKey' is used in 'RegexpFilter' to key regex caching.
-}
newtype CacheKey =
CacheKey Text deriving (Eq, Show, FromJSON, ToJSON)
newtype Existence =
Existence Bool deriving (Eq, Show, FromJSON, ToJSON)
newtype NullValue =
NullValue Bool deriving (Eq, Show, FromJSON, ToJSON)
newtype CutoffFrequency =
CutoffFrequency Double deriving (Eq, Show, FromJSON, ToJSON)
newtype Analyzer =
Analyzer Text deriving (Eq, Show, FromJSON, ToJSON)
newtype MaxExpansions =
MaxExpansions Int deriving (Eq, Show, FromJSON, ToJSON)
{-| 'Lenient', if set to true, will cause format based failures to be
ignored. I don't know what the bloody default is, Elasticsearch
documentation didn't say what it was. Let me know if you figure it out.
-}
newtype Lenient =
Lenient Bool deriving (Eq, Show, FromJSON, ToJSON)
newtype Tiebreaker =
Tiebreaker Double deriving (Eq, Show, FromJSON, ToJSON)
{-| 'MinimumMatch' controls how many should clauses in the bool query should
match. Can be an absolute value (2) or a percentage (30%) or a
combination of both.
-}
newtype MinimumMatch =
MinimumMatch Int deriving (Eq, Show, FromJSON, ToJSON)
newtype DisableCoord =
DisableCoord Bool deriving (Eq, Show, FromJSON, ToJSON)
newtype IgnoreTermFrequency =
IgnoreTermFrequency Bool deriving (Eq, Show, FromJSON, ToJSON)
newtype MinimumTermFrequency =
MinimumTermFrequency Int deriving (Eq, Show, FromJSON, ToJSON)
newtype MaxQueryTerms =
MaxQueryTerms Int deriving (Eq, Show, FromJSON, ToJSON)
newtype Fuzziness =
Fuzziness Double deriving (Eq, Show, FromJSON, ToJSON)
{-| 'PrefixLength' is the prefix length used in queries, defaults to 0. -}
newtype PrefixLength =
PrefixLength Int deriving (Eq, Show, FromJSON, ToJSON)
newtype PercentMatch =
PercentMatch Double deriving (Eq, Show, FromJSON, ToJSON)
newtype StopWord =
StopWord Text deriving (Eq, Show, FromJSON, ToJSON)
newtype QueryPath =
QueryPath Text deriving (Eq, Show, FromJSON, ToJSON)
{-| Allowing a wildcard at the beginning of a word (eg "*ing") is particularly
heavy, because all terms in the index need to be examined, just in case
they match. Leading wildcards can be disabled by setting
'AllowLeadingWildcard' to false. -}
newtype AllowLeadingWildcard =
AllowLeadingWildcard Bool deriving (Eq, Show, FromJSON, ToJSON)
newtype LowercaseExpanded =
LowercaseExpanded Bool deriving (Eq, Show, FromJSON, ToJSON)
newtype EnablePositionIncrements =
EnablePositionIncrements Bool deriving (Eq, Show, FromJSON, ToJSON)
{-| By default, wildcard terms in a query are not analyzed.
Setting 'AnalyzeWildcard' to true enables best-effort analysis.
-}
newtype AnalyzeWildcard = AnalyzeWildcard Bool deriving (Eq, Show, FromJSON, ToJSON)
{-| 'GeneratePhraseQueries' defaults to false.
-}
newtype GeneratePhraseQueries =
GeneratePhraseQueries Bool deriving (Eq, Show, FromJSON, ToJSON)
{-| 'Locale' is used for string conversions - defaults to ROOT.
-}
newtype Locale = Locale Text deriving (Eq, Show, FromJSON, ToJSON)
newtype MaxWordLength = MaxWordLength Int deriving (Eq, Show, FromJSON, ToJSON)
newtype MinWordLength = MinWordLength Int deriving (Eq, Show, FromJSON, ToJSON)
{-| 'PhraseSlop' sets the default slop for phrases, 0 means exact
phrase matches. Default is 0.
-}
newtype PhraseSlop = PhraseSlop Int deriving (Eq, Show, FromJSON, ToJSON)
newtype MinDocFrequency = MinDocFrequency Int deriving (Eq, Show, FromJSON, ToJSON)
newtype MaxDocFrequency = MaxDocFrequency Int deriving (Eq, Show, FromJSON, ToJSON)
-- | Newtype wrapper to parse ES's concerning tendency to in some APIs return a floating point number of milliseconds since epoch ಠ_ಠ
newtype POSIXMS = POSIXMS { posixMS :: UTCTime }
instance FromJSON POSIXMS where
parseJSON = withScientific "POSIXMS" (return . parse)
where parse n =
let n' = truncate n :: Integer
in POSIXMS (posixSecondsToUTCTime (fromInteger (n' `div` 1000)))
newtype Boost =
Boost Double
deriving (Eq, Show, ToJSON, FromJSON)
newtype BoostTerms =
BoostTerms Double
deriving (Eq, Show, ToJSON, FromJSON)
{-| 'ReplicaCount' is part of 'IndexSettings' -}
newtype ReplicaCount =
ReplicaCount Int
deriving (Eq, Show, ToJSON)
{-| 'ShardCount' is part of 'IndexSettings' -}
newtype ShardCount =
ShardCount Int
deriving (Eq, Show, ToJSON)
-- This insanity is because ES *sometimes* returns Replica/Shard counts as strings
instance FromJSON ReplicaCount where
parseJSON v = parseAsInt v
<|> parseAsString v
where parseAsInt = fmap ReplicaCount . parseJSON
parseAsString = withText "ReplicaCount" (fmap ReplicaCount . parseReadText)
instance FromJSON ShardCount where
parseJSON v = parseAsInt v
<|> parseAsString v
where parseAsInt = fmap ShardCount . parseJSON
parseAsString = withText "ShardCount" (fmap ShardCount . parseReadText)
{-| 'IndexName' is used to describe which index to query/create/delete -}
newtype IndexName =
IndexName Text
deriving (Eq, Show, ToJSON, FromJSON)
newtype IndexAliasName =
IndexAliasName { indexAliasName :: IndexName }
deriving (Eq, Show, ToJSON)
newtype MaybeNA a = MaybeNA { unMaybeNA :: Maybe a }
deriving (Show, Eq)
instance FromJSON a => FromJSON (MaybeNA a) where
parseJSON (String "NA") = pure $ MaybeNA Nothing
parseJSON o = MaybeNA . Just <$> parseJSON o
newtype SnapshotName =
SnapshotName { snapshotName :: Text }
deriving (Eq, Show, ToJSON, FromJSON)
-- | Milliseconds
newtype MS = MS NominalDiffTime
-- keeps the unexported constructor warnings at bay
unMS :: MS -> NominalDiffTime
unMS (MS t) = t
instance FromJSON MS where
parseJSON = withScientific "MS" (return . MS . parse)
where
parse n = fromInteger (truncate n * 1000)
newtype TokenFilter =
TokenFilter Text deriving (Eq, Show, FromJSON, ToJSON)

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,107 @@
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Database.V5.Bloodhound.Internal.Sort where
import Bloodhound.Import
import Database.V5.Bloodhound.Internal.Newtypes
import Database.V5.Bloodhound.Internal.Query
{-| 'SortMode' prescribes how to handle sorting array/multi-valued fields.
http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-sort.html#_sort_mode_option
-}
data SortMode = SortMin
| SortMax
| SortSum
| SortAvg deriving (Eq, Show)
instance ToJSON SortMode where
toJSON SortMin = String "min"
toJSON SortMax = String "max"
toJSON SortSum = String "sum"
toJSON SortAvg = String "avg"
{-| 'mkSort' defaults everything but the 'FieldName' and the 'SortOrder' so
that you can concisely describe the usual kind of 'SortSpec's you want.
-}
mkSort :: FieldName -> SortOrder -> DefaultSort
mkSort fieldName sOrder = DefaultSort fieldName sOrder Nothing Nothing Nothing Nothing
{-| 'Sort' is a synonym for a list of 'SortSpec's. Sort behavior is order
dependent with later sorts acting as tie-breakers for earlier sorts.
-}
type Sort = [SortSpec]
{-| The two main kinds of 'SortSpec' are 'DefaultSortSpec' and
'GeoDistanceSortSpec'. The latter takes a 'SortOrder', 'GeoPoint', and
'DistanceUnit' to express "nearness" to a single geographical point as a
sort specification.
<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-sort.html#search-request-sort>
-}
data SortSpec =
DefaultSortSpec DefaultSort
| GeoDistanceSortSpec SortOrder GeoPoint DistanceUnit
deriving (Eq, Show)
instance ToJSON SortSpec where
toJSON (DefaultSortSpec
(DefaultSort (FieldName dsSortFieldName) dsSortOrder dsIgnoreUnmapped
dsSortMode dsMissingSort dsNestedFilter)) =
object [dsSortFieldName .= omitNulls base] where
base = [ "order" .= dsSortOrder
, "unmapped_type" .= dsIgnoreUnmapped
, "mode" .= dsSortMode
, "missing" .= dsMissingSort
, "nested_filter" .= dsNestedFilter ]
toJSON (GeoDistanceSortSpec gdsSortOrder (GeoPoint (FieldName field) gdsLatLon) units) =
object [ "unit" .= units
, field .= gdsLatLon
, "order" .= gdsSortOrder ]
{-| 'DefaultSort' is usually the kind of 'SortSpec' you'll want. There's a
'mkSort' convenience function for when you want to specify only the most
common parameters.
The `ignoreUnmapped`, when `Just` field is used to set the elastic 'unmapped_type'
<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-sort.html#search-request-sort>
-}
data DefaultSort =
DefaultSort { sortFieldName :: FieldName
, sortOrder :: SortOrder
-- default False
, ignoreUnmapped :: Maybe Text
, sortMode :: Maybe SortMode
, missingSort :: Maybe Missing
, nestedFilter :: Maybe Filter } deriving (Eq, Show)
{-| 'SortOrder' is 'Ascending' or 'Descending', as you might expect. These get
encoded into "asc" or "desc" when turned into JSON.
<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-sort.html#search-request-sort>
-}
data SortOrder = Ascending
| Descending deriving (Eq, Show)
instance ToJSON SortOrder where
toJSON Ascending = String "asc"
toJSON Descending = String "desc"
{-| 'Missing' prescribes how to handle missing fields. A missing field can be
sorted last, first, or using a custom value as a substitute.
<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-sort.html#_missing_values>
-}
data Missing = LastMissing
| FirstMissing
| CustomMissing Text deriving (Eq, Show)
instance ToJSON Missing where
toJSON LastMissing = String "_last"
toJSON FirstMissing = String "_first"
toJSON (CustomMissing txt) = String txt

View File

@ -0,0 +1,51 @@
{-# LANGUAGE OverloadedStrings #-}
module Database.V5.Bloodhound.Internal.StringlyTyped where
import Bloodhound.Import
import qualified Data.Text as T
-- This whole module is a sin bucket to deal with Elasticsearch badness.
newtype StringlyTypedDouble = StringlyTypedDouble
{ unStringlyTypedDouble :: Double }
instance FromJSON StringlyTypedDouble where
parseJSON =
fmap StringlyTypedDouble
. parseJSON
. unStringlyTypeJSON
newtype StringlyTypedInt = StringlyTypedInt
{ unStringlyTypedInt :: Int }
instance FromJSON StringlyTypedInt where
parseJSON =
fmap StringlyTypedInt
. parseJSON
. unStringlyTypeJSON
newtype StringlyTypedBool = StringlyTypedBool { unStringlyTypedBool :: Bool }
instance FromJSON StringlyTypedBool where
parseJSON =
fmap StringlyTypedBool
. parseJSON
. unStringlyTypeJSON
-- | For some reason in several settings APIs, all leaf values get returned
-- as strings. This function attempts to recover from this for all
-- non-recursive JSON types. If nothing can be done, the value is left alone.
unStringlyTypeJSON :: Value -> Value
unStringlyTypeJSON (String "true") =
Bool True
unStringlyTypeJSON (String "false") =
Bool False
unStringlyTypeJSON (String "null") =
Null
unStringlyTypeJSON v@(String t) =
case readMay (T.unpack t) of
Just n -> Number n
Nothing -> v
unStringlyTypeJSON v = v

View File

@ -0,0 +1,265 @@
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE OverloadedStrings #-}
module Database.V5.Bloodhound.Internal.Suggest where
import Bloodhound.Import
import qualified Data.HashMap.Strict as HM
import Database.V5.Bloodhound.Internal.Newtypes
import Database.V5.Bloodhound.Internal.Query (TemplateQueryInline(..), params)
data Suggest = Suggest
{ suggestText :: Text
, suggestName :: Text
, suggestType :: SuggestType
} deriving (Eq, Show)
instance ToJSON Suggest where
toJSON Suggest{..} =
object [ "text" .= suggestText
, suggestName .= suggestType
]
instance FromJSON Suggest where
parseJSON (Object o) = do
suggestText' <- o .: "text"
let dropTextList =
HM.toList
$ HM.filterWithKey (\x _ -> x /= "text") o
suggestName' <-
case dropTextList of
[(x, _)] -> return x
_ -> fail "error parsing Suggest field name"
suggestType' <- o .: suggestName'
return $ Suggest suggestText' suggestName' suggestType'
parseJSON x = typeMismatch "Suggest" x
data SuggestType =
SuggestTypePhraseSuggester PhraseSuggester
deriving (Eq, Show)
instance ToJSON SuggestType where
toJSON (SuggestTypePhraseSuggester x) =
object [ "phrase" .= x ]
instance FromJSON SuggestType where
parseJSON = withObject "SuggestType" parse
where parse o = phraseSuggester `taggedWith` "phrase"
where taggedWith parser k = parser =<< o .: k
phraseSuggester = pure . SuggestTypePhraseSuggester
data PhraseSuggester = PhraseSuggester
{ phraseSuggesterField :: FieldName
, phraseSuggesterGramSize :: Maybe Int
, phraseSuggesterRealWordErrorLikelihood :: Maybe Int
, phraseSuggesterConfidence :: Maybe Int
, phraseSuggesterMaxErrors :: Maybe Int
, phraseSuggesterSeparator :: Maybe Text
, phraseSuggesterSize :: Maybe Size
, phraseSuggesterAnalyzer :: Maybe Analyzer
, phraseSuggesterShardSize :: Maybe Int
, phraseSuggesterHighlight :: Maybe PhraseSuggesterHighlighter
, phraseSuggesterCollate :: Maybe PhraseSuggesterCollate
, phraseSuggesterCandidateGenerators :: [DirectGenerators]
} deriving (Eq, Show)
instance ToJSON PhraseSuggester where
toJSON PhraseSuggester{..} =
omitNulls [ "field" .= phraseSuggesterField
, "gram_size" .= phraseSuggesterGramSize
, "real_word_error_likelihood" .=
phraseSuggesterRealWordErrorLikelihood
, "confidence" .= phraseSuggesterConfidence
, "max_errors" .= phraseSuggesterMaxErrors
, "separator" .= phraseSuggesterSeparator
, "size" .= phraseSuggesterSize
, "analyzer" .= phraseSuggesterAnalyzer
, "shard_size" .= phraseSuggesterShardSize
, "highlight" .= phraseSuggesterHighlight
, "collate" .= phraseSuggesterCollate
, "direct_generator" .=
phraseSuggesterCandidateGenerators
]
instance FromJSON PhraseSuggester where
parseJSON = withObject "PhraseSuggester" parse
where parse o = PhraseSuggester
<$> o .: "field"
<*> o .:? "gram_size"
<*> o .:? "real_word_error_likelihood"
<*> o .:? "confidence"
<*> o .:? "max_errors"
<*> o .:? "separator"
<*> o .:? "size"
<*> o .:? "analyzer"
<*> o .:? "shard_size"
<*> o .:? "highlight"
<*> o .:? "collate"
<*> o .:? "direct_generator" .!= []
mkPhraseSuggester :: FieldName -> PhraseSuggester
mkPhraseSuggester fName =
PhraseSuggester fName Nothing Nothing Nothing Nothing Nothing Nothing
Nothing Nothing Nothing Nothing []
data PhraseSuggesterHighlighter =
PhraseSuggesterHighlighter { phraseSuggesterHighlighterPreTag :: Text
, phraseSuggesterHighlighterPostTag :: Text
}
deriving (Eq, Show)
instance ToJSON PhraseSuggesterHighlighter where
toJSON PhraseSuggesterHighlighter{..} =
object [ "pre_tag" .= phraseSuggesterHighlighterPreTag
, "post_tag" .= phraseSuggesterHighlighterPostTag
]
instance FromJSON PhraseSuggesterHighlighter where
parseJSON = withObject "PhraseSuggesterHighlighter" parse
where parse o = PhraseSuggesterHighlighter
<$> o .: "pre_tag"
<*> o .: "post_tag"
data PhraseSuggesterCollate = PhraseSuggesterCollate
{ phraseSuggesterCollateTemplateQuery :: TemplateQueryInline
, phraseSuggesterCollatePrune :: Bool
} deriving (Eq, Show)
instance ToJSON PhraseSuggesterCollate where
toJSON PhraseSuggesterCollate{..} =
object [ "query" .= object
[ "inline" .= (inline phraseSuggesterCollateTemplateQuery)
]
, "params" .= (params phraseSuggesterCollateTemplateQuery)
, "prune" .= phraseSuggesterCollatePrune
]
instance FromJSON PhraseSuggesterCollate where
parseJSON (Object o) = do
query' <- o .: "query"
inline' <- query' .: "inline"
params' <- o .: "params"
prune' <- o .:? "prune" .!= False
return $ PhraseSuggesterCollate
(TemplateQueryInline inline' params') prune'
parseJSON x = typeMismatch "PhraseSuggesterCollate" x
data SuggestOptions =
SuggestOptions { suggestOptionsText :: Text
, suggestOptionsScore :: Double
, suggestOptionsFreq :: Maybe Int
, suggestOptionsHighlighted :: Maybe Text
}
deriving (Eq, Read, Show)
instance FromJSON SuggestOptions where
parseJSON = withObject "SuggestOptions" parse
where
parse o = SuggestOptions
<$> o .: "text"
<*> o .: "score"
<*> o .:? "freq"
<*> o .:? "highlighted"
data SuggestResponse =
SuggestResponse { suggestResponseText :: Text
, suggestResponseOffset :: Int
, suggestResponseLength :: Int
, suggestResponseOptions :: [SuggestOptions]
}
deriving (Eq, Read, Show)
instance FromJSON SuggestResponse where
parseJSON = withObject "SuggestResponse" parse
where parse o = SuggestResponse
<$> o .: "text"
<*> o .: "offset"
<*> o .: "length"
<*> o .: "options"
data NamedSuggestionResponse = NamedSuggestionResponse
{ nsrName :: Text
, nsrResponses :: [SuggestResponse]
} deriving (Eq, Read, Show)
instance FromJSON NamedSuggestionResponse where
parseJSON (Object o) = do
suggestionName' <- case HM.toList o of
[(x, _)] -> return x
_ -> fail "error parsing NamedSuggestionResponse name"
suggestionResponses' <- o .: suggestionName'
return $ NamedSuggestionResponse suggestionName' suggestionResponses'
parseJSON x = typeMismatch "NamedSuggestionResponse" x
data DirectGeneratorSuggestModeTypes = DirectGeneratorSuggestModeMissing
| DirectGeneratorSuggestModePopular
| DirectGeneratorSuggestModeAlways
deriving (Eq, Show)
instance ToJSON DirectGeneratorSuggestModeTypes where
toJSON DirectGeneratorSuggestModeMissing = "missing"
toJSON DirectGeneratorSuggestModePopular = "popular"
toJSON DirectGeneratorSuggestModeAlways = "always"
instance FromJSON DirectGeneratorSuggestModeTypes where
parseJSON = withText "DirectGeneratorSuggestModeTypes" parse
where
parse "missing" =
pure DirectGeneratorSuggestModeMissing
parse "popular" =
pure DirectGeneratorSuggestModePopular
parse "always" =
pure DirectGeneratorSuggestModeAlways
parse f =
fail ("Unexpected DirectGeneratorSuggestModeTypes: " <> show f)
data DirectGenerators = DirectGenerators
{ directGeneratorsField :: FieldName
, directGeneratorsSize :: Maybe Int
, directGeneratorSuggestMode :: DirectGeneratorSuggestModeTypes
, directGeneratorMaxEdits :: Maybe Double
, directGeneratorPrefixLength :: Maybe Int
, directGeneratorMinWordLength :: Maybe Int
, directGeneratorMaxInspections :: Maybe Int
, directGeneratorMinDocFreq :: Maybe Double
, directGeneratorMaxTermFreq :: Maybe Double
, directGeneratorPreFilter :: Maybe Text
, directGeneratorPostFilter :: Maybe Text
} deriving (Eq, Show)
instance ToJSON DirectGenerators where
toJSON DirectGenerators{..} =
omitNulls [ "field" .= directGeneratorsField
, "size" .= directGeneratorsSize
, "suggest_mode" .= directGeneratorSuggestMode
, "max_edits" .= directGeneratorMaxEdits
, "prefix_length" .= directGeneratorPrefixLength
, "min_word_length" .= directGeneratorMinWordLength
, "max_inspections" .= directGeneratorMaxInspections
, "min_doc_freq" .= directGeneratorMinDocFreq
, "max_term_freq" .= directGeneratorMaxTermFreq
, "pre_filter" .= directGeneratorPreFilter
, "post_filter" .= directGeneratorPostFilter
]
instance FromJSON DirectGenerators where
parseJSON = withObject "DirectGenerators" parse
where parse o = DirectGenerators
<$> o .: "field"
<*> o .:? "size"
<*> o .: "suggest_mode"
<*> o .:? "max_edits"
<*> o .:? "prefix_length"
<*> o .:? "min_word_length"
<*> o .:? "max_inspections"
<*> o .:? "min_doc_freq"
<*> o .:? "max_term_freq"
<*> o .:? "pre_filter"
<*> o .:? "post_filter"
mkDirectGenerators :: FieldName -> DirectGenerators
mkDirectGenerators fn = DirectGenerators fn Nothing DirectGeneratorSuggestModeMissing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing

File diff suppressed because it is too large Load Diff

View File

@ -1,57 +0,0 @@
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
-------------------------------------------------------------------------------
-- |
-- Module : Database.Bloodhound.Types.Internal
-- Copyright : (C) 2014 Chris Allen
-- License : BSD-style (see the file LICENSE)
-- Maintainer : Chris Allen <cma@bitemyapp.com>
-- Stability : provisional
-- Portability : DeriveGeneric, RecordWildCards
--
-- Internal data types for Bloodhound. These types may change without
-- notice so import at your own risk.
-------------------------------------------------------------------------------
module Database.V5.Bloodhound.Types.Internal
( BHEnv(..)
, Server(..)
, MonadBH(..)
) where
import Control.Applicative as A
import Control.Monad.Reader
import Data.Aeson
import Data.Text (Text)
import Data.Typeable (Typeable)
import GHC.Generics (Generic)
import Network.HTTP.Client
{-| Common environment for Elasticsearch calls. Connections will be
pipelined according to the provided HTTP connection manager.
-}
data BHEnv = BHEnv { bhServer :: Server
, bhManager :: Manager
, bhRequestHook :: Request -> IO Request
-- ^ Low-level hook that is run before every request is sent. Used to implement custom authentication strategies. Defaults to 'return' with 'mkBHEnv'.
}
instance (Functor m, Applicative m, MonadIO m) => MonadBH (ReaderT BHEnv m) where
getBHEnv = ask
{-| 'Server' is used with the client functions to point at the ES instance
-}
newtype Server = Server Text deriving (Eq, Show, Generic, Typeable, FromJSON)
{-| All API calls to Elasticsearch operate within
MonadBH
. The idea is that it can be easily embedded in your
own monad transformer stack. A default instance for a ReaderT and
alias 'BH' is provided for the simple case.
-}
class (Functor m, A.Applicative m, MonadIO m) => MonadBH m where
getBHEnv :: m BHEnv

View File

@ -15,4 +15,7 @@ extra-deps:
- uri-bytestring-0.1.9
- temporary-resourcet-0.1.0.0
- transformers-compat-0.5.1.4
- pretty-simple-2.0.2.0
- quickcheck-arbitrary-template-0.2.0.0
resolver: lts-6.12

View File

@ -19,4 +19,7 @@ extra-deps:
- void-0.7.1
- generics-sop-0.2.2.0
- unordered-containers-0.2.6.0
- pretty-simple-2.0.2.0
- quickcheck-arbitrary-template-0.2.0.0
resolver: lts-2.18

View File

@ -7,5 +7,6 @@ packages:
- './examples'
extra-deps:
- quickcheck-properties-0.1
- quickcheck-arbitrary-template-0.2.0.0
resolver: lts-8.14

View File

@ -10,5 +10,6 @@ extra-deps:
- quickcheck-properties-0.1
- http-types-0.12.1
- aeson-1.3.0.0
- quickcheck-arbitrary-template-0.2.0.0
resolver: lts-11.0

View File

@ -0,0 +1,132 @@
{-# LANGUAGE OverloadedStrings #-}
module Test.Aggregation (spec) where
import Test.Common
import Test.Import
import Control.Error (fmapL, note)
import qualified Data.Map as M
import qualified Database.V1.Bloodhound
spec :: Spec
spec =
describe "Aggregation API" $ do
it "returns term aggregation results" $ withTestEnv $ do
_ <- insertData
let terms = TermsAgg $ mkTermsAggregation "user"
let search = mkAggregateSearch Nothing $ mkAggregations "users" terms
searchExpectAggs search
searchValidBucketAgg search "users" toTerms
it "return sub-aggregation results" $ withTestEnv $ do
_ <- insertData
let subaggs = mkAggregations "age_agg" . TermsAgg $ mkTermsAggregation "age"
agg = TermsAgg $ (mkTermsAggregation "user") { termAggs = Just subaggs}
search = mkAggregateSearch Nothing $ mkAggregations "users" agg
reply <- searchByIndex testIndex search
let result = decode (responseBody reply) :: Maybe (SearchResult Tweet)
usersAggResults = result >>= aggregations >>= toTerms "users"
subAggResults = usersAggResults >>= (listToMaybe . buckets) >>= termsAggs >>= toTerms "age_agg"
subAddResultsExists = isJust subAggResults
liftIO $ subAddResultsExists `shouldBe` True
it "returns cardinality aggregation results" $ withTestEnv $ do
_ <- insertData
let cardinality = CardinalityAgg $ mkCardinalityAggregation $ FieldName "user"
let search = mkAggregateSearch Nothing $ mkAggregations "users" cardinality
let search' = search { Database.V1.Bloodhound.from = From 0, size = Size 0 }
searchExpectAggs search'
let docCountPair k n = (k, object ["value" .= Number n])
res <- searchTweets search'
liftIO $
fmap aggregations res `shouldBe` Right (Just (M.fromList [ docCountPair "users" 1]))
it "can give collection hint parameters to term aggregations" $ when' (atleast es13) $ withTestEnv $ do
_ <- insertData
let terms = TermsAgg $ (mkTermsAggregation "user") { termCollectMode = Just BreadthFirst }
let search = mkAggregateSearch Nothing $ mkAggregations "users" terms
searchExpectAggs search
searchValidBucketAgg search "users" toTerms
-- One of these fails with 1.7.3
it "can give execution hint parameters to term aggregations" $ when' (atmost es11) $ withTestEnv $ do
_ <- insertData
searchTermsAggHint [Map, Ordinals]
it "can give execution hint parameters to term aggregations" $ when' (is es12) $ withTestEnv $ do
_ <- insertData
searchTermsAggHint [GlobalOrdinals, GlobalOrdinalsHash, GlobalOrdinalsLowCardinality, Map, Ordinals]
it "can give execution hint parameters to term aggregations" $ when' (atleast es12) $ withTestEnv $ do
_ <- insertData
searchTermsAggHint [GlobalOrdinals, GlobalOrdinalsHash, GlobalOrdinalsLowCardinality, Map]
-- One of the above.
it "can execute value_count aggregations" $ withTestEnv $ do
_ <- insertData
_ <- insertOther
let ags = mkAggregations "user_count" (ValueCountAgg (FieldValueCount (FieldName "user"))) <>
mkAggregations "bogus_count" (ValueCountAgg (FieldValueCount (FieldName "bogus")))
let search = mkAggregateSearch Nothing ags
let docCountPair k n = (k, object ["value" .= Number n])
res <- searchTweets search
liftIO $
fmap aggregations res `shouldBe` Right (Just (M.fromList [ docCountPair "user_count" 2
, docCountPair "bogus_count" 0
]))
it "can execute date_range aggregations" $ withTestEnv $ do
let now = fromGregorian 2015 3 14
let ltAMonthAgo = UTCTime (fromGregorian 2015 3 1) 0
let ltAWeekAgo = UTCTime (fromGregorian 2015 3 10) 0
let oldDoc = exampleTweet { postDate = ltAMonthAgo }
let newDoc = exampleTweet { postDate = ltAWeekAgo }
_ <- indexDocument testIndex testMapping defaultIndexDocumentSettings oldDoc (DocId "1")
_ <- indexDocument testIndex testMapping defaultIndexDocumentSettings newDoc (DocId "2")
_ <- refreshIndex testIndex
let thisMonth = DateRangeFrom (DateMathExpr (DMDate now) [SubtractTime 1 DMMonth])
let thisWeek = DateRangeFrom (DateMathExpr (DMDate now) [SubtractTime 1 DMWeek])
let agg = DateRangeAggregation (FieldName "postDate") Nothing (thisMonth :| [thisWeek])
let ags = mkAggregations "date_ranges" (DateRangeAgg agg)
let search = mkAggregateSearch Nothing ags
res <- searchTweets search
liftIO $ hitsTotal . searchHits <$> res `shouldBe` Right 2
let bucks = do magrs <- fmapL show (aggregations <$> res)
agrs <- note "no aggregations returned" magrs
rawBucks <- note "no date_ranges aggregation" $ M.lookup "date_ranges" agrs
parseEither parseJSON rawBucks
let fromMonthT = UTCTime (fromGregorian 2015 2 14) 0
let fromWeekT = UTCTime (fromGregorian 2015 3 7) 0
liftIO $ buckets <$> bucks `shouldBe` Right [ DateRangeResult "2015-02-14T00:00:00.000Z-*"
(Just fromMonthT)
(Just "2015-02-14T00:00:00.000Z")
Nothing
Nothing
2
Nothing
, DateRangeResult "2015-03-07T00:00:00.000Z-*"
(Just fromWeekT)
(Just "2015-03-07T00:00:00.000Z")
Nothing
Nothing
1
Nothing
]
it "returns date histogram aggregation results" $ withTestEnv $ do
_ <- insertData
let histogram = DateHistogramAgg $ mkDateHistogram (FieldName "postDate") Minute
let search = mkAggregateSearch Nothing (mkAggregations "byDate" histogram)
searchExpectAggs search
searchValidBucketAgg search "byDate" toDateHistogram
it "can execute missing aggregations" $ withTestEnv $ do
_ <- insertData
_ <- insertExtra
let ags = mkAggregations "missing_agg" (MissingAgg (MissingAggregation "extra"))
let search = mkAggregateSearch Nothing ags
let docCountPair k n = (k, object ["doc_count" .= Number n])
res <- searchTweets search
liftIO $
fmap aggregations res `shouldBe` Right (Just (M.fromList [docCountPair "missing_agg" 1]))

63
tests/V1/Test/ApproxEq.hs Normal file
View File

@ -0,0 +1,63 @@
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE OverloadedStrings #-}
module Test.ApproxEq where
import Database.V1.Bloodhound
import Test.Import
import qualified Data.List.NonEmpty as NE
-- | Typeclass for "equal where it matters". Use this to specify
-- less-strict equivalence for things such as lists that can wind up
-- in an unpredictable order
class ApproxEq a where
(=~) :: a -> a -> Bool
showApproxEq :: a -> String
default showApproxEq :: (Show a) => a -> String
showApproxEq = show
(==~) :: (ApproxEq a) => a -> a -> Property
a ==~ b = counterexample (showApproxEq a ++ " !=~ " ++ showApproxEq b) (a =~ b)
instance ApproxEq NominalDiffTime where (=~) = (==)
instance ApproxEq Bool where (=~) = (==)
instance ApproxEq Int where (=~) = (==)
instance (Eq a, Show a) => ApproxEq (Maybe a) where (=~) = (==)
instance ApproxEq Char where
(=~) = (==)
instance ApproxEq NodeAttrFilter where (=~) = (==)
instance ApproxEq NodeAttrName where (=~) = (==)
instance (Eq a, Show a) => ApproxEq (NonEmpty a) where (=~) = (==)
instance (ApproxEq l, Show l, ApproxEq r, Show r) => ApproxEq (Either l r) where
Left a =~ Left b = a =~ b
Right a =~ Right b = a =~ b
_ =~ _ = False
showApproxEq (Left x) = "Left " <> showApproxEq x
showApproxEq (Right x) = "Right " <> showApproxEq x
instance (ApproxEq a, Show a) => ApproxEq [a] where
as =~ bs = and (zipWith (=~) as bs)
instance ApproxEq ReplicaCount where (=~) = (==)
instance ApproxEq ReplicaBounds where (=~) = (==)
instance ApproxEq Bytes where (=~) = (==)
instance ApproxEq AllocationPolicy where (=~) = (==)
instance ApproxEq InitialShardCount where (=~) = (==)
instance ApproxEq FSType where (=~) = (==)
-- | Due to the way nodeattrfilters get serialized here, they may come
-- out in a different order, but they are morally equivalent
instance ApproxEq UpdatableIndexSetting where
RoutingAllocationInclude a =~ RoutingAllocationInclude b =
NE.sort a =~ NE.sort b
RoutingAllocationExclude a =~ RoutingAllocationExclude b =
NE.sort a =~ NE.sort b
RoutingAllocationRequire a =~ RoutingAllocationRequire b =
NE.sort a =~ NE.sort b
a =~ b = a == b
showApproxEq (RoutingAllocationInclude xs) = show (RoutingAllocationInclude (NE.sort xs))
showApproxEq (RoutingAllocationExclude xs) = show (RoutingAllocationExclude (NE.sort xs))
showApproxEq (RoutingAllocationRequire xs) = show (RoutingAllocationRequire (NE.sort xs))
showApproxEq x = show x

77
tests/V1/Test/BulkAPI.hs Normal file
View File

@ -0,0 +1,77 @@
{-# LANGUAGE OverloadedStrings #-}
module Test.BulkAPI (spec) where
import Test.Common
import Test.Import
import qualified Data.Vector as V
import qualified Lens.Micro.Aeson as LMA
newtype BulkTest =
BulkTest Text
deriving (Eq, Show)
instance ToJSON BulkTest where
toJSON (BulkTest name') =
object ["name" .= name']
instance FromJSON BulkTest where
parseJSON = withObject "BulkTest" parse
where
parse o = do
t <- o .: "name"
BulkTest <$> parseJSON t
spec :: Spec
spec =
describe "Bulk API" $
it "inserts all documents we request" $ withTestEnv $ do
_ <- insertData
let firstTest = BulkTest "blah"
let secondTest = BulkTest "bloo"
let firstDoc = BulkIndex testIndex
testMapping (DocId "2") (toJSON firstTest)
let secondDoc = BulkCreate testIndex
testMapping (DocId "3") (toJSON secondTest)
let stream = V.fromList [firstDoc, secondDoc]
_ <- bulk stream
_ <- refreshIndex testIndex
fDoc <- getDocument testIndex testMapping (DocId "2")
sDoc <- getDocument testIndex testMapping (DocId "3")
-- note that we cannot query for fourthDoc and fifthDoc since we
-- do not know their autogenerated ids.
let maybeFirst =
eitherDecode
$ responseBody fDoc
:: Either String (EsResult BulkTest)
let maybeSecond =
eitherDecode
$ responseBody sDoc
:: Either String (EsResult BulkTest)
liftIO $ do
fmap getSource maybeFirst `shouldBe` Right (Just firstTest)
fmap getSource maybeSecond `shouldBe` Right (Just secondTest)
-- Since we can't get the docs by doc id, we check for their existence in
-- a match all query.
let query = MatchAllQuery Nothing
let search = mkSearch (Just query) Nothing
resp <- searchByIndex testIndex search
parsed <- parseEsResponse resp :: BH IO (Either EsError (SearchResult Value))
case parsed of
Left e ->
liftIO $ expectationFailure ("Expected a script-transformed result but got: " <> show e)
(Right sr) -> do
liftIO $
hitsTotal (searchHits sr) `shouldBe` 3
let nameList :: [Text]
nameList =
hits (searchHits sr)
^.. traverse
. to hitSource
. _Just
. LMA.key "name"
. _String
liftIO $
nameList
`shouldBe` ["blah","bloo"]

289
tests/V1/Test/Common.hs Normal file
View File

@ -0,0 +1,289 @@
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Test.Common where
import Test.Import
import qualified Data.Map as M
import qualified Data.Version as Vers
import qualified Network.HTTP.Types.Status as NHTS
testServer :: Server
testServer = Server "http://localhost:9200"
testIndex :: IndexName
testIndex = IndexName "bloodhound-tests-twitter-1"
testMapping :: MappingName
testMapping = MappingName "tweet"
withTestEnv :: BH IO a -> IO a
withTestEnv = withBH defaultManagerSettings testServer
data Location = Location { lat :: Double
, lon :: Double } deriving (Eq, Show)
data Tweet = Tweet { user :: Text
, postDate :: UTCTime
, message :: Text
, age :: Int
, location :: Location
, extra :: Maybe Text }
deriving (Eq, Show)
$(deriveJSON defaultOptions ''Location)
$(deriveJSON defaultOptions ''Tweet)
data ParentMapping = ParentMapping deriving (Eq, Show)
instance ToJSON ParentMapping where
toJSON ParentMapping =
object ["properties" .=
object [ "user" .= object ["type" .= ("string" :: Text)
]
-- Serializing the date as a date is breaking other tests, mysteriously.
-- , "postDate" .= object [ "type" .= ("date" :: Text)
-- , "format" .= ("YYYY-MM-dd`T`HH:mm:ss.SSSZZ" :: Text)]
, "message" .= object ["type" .= ("string" :: Text)]
, "age" .= object ["type" .= ("integer" :: Text)]
, "location" .= object ["type" .= ("geo_point" :: Text)]
, "extra" .= object ["type" .= ("string" :: Text), "index" .= ("not_analyzed" :: Text)]
]]
es13 :: Vers.Version
es13 = Vers.Version [1, 3, 0] []
es12 :: Vers.Version
es12 = Vers.Version [1, 2, 0] []
es11 :: Vers.Version
es11 = Vers.Version [1, 1, 0] []
es14 :: Vers.Version
es14 = Vers.Version [1, 4, 0] []
es15 :: Vers.Version
es15 = Vers.Version [1, 5, 0] []
es16 :: Vers.Version
es16 = Vers.Version [1, 6, 0] []
es20 :: Vers.Version
es20 = Vers.Version [2, 0, 0] []
es50 :: Vers.Version
es50 = Vers.Version [5, 0, 0] []
getServerVersion :: IO (Maybe Vers.Version)
getServerVersion = fmap extractVersion <$> withTestEnv getStatus
where
extractVersion = versionNumber . number . version
createExampleIndex :: (MonadBH m) => m Reply
createExampleIndex =
createIndex (IndexSettings (ShardCount 1) (ReplicaCount 0)) testIndex
deleteExampleIndex :: (MonadBH m) => m Reply
deleteExampleIndex =
deleteIndex testIndex
validateStatus :: Show body => Response body -> Int -> Expectation
validateStatus resp expected =
if actual == expected
then return ()
else expectationFailure ("Expected " <> show expected <> " but got " <> show actual <> ": " <> show body)
where
actual = NHTS.statusCode (responseStatus resp)
body = responseBody resp
data ChildMapping = ChildMapping deriving (Eq, Show)
instance ToJSON ChildMapping where
toJSON ChildMapping =
object ["_parent" .= object ["type" .= ("parent" :: Text)]
, "properties" .=
object [ "user" .= object ["type" .= ("string" :: Text)
]
-- Serializing the date as a date is breaking other tests, mysteriously.
-- , "postDate" .= object [ "type" .= ("date" :: Text)
-- , "format" .= ("YYYY-MM-dd`T`HH:mm:ss.SSSZZ" :: Text)]
, "message" .= object ["type" .= ("string" :: Text)]
, "age" .= object ["type" .= ("integer" :: Text)]
, "location" .= object ["type" .= ("geo_point" :: Text)]
, "extra" .= object ["type" .= ("string" :: Text), "index" .= ("not_analyzed" :: Text)]
]]
data TweetMapping = TweetMapping deriving (Eq, Show)
instance ToJSON TweetMapping where
toJSON TweetMapping =
object ["tweet" .=
object ["properties" .=
object [ "user" .= object [ "type" .= ("string" :: Text)
]
-- Serializing the date as a date is breaking other tests, mysteriously.
-- , "postDate" .= object [ "type" .= ("date" :: Text)
-- , "format" .= ("YYYY-MM-dd`T`HH:mm:ss.SSSZZ" :: Text)]
, "message" .= object ["type" .= ("string" :: Text)]
, "age" .= object ["type" .= ("integer" :: Text)]
, "location" .= object ["type" .= ("geo_point" :: Text)]
, "extra" .= object ["type" .= ("string" :: Text), "index" .= ("not_analyzed" :: Text)]
]]]
exampleTweet :: Tweet
exampleTweet = Tweet { user = "bitemyapp"
, postDate = UTCTime
(ModifiedJulianDay 55000)
(secondsToDiffTime 10)
, message = "Use haskell!"
, age = 10000
, location = Location 40.12 (-71.34)
, extra = Nothing }
tweetWithExtra :: Tweet
tweetWithExtra = Tweet { user = "bitemyapp"
, postDate = UTCTime
(ModifiedJulianDay 55000)
(secondsToDiffTime 10)
, message = "Use haskell!"
, age = 10000
, location = Location 40.12 (-71.34)
, extra = Just "blah blah" }
newAge :: Int
newAge = 31337
newUser :: Text
newUser = "someotherapp"
tweetPatch :: Value
tweetPatch =
object [ "age" .= newAge
, "user" .= newUser
]
patchedTweet :: Tweet
patchedTweet = exampleTweet{age = newAge, user = newUser}
otherTweet :: Tweet
otherTweet = Tweet { user = "notmyapp"
, postDate = UTCTime
(ModifiedJulianDay 55000)
(secondsToDiffTime 11)
, message = "Use haskell!"
, age = 1000
, location = Location 40.12 (-71.34)
, extra = Nothing }
resetIndex :: BH IO ()
resetIndex = do
_ <- deleteExampleIndex
_ <- createExampleIndex
_ <- putMapping testIndex testMapping TweetMapping
return ()
insertData :: BH IO Reply
insertData = do
resetIndex
insertData' defaultIndexDocumentSettings
insertData' :: IndexDocumentSettings -> BH IO Reply
insertData' ids = do
r <- indexDocument testIndex testMapping ids exampleTweet (DocId "1")
_ <- refreshIndex testIndex
return r
updateData :: BH IO Reply
updateData = do
r <- updateDocument testIndex testMapping defaultIndexDocumentSettings tweetPatch (DocId "1")
_ <- refreshIndex testIndex
return r
insertOther :: BH IO ()
insertOther = do
_ <- indexDocument testIndex testMapping defaultIndexDocumentSettings otherTweet (DocId "2")
_ <- refreshIndex testIndex
return ()
insertExtra :: BH IO ()
insertExtra = do
_ <- indexDocument testIndex testMapping defaultIndexDocumentSettings tweetWithExtra (DocId "4")
_ <- refreshIndex testIndex
return ()
insertWithSpaceInId :: BH IO ()
insertWithSpaceInId = do
_ <- indexDocument testIndex testMapping defaultIndexDocumentSettings exampleTweet (DocId "Hello World")
_ <- refreshIndex testIndex
return ()
searchTweet :: Search -> BH IO (Either EsError Tweet)
searchTweet search = do
result <- searchTweets search
let myTweet :: Either EsError Tweet
myTweet = grabFirst result
return myTweet
searchTweets :: Search -> BH IO (Either EsError (SearchResult Tweet))
searchTweets search = parseEsResponse =<< searchByIndex testIndex search
searchExpectNoResults :: Search -> BH IO ()
searchExpectNoResults search = do
result <- searchTweets search
let emptyHits = fmap (hits . searchHits) result
liftIO $
emptyHits `shouldBe` Right []
searchExpectAggs :: Search -> BH IO ()
searchExpectAggs search = do
reply <- searchByIndex testIndex search
let isEmpty x = return (M.null x)
let result = decode (responseBody reply) :: Maybe (SearchResult Tweet)
liftIO $
(result >>= aggregations >>= isEmpty) `shouldBe` Just False
searchValidBucketAgg :: (BucketAggregation a, FromJSON a, Show a) =>
Search -> Text -> (Text -> AggregationResults -> Maybe (Bucket a)) -> BH IO ()
searchValidBucketAgg search aggKey extractor = do
reply <- searchByIndex testIndex search
let bucketDocs = docCount . head . buckets
let result = decode (responseBody reply) :: Maybe (SearchResult Tweet)
let count = result >>= aggregations >>= extractor aggKey >>= \x -> return (bucketDocs x)
liftIO $
count `shouldBe` Just 1
searchTermsAggHint :: [ExecutionHint] -> BH IO ()
searchTermsAggHint hints = do
let terms hint = TermsAgg $ (mkTermsAggregation "user") { termExecutionHint = Just hint }
let search hint = mkAggregateSearch Nothing $ mkAggregations "users" $ terms hint
forM_ hints $ searchExpectAggs . search
forM_ hints (\x -> searchValidBucketAgg (search x) "users" toTerms)
searchTweetHighlight :: Search
-> BH IO (Either EsError (Maybe HitHighlight))
searchTweetHighlight search = do
result <- searchTweets search
let tweetHit :: Either EsError (Maybe (Hit Tweet))
tweetHit = fmap (headMay . hits . searchHits) result
myHighlight :: Either EsError (Maybe HitHighlight)
myHighlight = (join . fmap hitHighlight) <$> tweetHit
return myHighlight
searchExpectSource :: Source -> Either EsError Value -> BH IO ()
searchExpectSource src expected = do
_ <- insertData
let query = QueryMatchQuery $ mkMatchQuery (FieldName "_all") (QueryString "haskell")
let search = (mkSearch (Just query) Nothing) { source = Just src }
reply <- searchByIndex testIndex search
result <- parseEsResponse reply
let value = grabFirst result
liftIO $
value `shouldBe` expected
atleast :: Vers.Version -> IO Bool
atleast v = getServerVersion >>= \x -> return $ x >= Just v
atmost :: Vers.Version -> IO Bool
atmost v = getServerVersion >>= \x -> return $ x <= Just v
is :: Vers.Version -> IO Bool
is v = getServerVersion >>= \x -> return $ x == Just v

View File

@ -0,0 +1,52 @@
{-# LANGUAGE OverloadedStrings #-}
module Test.Documents where
import Test.Common
import Test.Import
spec :: Spec
spec =
describe "document API" $ do
it "indexes, updates, gets, and then deletes the generated document" $ withTestEnv $ do
_ <- insertData
_ <- updateData
docInserted <- getDocument testIndex testMapping (DocId "1")
let newTweet = eitherDecode
(responseBody docInserted) :: Either String (EsResult Tweet)
liftIO $ fmap getSource newTweet `shouldBe` Right (Just patchedTweet)
it "indexes, gets, and then deletes the generated document with a DocId containing a space" $ withTestEnv $ do
_ <- insertWithSpaceInId
docInserted <- getDocument testIndex testMapping (DocId "Hello World")
let newTweet = eitherDecode
(responseBody docInserted) :: Either String (EsResult Tweet)
liftIO $ fmap getSource newTweet `shouldBe` Right (Just exampleTweet)
it "produces a parseable result when looking up a bogus document" $ withTestEnv $ do
doc <- getDocument testIndex testMapping (DocId "bogus")
let noTweet = eitherDecode
(responseBody doc) :: Either String (EsResult Tweet)
liftIO $ fmap foundResult noTweet `shouldBe` Right Nothing
it "can use optimistic concurrency control" $ withTestEnv $ do
let ev = ExternalDocVersion minBound
let cfg = defaultIndexDocumentSettings { idsVersionControl = ExternalGT ev }
resetIndex
res <- insertData' cfg
liftIO $ isCreated res `shouldBe` True
res' <- insertData' cfg
liftIO $ isVersionConflict res' `shouldBe` True
it "indexes two documents in a parent/child relationship and checks that the child exists" $ withTestEnv $ do
resetIndex
let validateStatus' stat = liftIO . flip validateStatus stat
_ <- validateStatus' 200 =<< putMapping testIndex (MappingName "child") ChildMapping
_ <- validateStatus' 200 =<< putMapping testIndex (MappingName "parent") ParentMapping
_ <- validateStatus' 201 =<< indexDocument testIndex (MappingName "parent") defaultIndexDocumentSettings exampleTweet (DocId "1")
let parent = (Just . DocumentParent . DocId) "1"
ids = IndexDocumentSettings NoVersionControl parent
_ <- validateStatus' 201 =<< indexDocument testIndex (MappingName "child") ids otherTweet (DocId "2")
_ <- refreshIndex testIndex
exists <- documentExists testIndex (MappingName "child") parent (DocId "2")
liftIO $ exists `shouldBe` True

432
tests/V1/Test/Generators.hs Normal file
View File

@ -0,0 +1,432 @@
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Test.Generators where
import Database.V1.Bloodhound
import Test.Import
import qualified Data.HashMap.Strict as HM
import qualified Data.List as L
import qualified Data.List.NonEmpty as NE
import qualified Data.Map as M
import qualified Data.Text as T
import qualified Data.Version as Vers
import Test.QuickCheck.TH.Generators
import Test.ApproxEq
instance Arbitrary NominalDiffTime where
arbitrary = fromInteger <$> arbitrary
#if !MIN_VERSION_QuickCheck(2,8,0)
instance (Arbitrary k, Ord k, Arbitrary v) => Arbitrary (M.Map k v) where
arbitrary = M.fromList <$> arbitrary
#endif
instance Arbitrary Text where
arbitrary = T.pack <$> arbitrary
instance Arbitrary UTCTime where
arbitrary = UTCTime
<$> arbitrary
<*> (fromRational . toRational <$> choose (0::Double, 86400))
instance Arbitrary Day where
arbitrary =
ModifiedJulianDay . (2000 +) <$> arbitrary
shrink =
(ModifiedJulianDay <$>) . shrink . toModifiedJulianDay
#if !MIN_VERSION_QuickCheck(2,9,0)
instance Arbitrary a => Arbitrary (NonEmpty a) where
arbitrary = liftA2 (:|) arbitrary arbitrary
#endif
arbitraryScore :: Gen Score
arbitraryScore = fmap getPositive <$> arbitrary
instance (Arbitrary a, Typeable a) => Arbitrary (Hit a) where
arbitrary = Hit <$> arbitrary
<*> arbitrary
<*> arbitrary
<*> arbitraryScore
<*> arbitrary
<*> arbitrary
instance (Arbitrary a, Typeable a) => Arbitrary (SearchHits a) where
arbitrary = reduceSize $ do
tot <- getPositive <$> arbitrary
score <- arbitraryScore
hs <- arbitrary
return $ SearchHits tot score hs
reduceSize :: Gen a -> Gen a
reduceSize f = sized $ \n -> resize (n `div` 2) f
arbitraryAlphaNum :: Gen Char
arbitraryAlphaNum = oneof [choose ('a', 'z')
,choose ('A','Z')
, choose ('0', '9')]
instance Arbitrary RoutingValue where
arbitrary = RoutingValue . T.pack <$> listOf1 arbitraryAlphaNum
instance Arbitrary AliasRouting where
arbitrary = oneof [allAlias
,one
,theOther
,both']
where one = GranularAliasRouting
<$> (Just <$> arbitrary)
<*> pure Nothing
theOther = GranularAliasRouting Nothing
<$> (Just <$> arbitrary)
both' = GranularAliasRouting
<$> (Just <$> arbitrary)
<*> (Just <$> arbitrary)
allAlias = AllAliasRouting <$> arbitrary
instance Arbitrary FieldName where
arbitrary =
FieldName
. T.pack
<$> listOf1 arbitraryAlphaNum
#if MIN_VERSION_base(4,10,0)
-- Test.QuickCheck.Modifiers
qcNonEmptyToNonEmpty :: NonEmptyList a -> NonEmpty a
qcNonEmptyToNonEmpty (NonEmpty (a : xs)) = (a :| xs)
qcNonEmptyToNonEmpty (NonEmpty []) = error "NonEmpty was empty!"
instance Arbitrary a => Arbitrary (NonEmpty a) where
arbitrary =
qcNonEmptyToNonEmpty
<$> arbitrary
#endif
instance Arbitrary RegexpFlags where
arbitrary = oneof [ pure AllRegexpFlags
, pure NoRegexpFlags
, SomeRegexpFlags <$> genUniqueFlags
]
where genUniqueFlags =
NE.fromList . L.nub
<$> listOf1 arbitrary
instance Arbitrary IndexAliasCreate where
arbitrary =
IndexAliasCreate
<$> arbitrary
<*> reduceSize arbitrary
instance Arbitrary ReplicaBounds where
arbitrary = oneof [ replicasBounded
, replicasLowerBounded
, pure ReplicasUnbounded
]
where replicasBounded = do
Positive a <- arbitrary
Positive b <- arbitrary
return (ReplicasBounded a b)
replicasLowerBounded = do
Positive a <- arbitrary
return (ReplicasLowerBounded a)
instance Arbitrary NodeAttrName where
arbitrary =
NodeAttrName
. T.pack
<$> listOf1 arbitraryAlphaNum
instance Arbitrary NodeAttrFilter where
arbitrary = do
n <- arbitrary
s:ss <- listOf1 (listOf1 arbitraryAlphaNum)
let ts = T.pack <$> s :| ss
return (NodeAttrFilter n ts)
instance Arbitrary VersionNumber where
arbitrary = mk . fmap getPositive . getNonEmpty <$> arbitrary
where
mk versions = VersionNumber (Vers.Version versions [])
instance Arbitrary TemplateQueryKeyValuePairs where
arbitrary = TemplateQueryKeyValuePairs . HM.fromList <$> arbitrary
shrink (TemplateQueryKeyValuePairs x) = map (TemplateQueryKeyValuePairs . HM.fromList) . shrink $ HM.toList x
makeArbitrary ''FilteredQuery
instance Arbitrary FilteredQuery where arbitrary = reduceSize arbitraryFilteredQuery
makeArbitrary ''Query
instance Arbitrary Query where arbitrary = reduceSize arbitraryQuery
makeArbitrary ''Filter
instance Arbitrary Filter where arbitrary = reduceSize arbitraryFilter
makeArbitrary ''IndexName
instance Arbitrary IndexName where arbitrary = arbitraryIndexName
makeArbitrary ''MappingName
instance Arbitrary MappingName where arbitrary = arbitraryMappingName
makeArbitrary ''DocId
instance Arbitrary DocId where arbitrary = arbitraryDocId
makeArbitrary ''Version
instance Arbitrary Version where arbitrary = arbitraryVersion
makeArbitrary ''BuildHash
instance Arbitrary BuildHash where arbitrary = arbitraryBuildHash
makeArbitrary ''IndexAliasRouting
instance Arbitrary IndexAliasRouting where arbitrary = arbitraryIndexAliasRouting
makeArbitrary ''ShardCount
instance Arbitrary ShardCount where arbitrary = arbitraryShardCount
makeArbitrary ''ReplicaCount
instance Arbitrary ReplicaCount where arbitrary = arbitraryReplicaCount
makeArbitrary ''TemplateName
instance Arbitrary TemplateName where arbitrary = arbitraryTemplateName
makeArbitrary ''TemplatePattern
instance Arbitrary TemplatePattern where arbitrary = arbitraryTemplatePattern
makeArbitrary ''QueryString
instance Arbitrary QueryString where arbitrary = arbitraryQueryString
makeArbitrary ''CacheName
instance Arbitrary CacheName where arbitrary = arbitraryCacheName
makeArbitrary ''CacheKey
instance Arbitrary CacheKey where arbitrary = arbitraryCacheKey
makeArbitrary ''Existence
instance Arbitrary Existence where arbitrary = arbitraryExistence
makeArbitrary ''CutoffFrequency
instance Arbitrary CutoffFrequency where arbitrary = arbitraryCutoffFrequency
makeArbitrary ''Analyzer
instance Arbitrary Analyzer where arbitrary = arbitraryAnalyzer
makeArbitrary ''MaxExpansions
instance Arbitrary MaxExpansions where arbitrary = arbitraryMaxExpansions
makeArbitrary ''Lenient
instance Arbitrary Lenient where arbitrary = arbitraryLenient
makeArbitrary ''Tiebreaker
instance Arbitrary Tiebreaker where arbitrary = arbitraryTiebreaker
makeArbitrary ''Boost
instance Arbitrary Boost where arbitrary = arbitraryBoost
makeArbitrary ''BoostTerms
instance Arbitrary BoostTerms where arbitrary = arbitraryBoostTerms
makeArbitrary ''MinimumMatch
instance Arbitrary MinimumMatch where arbitrary = arbitraryMinimumMatch
makeArbitrary ''DisableCoord
instance Arbitrary DisableCoord where arbitrary = arbitraryDisableCoord
makeArbitrary ''IgnoreTermFrequency
instance Arbitrary IgnoreTermFrequency where arbitrary = arbitraryIgnoreTermFrequency
makeArbitrary ''MinimumTermFrequency
instance Arbitrary MinimumTermFrequency where arbitrary = arbitraryMinimumTermFrequency
makeArbitrary ''MaxQueryTerms
instance Arbitrary MaxQueryTerms where arbitrary = arbitraryMaxQueryTerms
makeArbitrary ''Fuzziness
instance Arbitrary Fuzziness where arbitrary = arbitraryFuzziness
makeArbitrary ''PrefixLength
instance Arbitrary PrefixLength where arbitrary = arbitraryPrefixLength
makeArbitrary ''TypeName
instance Arbitrary TypeName where arbitrary = arbitraryTypeName
makeArbitrary ''PercentMatch
instance Arbitrary PercentMatch where arbitrary = arbitraryPercentMatch
makeArbitrary ''StopWord
instance Arbitrary StopWord where arbitrary = arbitraryStopWord
makeArbitrary ''QueryPath
instance Arbitrary QueryPath where arbitrary = arbitraryQueryPath
makeArbitrary ''AllowLeadingWildcard
instance Arbitrary AllowLeadingWildcard where arbitrary = arbitraryAllowLeadingWildcard
makeArbitrary ''LowercaseExpanded
instance Arbitrary LowercaseExpanded where arbitrary = arbitraryLowercaseExpanded
makeArbitrary ''EnablePositionIncrements
instance Arbitrary EnablePositionIncrements where arbitrary = arbitraryEnablePositionIncrements
makeArbitrary ''AnalyzeWildcard
instance Arbitrary AnalyzeWildcard where arbitrary = arbitraryAnalyzeWildcard
makeArbitrary ''GeneratePhraseQueries
instance Arbitrary GeneratePhraseQueries where arbitrary = arbitraryGeneratePhraseQueries
makeArbitrary ''Locale
instance Arbitrary Locale where arbitrary = arbitraryLocale
makeArbitrary ''MaxWordLength
instance Arbitrary MaxWordLength where arbitrary = arbitraryMaxWordLength
makeArbitrary ''MinWordLength
instance Arbitrary MinWordLength where arbitrary = arbitraryMinWordLength
makeArbitrary ''PhraseSlop
instance Arbitrary PhraseSlop where arbitrary = arbitraryPhraseSlop
makeArbitrary ''MinDocFrequency
instance Arbitrary MinDocFrequency where arbitrary = arbitraryMinDocFrequency
makeArbitrary ''MaxDocFrequency
instance Arbitrary MaxDocFrequency where arbitrary = arbitraryMaxDocFrequency
makeArbitrary ''Regexp
instance Arbitrary Regexp where arbitrary = arbitraryRegexp
makeArbitrary ''SimpleQueryStringQuery
instance Arbitrary SimpleQueryStringQuery where arbitrary = arbitrarySimpleQueryStringQuery
makeArbitrary ''FieldOrFields
instance Arbitrary FieldOrFields where arbitrary = arbitraryFieldOrFields
makeArbitrary ''SimpleQueryFlag
instance Arbitrary SimpleQueryFlag where arbitrary = arbitrarySimpleQueryFlag
makeArbitrary ''RegexpQuery
instance Arbitrary RegexpQuery where arbitrary = arbitraryRegexpQuery
makeArbitrary ''QueryStringQuery
instance Arbitrary QueryStringQuery where arbitrary = arbitraryQueryStringQuery
makeArbitrary ''RangeQuery
instance Arbitrary RangeQuery where arbitrary = arbitraryRangeQuery
makeArbitrary ''RangeValue
instance Arbitrary RangeValue where arbitrary = arbitraryRangeValue
makeArbitrary ''PrefixQuery
instance Arbitrary PrefixQuery where arbitrary = arbitraryPrefixQuery
makeArbitrary ''NestedQuery
instance Arbitrary NestedQuery where arbitrary = arbitraryNestedQuery
makeArbitrary ''MoreLikeThisFieldQuery
instance Arbitrary MoreLikeThisFieldQuery where arbitrary = arbitraryMoreLikeThisFieldQuery
makeArbitrary ''MoreLikeThisQuery
instance Arbitrary MoreLikeThisQuery where arbitrary = arbitraryMoreLikeThisQuery
makeArbitrary ''IndicesQuery
instance Arbitrary IndicesQuery where arbitrary = arbitraryIndicesQuery
makeArbitrary ''HasParentQuery
instance Arbitrary HasParentQuery where arbitrary = arbitraryHasParentQuery
makeArbitrary ''HasChildQuery
instance Arbitrary HasChildQuery where arbitrary = arbitraryHasChildQuery
makeArbitrary ''FuzzyQuery
instance Arbitrary FuzzyQuery where arbitrary = arbitraryFuzzyQuery
makeArbitrary ''FuzzyLikeFieldQuery
instance Arbitrary FuzzyLikeFieldQuery where arbitrary = arbitraryFuzzyLikeFieldQuery
makeArbitrary ''FuzzyLikeThisQuery
instance Arbitrary FuzzyLikeThisQuery where arbitrary = arbitraryFuzzyLikeThisQuery
makeArbitrary ''DisMaxQuery
instance Arbitrary DisMaxQuery where arbitrary = arbitraryDisMaxQuery
makeArbitrary ''CommonTermsQuery
instance Arbitrary CommonTermsQuery where arbitrary = arbitraryCommonTermsQuery
makeArbitrary ''DistanceRange
instance Arbitrary DistanceRange where arbitrary = arbitraryDistanceRange
makeArbitrary ''MultiMatchQuery
instance Arbitrary MultiMatchQuery where arbitrary = arbitraryMultiMatchQuery
makeArbitrary ''LessThanD
instance Arbitrary LessThanD where arbitrary = arbitraryLessThanD
makeArbitrary ''LessThanEqD
instance Arbitrary LessThanEqD where arbitrary = arbitraryLessThanEqD
makeArbitrary ''GreaterThanD
instance Arbitrary GreaterThanD where arbitrary = arbitraryGreaterThanD
makeArbitrary ''GreaterThanEqD
instance Arbitrary GreaterThanEqD where arbitrary = arbitraryGreaterThanEqD
makeArbitrary ''LessThan
instance Arbitrary LessThan where arbitrary = arbitraryLessThan
makeArbitrary ''LessThanEq
instance Arbitrary LessThanEq where arbitrary = arbitraryLessThanEq
makeArbitrary ''GreaterThan
instance Arbitrary GreaterThan where arbitrary = arbitraryGreaterThan
makeArbitrary ''GreaterThanEq
instance Arbitrary GreaterThanEq where arbitrary = arbitraryGreaterThanEq
makeArbitrary ''GeoPoint
instance Arbitrary GeoPoint where arbitrary = arbitraryGeoPoint
makeArbitrary ''NullValue
instance Arbitrary NullValue where arbitrary = arbitraryNullValue
makeArbitrary ''MinimumMatchHighLow
instance Arbitrary MinimumMatchHighLow where arbitrary = arbitraryMinimumMatchHighLow
makeArbitrary ''CommonMinimumMatch
instance Arbitrary CommonMinimumMatch where arbitrary = arbitraryCommonMinimumMatch
makeArbitrary ''BoostingQuery
instance Arbitrary BoostingQuery where arbitrary = arbitraryBoostingQuery
makeArbitrary ''BoolQuery
instance Arbitrary BoolQuery where arbitrary = arbitraryBoolQuery
makeArbitrary ''MatchQuery
instance Arbitrary MatchQuery where arbitrary = arbitraryMatchQuery
makeArbitrary ''MultiMatchQueryType
instance Arbitrary MultiMatchQueryType where arbitrary = arbitraryMultiMatchQueryType
makeArbitrary ''BooleanOperator
instance Arbitrary BooleanOperator where arbitrary = arbitraryBooleanOperator
makeArbitrary ''ZeroTermsQuery
instance Arbitrary ZeroTermsQuery where arbitrary = arbitraryZeroTermsQuery
makeArbitrary ''MatchQueryType
instance Arbitrary MatchQueryType where arbitrary = arbitraryMatchQueryType
makeArbitrary ''SearchAliasRouting
instance Arbitrary SearchAliasRouting where arbitrary = arbitrarySearchAliasRouting
makeArbitrary ''ScoreType
instance Arbitrary ScoreType where arbitrary = arbitraryScoreType
makeArbitrary ''Distance
instance Arbitrary Distance where arbitrary = arbitraryDistance
makeArbitrary ''DistanceUnit
instance Arbitrary DistanceUnit where arbitrary = arbitraryDistanceUnit
makeArbitrary ''DistanceType
instance Arbitrary DistanceType where arbitrary = arbitraryDistanceType
makeArbitrary ''OptimizeBbox
instance Arbitrary OptimizeBbox where arbitrary = arbitraryOptimizeBbox
makeArbitrary ''GeoBoundingBoxConstraint
instance Arbitrary GeoBoundingBoxConstraint where arbitrary = arbitraryGeoBoundingBoxConstraint
makeArbitrary ''GeoFilterType
instance Arbitrary GeoFilterType where arbitrary = arbitraryGeoFilterType
makeArbitrary ''GeoBoundingBox
instance Arbitrary GeoBoundingBox where arbitrary = arbitraryGeoBoundingBox
makeArbitrary ''LatLon
instance Arbitrary LatLon where arbitrary = arbitraryLatLon
makeArbitrary ''RangeExecution
instance Arbitrary RangeExecution where arbitrary = arbitraryRangeExecution
makeArbitrary ''RegexpFlag
instance Arbitrary RegexpFlag where arbitrary = arbitraryRegexpFlag
makeArbitrary ''BoolMatch
instance Arbitrary BoolMatch where arbitrary = arbitraryBoolMatch
makeArbitrary ''Term
instance Arbitrary Term where arbitrary = arbitraryTerm
makeArbitrary ''IndexSettings
instance Arbitrary IndexSettings where arbitrary = arbitraryIndexSettings
makeArbitrary ''UpdatableIndexSetting
instance Arbitrary UpdatableIndexSetting where
arbitrary = arbitraryUpdatableIndexSetting
makeArbitrary ''Bytes
instance Arbitrary Bytes where arbitrary = arbitraryBytes
makeArbitrary ''AllocationPolicy
instance Arbitrary AllocationPolicy where arbitrary = arbitraryAllocationPolicy
makeArbitrary ''InitialShardCount
instance Arbitrary InitialShardCount where arbitrary = arbitraryInitialShardCount
makeArbitrary ''FSType
instance Arbitrary FSType where arbitrary = arbitraryFSType
makeArbitrary ''CompoundFormat
instance Arbitrary CompoundFormat where arbitrary = arbitraryCompoundFormat
makeArbitrary ''FsSnapshotRepo
instance Arbitrary FsSnapshotRepo where arbitrary = arbitraryFsSnapshotRepo
makeArbitrary ''SnapshotRepoName
instance Arbitrary SnapshotRepoName where arbitrary = arbitrarySnapshotRepoName
makeArbitrary ''TemplateQueryInline
instance Arbitrary TemplateQueryInline where arbitrary = arbitraryTemplateQueryInline
makeArbitrary ''DirectGeneratorSuggestModeTypes
instance Arbitrary DirectGeneratorSuggestModeTypes where arbitrary = arbitraryDirectGeneratorSuggestModeTypes
makeArbitrary ''DirectGenerators
instance Arbitrary DirectGenerators where arbitrary = arbitraryDirectGenerators
makeArbitrary ''PhraseSuggesterCollate
instance Arbitrary PhraseSuggesterCollate where arbitrary = arbitraryPhraseSuggesterCollate
makeArbitrary ''PhraseSuggesterHighlighter
instance Arbitrary PhraseSuggesterHighlighter where arbitrary = arbitraryPhraseSuggesterHighlighter
makeArbitrary ''Size
instance Arbitrary Size where arbitrary = arbitrarySize
makeArbitrary ''PhraseSuggester
instance Arbitrary PhraseSuggester where arbitrary = arbitraryPhraseSuggester
makeArbitrary ''SuggestType
instance Arbitrary SuggestType where arbitrary = arbitrarySuggestType
makeArbitrary ''Suggest
instance Arbitrary Suggest where arbitrary = arbitrarySuggest
makeArbitrary ''Script
instance Arbitrary Script where arbitrary = arbitraryScript
newtype UpdatableIndexSetting' =
UpdatableIndexSetting' UpdatableIndexSetting
deriving (Show, Eq, ToJSON, FromJSON, ApproxEq, Typeable)
instance Arbitrary UpdatableIndexSetting' where
arbitrary = do
settings <- arbitrary
return $ UpdatableIndexSetting' $ case settings of
RoutingAllocationInclude xs ->
RoutingAllocationInclude (dropDuplicateAttrNames xs)
RoutingAllocationExclude xs ->
RoutingAllocationExclude (dropDuplicateAttrNames xs)
RoutingAllocationRequire xs ->
RoutingAllocationRequire (dropDuplicateAttrNames xs)
x -> x
where
dropDuplicateAttrNames =
NE.fromList . L.nubBy sameAttrName . NE.toList
sameAttrName a b =
nodeAttrFilterName a == nodeAttrFilterName b
-- shrink (UpdatableIndexSetting' x) = map UpdatableIndexSetting' (shrink x)

View File

@ -0,0 +1,32 @@
{-# LANGUAGE OverloadedStrings #-}
module Test.Highlights where
import Test.Common
import Test.Import
import qualified Data.Map as M
initHighlights :: Text -> BH IO (Either EsError (Maybe HitHighlight))
initHighlights fieldName = do
_ <- insertData
_ <- insertOther
let query = QueryMatchQuery $ mkMatchQuery (FieldName fieldName) (QueryString "haskell")
let testHighlight = Highlights Nothing [FieldHighlight (FieldName fieldName) Nothing]
let search = mkHighlightSearch (Just query) testHighlight
searchTweetHighlight search
spec :: Spec
spec =
describe "Highlights API" $ do
it "returns highlight from query when there should be one" $ withTestEnv $ do
myHighlight <- initHighlights "message"
liftIO $
myHighlight `shouldBe`
Right (Just (M.fromList [("message", ["Use <em>haskell</em>!"])]))
it "doesn't return highlight from a query when it shouldn't" $ withTestEnv $ do
myHighlight <- initHighlights "user"
liftIO $
myHighlight `shouldBe`
Right Nothing

58
tests/V1/Test/Import.hs Normal file
View File

@ -0,0 +1,58 @@
{-# LANGUAGE OverloadedStrings #-}
module Test.Import
( module X
, module Test.Import
) where
import Control.Applicative as X
import Control.Exception as X (evaluate)
import Control.Monad as X
import Control.Monad.Catch as X
import Control.Monad.Reader as X
import Data.Aeson as X
import Data.Aeson.TH as X
import Data.Aeson.Types as X (parseEither)
import Data.Maybe as X
import Data.List.NonEmpty as X (NonEmpty(..))
import Data.Monoid as X
import Data.Ord as X (comparing)
import Data.Proxy as X
import Data.Text as X (Text)
import Data.Time.Calendar as X (Day(..), fromGregorian)
import Data.Time.Clock as X
import Data.Typeable as X
import Database.V1.Bloodhound as X hiding (key)
import Lens.Micro as X
import Lens.Micro.Aeson as X
import Network.HTTP.Client as X hiding (Proxy, fileSize)
import System.IO.Temp as X
import System.PosixCompat.Files as X
import Test.Hspec as X
import Test.Hspec.QuickCheck as X (prop)
import Test.QuickCheck as X hiding (Result, Success)
import Test.QuickCheck.Property.Monoid as X (T (..), eq, prop_Monoid)
import Text.Pretty.Simple as X (pPrint)
import qualified Data.List as L
noDuplicates :: Eq a => [a] -> Bool
noDuplicates xs = L.nub xs == xs
getSource :: EsResult a -> Maybe a
getSource = fmap _source . foundResult
grabFirst :: Either EsError (SearchResult a) -> Either EsError a
grabFirst r =
case fmap (hitSource . head . hits . searchHits) r of
(Left e) -> Left e
(Right Nothing) -> Left (EsError 500 "Source was missing")
(Right (Just x)) -> Right x
when' :: Monad m => m Bool -> m () -> m ()
when' b f = b >>= \x -> when x f
headMay :: [a] -> Maybe a
headMay (x : _) = Just x
headMay _ = Nothing

78
tests/V1/Test/Indices.hs Normal file
View File

@ -0,0 +1,78 @@
{-# LANGUAGE OverloadedStrings #-}
module Test.Indices where
import Test.Common
import Test.Import
import qualified Data.List as L
import qualified Data.List.NonEmpty as NE
spec :: Spec
spec = do
describe "Index create/delete API" $ do
it "creates and then deletes the requested index" $ withTestEnv $ do
-- priming state.
_ <- deleteExampleIndex
resp <- createExampleIndex
deleteResp <- deleteExampleIndex
liftIO $ do
validateStatus resp 200
validateStatus deleteResp 200
describe "Index aliases" $ do
let aname = IndexAliasName (IndexName "bloodhound-tests-twitter-1-alias")
let alias = IndexAlias (testIndex) aname
let create = IndexAliasCreate Nothing Nothing
let action = AddAlias alias create
it "handles the simple case of aliasing an existing index" $ do
withTestEnv $ do
resetIndex
resp <- updateIndexAliases (action :| [])
liftIO $ validateStatus resp 200
let cleanup = withTestEnv (updateIndexAliases (RemoveAlias alias :| []))
(do aliases <- withTestEnv getIndexAliases
let expected = IndexAliasSummary alias create
case aliases of
Right (IndexAliasesSummary summs) ->
L.find ((== alias) . indexAliasSummaryAlias) summs `shouldBe` Just expected
Left e -> expectationFailure ("Expected an IndexAliasesSummary but got " <> show e)) `finally` cleanup
describe "Index Listing" $ do
it "returns a list of index names" $ withTestEnv $ do
_ <- createExampleIndex
ixns <- listIndices
liftIO (ixns `shouldContain` [testIndex])
describe "Index Settings" $ do
it "persists settings" $ withTestEnv $ do
_ <- deleteExampleIndex
_ <- createExampleIndex
let updates = BlocksWrite False :| []
updateResp <- updateIndexSettings updates testIndex
liftIO $ validateStatus updateResp 200
getResp <- getIndexSettings testIndex
liftIO $
getResp `shouldBe` Right (IndexSettingsSummary
testIndex
(IndexSettings (ShardCount 1) (ReplicaCount 0))
(NE.toList updates))
it "allows total fields to be set" $ when' (atleast es50) $ withTestEnv $ do
_ <- deleteExampleIndex
_ <- createExampleIndex
let updates = FailOnMergeFailure True :| []
updateResp <- updateIndexSettings updates testIndex
liftIO $ validateStatus updateResp 200
getResp <- getIndexSettings testIndex
liftIO $
getResp `shouldBe` Right (IndexSettingsSummary
testIndex
(IndexSettings (ShardCount 1) (ReplicaCount 0))
(NE.toList updates))
describe "Index Optimization" $ do
it "returns a successful response upon completion" $ withTestEnv $ do
_ <- createExampleIndex
resp <- optimizeIndex (IndexList (testIndex :| [])) defaultIndexOptimizationSettings
liftIO $ validateStatus resp 200

202
tests/V1/Test/JSON.hs Normal file
View File

@ -0,0 +1,202 @@
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Test.JSON (spec) where
import Test.Import
import qualified Data.ByteString.Lazy.Char8 as BL8
import qualified Data.List as L
import Data.List.NonEmpty (NonEmpty(..))
import qualified Data.List.NonEmpty as NE
import qualified Data.HashMap.Strict as HM
import qualified Data.Text as T
import qualified Data.Vector as V
import Test.ApproxEq
import Test.Generators
propJSON :: forall a
. ( Arbitrary a
, ToJSON a
, FromJSON a
, Show a
, Eq a
, Typeable a
)
=> Proxy a -> Spec
propJSON _ = prop testName $ \(a :: a) ->
let jsonStr = "via " <> BL8.unpack (encode a)
in counterexample jsonStr (parseEither parseJSON (toJSON a)
=== Right a)
where testName = show ty <> " FromJSON/ToJSON roundtrips"
ty = typeOf (undefined :: a)
propApproxJSON :: forall a
. ( Arbitrary a
, ToJSON a
, FromJSON a
, Show a
, ApproxEq a
, Typeable a
)
=> Proxy a -> Spec
propApproxJSON _ = prop testName $ \(a :: a) ->
let jsonStr = "via " <> BL8.unpack (encode a)
in counterexample jsonStr (parseEither parseJSON (toJSON a)
==~ Right a)
where testName = show ty <> " FromJSON/ToJSON roundtrips"
ty = typeOf (undefined :: a)
spec :: Spec
spec = do
describe "ToJSON RegexpFlags" $ do
it "generates the correct JSON for AllRegexpFlags" $
toJSON AllRegexpFlags `shouldBe` String "ALL"
it "generates the correct JSON for NoRegexpFlags" $
toJSON NoRegexpFlags `shouldBe` String "NONE"
it "generates the correct JSON for SomeRegexpFlags" $
let flags = AnyString :| [ Automaton
, Complement
, Empty
, Intersection
, Interval ]
in toJSON (SomeRegexpFlags flags) `shouldBe` String "ANYSTRING|AUTOMATON|COMPLEMENT|EMPTY|INTERSECTION|INTERVAL"
prop "removes duplicates from flags" $ \(flags :: RegexpFlags) ->
let String str = toJSON flags
flagStrs = T.splitOn "|" str
in noDuplicates flagStrs
describe "omitNulls" $ do
it "checks that omitNulls drops list elements when it should" $
let dropped = omitNulls $ [ "test1" .= (toJSON ([] :: [Int]))
, "test2" .= (toJSON ("some value" :: Text))]
in dropped `shouldBe` Object (HM.fromList [("test2", String "some value")])
it "checks that omitNulls doesn't drop list elements when it shouldn't" $
let notDropped = omitNulls $ [ "test1" .= (toJSON ([1] :: [Int]))
, "test2" .= (toJSON ("some value" :: Text))]
in notDropped `shouldBe` Object (HM.fromList [ ("test1", Array (V.fromList [Number 1.0]))
, ("test2", String "some value")])
it "checks that omitNulls drops non list elements when it should" $
let dropped = omitNulls $ [ "test1" .= (toJSON Null)
, "test2" .= (toJSON ("some value" :: Text))]
in dropped `shouldBe` Object (HM.fromList [("test2", String "some value")])
it "checks that omitNulls doesn't drop non list elements when it shouldn't" $
let notDropped = omitNulls $ [ "test1" .= (toJSON (1 :: Int))
, "test2" .= (toJSON ("some value" :: Text))]
in notDropped `shouldBe` Object (HM.fromList [ ("test1", Number 1.0)
, ("test2", String "some value")])
describe "Exact isomorphism JSON instances" $ do
propJSON (Proxy :: Proxy Version)
propJSON (Proxy :: Proxy IndexName)
propJSON (Proxy :: Proxy MappingName)
propJSON (Proxy :: Proxy DocId)
propJSON (Proxy :: Proxy IndexAliasRouting)
propJSON (Proxy :: Proxy RoutingValue)
propJSON (Proxy :: Proxy ShardCount)
propJSON (Proxy :: Proxy ReplicaCount)
propJSON (Proxy :: Proxy TemplateName)
propJSON (Proxy :: Proxy TemplatePattern)
propJSON (Proxy :: Proxy QueryString)
propJSON (Proxy :: Proxy FieldName)
propJSON (Proxy :: Proxy CacheName)
propJSON (Proxy :: Proxy CacheKey)
propJSON (Proxy :: Proxy Existence)
propJSON (Proxy :: Proxy CutoffFrequency)
propJSON (Proxy :: Proxy Analyzer)
propJSON (Proxy :: Proxy MaxExpansions)
propJSON (Proxy :: Proxy Lenient)
propJSON (Proxy :: Proxy Tiebreaker)
propJSON (Proxy :: Proxy Boost)
propJSON (Proxy :: Proxy BoostTerms)
propJSON (Proxy :: Proxy MinimumMatch)
propJSON (Proxy :: Proxy DisableCoord)
propJSON (Proxy :: Proxy IgnoreTermFrequency)
propJSON (Proxy :: Proxy MinimumTermFrequency)
propJSON (Proxy :: Proxy MaxQueryTerms)
propJSON (Proxy :: Proxy Fuzziness)
propJSON (Proxy :: Proxy PrefixLength)
propJSON (Proxy :: Proxy TypeName)
propJSON (Proxy :: Proxy PercentMatch)
propJSON (Proxy :: Proxy StopWord)
propJSON (Proxy :: Proxy QueryPath)
propJSON (Proxy :: Proxy AllowLeadingWildcard)
propJSON (Proxy :: Proxy LowercaseExpanded)
propJSON (Proxy :: Proxy EnablePositionIncrements)
propJSON (Proxy :: Proxy AnalyzeWildcard)
propJSON (Proxy :: Proxy GeneratePhraseQueries)
propJSON (Proxy :: Proxy Locale)
propJSON (Proxy :: Proxy MaxWordLength)
propJSON (Proxy :: Proxy MinWordLength)
propJSON (Proxy :: Proxy PhraseSlop)
propJSON (Proxy :: Proxy MinDocFrequency)
propJSON (Proxy :: Proxy MaxDocFrequency)
propJSON (Proxy :: Proxy Filter)
propJSON (Proxy :: Proxy Query)
propJSON (Proxy :: Proxy SimpleQueryStringQuery)
propJSON (Proxy :: Proxy FieldOrFields)
propJSON (Proxy :: Proxy SimpleQueryFlag)
propJSON (Proxy :: Proxy RegexpQuery)
propJSON (Proxy :: Proxy QueryStringQuery)
propJSON (Proxy :: Proxy RangeQuery)
propJSON (Proxy :: Proxy PrefixQuery)
propJSON (Proxy :: Proxy NestedQuery)
propJSON (Proxy :: Proxy MoreLikeThisFieldQuery)
propJSON (Proxy :: Proxy MoreLikeThisQuery)
propJSON (Proxy :: Proxy IndicesQuery)
propJSON (Proxy :: Proxy HasParentQuery)
propJSON (Proxy :: Proxy HasChildQuery)
propJSON (Proxy :: Proxy FuzzyQuery)
propJSON (Proxy :: Proxy FuzzyLikeFieldQuery)
propJSON (Proxy :: Proxy FuzzyLikeThisQuery)
propJSON (Proxy :: Proxy DisMaxQuery)
propJSON (Proxy :: Proxy CommonTermsQuery)
propJSON (Proxy :: Proxy CommonMinimumMatch)
propJSON (Proxy :: Proxy BoostingQuery)
propJSON (Proxy :: Proxy BoolQuery)
propJSON (Proxy :: Proxy MatchQuery)
propJSON (Proxy :: Proxy MultiMatchQueryType)
propJSON (Proxy :: Proxy BooleanOperator)
propJSON (Proxy :: Proxy ZeroTermsQuery)
propJSON (Proxy :: Proxy MatchQueryType)
propJSON (Proxy :: Proxy AliasRouting)
propJSON (Proxy :: Proxy IndexAliasCreate)
propJSON (Proxy :: Proxy SearchAliasRouting)
propJSON (Proxy :: Proxy ScoreType)
propJSON (Proxy :: Proxy Distance)
propJSON (Proxy :: Proxy DistanceUnit)
propJSON (Proxy :: Proxy DistanceType)
propJSON (Proxy :: Proxy OptimizeBbox)
propJSON (Proxy :: Proxy GeoBoundingBoxConstraint)
propJSON (Proxy :: Proxy GeoFilterType)
propJSON (Proxy :: Proxy GeoBoundingBox)
propJSON (Proxy :: Proxy LatLon)
propJSON (Proxy :: Proxy RangeExecution)
prop "RegexpFlags FromJSON/ToJSON roundtrips, removing dups " $ \rfs ->
let expected = case rfs of
SomeRegexpFlags fs -> SomeRegexpFlags (NE.fromList (L.nub (NE.toList fs)))
x -> x
in parseEither parseJSON (toJSON rfs) === Right expected
propJSON (Proxy :: Proxy BoolMatch)
propJSON (Proxy :: Proxy Term)
propJSON (Proxy :: Proxy MultiMatchQuery)
propJSON (Proxy :: Proxy IndexSettings)
propJSON (Proxy :: Proxy CompoundFormat)
propJSON (Proxy :: Proxy TemplateQueryInline)
propJSON (Proxy :: Proxy Suggest)
propJSON (Proxy :: Proxy DirectGenerators)
propJSON (Proxy :: Proxy DirectGeneratorSuggestModeTypes)
describe "Approximate isomorphism JSON instances" $ do
propApproxJSON (Proxy :: Proxy UpdatableIndexSetting')
propApproxJSON (Proxy :: Proxy ReplicaCount)
propApproxJSON (Proxy :: Proxy ReplicaBounds)
propApproxJSON (Proxy :: Proxy Bytes)
propApproxJSON (Proxy :: Proxy AllocationPolicy)
propApproxJSON (Proxy :: Proxy InitialShardCount)
propApproxJSON (Proxy :: Proxy FSType)

115
tests/V1/Test/Query.hs Normal file
View File

@ -0,0 +1,115 @@
{-# LANGUAGE OverloadedStrings #-}
module Test.Query where
import Test.Common
import Test.Import
import qualified Data.HashMap.Strict as HM
spec :: Spec
spec =
describe "query API" $ do
it "returns document for term query and identity filter" $ withTestEnv $ do
_ <- insertData
let query = TermQuery (Term "user" "bitemyapp") Nothing
let filter' = IdentityFilter
let search = mkSearch (Just query) (Just filter')
myTweet <- searchTweet search
liftIO $
myTweet `shouldBe` Right exampleTweet
it "handles constant score queries" $ withTestEnv $ do
_ <- insertData
let query = TermsQuery "user" ("bitemyapp" :| [])
let cfQuery = ConstantScoreQuery query (Boost 1.0)
let filter' = IdentityFilter
let search = mkSearch (Just cfQuery) (Just filter')
myTweet <- searchTweet search
liftIO $
myTweet `shouldBe` Right exampleTweet
it "returns document for terms query and identity filter" $ withTestEnv $ do
_ <- insertData
let query = TermsQuery "user" ("bitemyapp" :| [])
let filter' = IdentityFilter
let search = mkSearch (Just query) (Just filter')
myTweet <- searchTweet search
liftIO $
myTweet `shouldBe` Right exampleTweet
it "returns document for match query" $ withTestEnv $ do
_ <- insertData
let query = QueryMatchQuery $ mkMatchQuery (FieldName "user") (QueryString "bitemyapp")
let search = mkSearch (Just query) Nothing
myTweet <- searchTweet search
liftIO $
myTweet `shouldBe` Right exampleTweet
it "returns document for multi-match query" $ withTestEnv $ do
_ <- insertData
let flds = [FieldName "user", FieldName "message"]
let query = QueryMultiMatchQuery $ mkMultiMatchQuery flds (QueryString "bitemyapp")
let search = mkSearch (Just query) Nothing
myTweet <- searchTweet search
liftIO $
myTweet `shouldBe` Right exampleTweet
it "returns document for multi-match query with a custom tiebreaker" $ withTestEnv $ do
_ <- insertData
let tiebreaker = Just $ Tiebreaker 0.3
flds = [FieldName "user", FieldName "message"]
multiQuery' = mkMultiMatchQuery flds (QueryString "bitemyapp")
query = QueryMultiMatchQuery $ multiQuery' { multiMatchQueryTiebreaker = tiebreaker }
search = mkSearch (Just query) Nothing
myTweet <- searchTweet search
liftIO $
myTweet `shouldBe` Right exampleTweet
it "returns document for bool query" $ withTestEnv $ do
_ <- insertData
let innerQuery = QueryMatchQuery $
mkMatchQuery (FieldName "user") (QueryString "bitemyapp")
let query = QueryBoolQuery $
mkBoolQuery [innerQuery] [] []
let search = mkSearch (Just query) Nothing
myTweet <- searchTweet search
liftIO $
myTweet `shouldBe` Right exampleTweet
it "returns document for boosting query" $ withTestEnv $ do
_ <- insertData
let posQuery = QueryMatchQuery $ mkMatchQuery (FieldName "user") (QueryString "bitemyapp")
let negQuery = QueryMatchQuery $ mkMatchQuery (FieldName "user") (QueryString "notmyapp")
let query = QueryBoostingQuery $ BoostingQuery posQuery negQuery (Boost 0.2)
let search = mkSearch (Just query) Nothing
myTweet <- searchTweet search
liftIO $
myTweet `shouldBe` Right exampleTweet
it "returns document for common terms query" $ withTestEnv $ do
_ <- insertData
let query = QueryCommonTermsQuery $
CommonTermsQuery (FieldName "user")
(QueryString "bitemyapp")
(CutoffFrequency 0.0001)
Or Or Nothing Nothing Nothing Nothing
let search = mkSearch (Just query) Nothing
myTweet <- searchTweet search
liftIO $
myTweet `shouldBe` Right exampleTweet
it "returns document for for inline template query" $ withTestEnv $ do
_ <- insertData
let innerQuery = QueryMatchQuery $
mkMatchQuery (FieldName "{{userKey}}")
(QueryString "{{bitemyappKey}}")
templateParams = TemplateQueryKeyValuePairs $ HM.fromList
[ ("userKey", "user")
, ("bitemyappKey", "bitemyapp")
]
templateQuery = QueryTemplateQueryInline $
TemplateQueryInline innerQuery templateParams
search = mkSearch (Just templateQuery) Nothing
myTweet <- searchTweet search
liftIO $ myTweet `shouldBe` Right exampleTweet

201
tests/V1/Test/Snapshots.hs Normal file
View File

@ -0,0 +1,201 @@
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE OverloadedStrings #-}
module Test.Snapshots (spec) where
import Test.Common
import Test.Import
import Data.Maybe (fromMaybe)
import qualified Data.List as L
import qualified Data.HashMap.Strict as HM
import qualified Data.Text as T
import qualified Data.Vector as V
import qualified Network.HTTP.Types.Method as NHTM
import qualified Network.URI as URI
import Test.Generators ()
spec :: Spec
spec = do
describe "FsSnapshotRepo" $
prop "SnapshotRepo laws" $ \fsr ->
fromGSnapshotRepo (toGSnapshotRepo fsr) === Right (fsr :: FsSnapshotRepo)
describe "Snapshot repos" $ do
it "always parses all snapshot repos API" $ when' canSnapshot $ withTestEnv $ do
res <- getSnapshotRepos AllSnapshotRepos
liftIO $ case res of
Left e -> expectationFailure ("Expected a right but got Left " <> show e)
Right _ -> return ()
it "finds an existing list of repos" $ when' canSnapshot $ withTestEnv $ do
let r1n = SnapshotRepoName "bloodhound-repo1"
let r2n = SnapshotRepoName "bloodhound-repo2"
withSnapshotRepo r1n $ \r1 ->
withSnapshotRepo r2n $ \r2 -> do
repos <- getSnapshotRepos (SnapshotRepoList (ExactRepo r1n :| [ExactRepo r2n]))
liftIO $ case repos of
Right xs -> do
let srt = L.sortBy (comparing gSnapshotRepoName)
srt xs `shouldBe` srt [r1, r2]
Left e -> expectationFailure (show e)
it "creates and updates with updateSnapshotRepo" $ when' canSnapshot $ withTestEnv $ do
let r1n = SnapshotRepoName "bloodhound-repo1"
withSnapshotRepo r1n $ \r1 -> do
let Just (String dir) = HM.lookup "location" (gSnapshotRepoSettingsObject (gSnapshotRepoSettings r1))
let noCompression = FsSnapshotRepo r1n (T.unpack dir) False Nothing Nothing Nothing
resp <- updateSnapshotRepo defaultSnapshotRepoUpdateSettings noCompression
liftIO (validateStatus resp 200)
Right [roundtrippedNoCompression] <- getSnapshotRepos (SnapshotRepoList (ExactRepo r1n :| []))
liftIO (roundtrippedNoCompression `shouldBe` toGSnapshotRepo noCompression)
-- verify came around in 1.4 it seems
it "can verify existing repos" $ when' canSnapshot $ when' (atleast es14) $ withTestEnv $ do
let r1n = SnapshotRepoName "bloodhound-repo1"
withSnapshotRepo r1n $ \_ -> do
res <- verifySnapshotRepo r1n
liftIO $ case res of
Right (SnapshotVerification vs)
| null vs -> expectationFailure "Expected nonempty set of verifying nodes"
| otherwise -> return ()
Left e -> expectationFailure (show e)
describe "Snapshots" $ do
it "always parses all snapshots API" $ when' canSnapshot $ withTestEnv $ do
let r1n = SnapshotRepoName "bloodhound-repo1"
withSnapshotRepo r1n $ \_ -> do
res <- getSnapshots r1n AllSnapshots
liftIO $ case res of
Left e -> expectationFailure ("Expected a right but got Left " <> show e)
Right _ -> return ()
it "can parse a snapshot that it created" $ when' canSnapshot $ withTestEnv $ do
let r1n = SnapshotRepoName "bloodhound-repo1"
withSnapshotRepo r1n $ \_ -> do
let s1n = SnapshotName "example-snapshot"
withSnapshot r1n s1n $ do
res <- getSnapshots r1n (SnapshotList (ExactSnap s1n :| []))
liftIO $ case res of
Right [snap]
| snapInfoState snap == SnapshotSuccess &&
snapInfoName snap == s1n -> return ()
| otherwise -> expectationFailure (show snap)
Right [] -> expectationFailure "There were no snapshots"
Right snaps -> expectationFailure ("Expected 1 snapshot but got" <> show (length snaps))
Left e -> expectationFailure (show e)
describe "Snapshot restore" $ do
it "can restore a snapshot that we create" $ when' canSnapshot $ withTestEnv $ do
let r1n = SnapshotRepoName "bloodhound-repo1"
withSnapshotRepo r1n $ \_ -> do
let s1n = SnapshotName "example-snapshot"
withSnapshot r1n s1n $ do
let settings = defaultSnapshotRestoreSettings { snapRestoreWaitForCompletion = True }
-- have to close an index to restore it
resp1 <- closeIndex testIndex
liftIO (validateStatus resp1 200)
resp2 <- restoreSnapshot r1n s1n settings
liftIO (validateStatus resp2 200)
it "can restore and rename" $ when' canSnapshot $ withTestEnv $ do
let r1n = SnapshotRepoName "bloodhound-repo1"
withSnapshotRepo r1n $ \_ -> do
let s1n = SnapshotName "example-snapshot"
withSnapshot r1n s1n $ do
let pat = RestoreRenamePattern "bloodhound-tests-twitter-(\\d+)"
let replace = RRTLit "restored-" :| [RRSubWholeMatch]
let expectedIndex = IndexName "restored-bloodhound-tests-twitter-1"
oldEnoughForOverrides <- liftIO (atleast es15)
let overrides = RestoreIndexSettings { restoreOverrideReplicas = Just (ReplicaCount 0) }
let settings = defaultSnapshotRestoreSettings { snapRestoreWaitForCompletion = True
, snapRestoreRenamePattern = Just pat
, snapRestoreRenameReplacement = Just replace
, snapRestoreIndexSettingsOverrides = if oldEnoughForOverrides
then Just overrides
else Nothing
}
-- have to close an index to restore it
let go = do
resp <- restoreSnapshot r1n s1n settings
liftIO (validateStatus resp 200)
exists <- indexExists expectedIndex
liftIO (exists `shouldBe` True)
go `finally` deleteIndex expectedIndex
-- | Get configured repo paths for snapshotting. Note that by default
-- this is not enabled and if we are over es 1.5, we won't be able to
-- test snapshotting. Note that this can and should be part of the
-- client functionality in a much less ad-hoc incarnation.
getRepoPaths :: IO [FilePath]
getRepoPaths = withTestEnv $ do
bhe <- getBHEnv
let Server s = bhServer bhe
let tUrl = s <> "/" <> "_nodes"
initReq <- parseRequest (URI.escapeURIString URI.isAllowedInURI (T.unpack tUrl))
let req = setRequestIgnoreStatus $ initReq { method = NHTM.methodGet }
Right (Object o) <- parseEsResponse =<< liftIO (httpLbs req (bhManager bhe))
return $ fromMaybe mempty $ do
Object nodes <- HM.lookup "nodes" o
Object firstNode <- snd <$> headMay (HM.toList nodes)
Object settings <- HM.lookup "settings" firstNode
Object path <- HM.lookup "path" settings
Array repo <- HM.lookup "repo" path
return [ T.unpack t | String t <- V.toList repo]
-- | 1.5 and earlier don't care about repo paths
canSnapshot :: IO Bool
canSnapshot = do
caresAboutRepos <- atleast es16
repoPaths <- getRepoPaths
return (not caresAboutRepos || not (null repoPaths))
withSnapshotRepo
:: ( MonadMask m
, MonadBH m
)
=> SnapshotRepoName
-> (GenericSnapshotRepo -> m a)
-> m a
withSnapshotRepo srn@(SnapshotRepoName n) f = do
repoPaths <- liftIO getRepoPaths
-- we'll use the first repo path if available, otherwise system temp
-- dir. Note that this will fail on ES > 1.6, so be sure you use
-- @when' canSnapshot@.
case repoPaths of
(firstRepoPath:_) -> withTempDirectory firstRepoPath (T.unpack n) $ \dir -> bracket (alloc dir) free f
[] -> withSystemTempDirectory (T.unpack n) $ \dir -> bracket (alloc dir) free f
where
alloc dir = do
liftIO (setFileMode dir mode)
let repo = FsSnapshotRepo srn "bloodhound-tests-backups" True Nothing Nothing Nothing
resp <- updateSnapshotRepo defaultSnapshotRepoUpdateSettings repo
liftIO (validateStatus resp 200)
return (toGSnapshotRepo repo)
mode = ownerModes `unionFileModes` groupModes `unionFileModes` otherModes
free GenericSnapshotRepo {..} = do
resp <- deleteSnapshotRepo gSnapshotRepoName
liftIO (validateStatus resp 200)
withSnapshot
:: ( MonadMask m
, MonadBH m
)
=> SnapshotRepoName
-> SnapshotName
-> m a
-> m a
withSnapshot srn sn = bracket_ alloc free
where
alloc = do
resp <- createSnapshot srn sn createSettings
liftIO (validateStatus resp 200)
-- We'll make this synchronous for testing purposes
createSettings = defaultSnapshotCreateSettings { snapWaitForCompletion = True
, snapIndices = Just (IndexList (testIndex :| []))
-- We don't actually need to back up any data
}
free =
deleteSnapshot srn sn

22
tests/V1/Test/Sorting.hs Normal file
View File

@ -0,0 +1,22 @@
{-# LANGUAGE OverloadedStrings #-}
module Test.Sorting where
import Test.Common
import Test.Import
spec :: Spec
spec =
describe "sorting" $
it "returns documents in the right order" $ withTestEnv $ do
_ <- insertData
_ <- insertOther
let sortSpec = DefaultSortSpec $ mkSort (FieldName "age") Ascending
let search = Search Nothing
Nothing (Just [sortSpec]) Nothing Nothing
False (From 0) (Size 10) SearchTypeQueryThenFetch Nothing Nothing
Nothing
result <- searchTweets search
let myTweet = grabFirst result
liftIO $
myTweet `shouldBe` Right otherTweet

View File

@ -0,0 +1,38 @@
{-# LANGUAGE OverloadedStrings #-}
module Test.SourceFiltering where
import Test.Common
import Test.Import
import qualified Data.HashMap.Strict as HM
spec :: Spec
spec =
describe "Source filtering" $ do
it "doesn't include source when sources are disabled" $ withTestEnv $
searchExpectSource
NoSource
(Left (EsError 500 "Source was missing"))
it "includes a source" $ withTestEnv $
searchExpectSource
(SourcePatterns (PopPattern (Pattern "message")))
(Right (Object (HM.fromList [("message", String "Use haskell!")])))
it "includes sources" $ withTestEnv $
searchExpectSource
(SourcePatterns (PopPatterns [Pattern "user", Pattern "message"]))
(Right (Object (HM.fromList [("user",String "bitemyapp"),("message", String "Use haskell!")])))
it "includes source patterns" $ withTestEnv $
searchExpectSource
(SourcePatterns (PopPattern (Pattern "*ge")))
(Right (Object (HM.fromList [("age", Number 10000),("message", String "Use haskell!")])))
it "excludes source patterns" $ withTestEnv $
searchExpectSource
(SourceIncludeExclude (Include [])
(Exclude [Pattern "l*", Pattern "*ge", Pattern "postDate", Pattern "extra"]))
(Right (Object (HM.fromList [("user",String "bitemyapp")])))

22
tests/V1/Test/Suggest.hs Normal file
View File

@ -0,0 +1,22 @@
{-# LANGUAGE OverloadedStrings #-}
module Test.Suggest where
import Test.Common
import Test.Import
spec :: Spec
spec =
describe "Suggest" $
it "returns a search suggestion using the phrase suggester" $ withTestEnv $ do
_ <- insertData
let phraseSuggester = mkPhraseSuggester (FieldName "message")
namedSuggester = Suggest "Use haskel" "suggest_name" (SuggestTypePhraseSuggester phraseSuggester)
search' = mkSearch Nothing Nothing
search = search' { suggestBody = Just namedSuggester }
expectedText = Just "use haskell"
resp <- searchByIndex testIndex search
parsed <- parseEsResponse resp :: BH IO (Either EsError (SearchResult Tweet))
case parsed of
Left e -> liftIO $ expectationFailure ("Expected an search suggestion but got " <> show e)
Right sr -> liftIO $ (suggestOptionsText . head . suggestResponseOptions . head . nsrResponses <$> suggest sr) `shouldBe` expectedText

View File

@ -0,0 +1,26 @@
{-# LANGUAGE OverloadedStrings #-}
module Test.Templates where
import Test.Common
import Test.Import
spec :: Spec
spec =
describe "template API" $ do
it "can create a template" $ withTestEnv $ do
let idxTpl = IndexTemplate (TemplatePattern "tweet-*") (Just (IndexSettings (ShardCount 1) (ReplicaCount 1))) [toJSON TweetMapping]
resp <- putTemplate idxTpl (TemplateName "tweet-tpl")
liftIO $ validateStatus resp 200
it "can detect if a template exists" $ withTestEnv $ do
exists <- templateExists (TemplateName "tweet-tpl")
liftIO $ exists `shouldBe` True
it "can delete a template" $ withTestEnv $ do
resp <- deleteTemplate (TemplateName "tweet-tpl")
liftIO $ validateStatus resp 200
it "can detect if a template doesn't exist" $ withTestEnv $ do
exists <- templateExists (TemplateName "tweet-tpl")
liftIO $ exists `shouldBe` False

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,147 @@
{-# LANGUAGE OverloadedStrings #-}
module Test.Aggregation (spec) where
import Test.Common
import Test.Import
import Control.Error (fmapL, note)
import qualified Data.Map as M
import qualified Database.V5.Bloodhound
spec :: Spec
spec =
describe "Aggregation API" $ do
it "returns term aggregation results" $ withTestEnv $ do
_ <- insertData
let terms = TermsAgg $ mkTermsAggregation "user"
let search = mkAggregateSearch Nothing $ mkAggregations "users" terms
searchExpectAggs search
searchValidBucketAgg search "users" toTerms
it "return sub-aggregation results" $ withTestEnv $ do
_ <- insertData
let subaggs = mkAggregations "age_agg" . TermsAgg $ mkTermsAggregation "age"
agg = TermsAgg $ (mkTermsAggregation "user") { termAggs = Just subaggs}
search = mkAggregateSearch Nothing $ mkAggregations "users" agg
reply <- searchByIndex testIndex search
let result = decode (responseBody reply) :: Maybe (SearchResult Tweet)
usersAggResults = result >>= aggregations >>= toTerms "users"
subAggResults = usersAggResults >>= (listToMaybe . buckets) >>= termsAggs >>= toTerms "age_agg"
subAddResultsExists = isJust subAggResults
liftIO $ subAddResultsExists `shouldBe` True
it "returns cardinality aggregation results" $ withTestEnv $ do
_ <- insertData
let cardinality = CardinalityAgg $ mkCardinalityAggregation $ FieldName "user"
let search = mkAggregateSearch Nothing $ mkAggregations "users" cardinality
let search' = search { Database.V5.Bloodhound.from = From 0, size = Size 0 }
searchExpectAggs search'
let docCountPair k n = (k, object ["value" .= Number n])
res <- searchTweets search'
liftIO $
fmap aggregations res `shouldBe` Right (Just (M.fromList [ docCountPair "users" 1]))
it "returns stats aggregation results" $ withTestEnv $ do
_ <- insertData
let stats = StatsAgg $ mkStatsAggregation $ FieldName "age"
let search = mkAggregateSearch Nothing $ mkAggregations "users" stats
let search' = search { Database.V5.Bloodhound.from = From 0, size = Size 0 }
searchExpectAggs search'
let statsAggRes k n = (k, object [ "max" .= Number n
, "avg" .= Number n
, "count" .= Number 1
, "min" .= Number n
, "sum" .= Number n])
res <- searchTweets search'
liftIO $
fmap aggregations res `shouldBe` Right (Just (M.fromList [ statsAggRes "users" 10000]))
it "can give collection hint parameters to term aggregations" $ when' (atleast es13) $ withTestEnv $ do
_ <- insertData
let terms = TermsAgg $ (mkTermsAggregation "user") { termCollectMode = Just BreadthFirst }
let search = mkAggregateSearch Nothing $ mkAggregations "users" terms
searchExpectAggs search
searchValidBucketAgg search "users" toTerms
-- One of these fails with 1.7.3
it "can give execution hint parameters to term aggregations" $ when' (atmost es11) $ withTestEnv $ do
_ <- insertData
searchTermsAggHint [Map, Ordinals]
it "can give execution hint parameters to term aggregations" $ when' (is es12) $ withTestEnv $ do
_ <- insertData
searchTermsAggHint [GlobalOrdinals, GlobalOrdinalsHash, GlobalOrdinalsLowCardinality, Map, Ordinals]
it "can give execution hint parameters to term aggregations" $ when' (atleast es12) $ withTestEnv $ do
_ <- insertData
searchTermsAggHint [GlobalOrdinals, GlobalOrdinalsHash, GlobalOrdinalsLowCardinality, Map]
-- One of the above.
it "can execute value_count aggregations" $ withTestEnv $ do
_ <- insertData
_ <- insertOther
let ags = mkAggregations "user_count" (ValueCountAgg (FieldValueCount (FieldName "user"))) <>
mkAggregations "bogus_count" (ValueCountAgg (FieldValueCount (FieldName "bogus")))
let search = mkAggregateSearch Nothing ags
let docCountPair k n = (k, object ["value" .= Number n])
res <- searchTweets search
liftIO $
fmap aggregations res `shouldBe` Right (Just (M.fromList [ docCountPair "user_count" 2
, docCountPair "bogus_count" 0
]))
it "can execute date_range aggregations" $ withTestEnv $ do
let now = fromGregorian 2015 3 14
let ltAMonthAgo = UTCTime (fromGregorian 2015 3 1) 0
let ltAWeekAgo = UTCTime (fromGregorian 2015 3 10) 0
let oldDoc = exampleTweet { postDate = ltAMonthAgo }
let newDoc = exampleTweet { postDate = ltAWeekAgo }
_ <- indexDocument testIndex testMapping defaultIndexDocumentSettings oldDoc (DocId "1")
_ <- indexDocument testIndex testMapping defaultIndexDocumentSettings newDoc (DocId "2")
_ <- refreshIndex testIndex
let thisMonth = DateRangeFrom (DateMathExpr (DMDate now) [SubtractTime 1 DMMonth])
let thisWeek = DateRangeFrom (DateMathExpr (DMDate now) [SubtractTime 1 DMWeek])
let agg = DateRangeAggregation (FieldName "postDate") Nothing (thisMonth :| [thisWeek])
let ags = mkAggregations "date_ranges" (DateRangeAgg agg)
let search = mkAggregateSearch Nothing ags
res <- searchTweets search
liftIO $ hitsTotal . searchHits <$> res `shouldBe` Right 2
let bucks = do magrs <- fmapL show (aggregations <$> res)
agrs <- note "no aggregations returned" magrs
rawBucks <- note "no date_ranges aggregation" $ M.lookup "date_ranges" agrs
parseEither parseJSON rawBucks
let fromMonthT = UTCTime (fromGregorian 2015 2 14) 0
let fromWeekT = UTCTime (fromGregorian 2015 3 7) 0
liftIO $ buckets <$> bucks `shouldBe` Right [ DateRangeResult "2015-02-14T00:00:00.000Z-*"
(Just fromMonthT)
(Just "2015-02-14T00:00:00.000Z")
Nothing
Nothing
2
Nothing
, DateRangeResult "2015-03-07T00:00:00.000Z-*"
(Just fromWeekT)
(Just "2015-03-07T00:00:00.000Z")
Nothing
Nothing
1
Nothing
]
it "returns date histogram aggregation results" $ withTestEnv $ do
_ <- insertData
let histogram = DateHistogramAgg $ mkDateHistogram (FieldName "postDate") Minute
let search = mkAggregateSearch Nothing (mkAggregations "byDate" histogram)
searchExpectAggs search
searchValidBucketAgg search "byDate" toDateHistogram
it "can execute missing aggregations" $ withTestEnv $ do
_ <- insertData
_ <- insertExtra
let ags = mkAggregations "missing_agg" (MissingAgg (MissingAggregation "extra"))
let search = mkAggregateSearch Nothing ags
let docCountPair k n = (k, object ["doc_count" .= Number n])
res <- searchTweets search
liftIO $
fmap aggregations res `shouldBe` Right (Just (M.fromList [docCountPair "missing_agg" 1]))

63
tests/V5/Test/ApproxEq.hs Normal file
View File

@ -0,0 +1,63 @@
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE OverloadedStrings #-}
module Test.ApproxEq where
import Database.V5.Bloodhound
import Test.Import
import qualified Data.List.NonEmpty as NE
-- | Typeclass for "equal where it matters". Use this to specify
-- less-strict equivalence for things such as lists that can wind up
-- in an unpredictable order
class ApproxEq a where
(=~) :: a -> a -> Bool
showApproxEq :: a -> String
default showApproxEq :: (Show a) => a -> String
showApproxEq = show
(==~) :: (ApproxEq a) => a -> a -> Property
a ==~ b = counterexample (showApproxEq a ++ " !=~ " ++ showApproxEq b) (a =~ b)
instance ApproxEq NominalDiffTime where (=~) = (==)
instance ApproxEq Bool where (=~) = (==)
instance ApproxEq Int where (=~) = (==)
instance (Eq a, Show a) => ApproxEq (Maybe a) where (=~) = (==)
instance ApproxEq Char where
(=~) = (==)
instance ApproxEq NodeAttrFilter where (=~) = (==)
instance ApproxEq NodeAttrName where (=~) = (==)
instance (Eq a, Show a) => ApproxEq (NonEmpty a) where (=~) = (==)
instance (ApproxEq l, Show l, ApproxEq r, Show r) => ApproxEq (Either l r) where
Left a =~ Left b = a =~ b
Right a =~ Right b = a =~ b
_ =~ _ = False
showApproxEq (Left x) = "Left " <> showApproxEq x
showApproxEq (Right x) = "Right " <> showApproxEq x
instance (ApproxEq a, Show a) => ApproxEq [a] where
as =~ bs = and (zipWith (=~) as bs)
instance ApproxEq ReplicaCount where (=~) = (==)
instance ApproxEq ReplicaBounds where (=~) = (==)
instance ApproxEq Bytes where (=~) = (==)
instance ApproxEq AllocationPolicy where (=~) = (==)
instance ApproxEq InitialShardCount where (=~) = (==)
instance ApproxEq FSType where (=~) = (==)
-- | Due to the way nodeattrfilters get serialized here, they may come
-- out in a different order, but they are morally equivalent
instance ApproxEq UpdatableIndexSetting where
RoutingAllocationInclude a =~ RoutingAllocationInclude b =
NE.sort a =~ NE.sort b
RoutingAllocationExclude a =~ RoutingAllocationExclude b =
NE.sort a =~ NE.sort b
RoutingAllocationRequire a =~ RoutingAllocationRequire b =
NE.sort a =~ NE.sort b
a =~ b = a == b
showApproxEq (RoutingAllocationInclude xs) = show (RoutingAllocationInclude (NE.sort xs))
showApproxEq (RoutingAllocationExclude xs) = show (RoutingAllocationExclude (NE.sort xs))
showApproxEq (RoutingAllocationRequire xs) = show (RoutingAllocationRequire (NE.sort xs))
showApproxEq x = show x

95
tests/V5/Test/BulkAPI.hs Normal file
View File

@ -0,0 +1,95 @@
{-# LANGUAGE OverloadedStrings #-}
module Test.BulkAPI (spec) where
import Test.Common
import Test.Import
import qualified Data.Vector as V
import qualified Lens.Micro.Aeson as LMA
newtype BulkTest =
BulkTest Text
deriving (Eq, Show)
instance ToJSON BulkTest where
toJSON (BulkTest name') =
object ["name" .= name']
instance FromJSON BulkTest where
parseJSON = withObject "BulkTest" parse
where
parse o = do
t <- o .: "name"
BulkTest <$> parseJSON t
spec :: Spec
spec =
describe "Bulk API" $
it "inserts all documents we request" $ withTestEnv $ do
_ <- insertData
let firstTest = BulkTest "blah"
let secondTest = BulkTest "bloo"
let thirdTest = BulkTest "graffle"
let fourthTest = BulkTest "garabadoo"
let fifthTest = BulkTest "serenity"
let firstDoc = BulkIndex testIndex
testMapping (DocId "2") (toJSON firstTest)
let secondDoc = BulkCreate testIndex
testMapping (DocId "3") (toJSON secondTest)
let thirdDoc = BulkCreateEncoding testIndex
testMapping (DocId "4") (toEncoding thirdTest)
let fourthDoc = BulkIndexAuto testIndex
testMapping (toJSON fourthTest)
let fifthDoc = BulkIndexEncodingAuto testIndex
testMapping (toEncoding fifthTest)
let stream = V.fromList [firstDoc, secondDoc, thirdDoc, fourthDoc, fifthDoc]
_ <- bulk stream
-- liftIO $ pPrint bulkResp
_ <- refreshIndex testIndex
-- liftIO $ pPrint refreshResp
fDoc <- getDocument testIndex testMapping (DocId "2")
sDoc <- getDocument testIndex testMapping (DocId "3")
tDoc <- getDocument testIndex testMapping (DocId "4")
-- note that we cannot query for fourthDoc and fifthDoc since we
-- do not know their autogenerated ids.
let maybeFirst =
eitherDecode
$ responseBody fDoc
:: Either String (EsResult BulkTest)
let maybeSecond =
eitherDecode
$ responseBody sDoc
:: Either String (EsResult BulkTest)
let maybeThird =
eitherDecode
$ responseBody tDoc
:: Either String (EsResult BulkTest)
-- liftIO $ pPrint [maybeFirst, maybeSecond, maybeThird]
liftIO $ do
fmap getSource maybeFirst `shouldBe` Right (Just firstTest)
fmap getSource maybeSecond `shouldBe` Right (Just secondTest)
fmap getSource maybeThird `shouldBe` Right (Just thirdTest)
-- Since we can't get the docs by doc id, we check for their existence in
-- a match all query.
let query = MatchAllQuery Nothing
let search = mkSearch (Just query) Nothing
resp <- searchByIndex testIndex search
parsed <- parseEsResponse resp :: BH IO (Either EsError (SearchResult Value))
case parsed of
Left e ->
liftIO $ expectationFailure ("Expected a script-transformed result but got: " <> show e)
(Right sr) -> do
liftIO $
hitsTotal (searchHits sr) `shouldBe` 6
let nameList :: [Text]
nameList =
hits (searchHits sr)
^.. traverse
. to hitSource
. _Just
. LMA.key "name"
. _String
liftIO $
nameList
`shouldBe` ["blah","bloo","graffle","garabadoo","serenity"]

292
tests/V5/Test/Common.hs Normal file
View File

@ -0,0 +1,292 @@
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Test.Common where
import Test.Import
import qualified Data.Map as M
import qualified Data.Version as Vers
import qualified Network.HTTP.Types.Status as NHTS
testServer :: Server
testServer = Server "http://localhost:9200"
testIndex :: IndexName
testIndex = IndexName "bloodhound-tests-twitter-1"
testMapping :: MappingName
testMapping = MappingName "tweet"
withTestEnv :: BH IO a -> IO a
withTestEnv = withBH defaultManagerSettings testServer
data Location = Location { lat :: Double
, lon :: Double } deriving (Eq, Show)
data Tweet = Tweet { user :: Text
, postDate :: UTCTime
, message :: Text
, age :: Int
, location :: Location
, extra :: Maybe Text }
deriving (Eq, Show)
$(deriveJSON defaultOptions ''Location)
$(deriveJSON defaultOptions ''Tweet)
data ParentMapping = ParentMapping deriving (Eq, Show)
instance ToJSON ParentMapping where
toJSON ParentMapping =
object ["properties" .=
object [ "user" .= object ["type" .= ("string" :: Text)
, "fielddata" .= True
]
-- Serializing the date as a date is breaking other tests, mysteriously.
-- , "postDate" .= object [ "type" .= ("date" :: Text)
-- , "format" .= ("YYYY-MM-dd`T`HH:mm:ss.SSSZZ" :: Text)]
, "message" .= object ["type" .= ("string" :: Text)]
, "age" .= object ["type" .= ("integer" :: Text)]
, "location" .= object ["type" .= ("geo_point" :: Text)]
, "extra" .= object ["type" .= ("keyword" :: Text)]
]]
es13 :: Vers.Version
es13 = Vers.Version [1, 3, 0] []
es12 :: Vers.Version
es12 = Vers.Version [1, 2, 0] []
es11 :: Vers.Version
es11 = Vers.Version [1, 1, 0] []
es14 :: Vers.Version
es14 = Vers.Version [1, 4, 0] []
es15 :: Vers.Version
es15 = Vers.Version [1, 5, 0] []
es16 :: Vers.Version
es16 = Vers.Version [1, 6, 0] []
es20 :: Vers.Version
es20 = Vers.Version [2, 0, 0] []
es50 :: Vers.Version
es50 = Vers.Version [5, 0, 0] []
getServerVersion :: IO (Maybe Vers.Version)
getServerVersion = fmap extractVersion <$> withTestEnv getStatus
where
extractVersion = versionNumber . number . version
createExampleIndex :: (MonadBH m) => m Reply
createExampleIndex =
createIndex (IndexSettings (ShardCount 1) (ReplicaCount 0)) testIndex
deleteExampleIndex :: (MonadBH m) => m Reply
deleteExampleIndex =
deleteIndex testIndex
validateStatus :: Show body => Response body -> Int -> Expectation
validateStatus resp expected =
if actual == expected
then return ()
else expectationFailure ("Expected " <> show expected <> " but got " <> show actual <> ": " <> show body)
where
actual = NHTS.statusCode (responseStatus resp)
body = responseBody resp
data ChildMapping = ChildMapping deriving (Eq, Show)
instance ToJSON ChildMapping where
toJSON ChildMapping =
object ["_parent" .= object ["type" .= ("parent" :: Text)]
, "properties" .=
object [ "user" .= object ["type" .= ("string" :: Text)
, "fielddata" .= True
]
-- Serializing the date as a date is breaking other tests, mysteriously.
-- , "postDate" .= object [ "type" .= ("date" :: Text)
-- , "format" .= ("YYYY-MM-dd`T`HH:mm:ss.SSSZZ" :: Text)]
, "message" .= object ["type" .= ("string" :: Text)]
, "age" .= object ["type" .= ("integer" :: Text)]
, "location" .= object ["type" .= ("geo_point" :: Text)]
, "extra" .= object ["type" .= ("keyword" :: Text)]
]]
data TweetMapping = TweetMapping deriving (Eq, Show)
instance ToJSON TweetMapping where
toJSON TweetMapping =
object ["tweet" .=
object ["properties" .=
object [ "user" .= object [ "type" .= ("string" :: Text)
, "fielddata" .= True
]
-- Serializing the date as a date is breaking other tests, mysteriously.
-- , "postDate" .= object [ "type" .= ("date" :: Text)
-- , "format" .= ("YYYY-MM-dd`T`HH:mm:ss.SSSZZ" :: Text)]
, "message" .= object ["type" .= ("string" :: Text)]
, "age" .= object ["type" .= ("integer" :: Text)]
, "location" .= object ["type" .= ("geo_point" :: Text)]
, "extra" .= object ["type" .= ("keyword" :: Text)]
]]]
exampleTweet :: Tweet
exampleTweet = Tweet { user = "bitemyapp"
, postDate = UTCTime
(ModifiedJulianDay 55000)
(secondsToDiffTime 10)
, message = "Use haskell!"
, age = 10000
, location = Location 40.12 (-71.34)
, extra = Nothing }
tweetWithExtra :: Tweet
tweetWithExtra = Tweet { user = "bitemyapp"
, postDate = UTCTime
(ModifiedJulianDay 55000)
(secondsToDiffTime 10)
, message = "Use haskell!"
, age = 10000
, location = Location 40.12 (-71.34)
, extra = Just "blah blah" }
newAge :: Int
newAge = 31337
newUser :: Text
newUser = "someotherapp"
tweetPatch :: Value
tweetPatch =
object [ "age" .= newAge
, "user" .= newUser
]
patchedTweet :: Tweet
patchedTweet = exampleTweet{age = newAge, user = newUser}
otherTweet :: Tweet
otherTweet = Tweet { user = "notmyapp"
, postDate = UTCTime
(ModifiedJulianDay 55000)
(secondsToDiffTime 11)
, message = "Use haskell!"
, age = 1000
, location = Location 40.12 (-71.34)
, extra = Nothing }
resetIndex :: BH IO ()
resetIndex = do
_ <- deleteExampleIndex
_ <- createExampleIndex
_ <- putMapping testIndex testMapping TweetMapping
return ()
insertData :: BH IO Reply
insertData = do
resetIndex
insertData' defaultIndexDocumentSettings
insertData' :: IndexDocumentSettings -> BH IO Reply
insertData' ids = do
r <- indexDocument testIndex testMapping ids exampleTweet (DocId "1")
_ <- refreshIndex testIndex
return r
updateData :: BH IO Reply
updateData = do
r <- updateDocument testIndex testMapping defaultIndexDocumentSettings tweetPatch (DocId "1")
_ <- refreshIndex testIndex
return r
insertOther :: BH IO ()
insertOther = do
_ <- indexDocument testIndex testMapping defaultIndexDocumentSettings otherTweet (DocId "2")
_ <- refreshIndex testIndex
return ()
insertExtra :: BH IO ()
insertExtra = do
_ <- indexDocument testIndex testMapping defaultIndexDocumentSettings tweetWithExtra (DocId "4")
_ <- refreshIndex testIndex
return ()
insertWithSpaceInId :: BH IO ()
insertWithSpaceInId = do
_ <- indexDocument testIndex testMapping defaultIndexDocumentSettings exampleTweet (DocId "Hello World")
_ <- refreshIndex testIndex
return ()
searchTweet :: Search -> BH IO (Either EsError Tweet)
searchTweet search = do
result <- searchTweets search
let myTweet :: Either EsError Tweet
myTweet = grabFirst result
return myTweet
searchTweets :: Search -> BH IO (Either EsError (SearchResult Tweet))
searchTweets search = parseEsResponse =<< searchByIndex testIndex search
searchExpectNoResults :: Search -> BH IO ()
searchExpectNoResults search = do
result <- searchTweets search
let emptyHits = fmap (hits . searchHits) result
liftIO $
emptyHits `shouldBe` Right []
searchExpectAggs :: Search -> BH IO ()
searchExpectAggs search = do
reply <- searchByIndex testIndex search
let isEmpty x = return (M.null x)
let result = decode (responseBody reply) :: Maybe (SearchResult Tweet)
liftIO $
(result >>= aggregations >>= isEmpty) `shouldBe` Just False
searchValidBucketAgg :: (BucketAggregation a, FromJSON a, Show a) =>
Search -> Text -> (Text -> AggregationResults -> Maybe (Bucket a)) -> BH IO ()
searchValidBucketAgg search aggKey extractor = do
reply <- searchByIndex testIndex search
let bucketDocs = docCount . head . buckets
let result = decode (responseBody reply) :: Maybe (SearchResult Tweet)
let count = result >>= aggregations >>= extractor aggKey >>= \x -> return (bucketDocs x)
liftIO $
count `shouldBe` Just 1
searchTermsAggHint :: [ExecutionHint] -> BH IO ()
searchTermsAggHint hints = do
let terms hint = TermsAgg $ (mkTermsAggregation "user") { termExecutionHint = Just hint }
let search hint = mkAggregateSearch Nothing $ mkAggregations "users" $ terms hint
forM_ hints $ searchExpectAggs . search
forM_ hints (\x -> searchValidBucketAgg (search x) "users" toTerms)
searchTweetHighlight :: Search
-> BH IO (Either EsError (Maybe HitHighlight))
searchTweetHighlight search = do
result <- searchTweets search
let tweetHit :: Either EsError (Maybe (Hit Tweet))
tweetHit = fmap (headMay . hits . searchHits) result
myHighlight :: Either EsError (Maybe HitHighlight)
myHighlight = (join . fmap hitHighlight) <$> tweetHit
return myHighlight
searchExpectSource :: Source -> Either EsError Value -> BH IO ()
searchExpectSource src expected = do
_ <- insertData
let query = QueryMatchQuery $ mkMatchQuery (FieldName "_all") (QueryString "haskell")
let search = (mkSearch (Just query) Nothing) { source = Just src }
reply <- searchByIndex testIndex search
result <- parseEsResponse reply
let value = grabFirst result
liftIO $
value `shouldBe` expected
atleast :: Vers.Version -> IO Bool
atleast v = getServerVersion >>= \x -> return $ x >= Just v
atmost :: Vers.Version -> IO Bool
atmost v = getServerVersion >>= \x -> return $ x <= Just v
is :: Vers.Version -> IO Bool
is v = getServerVersion >>= \x -> return $ x == Just v

View File

@ -0,0 +1,51 @@
{-# LANGUAGE OverloadedStrings #-}
module Test.Documents where
import Test.Common
import Test.Import
spec :: Spec
spec =
describe "document API" $ do
it "indexes, updates, gets, and then deletes the generated document" $ withTestEnv $ do
_ <- insertData
_ <- updateData
docInserted <- getDocument testIndex testMapping (DocId "1")
let newTweet = eitherDecode
(responseBody docInserted) :: Either String (EsResult Tweet)
liftIO $ fmap getSource newTweet `shouldBe` Right (Just patchedTweet)
it "indexes, gets, and then deletes the generated document with a DocId containing a space" $ withTestEnv $ do
_ <- insertWithSpaceInId
docInserted <- getDocument testIndex testMapping (DocId "Hello World")
let newTweet = eitherDecode
(responseBody docInserted) :: Either String (EsResult Tweet)
liftIO $ fmap getSource newTweet `shouldBe` Right (Just exampleTweet)
it "produces a parseable result when looking up a bogus document" $ withTestEnv $ do
doc <- getDocument testIndex testMapping (DocId "bogus")
let noTweet = eitherDecode
(responseBody doc) :: Either String (EsResult Tweet)
liftIO $ fmap foundResult noTweet `shouldBe` Right Nothing
it "can use optimistic concurrency control" $ withTestEnv $ do
let ev = ExternalDocVersion minBound
let cfg = defaultIndexDocumentSettings { idsVersionControl = ExternalGT ev }
resetIndex
res <- insertData' cfg
liftIO $ isCreated res `shouldBe` True
res' <- insertData' cfg
liftIO $ isVersionConflict res' `shouldBe` True
it "indexes two documents in a parent/child relationship and checks that the child exists" $ withTestEnv $ do
resetIndex
_ <- putMapping testIndex (MappingName "child") ChildMapping
_ <- putMapping testIndex (MappingName "parent") ParentMapping
_ <- indexDocument testIndex (MappingName "parent") defaultIndexDocumentSettings exampleTweet (DocId "1")
let parent = (Just . DocumentParent . DocId) "1"
ids = IndexDocumentSettings NoVersionControl parent
_ <- indexDocument testIndex (MappingName "child") ids otherTweet (DocId "2")
_ <- refreshIndex testIndex
exists <- documentExists testIndex (MappingName "child") parent (DocId "2")
liftIO $ exists `shouldBe` True

536
tests/V5/Test/Generators.hs Normal file
View File

@ -0,0 +1,536 @@
{-# LANGUAGE CPP #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Test.Generators where
import Database.V5.Bloodhound
import Test.Import
import qualified Data.HashMap.Strict as HM
import qualified Data.List as L
import qualified Data.List.NonEmpty as NE
import qualified Data.Map as M
import qualified Data.Text as T
import qualified Data.Version as Vers
import Test.QuickCheck.TH.Generators
import Test.ApproxEq
instance Arbitrary NominalDiffTime where
arbitrary = fromInteger <$> arbitrary
#if !MIN_VERSION_QuickCheck(2,8,0)
instance (Arbitrary k, Ord k, Arbitrary v) => Arbitrary (M.Map k v) where
arbitrary = M.fromList <$> arbitrary
#endif
instance Arbitrary Text where
arbitrary = T.pack <$> arbitrary
instance Arbitrary UTCTime where
arbitrary = UTCTime
<$> arbitrary
<*> (fromRational . toRational <$> choose (0::Double, 86400))
instance Arbitrary Day where
arbitrary =
ModifiedJulianDay . (2000 +) <$> arbitrary
shrink =
(ModifiedJulianDay <$>) . shrink . toModifiedJulianDay
#if !MIN_VERSION_QuickCheck(2,9,0)
instance Arbitrary a => Arbitrary (NonEmpty a) where
arbitrary = liftA2 (:|) arbitrary arbitrary
#endif
arbitraryScore :: Gen Score
arbitraryScore = fmap getPositive <$> arbitrary
instance (Arbitrary a, Typeable a) => Arbitrary (Hit a) where
arbitrary = Hit <$> arbitrary
<*> arbitrary
<*> arbitrary
<*> arbitraryScore
<*> arbitrary
<*> arbitrary
<*> arbitrary
instance Arbitrary HitFields where
arbitrary = pure (HitFields M.empty)
shrink = const []
instance (Arbitrary a, Typeable a) => Arbitrary (SearchHits a) where
arbitrary = reduceSize $ do
tot <- getPositive <$> arbitrary
score <- arbitraryScore
hs <- arbitrary
return $ SearchHits tot score hs
reduceSize :: Gen a -> Gen a
reduceSize f = sized $ \n -> resize (n `div` 2) f
arbitraryAlphaNum :: Gen Char
arbitraryAlphaNum = oneof [choose ('a', 'z')
,choose ('A','Z')
, choose ('0', '9')]
instance Arbitrary RoutingValue where
arbitrary = RoutingValue . T.pack <$> listOf1 arbitraryAlphaNum
instance Arbitrary AliasRouting where
arbitrary = oneof [allAlias
,one
,theOther
,both']
where one = GranularAliasRouting
<$> (Just <$> arbitrary)
<*> pure Nothing
theOther = GranularAliasRouting Nothing
<$> (Just <$> arbitrary)
both' = GranularAliasRouting
<$> (Just <$> arbitrary)
<*> (Just <$> arbitrary)
allAlias = AllAliasRouting <$> arbitrary
instance Arbitrary FieldName where
arbitrary =
FieldName
. T.pack
<$> listOf1 arbitraryAlphaNum
#if MIN_VERSION_base(4,10,0)
-- Test.QuickCheck.Modifiers
qcNonEmptyToNonEmpty :: NonEmptyList a -> NonEmpty a
qcNonEmptyToNonEmpty (NonEmpty (a : xs)) = (a :| xs)
qcNonEmptyToNonEmpty (NonEmpty []) = error "NonEmpty was empty!"
instance Arbitrary a => Arbitrary (NonEmpty a) where
arbitrary =
qcNonEmptyToNonEmpty
<$> arbitrary
#endif
instance Arbitrary ScriptFields where
arbitrary =
pure $ ScriptFields $
HM.fromList []
shrink = const []
instance Arbitrary ScriptParams where
arbitrary =
pure $ ScriptParams $
HM.fromList [ ("a", Number 42)
, ("b", String "forty two")
]
shrink = const []
instance Arbitrary RegexpFlags where
arbitrary = oneof [ pure AllRegexpFlags
, pure NoRegexpFlags
, SomeRegexpFlags <$> genUniqueFlags
]
where genUniqueFlags =
NE.fromList . L.nub
<$> listOf1 arbitrary
instance Arbitrary IndexAliasCreate where
arbitrary =
IndexAliasCreate
<$> arbitrary
<*> reduceSize arbitrary
instance Arbitrary Query where
arbitrary =
reduceSize
$ oneof [ TermQuery <$> arbitrary <*> arbitrary
, TermsQuery <$> arbitrary <*> arbitrary
, QueryMatchQuery <$> arbitrary
, QueryMultiMatchQuery <$> arbitrary
, QueryBoolQuery <$> arbitrary
, QueryBoostingQuery <$> arbitrary
, QueryCommonTermsQuery <$> arbitrary
, ConstantScoreQuery <$> arbitrary <*> arbitrary
, QueryDisMaxQuery <$> arbitrary
, QueryFuzzyLikeThisQuery <$> arbitrary
, QueryFuzzyLikeFieldQuery <$> arbitrary
, QueryFuzzyQuery <$> arbitrary
, QueryHasChildQuery <$> arbitrary
, QueryHasParentQuery <$> arbitrary
, IdsQuery <$> arbitrary <*> arbitrary
, QueryIndicesQuery <$> arbitrary
, MatchAllQuery <$> arbitrary
, QueryMoreLikeThisQuery <$> arbitrary
, QueryMoreLikeThisFieldQuery <$> arbitrary
, QueryNestedQuery <$> arbitrary
, QueryPrefixQuery <$> arbitrary
, QueryQueryStringQuery <$> arbitrary
, QuerySimpleQueryStringQuery <$> arbitrary
, QueryRangeQuery <$> arbitrary
, QueryRegexpQuery <$> arbitrary
, QueryTemplateQueryInline <$> arbitrary
]
-- TODO: Implement shrink
-- shrink = genericShrink
instance Arbitrary Filter where
arbitrary =
Filter <$> arbitrary
shrink (Filter q) =
Filter <$> shrink q
instance Arbitrary ReplicaBounds where
arbitrary = oneof [ replicasBounded
, replicasLowerBounded
, pure ReplicasUnbounded
]
where replicasBounded = do
Positive a <- arbitrary
Positive b <- arbitrary
return (ReplicasBounded a b)
replicasLowerBounded = do
Positive a <- arbitrary
return (ReplicasLowerBounded a)
instance Arbitrary NodeAttrName where
arbitrary =
NodeAttrName
. T.pack
<$> listOf1 arbitraryAlphaNum
instance Arbitrary NodeAttrFilter where
arbitrary = do
n <- arbitrary
s:ss <- listOf1 (listOf1 arbitraryAlphaNum)
let ts = T.pack <$> s :| ss
return (NodeAttrFilter n ts)
instance Arbitrary VersionNumber where
arbitrary = mk . fmap getPositive . getNonEmpty <$> arbitrary
where
mk versions = VersionNumber (Vers.Version versions [])
instance Arbitrary TemplateQueryKeyValuePairs where
arbitrary = TemplateQueryKeyValuePairs . HM.fromList <$> arbitrary
shrink (TemplateQueryKeyValuePairs x) = map (TemplateQueryKeyValuePairs . HM.fromList) . shrink $ HM.toList x
makeArbitrary ''IndexName
instance Arbitrary IndexName where arbitrary = arbitraryIndexName
makeArbitrary ''MappingName
instance Arbitrary MappingName where arbitrary = arbitraryMappingName
makeArbitrary ''DocId
instance Arbitrary DocId where arbitrary = arbitraryDocId
makeArbitrary ''Version
instance Arbitrary Version where arbitrary = arbitraryVersion
makeArbitrary ''BuildHash
instance Arbitrary BuildHash where arbitrary = arbitraryBuildHash
makeArbitrary ''IndexAliasRouting
instance Arbitrary IndexAliasRouting where arbitrary = arbitraryIndexAliasRouting
makeArbitrary ''ShardCount
instance Arbitrary ShardCount where arbitrary = arbitraryShardCount
makeArbitrary ''ReplicaCount
instance Arbitrary ReplicaCount where arbitrary = arbitraryReplicaCount
makeArbitrary ''TemplateName
instance Arbitrary TemplateName where arbitrary = arbitraryTemplateName
makeArbitrary ''TemplatePattern
instance Arbitrary TemplatePattern where arbitrary = arbitraryTemplatePattern
makeArbitrary ''QueryString
instance Arbitrary QueryString where arbitrary = arbitraryQueryString
makeArbitrary ''CacheName
instance Arbitrary CacheName where arbitrary = arbitraryCacheName
makeArbitrary ''CacheKey
instance Arbitrary CacheKey where arbitrary = arbitraryCacheKey
makeArbitrary ''Existence
instance Arbitrary Existence where arbitrary = arbitraryExistence
makeArbitrary ''CutoffFrequency
instance Arbitrary CutoffFrequency where arbitrary = arbitraryCutoffFrequency
makeArbitrary ''Analyzer
instance Arbitrary Analyzer where arbitrary = arbitraryAnalyzer
makeArbitrary ''MaxExpansions
instance Arbitrary MaxExpansions where arbitrary = arbitraryMaxExpansions
makeArbitrary ''Lenient
instance Arbitrary Lenient where arbitrary = arbitraryLenient
makeArbitrary ''Tiebreaker
instance Arbitrary Tiebreaker where arbitrary = arbitraryTiebreaker
makeArbitrary ''Boost
instance Arbitrary Boost where arbitrary = arbitraryBoost
makeArbitrary ''BoostTerms
instance Arbitrary BoostTerms where arbitrary = arbitraryBoostTerms
makeArbitrary ''MinimumMatch
instance Arbitrary MinimumMatch where arbitrary = arbitraryMinimumMatch
makeArbitrary ''DisableCoord
instance Arbitrary DisableCoord where arbitrary = arbitraryDisableCoord
makeArbitrary ''IgnoreTermFrequency
instance Arbitrary IgnoreTermFrequency where arbitrary = arbitraryIgnoreTermFrequency
makeArbitrary ''MinimumTermFrequency
instance Arbitrary MinimumTermFrequency where arbitrary = arbitraryMinimumTermFrequency
makeArbitrary ''MaxQueryTerms
instance Arbitrary MaxQueryTerms where arbitrary = arbitraryMaxQueryTerms
makeArbitrary ''Fuzziness
instance Arbitrary Fuzziness where arbitrary = arbitraryFuzziness
makeArbitrary ''PrefixLength
instance Arbitrary PrefixLength where arbitrary = arbitraryPrefixLength
makeArbitrary ''TypeName
instance Arbitrary TypeName where arbitrary = arbitraryTypeName
makeArbitrary ''PercentMatch
instance Arbitrary PercentMatch where arbitrary = arbitraryPercentMatch
makeArbitrary ''StopWord
instance Arbitrary StopWord where arbitrary = arbitraryStopWord
makeArbitrary ''QueryPath
instance Arbitrary QueryPath where arbitrary = arbitraryQueryPath
makeArbitrary ''AllowLeadingWildcard
instance Arbitrary AllowLeadingWildcard where arbitrary = arbitraryAllowLeadingWildcard
makeArbitrary ''LowercaseExpanded
instance Arbitrary LowercaseExpanded where arbitrary = arbitraryLowercaseExpanded
makeArbitrary ''EnablePositionIncrements
instance Arbitrary EnablePositionIncrements where arbitrary = arbitraryEnablePositionIncrements
makeArbitrary ''AnalyzeWildcard
instance Arbitrary AnalyzeWildcard where arbitrary = arbitraryAnalyzeWildcard
makeArbitrary ''GeneratePhraseQueries
instance Arbitrary GeneratePhraseQueries where arbitrary = arbitraryGeneratePhraseQueries
makeArbitrary ''Locale
instance Arbitrary Locale where arbitrary = arbitraryLocale
makeArbitrary ''MaxWordLength
instance Arbitrary MaxWordLength where arbitrary = arbitraryMaxWordLength
makeArbitrary ''MinWordLength
instance Arbitrary MinWordLength where arbitrary = arbitraryMinWordLength
makeArbitrary ''PhraseSlop
instance Arbitrary PhraseSlop where arbitrary = arbitraryPhraseSlop
makeArbitrary ''MinDocFrequency
instance Arbitrary MinDocFrequency where arbitrary = arbitraryMinDocFrequency
makeArbitrary ''MaxDocFrequency
instance Arbitrary MaxDocFrequency where arbitrary = arbitraryMaxDocFrequency
makeArbitrary ''Regexp
instance Arbitrary Regexp where arbitrary = arbitraryRegexp
makeArbitrary ''SimpleQueryStringQuery
instance Arbitrary SimpleQueryStringQuery where arbitrary = arbitrarySimpleQueryStringQuery
makeArbitrary ''FieldOrFields
instance Arbitrary FieldOrFields where arbitrary = arbitraryFieldOrFields
makeArbitrary ''SimpleQueryFlag
instance Arbitrary SimpleQueryFlag where arbitrary = arbitrarySimpleQueryFlag
makeArbitrary ''RegexpQuery
instance Arbitrary RegexpQuery where arbitrary = arbitraryRegexpQuery
makeArbitrary ''QueryStringQuery
instance Arbitrary QueryStringQuery where arbitrary = arbitraryQueryStringQuery
makeArbitrary ''RangeQuery
instance Arbitrary RangeQuery where arbitrary = arbitraryRangeQuery
makeArbitrary ''RangeValue
instance Arbitrary RangeValue where arbitrary = arbitraryRangeValue
makeArbitrary ''PrefixQuery
instance Arbitrary PrefixQuery where arbitrary = arbitraryPrefixQuery
makeArbitrary ''NestedQuery
instance Arbitrary NestedQuery where arbitrary = arbitraryNestedQuery
makeArbitrary ''MoreLikeThisFieldQuery
instance Arbitrary MoreLikeThisFieldQuery where arbitrary = arbitraryMoreLikeThisFieldQuery
makeArbitrary ''MoreLikeThisQuery
instance Arbitrary MoreLikeThisQuery where arbitrary = arbitraryMoreLikeThisQuery
makeArbitrary ''IndicesQuery
instance Arbitrary IndicesQuery where arbitrary = arbitraryIndicesQuery
makeArbitrary ''HasParentQuery
instance Arbitrary HasParentQuery where arbitrary = arbitraryHasParentQuery
makeArbitrary ''HasChildQuery
instance Arbitrary HasChildQuery where arbitrary = arbitraryHasChildQuery
makeArbitrary ''FuzzyQuery
instance Arbitrary FuzzyQuery where arbitrary = arbitraryFuzzyQuery
makeArbitrary ''FuzzyLikeFieldQuery
instance Arbitrary FuzzyLikeFieldQuery where arbitrary = arbitraryFuzzyLikeFieldQuery
makeArbitrary ''FuzzyLikeThisQuery
instance Arbitrary FuzzyLikeThisQuery where arbitrary = arbitraryFuzzyLikeThisQuery
makeArbitrary ''DisMaxQuery
instance Arbitrary DisMaxQuery where arbitrary = arbitraryDisMaxQuery
makeArbitrary ''CommonTermsQuery
instance Arbitrary CommonTermsQuery where arbitrary = arbitraryCommonTermsQuery
makeArbitrary ''DistanceRange
instance Arbitrary DistanceRange where arbitrary = arbitraryDistanceRange
makeArbitrary ''MultiMatchQuery
instance Arbitrary MultiMatchQuery where arbitrary = arbitraryMultiMatchQuery
makeArbitrary ''LessThanD
instance Arbitrary LessThanD where arbitrary = arbitraryLessThanD
makeArbitrary ''LessThanEqD
instance Arbitrary LessThanEqD where arbitrary = arbitraryLessThanEqD
makeArbitrary ''GreaterThanD
instance Arbitrary GreaterThanD where arbitrary = arbitraryGreaterThanD
makeArbitrary ''GreaterThanEqD
instance Arbitrary GreaterThanEqD where arbitrary = arbitraryGreaterThanEqD
makeArbitrary ''LessThan
instance Arbitrary LessThan where arbitrary = arbitraryLessThan
makeArbitrary ''LessThanEq
instance Arbitrary LessThanEq where arbitrary = arbitraryLessThanEq
makeArbitrary ''GreaterThan
instance Arbitrary GreaterThan where arbitrary = arbitraryGreaterThan
makeArbitrary ''GreaterThanEq
instance Arbitrary GreaterThanEq where arbitrary = arbitraryGreaterThanEq
makeArbitrary ''GeoPoint
instance Arbitrary GeoPoint where arbitrary = arbitraryGeoPoint
makeArbitrary ''NullValue
instance Arbitrary NullValue where arbitrary = arbitraryNullValue
makeArbitrary ''MinimumMatchHighLow
instance Arbitrary MinimumMatchHighLow where arbitrary = arbitraryMinimumMatchHighLow
makeArbitrary ''CommonMinimumMatch
instance Arbitrary CommonMinimumMatch where arbitrary = arbitraryCommonMinimumMatch
makeArbitrary ''BoostingQuery
instance Arbitrary BoostingQuery where arbitrary = arbitraryBoostingQuery
makeArbitrary ''BoolQuery
instance Arbitrary BoolQuery where arbitrary = arbitraryBoolQuery
makeArbitrary ''MatchQuery
instance Arbitrary MatchQuery where arbitrary = arbitraryMatchQuery
makeArbitrary ''MultiMatchQueryType
instance Arbitrary MultiMatchQueryType where arbitrary = arbitraryMultiMatchQueryType
makeArbitrary ''BooleanOperator
instance Arbitrary BooleanOperator where arbitrary = arbitraryBooleanOperator
makeArbitrary ''ZeroTermsQuery
instance Arbitrary ZeroTermsQuery where arbitrary = arbitraryZeroTermsQuery
makeArbitrary ''MatchQueryType
instance Arbitrary MatchQueryType where arbitrary = arbitraryMatchQueryType
makeArbitrary ''SearchAliasRouting
instance Arbitrary SearchAliasRouting where arbitrary = arbitrarySearchAliasRouting
makeArbitrary ''ScoreType
instance Arbitrary ScoreType where arbitrary = arbitraryScoreType
makeArbitrary ''Distance
instance Arbitrary Distance where arbitrary = arbitraryDistance
makeArbitrary ''DistanceUnit
instance Arbitrary DistanceUnit where arbitrary = arbitraryDistanceUnit
makeArbitrary ''DistanceType
instance Arbitrary DistanceType where arbitrary = arbitraryDistanceType
makeArbitrary ''OptimizeBbox
instance Arbitrary OptimizeBbox where arbitrary = arbitraryOptimizeBbox
makeArbitrary ''GeoBoundingBoxConstraint
instance Arbitrary GeoBoundingBoxConstraint where arbitrary = arbitraryGeoBoundingBoxConstraint
makeArbitrary ''GeoFilterType
instance Arbitrary GeoFilterType where arbitrary = arbitraryGeoFilterType
makeArbitrary ''GeoBoundingBox
instance Arbitrary GeoBoundingBox where arbitrary = arbitraryGeoBoundingBox
makeArbitrary ''LatLon
instance Arbitrary LatLon where arbitrary = arbitraryLatLon
makeArbitrary ''RangeExecution
instance Arbitrary RangeExecution where arbitrary = arbitraryRangeExecution
makeArbitrary ''RegexpFlag
instance Arbitrary RegexpFlag where arbitrary = arbitraryRegexpFlag
makeArbitrary ''BoolMatch
instance Arbitrary BoolMatch where arbitrary = arbitraryBoolMatch
makeArbitrary ''Term
instance Arbitrary Term where arbitrary = arbitraryTerm
makeArbitrary ''IndexSettings
instance Arbitrary IndexSettings where arbitrary = arbitraryIndexSettings
makeArbitrary ''TokenChar
instance Arbitrary TokenChar where arbitrary = arbitraryTokenChar
makeArbitrary ''Ngram
instance Arbitrary Ngram where arbitrary = arbitraryNgram
makeArbitrary ''TokenizerDefinition
instance Arbitrary TokenizerDefinition where arbitrary = arbitraryTokenizerDefinition
makeArbitrary ''TokenFilter
instance Arbitrary TokenFilter where arbitrary = arbitraryTokenFilter
makeArbitrary ''TokenFilterDefinition
instance Arbitrary TokenFilterDefinition where arbitrary = arbitraryTokenFilterDefinition
makeArbitrary ''Language
instance Arbitrary Language where arbitrary = arbitraryLanguage
makeArbitrary ''Shingle
instance Arbitrary Shingle where arbitrary = arbitraryShingle
makeArbitrary ''AnalyzerDefinition
instance Arbitrary AnalyzerDefinition where arbitrary = arbitraryAnalyzerDefinition
makeArbitrary ''Analysis
instance Arbitrary Analysis where arbitrary = arbitraryAnalysis
makeArbitrary ''Tokenizer
instance Arbitrary Tokenizer where arbitrary = arbitraryTokenizer
makeArbitrary ''UpdatableIndexSetting
instance Arbitrary UpdatableIndexSetting where
arbitrary = arbitraryUpdatableIndexSetting
makeArbitrary ''Compression
instance Arbitrary Compression where arbitrary = arbitraryCompression
makeArbitrary ''Bytes
instance Arbitrary Bytes where arbitrary = arbitraryBytes
makeArbitrary ''AllocationPolicy
instance Arbitrary AllocationPolicy where arbitrary = arbitraryAllocationPolicy
makeArbitrary ''InitialShardCount
instance Arbitrary InitialShardCount where arbitrary = arbitraryInitialShardCount
makeArbitrary ''FSType
instance Arbitrary FSType where arbitrary = arbitraryFSType
makeArbitrary ''CompoundFormat
instance Arbitrary CompoundFormat where arbitrary = arbitraryCompoundFormat
makeArbitrary ''FsSnapshotRepo
instance Arbitrary FsSnapshotRepo where arbitrary = arbitraryFsSnapshotRepo
makeArbitrary ''SnapshotRepoName
instance Arbitrary SnapshotRepoName where arbitrary = arbitrarySnapshotRepoName
makeArbitrary ''TemplateQueryInline
instance Arbitrary TemplateQueryInline where arbitrary = arbitraryTemplateQueryInline
makeArbitrary ''DirectGeneratorSuggestModeTypes
instance Arbitrary DirectGeneratorSuggestModeTypes where arbitrary = arbitraryDirectGeneratorSuggestModeTypes
makeArbitrary ''DirectGenerators
instance Arbitrary DirectGenerators where arbitrary = arbitraryDirectGenerators
makeArbitrary ''PhraseSuggesterCollate
instance Arbitrary PhraseSuggesterCollate where arbitrary = arbitraryPhraseSuggesterCollate
makeArbitrary ''PhraseSuggesterHighlighter
instance Arbitrary PhraseSuggesterHighlighter where arbitrary = arbitraryPhraseSuggesterHighlighter
makeArbitrary ''Size
instance Arbitrary Size where arbitrary = arbitrarySize
makeArbitrary ''PhraseSuggester
instance Arbitrary PhraseSuggester where arbitrary = arbitraryPhraseSuggester
makeArbitrary ''SuggestType
instance Arbitrary SuggestType where arbitrary = arbitrarySuggestType
makeArbitrary ''Suggest
instance Arbitrary Suggest where arbitrary = arbitrarySuggest
makeArbitrary ''FunctionScoreQuery
instance Arbitrary FunctionScoreQuery where arbitrary = arbitraryFunctionScoreQuery
makeArbitrary ''FunctionScoreFunction
instance Arbitrary FunctionScoreFunction where arbitrary = arbitraryFunctionScoreFunction
makeArbitrary ''FunctionScoreFunctions
instance Arbitrary FunctionScoreFunctions where arbitrary = arbitraryFunctionScoreFunctions
makeArbitrary ''ComponentFunctionScoreFunction
instance Arbitrary ComponentFunctionScoreFunction where arbitrary = arbitraryComponentFunctionScoreFunction
makeArbitrary ''Script
instance Arbitrary Script where arbitrary = arbitraryScript
makeArbitrary ''ScriptLanguage
instance Arbitrary ScriptLanguage where arbitrary = arbitraryScriptLanguage
makeArbitrary ''ScriptInline
instance Arbitrary ScriptInline where arbitrary = arbitraryScriptInline
makeArbitrary ''ScriptId
instance Arbitrary ScriptId where arbitrary = arbitraryScriptId
makeArbitrary ''ScoreMode
instance Arbitrary ScoreMode where arbitrary = arbitraryScoreMode
makeArbitrary ''BoostMode
instance Arbitrary BoostMode where arbitrary = arbitraryBoostMode
makeArbitrary ''Seed
instance Arbitrary Seed where arbitrary = arbitrarySeed
makeArbitrary ''FieldValueFactor
instance Arbitrary FieldValueFactor where arbitrary = arbitraryFieldValueFactor
makeArbitrary ''Weight
instance Arbitrary Weight where arbitrary = arbitraryWeight
makeArbitrary ''Factor
instance Arbitrary Factor where arbitrary = arbitraryFactor
makeArbitrary ''FactorMissingFieldValue
instance Arbitrary FactorMissingFieldValue where arbitrary = arbitraryFactorMissingFieldValue
makeArbitrary ''FactorModifier
instance Arbitrary FactorModifier where arbitrary = arbitraryFactorModifier
newtype UpdatableIndexSetting' =
UpdatableIndexSetting' UpdatableIndexSetting
deriving (Show, Eq, ToJSON, FromJSON, ApproxEq, Typeable)
instance Arbitrary UpdatableIndexSetting' where
arbitrary = do
settings <- arbitrary
return $ UpdatableIndexSetting' $ case settings of
RoutingAllocationInclude xs ->
RoutingAllocationInclude (dropDuplicateAttrNames xs)
RoutingAllocationExclude xs ->
RoutingAllocationExclude (dropDuplicateAttrNames xs)
RoutingAllocationRequire xs ->
RoutingAllocationRequire (dropDuplicateAttrNames xs)
x -> x
where
dropDuplicateAttrNames =
NE.fromList . L.nubBy sameAttrName . NE.toList
sameAttrName a b =
nodeAttrFilterName a == nodeAttrFilterName b
-- shrink (UpdatableIndexSetting' x) = map UpdatableIndexSetting' (shrink x)

View File

@ -0,0 +1,32 @@
{-# LANGUAGE OverloadedStrings #-}
module Test.Highlights where
import Test.Common
import Test.Import
import qualified Data.Map as M
initHighlights :: Text -> BH IO (Either EsError (Maybe HitHighlight))
initHighlights fieldName = do
_ <- insertData
_ <- insertOther
let query = QueryMatchQuery $ mkMatchQuery (FieldName fieldName) (QueryString "haskell")
let testHighlight = Highlights Nothing [FieldHighlight (FieldName fieldName) Nothing]
let search = mkHighlightSearch (Just query) testHighlight
searchTweetHighlight search
spec :: Spec
spec =
describe "Highlights API" $ do
it "returns highlight from query when there should be one" $ withTestEnv $ do
myHighlight <- initHighlights "message"
liftIO $
myHighlight `shouldBe`
Right (Just (M.fromList [("message", ["Use <em>haskell</em>!"])]))
it "doesn't return highlight from a query when it shouldn't" $ withTestEnv $ do
myHighlight <- initHighlights "user"
liftIO $
myHighlight `shouldBe`
Right Nothing

58
tests/V5/Test/Import.hs Normal file
View File

@ -0,0 +1,58 @@
{-# LANGUAGE OverloadedStrings #-}
module Test.Import
( module X
, module Test.Import
) where
import Control.Applicative as X
import Control.Exception as X (evaluate)
import Control.Monad as X
import Control.Monad.Catch as X
import Control.Monad.Reader as X
import Data.Aeson as X
import Data.Aeson.TH as X
import Data.Aeson.Types as X (parseEither)
import Data.Maybe as X
import Data.List.NonEmpty as X (NonEmpty(..))
import Data.Monoid as X
import Data.Ord as X (comparing)
import Data.Proxy as X
import Data.Text as X (Text)
import Data.Time.Calendar as X (Day(..), fromGregorian)
import Data.Time.Clock as X
import Data.Typeable as X
import Database.V5.Bloodhound as X hiding (key)
import Lens.Micro as X
import Lens.Micro.Aeson as X
import Network.HTTP.Client as X hiding (Proxy, fileSize)
import System.IO.Temp as X
import System.PosixCompat.Files as X
import Test.Hspec as X
import Test.Hspec.QuickCheck as X (prop)
import Test.QuickCheck as X hiding (Result, Success)
import Test.QuickCheck.Property.Monoid as X (T (..), eq, prop_Monoid)
import Text.Pretty.Simple as X (pPrint)
import qualified Data.List as L
noDuplicates :: Eq a => [a] -> Bool
noDuplicates xs = L.nub xs == xs
getSource :: EsResult a -> Maybe a
getSource = fmap _source . foundResult
grabFirst :: Either EsError (SearchResult a) -> Either EsError a
grabFirst r =
case fmap (hitSource . head . hits . searchHits) r of
(Left e) -> Left e
(Right Nothing) -> Left (EsError 500 "Source was missing")
(Right (Just x)) -> Right x
when' :: Monad m => m Bool -> m () -> m ()
when' b f = b >>= \x -> when x f
headMay :: [a] -> Maybe a
headMay (x : _) = Just x
headMay _ = Nothing

152
tests/V5/Test/Indices.hs Normal file
View File

@ -0,0 +1,152 @@
{-# LANGUAGE OverloadedStrings #-}
module Test.Indices where
import Test.Common
import Test.Import
import qualified Data.List as L
import qualified Data.List.NonEmpty as NE
import qualified Data.Map as M
spec :: Spec
spec = do
describe "Index create/delete API" $ do
it "creates and then deletes the requested index" $ withTestEnv $ do
-- priming state.
_ <- deleteExampleIndex
resp <- createExampleIndex
deleteResp <- deleteExampleIndex
liftIO $ do
validateStatus resp 200
validateStatus deleteResp 200
describe "Index aliases" $ do
let aname = IndexAliasName (IndexName "bloodhound-tests-twitter-1-alias")
let alias = IndexAlias (testIndex) aname
let create = IndexAliasCreate Nothing Nothing
let action = AddAlias alias create
it "handles the simple case of aliasing an existing index" $ do
withTestEnv $ do
resetIndex
resp <- updateIndexAliases (action :| [])
liftIO $ validateStatus resp 200
let cleanup = withTestEnv (updateIndexAliases (RemoveAlias alias :| []))
(do aliases <- withTestEnv getIndexAliases
let expected = IndexAliasSummary alias create
case aliases of
Right (IndexAliasesSummary summs) ->
L.find ((== alias) . indexAliasSummaryAlias) summs `shouldBe` Just expected
Left e -> expectationFailure ("Expected an IndexAliasesSummary but got " <> show e)) `finally` cleanup
it "allows alias deletion" $ do
aliases <- withTestEnv $ do
resetIndex
resp <- updateIndexAliases (action :| [])
liftIO $ validateStatus resp 200
_ <- deleteIndexAlias aname
getIndexAliases
-- let expected = IndexAliasSummary alias create
case aliases of
Right (IndexAliasesSummary summs) ->
L.find ( (== aname)
. indexAlias
. indexAliasSummaryAlias
) summs
`shouldBe` Nothing
Left e -> expectationFailure ("Expected an IndexAliasesSummary but got " <> show e)
describe "Index Listing" $ do
it "returns a list of index names" $ withTestEnv $ do
_ <- createExampleIndex
ixns <- listIndices
liftIO (ixns `shouldContain` [testIndex])
describe "Index Settings" $ do
it "persists settings" $ withTestEnv $ do
_ <- deleteExampleIndex
_ <- createExampleIndex
let updates = BlocksWrite False :| []
updateResp <- updateIndexSettings updates testIndex
liftIO $ validateStatus updateResp 200
getResp <- getIndexSettings testIndex
liftIO $
getResp `shouldBe` Right (IndexSettingsSummary
testIndex
(IndexSettings (ShardCount 1) (ReplicaCount 0))
(NE.toList updates))
it "allows total fields to be set" $ when' (atleast es50) $ withTestEnv $ do
_ <- deleteExampleIndex
_ <- createExampleIndex
let updates = MappingTotalFieldsLimit 2500 :| []
updateResp <- updateIndexSettings updates testIndex
liftIO $ validateStatus updateResp 200
getResp <- getIndexSettings testIndex
liftIO $
getResp `shouldBe` Right (IndexSettingsSummary
testIndex
(IndexSettings (ShardCount 1) (ReplicaCount 0))
(NE.toList updates))
it "accepts customer analyzers" $ when' (atleast es50) $ withTestEnv $ do
_ <- deleteExampleIndex
let analysis = Analysis
(M.singleton "ex_analyzer"
( AnalyzerDefinition
(Just (Tokenizer "ex_tokenizer"))
(map TokenFilter
[ "ex_filter_lowercase","ex_filter_uppercase","ex_filter_apostrophe"
, "ex_filter_reverse","ex_filter_snowball"
, "ex_filter_shingle"
]
)
)
)
(M.singleton "ex_tokenizer"
( TokenizerDefinitionNgram
( Ngram 3 4 [TokenLetter,TokenDigit])
)
)
(M.fromList
[ ("ex_filter_lowercase",TokenFilterDefinitionLowercase (Just Greek))
, ("ex_filter_uppercase",TokenFilterDefinitionUppercase Nothing)
, ("ex_filter_apostrophe",TokenFilterDefinitionApostrophe)
, ("ex_filter_reverse",TokenFilterDefinitionReverse)
, ("ex_filter_snowball",TokenFilterDefinitionSnowball English)
, ("ex_filter_shingle",TokenFilterDefinitionShingle (Shingle 3 3 True False " " "_"))
]
)
updates = [AnalysisSetting analysis]
createResp <- createIndexWith (updates ++ [NumberOfReplicas (ReplicaCount 0)]) 1 testIndex
liftIO $ validateStatus createResp 200
getResp <- getIndexSettings testIndex
liftIO $
getResp `shouldBe` Right (IndexSettingsSummary
testIndex
(IndexSettings (ShardCount 1) (ReplicaCount 0))
updates
)
it "accepts default compression codec" $ when' (atleast es50) $ withTestEnv $ do
_ <- deleteExampleIndex
let updates = [CompressionSetting CompressionDefault]
createResp <- createIndexWith (updates ++ [NumberOfReplicas (ReplicaCount 0)]) 1 testIndex
liftIO $ validateStatus createResp 200
getResp <- getIndexSettings testIndex
liftIO $ getResp `shouldBe` Right
(IndexSettingsSummary testIndex (IndexSettings (ShardCount 1) (ReplicaCount 0)) updates)
it "accepts best compression codec" $ when' (atleast es50) $ withTestEnv $ do
_ <- deleteExampleIndex
let updates = [CompressionSetting CompressionBest]
createResp <- createIndexWith (updates ++ [NumberOfReplicas (ReplicaCount 0)]) 1 testIndex
liftIO $ validateStatus createResp 200
getResp <- getIndexSettings testIndex
liftIO $ getResp `shouldBe` Right
(IndexSettingsSummary testIndex (IndexSettings (ShardCount 1) (ReplicaCount 0)) updates)
describe "Index Optimization" $ do
it "returns a successful response upon completion" $ withTestEnv $ do
_ <- createExampleIndex
resp <- forceMergeIndex (IndexList (testIndex :| [])) defaultForceMergeIndexSettings
liftIO $ validateStatus resp 200

213
tests/V5/Test/JSON.hs Normal file
View File

@ -0,0 +1,213 @@
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Test.JSON (spec) where
import Test.Import
import qualified Data.ByteString.Lazy.Char8 as BL8
import qualified Data.List as L
import Data.List.NonEmpty (NonEmpty(..))
import qualified Data.List.NonEmpty as NE
import qualified Data.HashMap.Strict as HM
import qualified Data.Text as T
import qualified Data.Vector as V
import Test.ApproxEq
import Test.Generators
propJSON :: forall a
. ( Arbitrary a
, ToJSON a
, FromJSON a
, Show a
, Eq a
, Typeable a
)
=> Proxy a -> Spec
propJSON _ = prop testName $ \(a :: a) ->
let jsonStr = "via " <> BL8.unpack (encode a)
in counterexample jsonStr (parseEither parseJSON (toJSON a)
=== Right a)
where testName = show ty <> " FromJSON/ToJSON roundtrips"
ty = typeOf (undefined :: a)
propApproxJSON :: forall a
. ( Arbitrary a
, ToJSON a
, FromJSON a
, Show a
, ApproxEq a
, Typeable a
)
=> Proxy a -> Spec
propApproxJSON _ = prop testName $ \(a :: a) ->
let jsonStr = "via " <> BL8.unpack (encode a)
in counterexample jsonStr (parseEither parseJSON (toJSON a)
==~ Right a)
where testName = show ty <> " FromJSON/ToJSON roundtrips"
ty = typeOf (undefined :: a)
spec :: Spec
spec = do
describe "ToJSON RegexpFlags" $ do
it "generates the correct JSON for AllRegexpFlags" $
toJSON AllRegexpFlags `shouldBe` String "ALL"
it "generates the correct JSON for NoRegexpFlags" $
toJSON NoRegexpFlags `shouldBe` String "NONE"
it "generates the correct JSON for SomeRegexpFlags" $
let flags = AnyString :| [ Automaton
, Complement
, Empty
, Intersection
, Interval ]
in toJSON (SomeRegexpFlags flags) `shouldBe` String "ANYSTRING|AUTOMATON|COMPLEMENT|EMPTY|INTERSECTION|INTERVAL"
prop "removes duplicates from flags" $ \(flags :: RegexpFlags) ->
let String str = toJSON flags
flagStrs = T.splitOn "|" str
in noDuplicates flagStrs
describe "omitNulls" $ do
it "checks that omitNulls drops list elements when it should" $
let dropped = omitNulls $ [ "test1" .= (toJSON ([] :: [Int]))
, "test2" .= (toJSON ("some value" :: Text))]
in dropped `shouldBe` Object (HM.fromList [("test2", String "some value")])
it "checks that omitNulls doesn't drop list elements when it shouldn't" $
let notDropped = omitNulls $ [ "test1" .= (toJSON ([1] :: [Int]))
, "test2" .= (toJSON ("some value" :: Text))]
in notDropped `shouldBe` Object (HM.fromList [ ("test1", Array (V.fromList [Number 1.0]))
, ("test2", String "some value")])
it "checks that omitNulls drops non list elements when it should" $
let dropped = omitNulls $ [ "test1" .= (toJSON Null)
, "test2" .= (toJSON ("some value" :: Text))]
in dropped `shouldBe` Object (HM.fromList [("test2", String "some value")])
it "checks that omitNulls doesn't drop non list elements when it shouldn't" $
let notDropped = omitNulls $ [ "test1" .= (toJSON (1 :: Int))
, "test2" .= (toJSON ("some value" :: Text))]
in notDropped `shouldBe` Object (HM.fromList [ ("test1", Number 1.0)
, ("test2", String "some value")])
describe "Exact isomorphism JSON instances" $ do
propJSON (Proxy :: Proxy Version)
propJSON (Proxy :: Proxy IndexName)
propJSON (Proxy :: Proxy MappingName)
propJSON (Proxy :: Proxy DocId)
propJSON (Proxy :: Proxy IndexAliasRouting)
propJSON (Proxy :: Proxy RoutingValue)
propJSON (Proxy :: Proxy ShardCount)
propJSON (Proxy :: Proxy ReplicaCount)
propJSON (Proxy :: Proxy TemplateName)
propJSON (Proxy :: Proxy TemplatePattern)
propJSON (Proxy :: Proxy QueryString)
propJSON (Proxy :: Proxy FieldName)
propJSON (Proxy :: Proxy Script)
propJSON (Proxy :: Proxy ScriptLanguage)
propJSON (Proxy :: Proxy ScriptInline)
propJSON (Proxy :: Proxy ScriptId)
propJSON (Proxy :: Proxy ScriptParams)
propJSON (Proxy :: Proxy CacheName)
propJSON (Proxy :: Proxy CacheKey)
propJSON (Proxy :: Proxy Existence)
propJSON (Proxy :: Proxy CutoffFrequency)
propJSON (Proxy :: Proxy Analyzer)
propJSON (Proxy :: Proxy MaxExpansions)
propJSON (Proxy :: Proxy Lenient)
propJSON (Proxy :: Proxy Tiebreaker)
propJSON (Proxy :: Proxy Boost)
propJSON (Proxy :: Proxy BoostTerms)
propJSON (Proxy :: Proxy MinimumMatch)
propJSON (Proxy :: Proxy DisableCoord)
propJSON (Proxy :: Proxy IgnoreTermFrequency)
propJSON (Proxy :: Proxy MinimumTermFrequency)
propJSON (Proxy :: Proxy MaxQueryTerms)
propJSON (Proxy :: Proxy Fuzziness)
propJSON (Proxy :: Proxy PrefixLength)
propJSON (Proxy :: Proxy TypeName)
propJSON (Proxy :: Proxy PercentMatch)
propJSON (Proxy :: Proxy StopWord)
propJSON (Proxy :: Proxy QueryPath)
propJSON (Proxy :: Proxy AllowLeadingWildcard)
propJSON (Proxy :: Proxy LowercaseExpanded)
propJSON (Proxy :: Proxy EnablePositionIncrements)
propJSON (Proxy :: Proxy AnalyzeWildcard)
propJSON (Proxy :: Proxy GeneratePhraseQueries)
propJSON (Proxy :: Proxy Locale)
propJSON (Proxy :: Proxy MaxWordLength)
propJSON (Proxy :: Proxy MinWordLength)
propJSON (Proxy :: Proxy PhraseSlop)
propJSON (Proxy :: Proxy MinDocFrequency)
propJSON (Proxy :: Proxy MaxDocFrequency)
propJSON (Proxy :: Proxy Filter)
propJSON (Proxy :: Proxy Query)
propJSON (Proxy :: Proxy SimpleQueryStringQuery)
propJSON (Proxy :: Proxy FieldOrFields)
propJSON (Proxy :: Proxy SimpleQueryFlag)
propJSON (Proxy :: Proxy RegexpQuery)
propJSON (Proxy :: Proxy QueryStringQuery)
propJSON (Proxy :: Proxy RangeQuery)
propJSON (Proxy :: Proxy PrefixQuery)
propJSON (Proxy :: Proxy NestedQuery)
propJSON (Proxy :: Proxy MoreLikeThisFieldQuery)
propJSON (Proxy :: Proxy MoreLikeThisQuery)
propJSON (Proxy :: Proxy IndicesQuery)
propJSON (Proxy :: Proxy HasParentQuery)
propJSON (Proxy :: Proxy HasChildQuery)
propJSON (Proxy :: Proxy FuzzyQuery)
propJSON (Proxy :: Proxy FuzzyLikeFieldQuery)
propJSON (Proxy :: Proxy FuzzyLikeThisQuery)
propJSON (Proxy :: Proxy FunctionScoreQuery)
propJSON (Proxy :: Proxy BoostMode)
propJSON (Proxy :: Proxy ScoreMode)
propJSON (Proxy :: Proxy ComponentFunctionScoreFunction)
propJSON (Proxy :: Proxy FieldValueFactor)
propJSON (Proxy :: Proxy FactorModifier)
propJSON (Proxy :: Proxy DisMaxQuery)
propJSON (Proxy :: Proxy CommonTermsQuery)
propJSON (Proxy :: Proxy CommonMinimumMatch)
propJSON (Proxy :: Proxy BoostingQuery)
propJSON (Proxy :: Proxy BoolQuery)
propJSON (Proxy :: Proxy MatchQuery)
propJSON (Proxy :: Proxy MultiMatchQueryType)
propJSON (Proxy :: Proxy BooleanOperator)
propJSON (Proxy :: Proxy ZeroTermsQuery)
propJSON (Proxy :: Proxy MatchQueryType)
propJSON (Proxy :: Proxy AliasRouting)
propJSON (Proxy :: Proxy IndexAliasCreate)
propJSON (Proxy :: Proxy SearchAliasRouting)
propJSON (Proxy :: Proxy ScoreType)
propJSON (Proxy :: Proxy Distance)
propJSON (Proxy :: Proxy DistanceUnit)
propJSON (Proxy :: Proxy DistanceType)
propJSON (Proxy :: Proxy OptimizeBbox)
propJSON (Proxy :: Proxy GeoBoundingBoxConstraint)
propJSON (Proxy :: Proxy GeoFilterType)
propJSON (Proxy :: Proxy GeoBoundingBox)
propJSON (Proxy :: Proxy LatLon)
propJSON (Proxy :: Proxy RangeExecution)
prop "RegexpFlags FromJSON/ToJSON roundtrips, removing dups " $ \rfs ->
let expected = case rfs of
SomeRegexpFlags fs -> SomeRegexpFlags (NE.fromList (L.nub (NE.toList fs)))
x -> x
in parseEither parseJSON (toJSON rfs) === Right expected
propJSON (Proxy :: Proxy BoolMatch)
propJSON (Proxy :: Proxy Term)
propJSON (Proxy :: Proxy MultiMatchQuery)
propJSON (Proxy :: Proxy IndexSettings)
propJSON (Proxy :: Proxy CompoundFormat)
propJSON (Proxy :: Proxy TemplateQueryInline)
propJSON (Proxy :: Proxy Suggest)
propJSON (Proxy :: Proxy DirectGenerators)
propJSON (Proxy :: Proxy DirectGeneratorSuggestModeTypes)
describe "Approximate isomorphism JSON instances" $ do
propApproxJSON (Proxy :: Proxy UpdatableIndexSetting')
propApproxJSON (Proxy :: Proxy ReplicaCount)
propApproxJSON (Proxy :: Proxy ReplicaBounds)
propApproxJSON (Proxy :: Proxy Bytes)
propApproxJSON (Proxy :: Proxy AllocationPolicy)
propApproxJSON (Proxy :: Proxy InitialShardCount)
propApproxJSON (Proxy :: Proxy FSType)

115
tests/V5/Test/Query.hs Normal file
View File

@ -0,0 +1,115 @@
{-# LANGUAGE OverloadedStrings #-}
module Test.Query where
import Test.Common
import Test.Import
import qualified Data.HashMap.Strict as HM
spec :: Spec
spec =
describe "query API" $ do
it "returns document for term query and identity filter" $ withTestEnv $ do
_ <- insertData
let query = TermQuery (Term "user" "bitemyapp") Nothing
let filter' = Filter $ MatchAllQuery Nothing
let search = mkSearch (Just query) (Just filter')
myTweet <- searchTweet search
liftIO $
myTweet `shouldBe` Right exampleTweet
it "handles constant score queries" $ withTestEnv $ do
_ <- insertData
let query = TermsQuery "user" ("bitemyapp" :| [])
let cfQuery = ConstantScoreQuery query (Boost 1.0)
let filter' = Filter $ MatchAllQuery Nothing
let search = mkSearch (Just cfQuery) (Just filter')
myTweet <- searchTweet search
liftIO $
myTweet `shouldBe` Right exampleTweet
it "returns document for terms query and identity filter" $ withTestEnv $ do
_ <- insertData
let query = TermsQuery "user" ("bitemyapp" :| [])
let filter' = Filter $ MatchAllQuery Nothing
let search = mkSearch (Just query) (Just filter')
myTweet <- searchTweet search
liftIO $
myTweet `shouldBe` Right exampleTweet
it "returns document for match query" $ withTestEnv $ do
_ <- insertData
let query = QueryMatchQuery $ mkMatchQuery (FieldName "user") (QueryString "bitemyapp")
let search = mkSearch (Just query) Nothing
myTweet <- searchTweet search
liftIO $
myTweet `shouldBe` Right exampleTweet
it "returns document for multi-match query" $ withTestEnv $ do
_ <- insertData
let flds = [FieldName "user", FieldName "message"]
let query = QueryMultiMatchQuery $ mkMultiMatchQuery flds (QueryString "bitemyapp")
let search = mkSearch (Just query) Nothing
myTweet <- searchTweet search
liftIO $
myTweet `shouldBe` Right exampleTweet
it "returns document for multi-match query with a custom tiebreaker" $ withTestEnv $ do
_ <- insertData
let tiebreaker = Just $ Tiebreaker 0.3
flds = [FieldName "user", FieldName "message"]
multiQuery' = mkMultiMatchQuery flds (QueryString "bitemyapp")
query = QueryMultiMatchQuery $ multiQuery' { multiMatchQueryTiebreaker = tiebreaker }
search = mkSearch (Just query) Nothing
myTweet <- searchTweet search
liftIO $
myTweet `shouldBe` Right exampleTweet
it "returns document for bool query" $ withTestEnv $ do
_ <- insertData
let innerQuery = QueryMatchQuery $
mkMatchQuery (FieldName "user") (QueryString "bitemyapp")
let query = QueryBoolQuery $
mkBoolQuery [innerQuery] [] [] []
let search = mkSearch (Just query) Nothing
myTweet <- searchTweet search
liftIO $
myTweet `shouldBe` Right exampleTweet
it "returns document for boosting query" $ withTestEnv $ do
_ <- insertData
let posQuery = QueryMatchQuery $ mkMatchQuery (FieldName "user") (QueryString "bitemyapp")
let negQuery = QueryMatchQuery $ mkMatchQuery (FieldName "user") (QueryString "notmyapp")
let query = QueryBoostingQuery $ BoostingQuery posQuery negQuery (Boost 0.2)
let search = mkSearch (Just query) Nothing
myTweet <- searchTweet search
liftIO $
myTweet `shouldBe` Right exampleTweet
it "returns document for common terms query" $ withTestEnv $ do
_ <- insertData
let query = QueryCommonTermsQuery $
CommonTermsQuery (FieldName "user")
(QueryString "bitemyapp")
(CutoffFrequency 0.0001)
Or Or Nothing Nothing Nothing Nothing
let search = mkSearch (Just query) Nothing
myTweet <- searchTweet search
liftIO $
myTweet `shouldBe` Right exampleTweet
it "returns document for for inline template query" $ withTestEnv $ do
_ <- insertData
let innerQuery = QueryMatchQuery $
mkMatchQuery (FieldName "{{userKey}}")
(QueryString "{{bitemyappKey}}")
templateParams = TemplateQueryKeyValuePairs $ HM.fromList
[ ("userKey", "user")
, ("bitemyappKey", "bitemyapp")
]
templateQuery = QueryTemplateQueryInline $
TemplateQueryInline innerQuery templateParams
search = mkSearch (Just templateQuery) Nothing
myTweet <- searchTweet search
liftIO $ myTweet `shouldBe` Right exampleTweet

36
tests/V5/Test/Script.hs Normal file
View File

@ -0,0 +1,36 @@
{-# LANGUAGE OverloadedStrings #-}
module Test.Script where
import Test.Common
import Test.Import
import qualified Data.HashMap.Strict as HM
import qualified Data.Map as M
spec :: Spec
spec =
describe "Script" $
it "returns a transformed document based on the script field" $ withTestEnv $ do
_ <- insertData
let query = MatchAllQuery Nothing
sfv = toJSON $
Script
(Just (ScriptLanguage "painless"))
(Just (ScriptInline "doc['age'].value * 2"))
Nothing
Nothing
sf = ScriptFields $
HM.fromList [("test1", sfv)]
search' = mkSearch (Just query) Nothing
search = search' { scriptFields = Just sf }
resp <- searchByIndex testIndex search
parsed <- parseEsResponse resp :: BH IO (Either EsError (SearchResult Value))
case parsed of
Left e ->
liftIO $ expectationFailure ("Expected a script-transformed result but got: " <> show e)
Right sr -> do
let Just results =
hitFields (head (hits (searchHits sr)))
liftIO $
results `shouldBe` HitFields (M.fromList [("test1", [Number 20000.0])])

201
tests/V5/Test/Snapshots.hs Normal file
View File

@ -0,0 +1,201 @@
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE OverloadedStrings #-}
module Test.Snapshots (spec) where
import Test.Common
import Test.Import
import Data.Maybe (fromMaybe)
import qualified Data.List as L
import qualified Data.HashMap.Strict as HM
import qualified Data.Text as T
import qualified Data.Vector as V
import qualified Network.HTTP.Types.Method as NHTM
import qualified Network.URI as URI
import Test.Generators ()
spec :: Spec
spec = do
describe "FsSnapshotRepo" $
prop "SnapshotRepo laws" $ \fsr ->
fromGSnapshotRepo (toGSnapshotRepo fsr) === Right (fsr :: FsSnapshotRepo)
describe "Snapshot repos" $ do
it "always parses all snapshot repos API" $ when' canSnapshot $ withTestEnv $ do
res <- getSnapshotRepos AllSnapshotRepos
liftIO $ case res of
Left e -> expectationFailure ("Expected a right but got Left " <> show e)
Right _ -> return ()
it "finds an existing list of repos" $ when' canSnapshot $ withTestEnv $ do
let r1n = SnapshotRepoName "bloodhound-repo1"
let r2n = SnapshotRepoName "bloodhound-repo2"
withSnapshotRepo r1n $ \r1 ->
withSnapshotRepo r2n $ \r2 -> do
repos <- getSnapshotRepos (SnapshotRepoList (ExactRepo r1n :| [ExactRepo r2n]))
liftIO $ case repos of
Right xs -> do
let srt = L.sortBy (comparing gSnapshotRepoName)
srt xs `shouldBe` srt [r1, r2]
Left e -> expectationFailure (show e)
it "creates and updates with updateSnapshotRepo" $ when' canSnapshot $ withTestEnv $ do
let r1n = SnapshotRepoName "bloodhound-repo1"
withSnapshotRepo r1n $ \r1 -> do
let Just (String dir) = HM.lookup "location" (gSnapshotRepoSettingsObject (gSnapshotRepoSettings r1))
let noCompression = FsSnapshotRepo r1n (T.unpack dir) False Nothing Nothing Nothing
resp <- updateSnapshotRepo defaultSnapshotRepoUpdateSettings noCompression
liftIO (validateStatus resp 200)
Right [roundtrippedNoCompression] <- getSnapshotRepos (SnapshotRepoList (ExactRepo r1n :| []))
liftIO (roundtrippedNoCompression `shouldBe` toGSnapshotRepo noCompression)
-- verify came around in 1.4 it seems
it "can verify existing repos" $ when' canSnapshot $ when' (atleast es14) $ withTestEnv $ do
let r1n = SnapshotRepoName "bloodhound-repo1"
withSnapshotRepo r1n $ \_ -> do
res <- verifySnapshotRepo r1n
liftIO $ case res of
Right (SnapshotVerification vs)
| null vs -> expectationFailure "Expected nonempty set of verifying nodes"
| otherwise -> return ()
Left e -> expectationFailure (show e)
describe "Snapshots" $ do
it "always parses all snapshots API" $ when' canSnapshot $ withTestEnv $ do
let r1n = SnapshotRepoName "bloodhound-repo1"
withSnapshotRepo r1n $ \_ -> do
res <- getSnapshots r1n AllSnapshots
liftIO $ case res of
Left e -> expectationFailure ("Expected a right but got Left " <> show e)
Right _ -> return ()
it "can parse a snapshot that it created" $ when' canSnapshot $ withTestEnv $ do
let r1n = SnapshotRepoName "bloodhound-repo1"
withSnapshotRepo r1n $ \_ -> do
let s1n = SnapshotName "example-snapshot"
withSnapshot r1n s1n $ do
res <- getSnapshots r1n (SnapshotList (ExactSnap s1n :| []))
liftIO $ case res of
Right [snap]
| snapInfoState snap == SnapshotSuccess &&
snapInfoName snap == s1n -> return ()
| otherwise -> expectationFailure (show snap)
Right [] -> expectationFailure "There were no snapshots"
Right snaps -> expectationFailure ("Expected 1 snapshot but got" <> show (length snaps))
Left e -> expectationFailure (show e)
describe "Snapshot restore" $ do
it "can restore a snapshot that we create" $ when' canSnapshot $ withTestEnv $ do
let r1n = SnapshotRepoName "bloodhound-repo1"
withSnapshotRepo r1n $ \_ -> do
let s1n = SnapshotName "example-snapshot"
withSnapshot r1n s1n $ do
let settings = defaultSnapshotRestoreSettings { snapRestoreWaitForCompletion = True }
-- have to close an index to restore it
resp1 <- closeIndex testIndex
liftIO (validateStatus resp1 200)
resp2 <- restoreSnapshot r1n s1n settings
liftIO (validateStatus resp2 200)
it "can restore and rename" $ when' canSnapshot $ withTestEnv $ do
let r1n = SnapshotRepoName "bloodhound-repo1"
withSnapshotRepo r1n $ \_ -> do
let s1n = SnapshotName "example-snapshot"
withSnapshot r1n s1n $ do
let pat = RestoreRenamePattern "bloodhound-tests-twitter-(\\d+)"
let replace = RRTLit "restored-" :| [RRSubWholeMatch]
let expectedIndex = IndexName "restored-bloodhound-tests-twitter-1"
oldEnoughForOverrides <- liftIO (atleast es15)
let overrides = RestoreIndexSettings { restoreOverrideReplicas = Just (ReplicaCount 0) }
let settings = defaultSnapshotRestoreSettings { snapRestoreWaitForCompletion = True
, snapRestoreRenamePattern = Just pat
, snapRestoreRenameReplacement = Just replace
, snapRestoreIndexSettingsOverrides = if oldEnoughForOverrides
then Just overrides
else Nothing
}
-- have to close an index to restore it
let go = do
resp <- restoreSnapshot r1n s1n settings
liftIO (validateStatus resp 200)
exists <- indexExists expectedIndex
liftIO (exists `shouldBe` True)
go `finally` deleteIndex expectedIndex
-- | Get configured repo paths for snapshotting. Note that by default
-- this is not enabled and if we are over es 1.5, we won't be able to
-- test snapshotting. Note that this can and should be part of the
-- client functionality in a much less ad-hoc incarnation.
getRepoPaths :: IO [FilePath]
getRepoPaths = withTestEnv $ do
bhe <- getBHEnv
let Server s = bhServer bhe
let tUrl = s <> "/" <> "_nodes"
initReq <- parseRequest (URI.escapeURIString URI.isAllowedInURI (T.unpack tUrl))
let req = setRequestIgnoreStatus $ initReq { method = NHTM.methodGet }
Right (Object o) <- parseEsResponse =<< liftIO (httpLbs req (bhManager bhe))
return $ fromMaybe mempty $ do
Object nodes <- HM.lookup "nodes" o
Object firstNode <- snd <$> headMay (HM.toList nodes)
Object settings <- HM.lookup "settings" firstNode
Object path <- HM.lookup "path" settings
Array repo <- HM.lookup "repo" path
return [ T.unpack t | String t <- V.toList repo]
-- | 1.5 and earlier don't care about repo paths
canSnapshot :: IO Bool
canSnapshot = do
caresAboutRepos <- atleast es16
repoPaths <- getRepoPaths
return (not caresAboutRepos || not (null repoPaths))
withSnapshotRepo
:: ( MonadMask m
, MonadBH m
)
=> SnapshotRepoName
-> (GenericSnapshotRepo -> m a)
-> m a
withSnapshotRepo srn@(SnapshotRepoName n) f = do
repoPaths <- liftIO getRepoPaths
-- we'll use the first repo path if available, otherwise system temp
-- dir. Note that this will fail on ES > 1.6, so be sure you use
-- @when' canSnapshot@.
case repoPaths of
(firstRepoPath:_) -> withTempDirectory firstRepoPath (T.unpack n) $ \dir -> bracket (alloc dir) free f
[] -> withSystemTempDirectory (T.unpack n) $ \dir -> bracket (alloc dir) free f
where
alloc dir = do
liftIO (setFileMode dir mode)
let repo = FsSnapshotRepo srn "bloodhound-tests-backups" True Nothing Nothing Nothing
resp <- updateSnapshotRepo defaultSnapshotRepoUpdateSettings repo
liftIO (validateStatus resp 200)
return (toGSnapshotRepo repo)
mode = ownerModes `unionFileModes` groupModes `unionFileModes` otherModes
free GenericSnapshotRepo {..} = do
resp <- deleteSnapshotRepo gSnapshotRepoName
liftIO (validateStatus resp 200)
withSnapshot
:: ( MonadMask m
, MonadBH m
)
=> SnapshotRepoName
-> SnapshotName
-> m a
-> m a
withSnapshot srn sn = bracket_ alloc free
where
alloc = do
resp <- createSnapshot srn sn createSettings
liftIO (validateStatus resp 200)
-- We'll make this synchronous for testing purposes
createSettings = defaultSnapshotCreateSettings { snapWaitForCompletion = True
, snapIndices = Just (IndexList (testIndex :| []))
-- We don't actually need to back up any data
}
free =
deleteSnapshot srn sn

22
tests/V5/Test/Sorting.hs Normal file
View File

@ -0,0 +1,22 @@
{-# LANGUAGE OverloadedStrings #-}
module Test.Sorting where
import Test.Common
import Test.Import
spec :: Spec
spec =
describe "sorting" $
it "returns documents in the right order" $ withTestEnv $ do
_ <- insertData
_ <- insertOther
let sortSpec = DefaultSortSpec $ mkSort (FieldName "age") Ascending
let search = Search Nothing
Nothing (Just [sortSpec]) Nothing Nothing
False (From 0) (Size 10) SearchTypeQueryThenFetch Nothing Nothing
Nothing Nothing
result <- searchTweets search
let myTweet = grabFirst result
liftIO $
myTweet `shouldBe` Right otherTweet

View File

@ -0,0 +1,38 @@
{-# LANGUAGE OverloadedStrings #-}
module Test.SourceFiltering where
import Test.Common
import Test.Import
import qualified Data.HashMap.Strict as HM
spec :: Spec
spec =
describe "Source filtering" $ do
it "doesn't include source when sources are disabled" $ withTestEnv $
searchExpectSource
NoSource
(Left (EsError 500 "Source was missing"))
it "includes a source" $ withTestEnv $
searchExpectSource
(SourcePatterns (PopPattern (Pattern "message")))
(Right (Object (HM.fromList [("message", String "Use haskell!")])))
it "includes sources" $ withTestEnv $
searchExpectSource
(SourcePatterns (PopPatterns [Pattern "user", Pattern "message"]))
(Right (Object (HM.fromList [("user",String "bitemyapp"),("message", String "Use haskell!")])))
it "includes source patterns" $ withTestEnv $
searchExpectSource
(SourcePatterns (PopPattern (Pattern "*ge")))
(Right (Object (HM.fromList [("age", Number 10000),("message", String "Use haskell!")])))
it "excludes source patterns" $ withTestEnv $
searchExpectSource
(SourceIncludeExclude (Include [])
(Exclude [Pattern "l*", Pattern "*ge", Pattern "postDate", Pattern "extra"]))
(Right (Object (HM.fromList [("user",String "bitemyapp")])))

23
tests/V5/Test/Suggest.hs Normal file
View File

@ -0,0 +1,23 @@
{-# LANGUAGE OverloadedStrings #-}
module Test.Suggest where
import Test.Common
import Test.Import
spec :: Spec
spec =
describe "Suggest" $
it "returns a search suggestion using the phrase suggester" $ withTestEnv $ do
_ <- insertData
let query = QueryMatchNoneQuery
phraseSuggester = mkPhraseSuggester (FieldName "message")
namedSuggester = Suggest "Use haskel" "suggest_name" (SuggestTypePhraseSuggester phraseSuggester)
search' = mkSearch (Just query) Nothing
search = search' { suggestBody = Just namedSuggester }
expectedText = Just "use haskell"
resp <- searchByIndex testIndex search
parsed <- parseEsResponse resp :: BH IO (Either EsError (SearchResult Tweet))
case parsed of
Left e -> liftIO $ expectationFailure ("Expected an search suggestion but got " <> show e)
Right sr -> liftIO $ (suggestOptionsText . head . suggestResponseOptions . head . nsrResponses <$> suggest sr) `shouldBe` expectedText

View File

@ -0,0 +1,26 @@
{-# LANGUAGE OverloadedStrings #-}
module Test.Templates where
import Test.Common
import Test.Import
spec :: Spec
spec =
describe "template API" $ do
it "can create a template" $ withTestEnv $ do
let idxTpl = IndexTemplate (TemplatePattern "tweet-*") (Just (IndexSettings (ShardCount 1) (ReplicaCount 1))) [toJSON TweetMapping]
resp <- putTemplate idxTpl (TemplateName "tweet-tpl")
liftIO $ validateStatus resp 200
it "can detect if a template exists" $ withTestEnv $ do
exists <- templateExists (TemplateName "tweet-tpl")
liftIO $ exists `shouldBe` True
it "can delete a template" $ withTestEnv $ do
resp <- deleteTemplate (TemplateName "tweet-tpl")
liftIO $ validateStatus resp 200
it "can detect if a template doesn't exist" $ withTestEnv $ do
exists <- templateExists (TemplateName "tweet-tpl")
liftIO $ exists `shouldBe` False

File diff suppressed because it is too large Load Diff