Housecleaning (#258)

House cleaning, nuked V1
This commit is contained in:
Chris Allen 2019-08-02 15:36:04 -05:00 committed by GitHub
parent 9537ed2539
commit c5603ff4e1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
78 changed files with 366 additions and 8809 deletions

1
.gitignore vendored
View File

@ -19,3 +19,4 @@ bloodhound.iml
examples/bloodhound-examples.cabal
/.ghc.environment.*
.hspec-failures
bloodhound.cabal

View File

@ -1,28 +1,22 @@
sudo: false
language: generic
jdk:
- oraclejdk11
addons:
apt:
packages:
- libgmp-dev
- oracle-java8-installer
env:
global:
- JAVA_HOME=/usr/lib/jvm/java-8-oracle
matrix:
- GHCVER=8.0 ESVER=1.7.6 STACK_YAML=stack-8.0.yaml ESFLAG=ES1 DLINK=https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch
- GHCVER=8.0 ESVER=5.0.2 STACK_YAML=stack-8.0.yaml ESFLAG=ES5 DLINK=https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch ES_JAVA_OPTS="-Xms500m -Xmx750m"
- GHCVER=8.2 ESVER=1.7.6 STACK_YAML=stack.yaml ESFLAG=ES1 DLINK=https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch
- GHCVER=8.2 ESVER=5.0.2 STACK_YAML=stack.yaml ESFLAG=ES5 DLINK=https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch ES_JAVA_OPTS="-Xms500m -Xmx750m"
- GHCVER=8.2 ESVER=5.5.0 STACK_YAML=stack.yaml ESFLAG=ES5 DLINK=https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch ES_JAVA_OPTS="-Xms500m -Xmx750m"
# allow_failures:
# - GHCVER=8.2 ESVER=6.1.3 STACK_YAML=stack.yaml ESFLAG=ES5 DLINK=https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch ES_JAVA_OPTS="-Xms500m -Xmx750m"
- GHCVER=8.6 ESVER=5.6.0 STACK_YAML=stack.yaml DLINK=https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch ES_JAVA_OPTS="-Xms500m -Xmx750m"
install:
# stack
- mkdir -p ~/.local/bin
- travis_retry curl -L https://github.com/commercialhaskell/stack/releases/download/v1.7.1/stack-1.7.1-linux-x86_64.tar.gz | tar xz --wildcards --strip-components=1 -C ~/.local/bin '*/stack'
- travis_retry curl -L https://github.com/commercialhaskell/stack/releases/download/v2.1.3/stack-2.1.3-linux-x86_64.tar.gz | tar xz --wildcards --strip-components=1 -C ~/.local/bin '*/stack'
- export PATH=~/.local/bin:$PATH
- stack --no-terminal --version
# elasticsearch
@ -36,11 +30,11 @@ script:
- stack setup --no-terminal
- stack update --no-terminal
- stack build -j2 --fast --no-terminal
- travis_wait 45 sleep 1800 &
- stack test --fast --no-terminal bloodhound:bloodhound-tests --test-arguments="--qc-max-success 500" --flag bloodhound:$ESFLAG
- travis_wait 60 sleep 1800 &
- stack test --fast --no-terminal bloodhound:bloodhound-tests --test-arguments="--qc-max-success 500"
cache:
timeout: 2000
timeout: 1500
directories:
- $HOME/.cabal
- $HOME/.ghc

119
Makefile
View File

@ -1,85 +1,96 @@
.PHONY : build build-validate ghci test test-rerun test-ghci ghcid ghcid-validate \
weeder hlint hlint-watch mod-build
.DEFAULT_GOAL = help
stack = STACK_YAML='stack.yaml' stack
ghc_perf_options = --ghc-options '+RTS -A128M -RTS'
build = build $(ghc_perf_options)
ghci = ghci $(ghc_perf_options)
test = test $(ghc_perf_options)
stack-8.0 = STACK_YAML="stack-8.0.yaml" stack
stack-8.2 = STACK_YAML="stack-8.2.yaml" stack
stack-8.4 = STACK_YAML="stack-8.4.yaml" stack
stack-8.6 = STACK_YAML="stack-8.6.yaml" stack
elasticsearch_directory = elasticsearch
# stack build --ghc-options '+RTS -A128M -RTS'
## run build
build:
stack build
$(stack) $(build)
## build with validation options (Wall, Werror)
build-validate:
stack build --fast --ghc-options '-Wall -Werror'
$(stack) build --fast --ghc-options '-Wall -Werror +RTS -A128M -RTS'
## run ghci
ghci:
stack ghci
$(stack) $(ghci)
## run tests
test: echo-warn
stack test
$(stack) $(test)
## run tests with forced re-run via "-r"
test-rerun: echo-warn
stack test --test-arguments "-r"
$(stack) $(test) --test-arguments "-r"
## run ghci with test stanza
test-ghci:
stack ghci bloodhound:test:bloodhound-tests
$(stack) $(ghci) bloodhound:test:bloodhound-tests
## run ghcid
ghcid:
ghcid -c "$(stack) ghci bloodhound:lib --test --ghci-options='-fobject-code -fno-warn-unused-do-bind' --main-is bloodhound:test:bloodhound-tests"
## run ghcid with validate options (Werror, etc.)
ghcid-validate:
ghcid -c "$(stack) ghci bloodhound:lib --test --ghci-options='-Werror -fobject-code -fno-warn-unused-do-bind' --main-is bloodhound:test:bloodhound-tests"
## run weeder
weeder:
weeder . --build
# hlint --default > .hlint.yaml
## run hlint
hlint:
hlint .
## hlint watch with `sos`
hlint-watch:
sos src/ -c "hlint ." -p "src/(.*)\.hs"
mod-build:
stack build --ghc-options '+RTS -A128M -RTS'
# mod-build:
# stack build --ghc-options '+RTS -A128M -RTS'
echo-warn:
@echo "Make certain you have an elasticsearch instance on localhost:9200 !"
7.8-build:
STACK_YAML="stack-7.8.yaml" stack build
## Test with GHC 8.0 and ES 5.x
test-8.0:
STACK_YAML="stack-8.0.yaml" stack test --fast bloodhound:test:bloodhound-tests --test-arguments="--qc-max-success 500"
7.8-test: echo-warn
STACK_YAML="stack-7.8.yaml" stack test
## Test with GHC 8.2 and ES 5.x
test-8.2:
STACK_YAML="stack.yaml" stack test --fast bloodhound:test:bloodhound-tests --test-arguments="--qc-max-success 500"
7.10-build:
STACK_YAML="stack-7.10.yaml" stack build
## Build with the GHC 8.0 Stack YAML
build-8.0:
$(stack-8.0) $(build)
7.10-test: echo-warn
STACK_YAML="stack-7.10.yaml" stack test
## Build with the GHC 8.2 Stack YAML
build-8.2:
$(stack-8.2) $(build)
7.10-test-ES1:
STACK_YAML="stack-7.10.yaml" stack test --fast bloodhound:test:bloodhound-tests --test-arguments="--qc-max-success 500" --flag bloodhound:ES1
## Build with the GHC 8.4 Stack YAML
build-8.4:
$(stack-8.4) $(build)
7.10-test-ES5:
STACK_YAML="stack-7.10.yaml" stack test --fast bloodhound:test:bloodhound-tests --test-arguments="--qc-max-success 500" --flag bloodhound:ES5
8.0-test-ES1:
STACK_YAML="stack-8.0.yaml" stack test --fast bloodhound:test:bloodhound-tests --test-arguments="--qc-max-success 500" --flag bloodhound:ES1
8.0-test-ES5:
STACK_YAML="stack-8.0.yaml" stack test --fast bloodhound:test:bloodhound-tests --test-arguments="--qc-max-success 500" --flag bloodhound:ES5
8.2-test-ES1:
STACK_YAML="stack.yaml" stack test --fast bloodhound:test:bloodhound-tests --test-arguments="--qc-max-success 500" --flag bloodhound:ES1
8.2-test-ES5:
STACK_YAML="stack.yaml" stack test --fast bloodhound:test:bloodhound-tests --test-arguments="--qc-max-success 500" --flag bloodhound:ES5
8.0-build:
STACK_YAML="stack-8.0.yaml" stack build
8.2-build:
STACK_YAML="stack-8.2.yaml" stack build
module-touch:
touch src/Database/V1/Bloodhound/Types.hs
touch src/Database/V5/Bloodhound/Types.hs
## Build with the GHC 8.6 Stack YAML
build-8.6:
$(stack-8.6) $(build)
## Upload the package to Hackage
upload:
stack upload --no-signature .
@ -87,20 +98,32 @@ upload:
## Run test environment
compose-ES5:
@LOCAL_USER_ID=${LOCAL_USER_ID} docker-compose -f tests/ES5/docker-compose.yml --project-directory tests/ES5/ up
@LOCAL_USER_ID=${LOCAL_USER_ID} docker-compose -f docker-compose.yml --project-directory $(elasticsearch_directory) up
## Run test environment in detach mode
compose-ES5-detach-up:
@LOCAL_USER_ID=${LOCAL_USER_ID} docker-compose -f tests/ES5/docker-compose.yml --project-directory tests/ES5/ up -d
@LOCAL_USER_ID=${LOCAL_USER_ID} docker-compose -f docker-compose.yml --project-directory $(elasticsearch_directory) up -d
## Close test environment if run on detach mode
compose-ES5-detach-down:
@LOCAL_USER_ID=${LOCAL_USER_ID} docker-compose -f tests/ES5/docker-compose.yml --project-directory tests/ES5/ down
@LOCAL_USER_ID=${LOCAL_USER_ID} docker-compose -f docker-compose.yml --project-directory $(elasticsearch_directory) down
## build the docker compose images
compose-build:
@LOCAL_USER_ID=${LOCAL_USER_ID} docker-compose -f tests/ES5/docker-compose.yml --project-directory tests/ES5/ build
@LOCAL_USER_ID=${LOCAL_USER_ID} docker-compose -f docker-compose.yml --project-directory $(elasticsearch_directory) build
## Spawn bash shell in ES5 test container
ES5-shell:
@LOCAL_USER_ID=${LOCAL_USER_ID} docker-compose -f tests/ES5/docker-compose.yml --project-directory tests/ES5/ exec elasticsearch1 bash
@LOCAL_USER_ID=${LOCAL_USER_ID} docker-compose -f docker-compose.yml --project-directory $(elasticsearch_directory) exec elasticsearch1 bash
help:
@echo "Please use \`make <target>' where <target> is one of\n\n"
@awk '/^[a-zA-Z\-\_0-9]+:/ { \
helpMessage = match(lastLine, /^## (.*)/); \
if (helpMessage) { \
helpCommand = substr($$1, 0, index($$1, ":")); \
helpMessage = substr(lastLine, RSTART + 3, RLENGTH); \
printf "%-30s %s\n", helpCommand, helpMessage; \
} \
} \
{ lastLine = $$0 }' $(MAKEFILE_LIST)

View File

@ -1,137 +0,0 @@
name: bloodhound
version: 0.16.0.0
synopsis: Elasticsearch client library for Haskell
description: Elasticsearch made awesome for Haskell hackers
homepage: https://github.com/bitemyapp/bloodhound
license: BSD3
license-file: LICENSE
author: Chris Allen
maintainer: cma@bitemyapp.com
copyright: 2018 Chris Allen
category: Database, Search
build-type: Simple
cabal-version: >=1.10
extra-source-files:
README.md
changelog.md
-- We do this to avoid package check errors when uploading to hackage
tests/V1/tests.hs
tests/V5/tests.hs
source-repository head
type: git
location: https://github.com/bitemyapp/bloodhound.git
Flag ES1
Description: Run the test suite against an Elasticsearch 1.x server
Default: False
Flag ES5
Description: Run the test suite against an Elasticsearch 5.x server
Default: True
library
ghc-options: -Wall
exposed-modules: Database.V5.Bloodhound
Database.V5.Bloodhound.Client
Database.V5.Bloodhound.Types
Database.V5.Bloodhound.Internal.Aggregation
Database.V5.Bloodhound.Internal.Analysis
Database.V5.Bloodhound.Internal.Client
Database.V5.Bloodhound.Internal.Highlight
Database.V5.Bloodhound.Internal.Newtypes
Database.V5.Bloodhound.Internal.Query
Database.V5.Bloodhound.Internal.Sort
Database.V5.Bloodhound.Internal.StringlyTyped
Database.V5.Bloodhound.Internal.Suggest
Database.V1.Bloodhound
Database.V1.Bloodhound.Client
Database.V1.Bloodhound.Types
Database.V1.Bloodhound.Types.Class
Database.V1.Bloodhound.Internal.Aggregation
Database.V1.Bloodhound.Internal.Client
Database.V1.Bloodhound.Internal.Highlight
Database.V1.Bloodhound.Internal.Newtypes
Database.V1.Bloodhound.Internal.Query
Database.V1.Bloodhound.Internal.Sort
Database.V1.Bloodhound.Internal.StringlyTyped
Database.V1.Bloodhound.Internal.Suggest
other-modules: Bloodhound.Import
Database.Bloodhound.Common.Script
hs-source-dirs: src
build-depends: base >= 4.3 && <5,
aeson >= 0.11.1,
blaze-builder,
bytestring >= 0.10.0 && <0.11,
containers >= 0.5.0.0 && <0.6,
exceptions,
hashable,
http-client >= 0.4.30 && <0.7,
http-types >= 0.8 && <0.13,
mtl >= 1.0 && <2.3,
network-uri >= 2.6 && <2.7,
scientific >= 0.3.0.0 && <0.4.0.0,
semigroups >= 0.15 && <0.19,
semver,
text >= 0.11 && <1.3,
time >= 1.4 && <1.9,
transformers >= 0.2 && <0.6,
unordered-containers,
vector >= 0.10.9 && <0.13
default-language: Haskell2010
test-suite bloodhound-tests
ghc-options: -Wall -fno-warn-orphans
type: exitcode-stdio-1.0
main-is: tests.hs
if flag(ES1)
hs-source-dirs: tests/V1
else
hs-source-dirs: tests/V5
other-modules: Test.Script
other-modules: Test.Aggregation
Test.ApproxEq
Test.BulkAPI
Test.Common
Test.Documents
Test.Generators
Test.Highlights
Test.Import
Test.Indices
Test.JSON
Test.Query
Test.Snapshots
Test.Sorting
Test.SourceFiltering
Test.Suggest
Test.Templates
build-depends: base,
QuickCheck,
aeson,
bloodhound,
bytestring,
containers,
errors,
exceptions,
hspec >= 1.8,
http-client,
http-types,
microlens,
microlens-aeson,
mtl,
network-uri,
pretty-simple,
quickcheck-arbitrary-template,
quickcheck-properties,
semigroups,
semver,
temporary,
text,
time,
unix-compat,
unordered-containers >= 0.2.5.0 && <0.3,
vector
default-language: Haskell2010

View File

@ -1,3 +1,12 @@
0.17.0.0
========
- @bitemyapp
- Removed V1 modules and support.
- Removed the cabal file, replaced with an Hpack `package.yaml`.
- @JoseD92
- Removed the disused `Seminearring` class.
- Added Docker Compose support.
0.16.0.0
========
- @bitemyapp

View File

@ -14,7 +14,7 @@ import Data.Text (Text)
import Data.Time.Calendar (Day (..))
import Data.Time.Clock (UTCTime (..), secondsToDiffTime)
import qualified Data.Vector as V
import Database.V5.Bloodhound
import Database.Bloodhound
import GHC.Generics (Generic)
import Network.HTTP.Client (defaultManagerSettings)

74
package.yaml Normal file
View File

@ -0,0 +1,74 @@
name: bloodhound
version: '0.17.0.0'
synopsis: Elasticsearch client library for Haskell
description: Elasticsearch made awesome for Haskell hackers
category: Database, Search
author: Chris Allen
maintainer: cma@bitemyapp.com
copyright: 2018 Chris Allen
license: BSD3
github: bitemyapp/bloodhound.git
extra-source-files:
- README.md
- changelog.md
- tests/tests.hs
dependencies:
- aeson >=0.11.1
- base >=4.3 && <5
- blaze-builder
- bytestring >=0.10.0
- containers >=0.5.0.0
- exceptions
- hashable
- http-client >=0.4.30
- http-types >=0.8
- mtl >=1.0
- network-uri >=2.6
- scientific >=0.3.0.0
- semigroups >=0.15
- semver
- text >=0.11
- time >=1.4
- transformers >=0.2
- unordered-containers
- vector >=0.10.9
library:
source-dirs: src
ghc-options: -Wall
exposed-modules:
- Database.Bloodhound
- Database.Bloodhound.Client
- Database.Bloodhound.Types
- Database.Bloodhound.Internal.Aggregation
- Database.Bloodhound.Internal.Analysis
- Database.Bloodhound.Internal.Client
- Database.Bloodhound.Internal.Highlight
- Database.Bloodhound.Internal.Newtypes
- Database.Bloodhound.Internal.Query
- Database.Bloodhound.Internal.Sort
- Database.Bloodhound.Internal.StringlyTyped
- Database.Bloodhound.Internal.Suggest
tests:
bloodhound-tests:
source-dirs: tests
main: tests.hs
ghc-options:
- -Wall
- -fno-warn-orphans
dependencies:
- QuickCheck
- base
- bloodhound
- errors
- hspec >=1.8
- microlens
- microlens-aeson
- pretty-simple
- quickcheck-arbitrary-template
- quickcheck-properties
- temporary
- unix-compat

View File

@ -0,0 +1,10 @@
module Database.Bloodhound
( -- module Data.Aeson.Types
-- ,
module Database.Bloodhound.Client
, module Database.Bloodhound.Types
) where
-- import Data.Aeson.Types
import Database.Bloodhound.Client
import Database.Bloodhound.Types

View File

@ -15,7 +15,7 @@
--
-------------------------------------------------------------------------------
module Database.V5.Bloodhound.Client
module Database.Bloodhound.Client
( -- * Bloodhound client functions
-- | The examples in this module assume the following code has been run.
-- The :{ and :} will only work in GHCi. You'll only need the data types
@ -125,12 +125,12 @@ import qualified Network.HTTP.Types.URI as NHTU
import qualified Network.URI as URI
import Prelude hiding (filter, head)
import Database.V5.Bloodhound.Types
import Database.Bloodhound.Types
-- $setup
-- >>> :set -XOverloadedStrings
-- >>> :set -XDeriveGeneric
-- >>> import Database.V5.Bloodhound
-- >>> import Database.Bloodhound
-- >>> import Network.HTTP.Client
-- >>> let testServer = (Server "http://localhost:9200")
-- >>> let runBH' = withBH defaultManagerSettings testServer

View File

@ -7,7 +7,7 @@ import Bloodhound.Import
import qualified Data.HashMap.Strict as HM
import Database.V5.Bloodhound.Internal.Newtypes
import Database.Bloodhound.Internal.Newtypes
newtype ScriptFields =
ScriptFields (HM.HashMap ScriptFieldName ScriptFieldValue)

View File

@ -2,7 +2,7 @@
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Database.V5.Bloodhound.Internal.Aggregation where
module Database.Bloodhound.Internal.Aggregation where
import Bloodhound.Import
@ -11,11 +11,11 @@ import qualified Data.HashMap.Strict as HM
import qualified Data.Map.Strict as M
import qualified Data.Text as T
import Database.V5.Bloodhound.Internal.Client
import Database.V5.Bloodhound.Internal.Highlight (HitHighlight)
import Database.V5.Bloodhound.Internal.Newtypes
import Database.V5.Bloodhound.Internal.Query
import Database.V5.Bloodhound.Internal.Sort
import Database.Bloodhound.Internal.Client
import Database.Bloodhound.Internal.Highlight (HitHighlight)
import Database.Bloodhound.Internal.Newtypes
import Database.Bloodhound.Internal.Query
import Database.Bloodhound.Internal.Sort
type Aggregations = M.Map Text Aggregation

View File

@ -1,16 +1,15 @@
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
module Database.V5.Bloodhound.Internal.Analysis where
module Database.Bloodhound.Internal.Analysis where
import Bloodhound.Import
import qualified Data.Map.Strict as M
import Data.String
import qualified Data.Text as T
import Database.V5.Bloodhound.Internal.Newtypes
import Database.V5.Bloodhound.Internal.StringlyTyped
import Database.Bloodhound.Internal.Newtypes
import Database.Bloodhound.Internal.StringlyTyped
data Analysis = Analysis
{ analysisAnalyzer :: M.Map Text AnalyzerDefinition

View File

@ -1,3 +1,4 @@
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
@ -5,10 +6,15 @@
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE UndecidableInstances #-}
module Database.V5.Bloodhound.Internal.Client where
module Database.Bloodhound.Internal.Client where
import Bloodhound.Import
#if defined(MIN_VERSION_GLASGOW_HASKELL)
#if MIN_VERSION_GLASGOW_HASKELL(8,6,0,0)
import Control.Monad.Fail (MonadFail)
#endif
#endif
import qualified Data.Text as T
import qualified Data.Traversable as DT
import qualified Data.HashMap.Strict as HM
@ -19,10 +25,10 @@ import Network.HTTP.Client
import Text.Read (Read(..))
import qualified Text.Read as TR
import Database.V5.Bloodhound.Internal.Analysis
import Database.V5.Bloodhound.Internal.Newtypes
import Database.V5.Bloodhound.Internal.Query
import Database.V5.Bloodhound.Internal.StringlyTyped
import Database.Bloodhound.Internal.Analysis
import Database.Bloodhound.Internal.Newtypes
import Database.Bloodhound.Internal.Query
import Database.Bloodhound.Internal.StringlyTyped
{-| Common environment for Elasticsearch calls. Connections will be
pipelined according to the provided HTTP connection manager.
@ -71,6 +77,11 @@ newtype BH m a = BH {
, MonadFix
, MonadThrow
, MonadCatch
#if defined(MIN_VERSION_GLASGOW_HASKELL)
#if MIN_VERSION_GLASGOW_HASKELL(8,6,0,0)
, MonadFail
#endif
#endif
, MonadMask)
instance MonadTrans BH where

View File

@ -1,14 +1,14 @@
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
module Database.V5.Bloodhound.Internal.Highlight where
module Database.Bloodhound.Internal.Highlight where
import Bloodhound.Import
import qualified Data.Map.Strict as M
import Database.V5.Bloodhound.Internal.Newtypes
import Database.V5.Bloodhound.Internal.Query
import Database.Bloodhound.Internal.Newtypes
import Database.Bloodhound.Internal.Query
type HitHighlight = M.Map Text [Text]

View File

@ -1,7 +1,7 @@
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
module Database.V5.Bloodhound.Internal.Newtypes where
module Database.Bloodhound.Internal.Newtypes where
import Bloodhound.Import

View File

@ -2,9 +2,9 @@
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Database.V5.Bloodhound.Internal.Query
module Database.Bloodhound.Internal.Query
( module X
, module Database.V5.Bloodhound.Internal.Query
, module Database.Bloodhound.Internal.Query
) where
import Bloodhound.Import
@ -15,7 +15,7 @@ import Data.List (nub)
import qualified Data.Text as T
import Database.Bloodhound.Common.Script as X
import Database.V5.Bloodhound.Internal.Newtypes
import Database.Bloodhound.Internal.Newtypes
data Query =
TermQuery Term (Maybe Boost)

View File

@ -2,12 +2,12 @@
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Database.V5.Bloodhound.Internal.Sort where
module Database.Bloodhound.Internal.Sort where
import Bloodhound.Import
import Database.V5.Bloodhound.Internal.Newtypes
import Database.V5.Bloodhound.Internal.Query
import Database.Bloodhound.Internal.Newtypes
import Database.Bloodhound.Internal.Query
{-| 'SortMode' prescribes how to handle sorting array/multi-valued fields.

View File

@ -1,6 +1,6 @@
{-# LANGUAGE OverloadedStrings #-}
module Database.V5.Bloodhound.Internal.StringlyTyped where
module Database.Bloodhound.Internal.StringlyTyped where
import Bloodhound.Import

View File

@ -2,14 +2,14 @@
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE OverloadedStrings #-}
module Database.V5.Bloodhound.Internal.Suggest where
module Database.Bloodhound.Internal.Suggest where
import Bloodhound.Import
import qualified Data.HashMap.Strict as HM
import Database.V5.Bloodhound.Internal.Newtypes
import Database.V5.Bloodhound.Internal.Query (TemplateQueryInline(..), params)
import Database.Bloodhound.Internal.Newtypes
import Database.Bloodhound.Internal.Query (TemplateQueryInline(..), params)
data Suggest = Suggest
{ suggestText :: Text

View File

@ -26,7 +26,7 @@
module Database.V5.Bloodhound.Types
module Database.Bloodhound.Types
( defaultCache
, defaultIndexSettings
, defaultIndexDocumentSettings
@ -417,14 +417,14 @@ module Database.V5.Bloodhound.Types
import Bloodhound.Import
import Database.V5.Bloodhound.Internal.Aggregation
import Database.V5.Bloodhound.Internal.Analysis
import Database.V5.Bloodhound.Internal.Client
import Database.V5.Bloodhound.Internal.Highlight
import Database.V5.Bloodhound.Internal.Newtypes
import Database.V5.Bloodhound.Internal.Query
import Database.V5.Bloodhound.Internal.Sort
import Database.V5.Bloodhound.Internal.Suggest
import Database.Bloodhound.Internal.Aggregation
import Database.Bloodhound.Internal.Analysis
import Database.Bloodhound.Internal.Client
import Database.Bloodhound.Internal.Highlight
import Database.Bloodhound.Internal.Newtypes
import Database.Bloodhound.Internal.Query
import Database.Bloodhound.Internal.Sort
import Database.Bloodhound.Internal.Suggest
{-| 'unpackId' is a silly convenience function that gets used once.
-}

View File

@ -1,10 +0,0 @@
module Database.V1.Bloodhound
( -- module Data.Aeson.Types
-- ,
module Database.V1.Bloodhound.Client
, module Database.V1.Bloodhound.Types
) where
-- import Data.Aeson.Types
import Database.V1.Bloodhound.Client
import Database.V1.Bloodhound.Types

View File

@ -1,927 +0,0 @@
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TupleSections #-}
-------------------------------------------------------------------------------
-- |
-- Module : Database.Bloodhound.Client
-- Copyright : (C) 2014 Chris Allen
-- License : BSD-style (see the file LICENSE)
-- Maintainer : Chris Allen <cma@bitemyapp.com
-- Stability : provisional
-- Portability : OverloadedStrings
--
-- Client side functions for talking to Elasticsearch servers.
--
-------------------------------------------------------------------------------
module Database.V1.Bloodhound.Client
( -- * Bloodhound client functions
-- | The examples in this module assume the following code has been run.
-- The :{ and :} will only work in GHCi. You'll only need the data types
-- and typeclass instances for the functions that make use of them.
-- $setup
withBH
-- ** Indices
, createIndex
, deleteIndex
, updateIndexSettings
, getIndexSettings
, optimizeIndex
, indexExists
, openIndex
, closeIndex
, listIndices
, waitForYellowIndex
-- *** Index Aliases
, updateIndexAliases
, getIndexAliases
-- *** Index Templates
, putTemplate
, templateExists
, deleteTemplate
-- ** Mapping
, putMapping
, deleteMapping
-- ** Documents
, indexDocument
, updateDocument
, getDocument
, documentExists
, deleteDocument
-- ** Searching
, searchAll
, searchByIndex
, searchByType
, scanSearch
, getInitialScroll
, advanceScroll
, refreshIndex
, mkSearch
, mkAggregateSearch
, mkHighlightSearch
, bulk
, pageSearch
, mkShardCount
, mkReplicaCount
, getStatus
-- ** Snapshot/Restore
-- *** Snapshot Repos
, getSnapshotRepos
, updateSnapshotRepo
, verifySnapshotRepo
, deleteSnapshotRepo
-- *** Snapshots
, createSnapshot
, getSnapshots
, deleteSnapshot
-- *** Restoring Snapshots
, restoreSnapshot
-- ** Nodes
, getNodesInfo
, getNodesStats
-- ** Request Utilities
, encodeBulkOperations
, encodeBulkOperation
-- * Authentication
, basicAuthHook
-- * Reply-handling tools
, isVersionConflict
, isSuccess
, isCreated
, parseEsResponse
)
where
import qualified Blaze.ByteString.Builder as BB
import Control.Applicative as A
import Control.Monad
import Control.Monad.Catch
import Control.Monad.IO.Class
import Data.Aeson
import Data.ByteString.Lazy.Builder
import qualified Data.ByteString.Lazy.Char8 as L
import Data.Foldable (toList)
import qualified Data.HashMap.Strict as HM
import Data.Ix
import qualified Data.List as LS (filter, foldl')
import Data.List.NonEmpty (NonEmpty (..))
import Data.Maybe (catMaybes, fromMaybe, isJust)
import Data.Monoid
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Data.Time.Clock
import qualified Data.Vector as V
import Network.HTTP.Client
import qualified Network.HTTP.Types.Method as NHTM
import qualified Network.HTTP.Types.Status as NHTS
import qualified Network.HTTP.Types.URI as NHTU
import qualified Network.URI as URI
import Prelude hiding (filter, head)
import Database.V1.Bloodhound.Types
-- | 'mkShardCount' is a straight-forward smart constructor for 'ShardCount'
-- which rejects 'Int' values below 1 and above 1000.
mkShardCount :: Int -> Maybe ShardCount
mkShardCount n
| n < 1 = Nothing
| n > 1000 = Nothing
| otherwise = Just (ShardCount n)
-- | 'mkReplicaCount' is a straight-forward smart constructor for 'ReplicaCount'
-- which rejects 'Int' values below 0 and above 1000.
mkReplicaCount :: Int -> Maybe ReplicaCount
mkReplicaCount n
| n < 0 = Nothing
| n > 1000 = Nothing -- ...
| otherwise = Just (ReplicaCount n)
emptyBody :: L.ByteString
emptyBody = L.pack ""
dispatch :: MonadBH m
=> Method
-> Text
-> Maybe L.ByteString
-> m Reply
dispatch dMethod url body = do
initReq <- liftIO $ parseUrl' url
reqHook <- bhRequestHook A.<$> getBHEnv
let reqBody = RequestBodyLBS $ fromMaybe emptyBody body
req <- liftIO
$ reqHook
$ setRequestIgnoreStatus
$ initReq { method = dMethod
, requestBody = reqBody }
mgr <- bhManager <$> getBHEnv
liftIO $ httpLbs req mgr
joinPath' :: [Text] -> Text
joinPath' = T.intercalate "/"
joinPath :: MonadBH m => [Text] -> m Text
joinPath ps = do
Server s <- bhServer <$> getBHEnv
return $ joinPath' (s:ps)
appendSearchTypeParam :: Text -> SearchType -> Text
appendSearchTypeParam originalUrl st = addQuery params originalUrl
where stText = "search_type"
params
| st == SearchTypeDfsQueryThenFetch = [(stText, Just "dfs_query_then_fetch")]
| st == SearchTypeCount = [(stText, Just "count")]
| st == SearchTypeScan = [(stText, Just "scan"), ("scroll", Just "1m")]
| st == SearchTypeQueryAndFetch = [(stText, Just "query_and_fetch")]
| st == SearchTypeDfsQueryAndFetch = [(stText, Just "dfs_query_and_fetch")]
-- used to catch 'SearchTypeQueryThenFetch', which is also the default
| otherwise = [(stText, Just "query_then_fetch")]
-- | Severely dumbed down query renderer. Assumes your data doesn't
-- need any encoding
addQuery :: [(Text, Maybe Text)] -> Text -> Text
addQuery q u = u <> rendered
where
rendered =
T.decodeUtf8 $ BB.toByteString $ NHTU.renderQueryText prependQuestionMark q
prependQuestionMark = True
bindM2 :: (Applicative m, Monad m) => (a -> b -> m c) -> m a -> m b -> m c
bindM2 f ma mb = join (f <$> ma <*> mb)
-- | Convenience function that sets up a manager and BHEnv and runs
-- the given set of bloodhound operations. Connections will be
-- pipelined automatically in accordance with the given manager
-- settings in IO. If you've got your own monad transformer stack, you
-- should use 'runBH' directly.
withBH :: ManagerSettings -> Server -> BH IO a -> IO a
withBH ms s f = do
mgr <- newManager ms
let env = mkBHEnv s mgr
runBH env f
-- Shortcut functions for HTTP methods
delete :: MonadBH m => Text -> m Reply
delete = flip (dispatch NHTM.methodDelete) Nothing
get :: MonadBH m => Text -> m Reply
get = flip (dispatch NHTM.methodGet) Nothing
head :: MonadBH m => Text -> m Reply
head = flip (dispatch NHTM.methodHead) Nothing
put :: MonadBH m => Text -> Maybe L.ByteString -> m Reply
put = dispatch NHTM.methodPut
post :: MonadBH m => Text -> Maybe L.ByteString -> m Reply
post = dispatch NHTM.methodPost
-- indexDocument s ix name doc = put (root </> s </> ix </> name </> doc) (Just encode doc)
-- http://hackage.haskell.org/package/http-client-lens-0.1.0/docs/Network-HTTP-Client-Lens.html
-- https://github.com/supki/libjenkins/blob/master/src/Jenkins/Rest/Internal.hs
-- | 'getStatus' fetches the 'Status' of a 'Server'
getStatus :: MonadBH m => m (Maybe Status)
getStatus = do
response <- get =<< url
return $ decode (responseBody response)
where
url = joinPath []
-- | 'getSnapshotRepos' gets the definitions of a subset of the
-- defined snapshot repos.
getSnapshotRepos
:: ( MonadBH m
, MonadThrow m
)
=> SnapshotRepoSelection
-> m (Either EsError [GenericSnapshotRepo])
getSnapshotRepos sel = fmap (fmap unGSRs) . parseEsResponse =<< get =<< url
where
url = joinPath ["_snapshot", selectorSeg]
selectorSeg = case sel of
AllSnapshotRepos -> "_all"
SnapshotRepoList (p :| ps) -> T.intercalate "," (renderPat <$> (p:ps))
renderPat (RepoPattern t) = t
renderPat (ExactRepo (SnapshotRepoName t)) = t
-- | Wrapper to extract the list of 'GenericSnapshotRepo' in the
-- format they're returned in
newtype GSRs = GSRs { unGSRs :: [GenericSnapshotRepo] }
instance FromJSON GSRs where
parseJSON = withObject "Collection of GenericSnapshotRepo" parse
where
parse = fmap GSRs . mapM (uncurry go) . HM.toList
go rawName = withObject "GenericSnapshotRepo" $ \o ->
GenericSnapshotRepo (SnapshotRepoName rawName) <$> o .: "type"
<*> o .: "settings"
-- | Create or update a snapshot repo
updateSnapshotRepo
:: ( MonadBH m
, SnapshotRepo repo
)
=> SnapshotRepoUpdateSettings
-- ^ Use 'defaultSnapshotRepoUpdateSettings' if unsure
-> repo
-> m Reply
updateSnapshotRepo SnapshotRepoUpdateSettings {..} repo =
bindM2 put url (return (Just body))
where
url = addQuery params <$> joinPath ["_snapshot", snapshotRepoName gSnapshotRepoName]
params
| repoUpdateVerify = []
| otherwise = [("verify", Just "false")]
body = encode $ object [ "type" .= gSnapshotRepoType
, "settings" .= gSnapshotRepoSettings
]
GenericSnapshotRepo {..} = toGSnapshotRepo repo
-- | Verify if a snapshot repo is working. __NOTE:__ this API did not
-- make it into Elasticsearch until 1.4. If you use an older version,
-- you will get an error here.
verifySnapshotRepo
:: ( MonadBH m
, MonadThrow m
)
=> SnapshotRepoName
-> m (Either EsError SnapshotVerification)
verifySnapshotRepo (SnapshotRepoName n) =
parseEsResponse =<< bindM2 post url (return Nothing)
where
url = joinPath ["_snapshot", n, "_verify"]
deleteSnapshotRepo :: MonadBH m => SnapshotRepoName -> m Reply
deleteSnapshotRepo (SnapshotRepoName n) = delete =<< url
where
url = joinPath ["_snapshot", n]
-- | Create and start a snapshot
createSnapshot
:: (MonadBH m)
=> SnapshotRepoName
-> SnapshotName
-> SnapshotCreateSettings
-> m Reply
createSnapshot (SnapshotRepoName repoName)
(SnapshotName snapName)
SnapshotCreateSettings {..} =
bindM2 put url (return (Just body))
where
url = addQuery params <$> joinPath ["_snapshot", repoName, snapName]
params = [("wait_for_completion", Just (boolQP snapWaitForCompletion))]
body = encode $ object prs
prs = catMaybes [ ("indices" .=) . indexSelectionName <$> snapIndices
, Just ("ignore_unavailable" .= snapIgnoreUnavailable)
, Just ("ignore_global_state" .= snapIncludeGlobalState)
, Just ("partial" .= snapPartial)
]
indexSelectionName :: IndexSelection -> Text
indexSelectionName AllIndexes = "_all"
indexSelectionName (IndexList (i :| is)) = T.intercalate "," (renderIndex <$> (i:is))
where
renderIndex (IndexName n) = n
-- | Get info about known snapshots given a pattern and repo name.
getSnapshots
:: ( MonadBH m
, MonadThrow m
)
=> SnapshotRepoName
-> SnapshotSelection
-> m (Either EsError [SnapshotInfo])
getSnapshots (SnapshotRepoName repoName) sel =
fmap (fmap unSIs) . parseEsResponse =<< get =<< url
where
url = joinPath ["_snapshot", repoName, snapPath]
snapPath = case sel of
AllSnapshots -> "_all"
SnapshotList (s :| ss) -> T.intercalate "," (renderPath <$> (s:ss))
renderPath (SnapPattern t) = t
renderPath (ExactSnap (SnapshotName t)) = t
newtype SIs = SIs { unSIs :: [SnapshotInfo] }
instance FromJSON SIs where
parseJSON = withObject "Collection of SnapshotInfo" parse
where
parse o = SIs <$> o .: "snapshots"
-- | Delete a snapshot. Cancels if it is running.
deleteSnapshot :: MonadBH m => SnapshotRepoName -> SnapshotName -> m Reply
deleteSnapshot (SnapshotRepoName repoName) (SnapshotName snapName) =
delete =<< url
where
url = joinPath ["_snapshot", repoName, snapName]
-- | Restore a snapshot to the cluster See
-- <https://www.elastic.co/guide/en/elasticsearch/reference/1.7/modules-snapshots.html#_restore>
-- for more details.
restoreSnapshot
:: MonadBH m
=> SnapshotRepoName
-> SnapshotName
-> SnapshotRestoreSettings
-- ^ Start with 'defaultSnapshotRestoreSettings' and customize
-- from there for reasonable defaults.
-> m Reply
restoreSnapshot (SnapshotRepoName repoName)
(SnapshotName snapName)
SnapshotRestoreSettings {..} = bindM2 post url (return (Just body))
where
url = addQuery params <$> joinPath ["_snapshot", repoName, snapName, "_restore"]
params = [("wait_for_completion", Just (boolQP snapRestoreWaitForCompletion))]
body = encode (object prs)
prs = catMaybes [ ("indices" .=) . indexSelectionName <$> snapRestoreIndices
, Just ("ignore_unavailable" .= snapRestoreIgnoreUnavailable)
, Just ("include_global_state" .= snapRestoreIncludeGlobalState)
, ("rename_pattern" .=) <$> snapRestoreRenamePattern
, ("rename_replacement" .=) . renderTokens <$> snapRestoreRenameReplacement
, Just ("include_aliases" .= snapRestoreIncludeAliases)
, ("index_settings" .= ) <$> snapRestoreIndexSettingsOverrides
, ("ignore_index_settings" .= ) <$> snapRestoreIgnoreIndexSettings
]
renderTokens (t :| ts) = mconcat (renderToken <$> (t:ts))
renderToken (RRTLit t) = t
renderToken RRSubWholeMatch = "$0"
renderToken (RRSubGroup g) = T.pack (show (rrGroupRefNum g))
getNodesInfo
:: ( MonadBH m
, MonadThrow m
)
=> NodeSelection
-> m (Either EsError NodesInfo)
getNodesInfo sel = parseEsResponse =<< get =<< url
where
url = joinPath ["_nodes", selectionSeg]
selectionSeg = case sel of
LocalNode -> "_local"
NodeList (l :| ls) -> T.intercalate "," (selToSeg <$> (l:ls))
AllNodes -> "_all"
selToSeg (NodeByName (NodeName n)) = n
selToSeg (NodeByFullNodeId (FullNodeId i)) = i
selToSeg (NodeByHost (Server s)) = s
selToSeg (NodeByAttribute (NodeAttrName a) v) = a <> ":" <> v
getNodesStats
:: ( MonadBH m
, MonadThrow m
)
=> NodeSelection
-> m (Either EsError NodesStats)
getNodesStats sel = parseEsResponse =<< get =<< url
where
url = joinPath ["_nodes", selectionSeg, "stats"]
selectionSeg = case sel of
LocalNode -> "_local"
NodeList (l :| ls) -> T.intercalate "," (selToSeg <$> (l:ls))
AllNodes -> "_all"
selToSeg (NodeByName (NodeName n)) = n
selToSeg (NodeByFullNodeId (FullNodeId i)) = i
selToSeg (NodeByHost (Server s)) = s
selToSeg (NodeByAttribute (NodeAttrName a) v) = a <> ":" <> v
-- | 'createIndex' will create an index given a 'Server', 'IndexSettings', and an 'IndexName'.
createIndex :: MonadBH m => IndexSettings -> IndexName -> m Reply
createIndex indexSettings (IndexName indexName) =
bindM2 put url (return body)
where url = joinPath [indexName]
body = Just $ encode indexSettings
-- | 'deleteIndex' will delete an index given a 'Server', and an 'IndexName'.
deleteIndex :: MonadBH m => IndexName -> m Reply
deleteIndex (IndexName indexName) =
delete =<< joinPath [indexName]
-- | 'updateIndexSettings' will apply a non-empty list of setting updates to an index
updateIndexSettings :: MonadBH m => NonEmpty UpdatableIndexSetting -> IndexName -> m Reply
updateIndexSettings updates (IndexName indexName) =
bindM2 put url (return body)
where
url = joinPath [indexName, "_settings"]
body = Just (encode jsonBody)
jsonBody = Object (deepMerge [u | Object u <- toJSON <$> toList updates])
getIndexSettings :: (MonadBH m, MonadThrow m) => IndexName
-> m (Either EsError IndexSettingsSummary)
getIndexSettings (IndexName indexName) =
parseEsResponse =<< get =<< url
where
url = joinPath [indexName, "_settings"]
-- | 'optimizeIndex' will optimize a single index, list of indexes or
-- all indexes. Note that this call will block until finishing but
-- will continue even if the request times out. Concurrent requests to
-- optimize an index while another is performing will block until the
-- previous one finishes. For more information see
-- <https://www.elastic.co/guide/en/elasticsearch/reference/1.7/indices-optimize.html>. Nothing
-- worthwhile comes back in the reply body, so matching on the status
-- should suffice.
--
-- 'optimizeIndex' with a maxNumSegments of 1 and onlyExpungeDeletes
-- to True is the main way to release disk space back to the OS being
-- held by deleted documents.
--
-- Note that this API was deprecated in Elasticsearch 2.1 for the
-- almost completely identical forcemerge API. Adding support to that
-- API would be trivial but due to the significant breaking changes,
-- this library cannot currently be used with >= 2.0, so that feature was omitted.
optimizeIndex :: MonadBH m => IndexSelection -> IndexOptimizationSettings -> m Reply
optimizeIndex ixs IndexOptimizationSettings {..} =
bindM2 post url (return body)
where url = addQuery params <$> joinPath [indexName, "_optimize"]
params = catMaybes [ ("max_num_segments",) . Just . showText <$> maxNumSegments
, Just ("only_expunge_deletes", Just (boolQP onlyExpungeDeletes))
, Just ("flush", Just (boolQP flushAfterOptimize))
]
indexName = indexSelectionName ixs
body = Nothing
deepMerge :: [Object] -> Object
deepMerge = LS.foldl' go mempty
where go acc = LS.foldl' go' acc . HM.toList
go' acc (k, v) = HM.insertWith merge k v acc
merge (Object a) (Object b) = Object (deepMerge [a, b])
merge _ b = b
statusCodeIs :: (Int, Int) -> Reply -> Bool
statusCodeIs r resp = inRange r $ NHTS.statusCode (responseStatus resp)
respIsTwoHunna :: Reply -> Bool
respIsTwoHunna = statusCodeIs (200, 299)
existentialQuery :: MonadBH m => Text -> m (Reply, Bool)
existentialQuery url = do
reply <- head url
return (reply, respIsTwoHunna reply)
-- | Tries to parse a response body as the expected type @a@ and
-- failing that tries to parse it as an EsError. All well-formed, JSON
-- responses from elasticsearch should fall into these two
-- categories. If they don't, a 'EsProtocolException' will be
-- thrown. If you encounter this, please report the full body it
-- reports along with your Elasticsearch verison.
parseEsResponse :: (MonadThrow m, FromJSON a) => Reply
-> m (Either EsError a)
parseEsResponse reply
| respIsTwoHunna reply = case eitherDecode body of
Right a -> return (Right a)
Left _ -> tryParseError
| otherwise = tryParseError
where body = responseBody reply
tryParseError = case eitherDecode body of
Right e -> return (Left e)
-- this case should not be possible
Left _ -> explode
explode = throwM (EsProtocolException body)
-- | 'indexExists' enables you to check if an index exists. Returns 'Bool'
-- in IO
indexExists :: MonadBH m => IndexName -> m Bool
indexExists (IndexName indexName) = do
(_, exists) <- existentialQuery =<< joinPath [indexName]
return exists
-- | 'refreshIndex' will force a refresh on an index. You must
-- do this if you want to read what you wrote.
refreshIndex :: MonadBH m => IndexName -> m Reply
refreshIndex (IndexName indexName) =
bindM2 post url (return Nothing)
where url = joinPath [indexName, "_refresh"]
-- | Block until the index becomes available for indexing
-- documents. This is useful for integration tests in which
-- indices are rapidly created and deleted.
waitForYellowIndex :: MonadBH m => IndexName -> m Reply
waitForYellowIndex (IndexName indexName) = get =<< url
where url = addQuery q <$> joinPath ["_cluster","health",indexName]
q = [("wait_for_status",Just "yellow"),("timeout",Just "10s")]
stringifyOCIndex :: OpenCloseIndex -> Text
stringifyOCIndex oci = case oci of
OpenIndex -> "_open"
CloseIndex -> "_close"
openOrCloseIndexes :: MonadBH m => OpenCloseIndex -> IndexName -> m Reply
openOrCloseIndexes oci (IndexName indexName) =
bindM2 post url (return Nothing)
where ociString = stringifyOCIndex oci
url = joinPath [indexName, ociString]
-- | 'openIndex' opens an index given a 'Server' and an 'IndexName'. Explained in further detail at
-- <http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/indices-open-close.html>
openIndex :: MonadBH m => IndexName -> m Reply
openIndex = openOrCloseIndexes OpenIndex
-- | 'closeIndex' closes an index given a 'Server' and an 'IndexName'. Explained in further detail at
-- <http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/indices-open-close.html>
closeIndex :: MonadBH m => IndexName -> m Reply
closeIndex = openOrCloseIndexes CloseIndex
-- | 'listIndices' returns a list of all index names on a given 'Server'
listIndices :: (MonadThrow m, MonadBH m) => m [IndexName]
listIndices =
parse . responseBody =<< get =<< url
where
url = joinPath ["_cat/indices?format=json"]
parse body = maybe (throwM (EsProtocolException body)) return $ do
vals <- decode body
forM vals $ \val ->
case val of
Object obj -> do
indexVal <- HM.lookup "index" obj
case indexVal of
String txt -> Just (IndexName txt)
_ -> Nothing
_ -> Nothing
-- | 'updateIndexAliases' updates the server's index alias
-- table. Operations are atomic. Explained in further detail at
-- <https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-aliases.html>
updateIndexAliases :: MonadBH m => NonEmpty IndexAliasAction -> m Reply
updateIndexAliases actions = bindM2 post url (return body)
where url = joinPath ["_aliases"]
body = Just (encode bodyJSON)
bodyJSON = object [ "actions" .= toList actions]
-- | Get all aliases configured on the server.
getIndexAliases :: (MonadBH m, MonadThrow m)
=> m (Either EsError IndexAliasesSummary)
getIndexAliases = parseEsResponse =<< get =<< url
where url = joinPath ["_aliases"]
-- | 'putTemplate' creates a template given an 'IndexTemplate' and a 'TemplateName'.
-- Explained in further detail at
-- <https://www.elastic.co/guide/en/elasticsearch/reference/1.7/indices-templates.html>
putTemplate :: MonadBH m => IndexTemplate -> TemplateName -> m Reply
putTemplate indexTemplate (TemplateName templateName) =
bindM2 put url (return body)
where url = joinPath ["_template", templateName]
body = Just $ encode indexTemplate
-- | 'templateExists' checks to see if a template exists.
templateExists :: MonadBH m => TemplateName -> m Bool
templateExists (TemplateName templateName) = do
(_, exists) <- existentialQuery =<< joinPath ["_template", templateName]
return exists
-- | 'deleteTemplate' is an HTTP DELETE and deletes a template.
deleteTemplate :: MonadBH m => TemplateName -> m Reply
deleteTemplate (TemplateName templateName) =
delete =<< joinPath ["_template", templateName]
-- | 'putMapping' is an HTTP PUT and has upsert semantics. Mappings are schemas
-- for documents in indexes.
putMapping :: (MonadBH m, ToJSON a) => IndexName
-> MappingName -> a -> m Reply
putMapping (IndexName indexName) (MappingName mappingName) mapping =
bindM2 put url (return body)
where url = joinPath [indexName, "_mapping", mappingName]
-- "_mapping" and mappingName above were originally transposed
-- erroneously. The correct API call is: "/INDEX/_mapping/MAPPING_NAME"
body = Just $ encode mapping
-- | 'deleteMapping' is an HTTP DELETE and deletes a mapping for a given index.
-- Mappings are schemas for documents in indexes.
deleteMapping :: MonadBH m => IndexName -> MappingName -> m Reply
deleteMapping (IndexName indexName)
(MappingName mappingName) =
-- "_mapping" and mappingName below were originally transposed
-- erroneously. The correct API call is: "/INDEX/_mapping/MAPPING_NAME"
delete =<< joinPath [indexName, "_mapping", mappingName]
versionCtlParams :: IndexDocumentSettings -> [(Text, Maybe Text)]
versionCtlParams cfg =
case idsVersionControl cfg of
NoVersionControl -> []
InternalVersion v -> versionParams v "internal"
ExternalGT (ExternalDocVersion v) -> versionParams v "external_gt"
ExternalGTE (ExternalDocVersion v) -> versionParams v "external_gte"
ForceVersion (ExternalDocVersion v) -> versionParams v "force"
where
vt = showText . docVersionNumber
versionParams v t = [ ("version", Just $ vt v)
, ("version_type", Just t)
]
-- | 'indexDocument' is the primary way to save a single document in
-- Elasticsearch. The document itself is simply something we can
-- convert into a JSON 'Value'. The 'DocId' will function as the
-- primary key for the document.
indexDocument :: (ToJSON doc, MonadBH m) => IndexName -> MappingName
-> IndexDocumentSettings -> doc -> DocId -> m Reply
indexDocument (IndexName indexName)
(MappingName mappingName) cfg document (DocId docId) =
bindM2 put url (return body)
where url = addQuery params <$> joinPath [indexName, mappingName, docId]
parentParams = case idsParent cfg of
Nothing -> []
Just (DocumentParent (DocId p)) -> [ ("parent", Just p) ]
params = versionCtlParams cfg ++ parentParams
body = Just (encode document)
-- | 'updateDocument' provides a way to perform an partial update of a
-- an already indexed document.
updateDocument :: (ToJSON patch, MonadBH m) => IndexName -> MappingName
-> IndexDocumentSettings -> patch -> DocId -> m Reply
updateDocument (IndexName indexName)
(MappingName mappingName) cfg patch (DocId docId) =
bindM2 post url (return body)
where url = addQuery (versionCtlParams cfg) <$>
joinPath [indexName, mappingName, docId, "_update"]
body = Just (encode $ object ["doc" .= toJSON patch])
-- | 'deleteDocument' is the primary way to delete a single document.
deleteDocument :: MonadBH m => IndexName -> MappingName
-> DocId -> m Reply
deleteDocument (IndexName indexName)
(MappingName mappingName) (DocId docId) =
delete =<< joinPath [indexName, mappingName, docId]
-- | 'bulk' uses
-- <http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/docs-bulk.html Elasticsearch's bulk API>
-- to perform bulk operations. The 'BulkOperation' data type encodes the
-- index\/update\/delete\/create operations. You pass a 'V.Vector' of 'BulkOperation's
-- and a 'Server' to 'bulk' in order to send those operations up to your Elasticsearch
-- server to be performed. I changed from [BulkOperation] to a Vector due to memory overhead.
bulk :: MonadBH m => V.Vector BulkOperation -> m Reply
bulk bulkOps = bindM2 post url (return body)
where url = joinPath ["_bulk"]
body = Just $ encodeBulkOperations bulkOps
-- | 'encodeBulkOperations' is a convenience function for dumping a vector of 'BulkOperation'
-- into an 'L.ByteString'
encodeBulkOperations :: V.Vector BulkOperation -> L.ByteString
encodeBulkOperations stream = collapsed where
blobs = fmap encodeBulkOperation stream
mashedTaters = mash (mempty :: Builder) blobs
collapsed = toLazyByteString $ mappend mashedTaters (byteString "\n")
mash :: Builder -> V.Vector L.ByteString -> Builder
mash =
V.foldl' (\b x -> b <> byteString "\n" <> lazyByteString x)
mkBulkStreamValue :: Text -> Text -> Text -> Text -> Value
mkBulkStreamValue operation indexName mappingName docId =
object [operation .=
object [ "_index" .= indexName
, "_type" .= mappingName
, "_id" .= docId]]
-- | 'encodeBulkOperation' is a convenience function for dumping a single 'BulkOperation'
-- into an 'L.ByteString'
encodeBulkOperation :: BulkOperation -> L.ByteString
encodeBulkOperation (BulkIndex (IndexName indexName)
(MappingName mappingName)
(DocId docId) value) = blob
where metadata = mkBulkStreamValue "index" indexName mappingName docId
blob = encode metadata `mappend` "\n" `mappend` encode value
encodeBulkOperation (BulkCreate (IndexName indexName)
(MappingName mappingName)
(DocId docId) value) = blob
where metadata = mkBulkStreamValue "create" indexName mappingName docId
blob = encode metadata `mappend` "\n" `mappend` encode value
encodeBulkOperation (BulkDelete (IndexName indexName)
(MappingName mappingName)
(DocId docId)) = blob
where metadata = mkBulkStreamValue "delete" indexName mappingName docId
blob = encode metadata
encodeBulkOperation (BulkUpdate (IndexName indexName)
(MappingName mappingName)
(DocId docId) value) = blob
where metadata = mkBulkStreamValue "update" indexName mappingName docId
doc = object ["doc" .= value]
blob = encode metadata `mappend` "\n" `mappend` encode doc
-- | 'getDocument' is a straight-forward way to fetch a single document from
-- Elasticsearch using a 'Server', 'IndexName', 'MappingName', and a 'DocId'.
-- The 'DocId' is the primary key for your Elasticsearch document.
getDocument :: MonadBH m => IndexName -> MappingName
-> DocId -> m Reply
getDocument (IndexName indexName)
(MappingName mappingName) (DocId docId) =
get =<< joinPath [indexName, mappingName, docId]
-- | 'documentExists' enables you to check if a document exists. Returns 'Bool'
-- in IO
documentExists :: MonadBH m => IndexName -> MappingName
-> Maybe DocumentParent -> DocId -> m Bool
documentExists (IndexName indexName) (MappingName mappingName)
parent (DocId docId) = do
(_, exists) <- existentialQuery =<< url
return exists
where url = addQuery params <$> joinPath [indexName, mappingName, docId]
parentParam = fmap (\(DocumentParent (DocId p)) -> p) parent
params = LS.filter (\(_, v) -> isJust v) [("parent", parentParam)]
dispatchSearch :: MonadBH m => Text -> Search -> m Reply
dispatchSearch url search = post url' (Just (encode search))
where url' = appendSearchTypeParam url (searchType search)
-- | 'searchAll', given a 'Search', will perform that search against all indexes
-- on an Elasticsearch server. Try to avoid doing this if it can be helped.
searchAll :: MonadBH m => Search -> m Reply
searchAll = bindM2 dispatchSearch url . return
where url = joinPath ["_search"]
-- | 'searchByIndex', given a 'Search' and an 'IndexName', will perform that search
-- against all mappings within an index on an Elasticsearch server.
searchByIndex :: MonadBH m => IndexName -> Search -> m Reply
searchByIndex (IndexName indexName) = bindM2 dispatchSearch url . return
where url = joinPath [indexName, "_search"]
-- | 'searchByType', given a 'Search', 'IndexName', and 'MappingName', will perform that
-- search against a specific mapping within an index on an Elasticsearch server.
searchByType :: MonadBH m => IndexName -> MappingName -> Search
-> m Reply
searchByType (IndexName indexName)
(MappingName mappingName) = bindM2 dispatchSearch url . return
where url = joinPath [indexName, mappingName, "_search"]
-- | For a given search, request a scroll for efficient streaming of
-- search results. Note that the search is put into 'SearchTypeScan'
-- mode and thus results will not be sorted. Combine this with
-- 'advanceScroll' to efficiently stream through the full result set
getInitialScroll :: MonadBH m => IndexName -> MappingName -> Search -> m (Maybe ScrollId)
getInitialScroll (IndexName indexName) (MappingName mappingName) search = do
let url = joinPath [indexName, mappingName, "_search"]
search' = search { searchType = SearchTypeScan }
resp' <- bindM2 dispatchSearch url (return search')
let msr = decode' $ responseBody resp' :: Maybe (SearchResult ())
msid = maybe Nothing scrollId msr
return msid
scroll' :: (FromJSON a, MonadBH m, MonadThrow m) => Maybe ScrollId -> m ([Hit a], Maybe ScrollId)
scroll' Nothing = return ([], Nothing)
scroll' (Just sid) = do
res <- advanceScroll sid 60
case res of
Right SearchResult {..} -> return (hits searchHits, scrollId)
Left _ -> return ([], Nothing)
-- | Use the given scroll to fetch the next page of documents. If there are no
-- further pages, 'SearchResult.searchHits.hits' will be '[]'.
advanceScroll
:: ( FromJSON a
, MonadBH m
, MonadThrow m
)
=> ScrollId
-> NominalDiffTime
-- ^ How long should the snapshot of data be kept around? This timeout is updated every time 'advanceScroll' is used, so don't feel the need to set it to the entire duration of your search processing. Note that durations < 1s will be rounded up. Also note that 'NominalDiffTime' is an instance of Num so literals like 60 will be interpreted as seconds. 60s is a reasonable default.
-> m (Either EsError (SearchResult a))
advanceScroll (ScrollId sid) scroll = do
url <- joinPath ["_search/scroll?scroll=" <> scrollTime]
parseEsResponse =<< post url (Just . L.fromStrict $ T.encodeUtf8 sid)
where scrollTime = showText secs <> "s"
secs :: Integer
secs = round scroll
simpleAccumulator :: (FromJSON a, MonadBH m, MonadThrow m) => [Hit a] -> ([Hit a], Maybe ScrollId) -> m ([Hit a], Maybe ScrollId)
simpleAccumulator oldHits (newHits, Nothing) = return (oldHits ++ newHits, Nothing)
simpleAccumulator oldHits ([], _) = return (oldHits, Nothing)
simpleAccumulator oldHits (newHits, msid) = do
(newHits', msid') <- scroll' msid
simpleAccumulator (oldHits ++ newHits) (newHits', msid')
-- | 'scanSearch' uses the 'scan&scroll' API of elastic,
-- for a given 'IndexName' and 'MappingName'. Note that this will
-- consume the entire search result set and will be doing O(n) list
-- appends so this may not be suitable for large result sets. In that
-- case, 'getInitialScroll' and 'advanceScroll' are good low level
-- tools. You should be able to hook them up trivially to conduit,
-- pipes, or your favorite streaming IO abstraction of choice. Note
-- that ordering on the search would destroy performance and thus is
-- ignored.
scanSearch :: (FromJSON a, MonadBH m, MonadThrow m) => IndexName -> MappingName -> Search -> m [Hit a]
scanSearch indexName mappingName search = do
msid <- getInitialScroll indexName mappingName search
(hits, msid') <- scroll' msid
(totalHits, _) <- simpleAccumulator [] (hits, msid')
return totalHits
-- | 'mkSearch' is a helper function for defaulting additional fields of a 'Search'
-- to Nothing in case you only care about your 'Query' and 'Filter'. Use record update
-- syntax if you want to add things like aggregations or highlights while still using
-- this helper function.
mkSearch :: Maybe Query -> Maybe Filter -> Search
mkSearch query filter = Search query filter Nothing Nothing Nothing False (From 0) (Size 10) SearchTypeQueryThenFetch Nothing Nothing Nothing
-- | 'mkAggregateSearch' is a helper function that defaults everything in a 'Search' except for
-- the 'Query' and the 'Aggregation'.
mkAggregateSearch :: Maybe Query -> Aggregations -> Search
mkAggregateSearch query mkSearchAggs = Search query Nothing Nothing (Just mkSearchAggs) Nothing False (From 0) (Size 0) SearchTypeQueryThenFetch Nothing Nothing Nothing
-- | 'mkHighlightSearch' is a helper function that defaults everything in a 'Search' except for
-- the 'Query' and the 'Aggregation'.
mkHighlightSearch :: Maybe Query -> Highlights -> Search
mkHighlightSearch query searchHighlights = Search query Nothing Nothing Nothing (Just searchHighlights) False (From 0) (Size 10) SearchTypeQueryThenFetch Nothing Nothing Nothing
-- | 'pageSearch' is a helper function that takes a search and assigns the from
-- and size fields for the search. The from parameter defines the offset
-- from the first result you want to fetch. The size parameter allows you to
-- configure the maximum amount of hits to be returned.
pageSearch :: From -- ^ The result offset
-> Size -- ^ The number of results to return
-> Search -- ^ The current seach
-> Search -- ^ The paged search
pageSearch resultOffset pageSize search = search { from = resultOffset, size = pageSize }
parseUrl' :: MonadThrow m => Text -> m Request
parseUrl' t = parseRequest (URI.escapeURIString URI.isAllowedInURI (T.unpack t))
-- | Was there an optimistic concurrency control conflict when
-- indexing a document?
isVersionConflict :: Reply -> Bool
isVersionConflict = statusCheck (== 409)
isSuccess :: Reply -> Bool
isSuccess = statusCheck (inRange (200, 299))
isCreated :: Reply -> Bool
isCreated = statusCheck (== 201)
statusCheck :: (Int -> Bool) -> Reply -> Bool
statusCheck prd = prd . NHTS.statusCode . responseStatus
-- | This is a hook that can be set via the 'bhRequestHook' function
-- that will authenticate all requests using an HTTP Basic
-- Authentication header. Note that it is *strongly* recommended that
-- this option only be used over an SSL connection.
--
-- >> (mkBHEnv myServer myManager) { bhRequestHook = basicAuthHook (EsUsername "myuser") (EsPassword "mypass") }
basicAuthHook :: Monad m => EsUsername -> EsPassword -> Request -> m Request
basicAuthHook (EsUsername u) (EsPassword p) = return . applyBasicAuth u' p'
where u' = T.encodeUtf8 u
p' = T.encodeUtf8 p
boolQP :: Bool -> Text
boolQP True = "true"
boolQP False = "false"

View File

@ -1,416 +0,0 @@
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Database.V1.Bloodhound.Internal.Aggregation where
import Bloodhound.Import
import qualified Data.HashMap.Strict as HM
import qualified Data.Map.Strict as M
import qualified Data.Text as T
import Database.V1.Bloodhound.Internal.Client
import Database.V1.Bloodhound.Internal.Highlight (HitHighlight)
import Database.V1.Bloodhound.Internal.Newtypes
import Database.V1.Bloodhound.Internal.Query
import Database.V1.Bloodhound.Internal.Sort
type Aggregations = M.Map Text Aggregation
emptyAggregations :: Aggregations
emptyAggregations = M.empty
mkAggregations :: Text -> Aggregation -> Aggregations
mkAggregations name aggregation = M.insert name aggregation emptyAggregations
data Aggregation = TermsAgg TermsAggregation
| CardinalityAgg CardinalityAggregation
| DateHistogramAgg DateHistogramAggregation
| ValueCountAgg ValueCountAggregation
| FilterAgg FilterAggregation
| DateRangeAgg DateRangeAggregation
| MissingAgg MissingAggregation
| TopHitsAgg TopHitsAggregation
deriving (Eq, Show)
instance ToJSON Aggregation where
toJSON (TermsAgg (TermsAggregation term include exclude order minDocCount size shardSize collectMode executionHint termAggs)) =
omitNulls ["terms" .= omitNulls [ toJSON' term,
"include" .= include,
"exclude" .= exclude,
"order" .= order,
"min_doc_count" .= minDocCount,
"size" .= size,
"shard_size" .= shardSize,
"collect_mode" .= collectMode,
"execution_hint" .= executionHint
],
"aggs" .= termAggs ]
where
toJSON' x = case x of { Left y -> "field" .= y; Right y -> "script" .= y }
toJSON (CardinalityAgg (CardinalityAggregation field precisionThreshold)) =
object ["cardinality" .= omitNulls [ "field" .= field,
"precisionThreshold" .= precisionThreshold
]
]
toJSON (DateHistogramAgg (DateHistogramAggregation field interval format preZone postZone preOffset postOffset dateHistoAggs)) =
omitNulls ["date_histogram" .= omitNulls [ "field" .= field,
"interval" .= interval,
"format" .= format,
"pre_zone" .= preZone,
"post_zone" .= postZone,
"pre_offset" .= preOffset,
"post_offset" .= postOffset
],
"aggs" .= dateHistoAggs ]
toJSON (ValueCountAgg a) = object ["value_count" .= v]
where v = case a of
(FieldValueCount (FieldName n)) -> object ["field" .= n]
(ScriptValueCount (Script s)) -> object ["script" .= s]
toJSON (FilterAgg (FilterAggregation filt ags)) =
omitNulls [ "filter" .= filt
, "aggs" .= ags]
toJSON (DateRangeAgg a) = object [ "date_range" .= a
]
toJSON (MissingAgg (MissingAggregation{..})) =
object ["missing" .= object ["field" .= maField]]
toJSON (TopHitsAgg (TopHitsAggregation mfrom msize msort)) =
omitNulls ["top_hits" .= omitNulls [ "size" .= msize
, "from" .= mfrom
, "sort" .= msort
]
]
data TopHitsAggregation = TopHitsAggregation
{ taFrom :: Maybe From
, taSize :: Maybe Size
, taSort :: Maybe Sort
} deriving (Eq, Show)
data MissingAggregation = MissingAggregation
{ maField :: Text
} deriving (Eq, Show)
data TermsAggregation = TermsAggregation { term :: Either Text Text
, termInclude :: Maybe TermInclusion
, termExclude :: Maybe TermInclusion
, termOrder :: Maybe TermOrder
, termMinDocCount :: Maybe Int
, termSize :: Maybe Int
, termShardSize :: Maybe Int
, termCollectMode :: Maybe CollectionMode
, termExecutionHint :: Maybe ExecutionHint
, termAggs :: Maybe Aggregations
} deriving (Eq, Show)
data CardinalityAggregation = CardinalityAggregation { cardinalityField :: FieldName,
precisionThreshold :: Maybe Int
} deriving (Eq, Show)
data DateHistogramAggregation = DateHistogramAggregation { dateField :: FieldName
, dateInterval :: Interval
, dateFormat :: Maybe Text
-- pre and post deprecated in 1.5
, datePreZone :: Maybe Text
, datePostZone :: Maybe Text
, datePreOffset :: Maybe Text
, datePostOffset :: Maybe Text
, dateAggs :: Maybe Aggregations
} deriving (Eq, Show)
data DateRangeAggregation = DateRangeAggregation { draField :: FieldName
, draFormat :: Maybe Text
, draRanges :: NonEmpty DateRangeAggRange
} deriving (Eq, Show)
data DateRangeAggRange = DateRangeFrom DateMathExpr
| DateRangeTo DateMathExpr
| DateRangeFromAndTo DateMathExpr DateMathExpr deriving (Eq, Show)
-- | See <https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics-valuecount-aggregation.html> for more information.
data ValueCountAggregation = FieldValueCount FieldName
| ScriptValueCount Script deriving (Eq, Show)
-- | Single-bucket filter aggregations. See <https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-filter-aggregation.html#search-aggregations-bucket-filter-aggregation> for more information.
data FilterAggregation = FilterAggregation { faFilter :: Filter
, faAggs :: Maybe Aggregations} deriving (Eq, Show)
mkTermsAggregation :: Text -> TermsAggregation
mkTermsAggregation t = TermsAggregation (Left t) Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing
mkTermsScriptAggregation :: Text -> TermsAggregation
mkTermsScriptAggregation t = TermsAggregation (Right t) Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing
mkDateHistogram :: FieldName -> Interval -> DateHistogramAggregation
mkDateHistogram t i = DateHistogramAggregation t i Nothing Nothing Nothing Nothing Nothing Nothing
mkCardinalityAggregation :: FieldName -> CardinalityAggregation
mkCardinalityAggregation t = CardinalityAggregation t Nothing
data TermInclusion = TermInclusion Text
| TermPattern Text Text deriving (Eq, Show)
instance ToJSON TermInclusion where
toJSON (TermInclusion x) = toJSON x
toJSON (TermPattern pattern flags) =
omitNulls [ "pattern" .= pattern
, "flags" .= flags]
data TermOrder = TermOrder
{ termSortField :: Text
, termSortOrder :: SortOrder } deriving (Eq, Show)
instance ToJSON TermOrder where
toJSON (TermOrder termSortField termSortOrder) =
object [termSortField .= termSortOrder]
data ExecutionHint = Ordinals
| GlobalOrdinals
| GlobalOrdinalsHash
| GlobalOrdinalsLowCardinality
| Map deriving (Eq, Show)
instance ToJSON ExecutionHint where
toJSON Ordinals = "ordinals"
toJSON GlobalOrdinals = "global_ordinals"
toJSON GlobalOrdinalsHash = "global_ordinals_hash"
toJSON GlobalOrdinalsLowCardinality = "global_ordinals_low_cardinality"
toJSON Map = "map"
-- | See <https://www.elastic.co/guide/en/elasticsearch/reference/current/common-options.html#date-math> for more information.
data DateMathExpr =
DateMathExpr DateMathAnchor [DateMathModifier]
deriving (Eq, Show)
instance ToJSON DateMathExpr where
toJSON (DateMathExpr a mods) = String (fmtA a <> mconcat (fmtMod <$> mods))
where fmtA DMNow = "now"
fmtA (DMDate date) = (T.pack $ showGregorian date) <> "||"
fmtMod (AddTime n u) = "+" <> showText n <> fmtU u
fmtMod (SubtractTime n u) = "-" <> showText n <> fmtU u
fmtMod (RoundDownTo u) = "/" <> fmtU u
fmtU DMYear = "y"
fmtU DMMonth = "M"
fmtU DMWeek = "w"
fmtU DMDay = "d"
fmtU DMHour = "h"
fmtU DMMinute = "m"
fmtU DMSecond = "s"
-- | Starting point for a date range. This along with the 'DateMathModifiers' gets you the date ES will start from.
data DateMathAnchor =
DMNow
| DMDate Day
deriving (Eq, Show)
data DateMathModifier =
AddTime Int DateMathUnit
| SubtractTime Int DateMathUnit
| RoundDownTo DateMathUnit
deriving (Eq, Show)
data DateMathUnit =
DMYear
| DMMonth
| DMWeek
| DMDay
| DMHour
| DMMinute
| DMSecond
deriving (Eq, Show)
data CollectionMode = BreadthFirst
| DepthFirst deriving (Eq, Show)
type AggregationResults = M.Map Text Value
class BucketAggregation a where
key :: a -> BucketValue
docCount :: a -> Int
aggs :: a -> Maybe AggregationResults
data BucketValue = TextValue Text
| ScientificValue Scientific
| BoolValue Bool deriving (Show)
data Bucket a = Bucket { buckets :: [a]} deriving (Show)
data TermsResult = TermsResult { termKey :: BucketValue
, termsDocCount :: Int
, termsAggs :: Maybe AggregationResults } deriving (Show)
data DateHistogramResult = DateHistogramResult { dateKey :: Int
, dateKeyStr :: Maybe Text
, dateDocCount :: Int
, dateHistogramAggs :: Maybe AggregationResults } deriving (Show)
data DateRangeResult = DateRangeResult { dateRangeKey :: Text
, dateRangeFrom :: Maybe UTCTime
, dateRangeFromAsString :: Maybe Text
, dateRangeTo :: Maybe UTCTime
, dateRangeToAsString :: Maybe Text
, dateRangeDocCount :: Int
, dateRangeAggs :: Maybe AggregationResults } deriving (Show, Eq)
toTerms :: Text -> AggregationResults -> Maybe (Bucket TermsResult)
toTerms = toAggResult
toDateHistogram :: Text -> AggregationResults -> Maybe (Bucket DateHistogramResult)
toDateHistogram = toAggResult
toMissing :: Text -> AggregationResults -> Maybe MissingResult
toMissing = toAggResult
toTopHits :: (FromJSON a) => Text -> AggregationResults -> Maybe (TopHitResult a)
toTopHits = toAggResult
toAggResult :: (FromJSON a) => Text -> AggregationResults -> Maybe a
toAggResult t a = M.lookup t a >>= deserialize
where deserialize = parseMaybe parseJSON
instance BucketAggregation TermsResult where
key = termKey
docCount = termsDocCount
aggs = termsAggs
instance BucketAggregation DateHistogramResult where
key = TextValue . showText . dateKey
docCount = dateDocCount
aggs = dateHistogramAggs
instance BucketAggregation DateRangeResult where
key = TextValue . dateRangeKey
docCount = dateRangeDocCount
aggs = dateRangeAggs
instance (FromJSON a) => FromJSON (Bucket a) where
parseJSON (Object v) = Bucket <$>
v .: "buckets"
parseJSON _ = mempty
instance FromJSON BucketValue where
parseJSON (String t) = return $ TextValue t
parseJSON (Number s) = return $ ScientificValue s
parseJSON (Bool b) = return $ BoolValue b
parseJSON _ = mempty
instance FromJSON MissingResult where
parseJSON = withObject "MissingResult" parse
where parse v = MissingResult <$> v .: "doc_count"
instance FromJSON TermsResult where
parseJSON (Object v) = TermsResult <$>
v .: "key" <*>
v .: "doc_count" <*>
(pure $ getNamedSubAgg v ["key", "doc_count"])
parseJSON _ = mempty
instance FromJSON DateHistogramResult where
parseJSON (Object v) = DateHistogramResult <$>
v .: "key" <*>
v .:? "key_as_string" <*>
v .: "doc_count" <*>
(pure $ getNamedSubAgg v [ "key"
, "doc_count"
, "key_as_string"
]
)
parseJSON _ = mempty
instance FromJSON DateRangeResult where
parseJSON = withObject "DateRangeResult" parse
where parse v = DateRangeResult <$>
v .: "key" <*>
(fmap posixMS <$> v .:? "from") <*>
v .:? "from_as_string" <*>
(fmap posixMS <$> v .:? "to") <*>
v .:? "to_as_string" <*>
v .: "doc_count" <*>
(pure $ getNamedSubAgg v [ "key"
, "from"
, "from_as_string"
, "to"
, "to_as_string"
, "doc_count"
]
)
instance (FromJSON a) => FromJSON (TopHitResult a) where
parseJSON (Object v) = TopHitResult <$>
v .: "hits"
parseJSON _ = fail "Failure in FromJSON (TopHitResult a)"
data MissingResult = MissingResult { missingDocCount :: Int } deriving (Show)
data TopHitResult a = TopHitResult { tarHits :: (SearchHits a)
} deriving Show
data SearchHits a =
SearchHits { hitsTotal :: Int
, maxScore :: Score
, hits :: [Hit a] } deriving (Eq, Show)
instance Semigroup (SearchHits a) where
(SearchHits ta ma ha) <> (SearchHits tb mb hb) = SearchHits (ta + tb) (max ma mb) (ha <> hb)
instance Monoid (SearchHits a) where
mempty = SearchHits 0 Nothing mempty
mappend = (<>)
data Hit a =
Hit { hitIndex :: IndexName
, hitType :: MappingName
, hitDocId :: DocId
, hitScore :: Score
, hitSource :: Maybe a
, hitHighlight :: Maybe HitHighlight } deriving (Eq, Show)
-- Try to get an AggregationResults when we don't know the
-- field name. We filter out the known keys to try to minimize the noise.
getNamedSubAgg :: Object -> [Text] -> Maybe AggregationResults
getNamedSubAgg o knownKeys = maggRes
where unknownKeys = HM.filterWithKey (\k _ -> k `notElem` knownKeys) o
maggRes
| HM.null unknownKeys = Nothing
| otherwise = Just . M.fromList $ HM.toList unknownKeys
instance ToJSON CollectionMode where
toJSON BreadthFirst = "breadth_first"
toJSON DepthFirst = "depth_first"
instance ToJSON DateRangeAggregation where
toJSON DateRangeAggregation {..} =
omitNulls [ "field" .= draField
, "format" .= draFormat
, "ranges" .= toList draRanges
]
instance (FromJSON a) => FromJSON (SearchHits a) where
parseJSON (Object v) = SearchHits <$>
v .: "total" <*>
v .: "max_score" <*>
v .: "hits"
parseJSON _ = empty
instance ToJSON DateRangeAggRange where
toJSON (DateRangeFrom e) = object [ "from" .= e ]
toJSON (DateRangeTo e) = object [ "to" .= e ]
toJSON (DateRangeFromAndTo f t) = object [ "from" .= f, "to" .= t ]
instance (FromJSON a) => FromJSON (Hit a) where
parseJSON (Object v) = Hit <$>
v .: "_index" <*>
v .: "_type" <*>
v .: "_id" <*>
v .: "_score" <*>
v .:? "_source" <*>
v .:? "highlight"
parseJSON _ = empty

File diff suppressed because it is too large Load Diff

View File

@ -1,144 +0,0 @@
{-# LANGUAGE OverloadedStrings #-}
module Database.V1.Bloodhound.Internal.Highlight where
import Bloodhound.Import
import qualified Data.Map as M
import Database.V1.Bloodhound.Internal.Newtypes
import Database.V1.Bloodhound.Internal.Query
type HitHighlight = M.Map Text [Text]
data Highlights = Highlights { globalsettings :: Maybe HighlightSettings
, highlightFields :: [FieldHighlight]
} deriving (Show, Eq)
instance ToJSON Highlights where
toJSON (Highlights global fields) =
omitNulls (("fields" .= fields)
: highlightSettingsPairs global)
data HighlightSettings = Plain PlainHighlight
| Postings PostingsHighlight
| FastVector FastVectorHighlight
deriving (Show, Eq)
data FieldHighlight = FieldHighlight FieldName (Maybe HighlightSettings)
deriving (Show, Eq)
data PlainHighlight =
PlainHighlight { plainCommon :: Maybe CommonHighlight
, plainNonPost :: Maybe NonPostings } deriving (Show, Eq)
-- This requires that index_options are set to 'offset' in the mapping.
data PostingsHighlight = PostingsHighlight (Maybe CommonHighlight) deriving (Show, Eq)
-- This requires that term_vector is set to 'with_positions_offsets' in the mapping.
data FastVectorHighlight =
FastVectorHighlight { fvCommon :: Maybe CommonHighlight
, fvNonPostSettings :: Maybe NonPostings
, boundaryChars :: Maybe Text
, boundaryMaxScan :: Maybe Int
, fragmentOffset :: Maybe Int
, matchedFields :: [Text]
, phraseLimit :: Maybe Int
} deriving (Show, Eq)
data CommonHighlight =
CommonHighlight { order :: Maybe Text
, forceSource :: Maybe Bool
, tag :: Maybe HighlightTag
, encoder :: Maybe HighlightEncoder
, noMatchSize :: Maybe Int
, highlightQuery :: Maybe Query
, requireFieldMatch :: Maybe Bool
} deriving (Show, Eq)
-- Settings that are only applicable to FastVector and Plain highlighters.
data NonPostings =
NonPostings { fragmentSize :: Maybe Int
, numberOfFragments :: Maybe Int} deriving (Show, Eq)
data HighlightEncoder = DefaultEncoder
| HTMLEncoder
deriving (Show, Eq)
-- NOTE: Should the tags use some kind of HTML type, rather than Text?
data HighlightTag = TagSchema Text
| CustomTags ([Text], [Text]) -- Only uses more than the first value in the lists if fvh
deriving (Show, Eq)
highlightSettingsPairs :: Maybe HighlightSettings -> [Pair]
highlightSettingsPairs Nothing = []
highlightSettingsPairs (Just (Plain plh)) = plainHighPairs (Just plh)
highlightSettingsPairs (Just (Postings ph)) = postHighPairs (Just ph)
highlightSettingsPairs (Just (FastVector fvh)) = fastVectorHighPairs (Just fvh)
plainHighPairs :: Maybe PlainHighlight -> [Pair]
plainHighPairs Nothing = []
plainHighPairs (Just (PlainHighlight plCom plNonPost)) =
[ "type" .= String "plain"]
++ commonHighlightPairs plCom
++ nonPostingsToPairs plNonPost
postHighPairs :: Maybe PostingsHighlight -> [Pair]
postHighPairs Nothing = []
postHighPairs (Just (PostingsHighlight pCom)) =
("type" .= String "postings")
: commonHighlightPairs pCom
fastVectorHighPairs :: Maybe FastVectorHighlight -> [Pair]
fastVectorHighPairs Nothing = []
fastVectorHighPairs (Just
(FastVectorHighlight fvCom fvNonPostSettings' fvBoundChars
fvBoundMaxScan fvFragOff fvMatchedFields
fvPhraseLim)) =
[ "type" .= String "fvh"
, "boundary_chars" .= fvBoundChars
, "boundary_max_scan" .= fvBoundMaxScan
, "fragment_offset" .= fvFragOff
, "matched_fields" .= fvMatchedFields
, "phraseLimit" .= fvPhraseLim]
++ commonHighlightPairs fvCom
++ nonPostingsToPairs fvNonPostSettings'
commonHighlightPairs :: Maybe CommonHighlight -> [Pair]
commonHighlightPairs Nothing = []
commonHighlightPairs (Just (CommonHighlight chScore chForceSource chTag chEncoder
chNoMatchSize chHighlightQuery
chRequireFieldMatch)) =
[ "order" .= chScore
, "force_source" .= chForceSource
, "encoder" .= chEncoder
, "no_match_size" .= chNoMatchSize
, "highlight_query" .= chHighlightQuery
, "require_fieldMatch" .= chRequireFieldMatch]
++ highlightTagToPairs chTag
nonPostingsToPairs :: Maybe NonPostings -> [Pair]
nonPostingsToPairs Nothing = []
nonPostingsToPairs (Just (NonPostings npFragSize npNumOfFrags)) =
[ "fragment_size" .= npFragSize
, "number_of_fragments" .= npNumOfFrags]
highlightTagToPairs :: Maybe HighlightTag -> [Pair]
highlightTagToPairs (Just (TagSchema _)) = [ "scheme" .= String "default"]
highlightTagToPairs (Just (CustomTags (pre, post))) = [ "pre_tags" .= pre
, "post_tags" .= post]
highlightTagToPairs Nothing = []
instance ToJSON FieldHighlight where
toJSON (FieldHighlight (FieldName fName) (Just fSettings)) =
object [ fName .= fSettings ]
toJSON (FieldHighlight (FieldName fName) Nothing) =
object [ fName .= emptyObject ]
instance ToJSON HighlightSettings where
toJSON hs = omitNulls (highlightSettingsPairs (Just hs))
instance ToJSON HighlightEncoder where
toJSON DefaultEncoder = String "default"
toJSON HTMLEncoder = String "html"

View File

@ -1,211 +0,0 @@
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
module Database.V1.Bloodhound.Internal.Newtypes where
import Bloodhound.Import
newtype From = From Int deriving (Eq, Show, ToJSON)
newtype Size = Size Int deriving (Eq, Show, ToJSON, FromJSON)
{-| 'FieldName' is used all over the place wherever a specific field within
a document needs to be specified, usually in 'Query's or 'Filter's.
-}
newtype FieldName =
FieldName Text
deriving (Eq, Show, ToJSON, FromJSON)
newtype Boost =
Boost Double
deriving (Eq, Show, ToJSON, FromJSON)
newtype BoostTerms =
BoostTerms Double
deriving (Eq, Show, ToJSON, FromJSON)
{-| 'ReplicaCount' is part of 'IndexSettings' -}
newtype ReplicaCount =
ReplicaCount Int
deriving (Eq, Show, ToJSON)
{-| 'ShardCount' is part of 'IndexSettings' -}
newtype ShardCount =
ShardCount Int
deriving (Eq, Show, ToJSON)
{-| 'TemplateName' is used to describe which template to query/create/delete
-}
newtype TemplateName = TemplateName Text deriving (Eq, Show, ToJSON, FromJSON)
{-| 'TemplatePattern' represents a pattern which is matched against index names
-}
newtype TemplatePattern = TemplatePattern Text deriving (Eq, Show, ToJSON, FromJSON)
{-| 'MappingName' is part of mappings which are how ES describes and schematizes
the data in the indices.
-}
newtype MappingName = MappingName Text deriving (Eq, Show, ToJSON, FromJSON)
{-| 'DocId' is a generic wrapper value for expressing unique Document IDs.
Can be set by the user or created by ES itself. Often used in client
functions for poking at specific documents.
-}
newtype DocId = DocId Text deriving (Eq, Show, ToJSON, FromJSON)
{-| 'QueryString' is used to wrap query text bodies, be they human written or not.
-}
newtype QueryString = QueryString Text deriving (Eq, Show, ToJSON, FromJSON)
{-| 'Script' is often used in place of 'FieldName' to specify more
complex ways of extracting a value from a document.
-}
newtype Script = Script { scriptText :: Text } deriving (Eq, Show)
{-| 'CacheName' is used in 'RegexpFilter' for describing the
'CacheKey' keyed caching behavior.
-}
newtype CacheName = CacheName Text deriving (Eq, Show, ToJSON, FromJSON)
{-| 'CacheKey' is used in 'RegexpFilter' to key regex caching.
-}
newtype CacheKey =
CacheKey Text deriving (Eq, Show, ToJSON, FromJSON)
newtype Existence =
Existence Bool deriving (Eq, Show, ToJSON, FromJSON)
newtype NullValue =
NullValue Bool deriving (Eq, Show, ToJSON, FromJSON)
newtype CutoffFrequency =
CutoffFrequency Double deriving (Eq, Show, ToJSON, FromJSON)
newtype Analyzer =
Analyzer Text deriving (Eq, Show, ToJSON, FromJSON)
newtype MaxExpansions =
MaxExpansions Int deriving (Eq, Show, ToJSON, FromJSON)
{-| 'Lenient', if set to true, will cause format based failures to be
ignored. I don't know what the bloody default is, Elasticsearch
documentation didn't say what it was. Let me know if you figure it out.
-}
newtype Lenient =
Lenient Bool deriving (Eq, Show, ToJSON, FromJSON)
newtype Tiebreaker =
Tiebreaker Double deriving (Eq, Show, ToJSON, FromJSON)
{-| 'MinimumMatch' controls how many should clauses in the bool query should
match. Can be an absolute value (2) or a percentage (30%) or a
combination of both.
-}
newtype MinimumMatch =
MinimumMatch Int deriving (Eq, Show, ToJSON, FromJSON)
newtype DisableCoord =
DisableCoord Bool deriving (Eq, Show, ToJSON, FromJSON)
newtype IgnoreTermFrequency =
IgnoreTermFrequency Bool deriving (Eq, Show, ToJSON, FromJSON)
newtype MinimumTermFrequency =
MinimumTermFrequency Int deriving (Eq, Show, ToJSON, FromJSON)
newtype MaxQueryTerms =
MaxQueryTerms Int deriving (Eq, Show, ToJSON, FromJSON)
newtype Fuzziness =
Fuzziness Double deriving (Eq, Show, ToJSON, FromJSON)
{-| 'PrefixLength' is the prefix length used in queries, defaults to 0. -}
newtype PrefixLength =
PrefixLength Int deriving (Eq, Show, ToJSON, FromJSON)
newtype TypeName =
TypeName Text deriving (Eq, Show, ToJSON, FromJSON)
newtype PercentMatch =
PercentMatch Double deriving (Eq, Show, ToJSON, FromJSON)
newtype StopWord =
StopWord Text deriving (Eq, Show, ToJSON, FromJSON)
newtype QueryPath =
QueryPath Text deriving (Eq, Show, ToJSON, FromJSON)
{-| Allowing a wildcard at the beginning of a word (eg "*ing") is particularly
heavy, because all terms in the index need to be examined, just in case
they match. Leading wildcards can be disabled by setting
'AllowLeadingWildcard' to false. -}
newtype AllowLeadingWildcard =
AllowLeadingWildcard Bool deriving (Eq, Show, ToJSON, FromJSON)
newtype LowercaseExpanded =
LowercaseExpanded Bool deriving (Eq, Show, ToJSON, FromJSON)
newtype EnablePositionIncrements =
EnablePositionIncrements Bool deriving (Eq, Show, ToJSON, FromJSON)
{-| By default, wildcard terms in a query are not analyzed.
Setting 'AnalyzeWildcard' to true enables best-effort analysis.
-}
newtype AnalyzeWildcard = AnalyzeWildcard Bool deriving (Eq, Show, ToJSON, FromJSON)
{-| 'GeneratePhraseQueries' defaults to false.
-}
newtype GeneratePhraseQueries =
GeneratePhraseQueries Bool deriving (Eq, Show, ToJSON, FromJSON)
{-| 'Locale' is used for string conversions - defaults to ROOT.
-}
newtype Locale = Locale Text deriving (Eq, Show, ToJSON, FromJSON)
newtype MaxWordLength = MaxWordLength Int deriving (Eq, Show, ToJSON, FromJSON)
newtype MinWordLength = MinWordLength Int deriving (Eq, Show, ToJSON, FromJSON)
{-| 'PhraseSlop' sets the default slop for phrases, 0 means exact
phrase matches. Default is 0.
-}
newtype PhraseSlop = PhraseSlop Int deriving (Eq, Show, ToJSON, FromJSON)
newtype MinDocFrequency = MinDocFrequency Int deriving (Eq, Show, ToJSON, FromJSON)
newtype MaxDocFrequency = MaxDocFrequency Int deriving (Eq, Show, ToJSON, FromJSON)
-- | Newtype wrapper to parse ES's concerning tendency to in some APIs return a floating point number of milliseconds since epoch ಠ_ಠ
newtype POSIXMS = POSIXMS { posixMS :: UTCTime }
instance FromJSON POSIXMS where
parseJSON = withScientific "POSIXMS" (return . parse)
where parse n = let n' = truncate n :: Integer
in POSIXMS (posixSecondsToUTCTime (fromInteger (n' `div` 1000)))
{-| 'IndexName' is used to describe which index to query/create/delete
-}
newtype IndexName = IndexName Text deriving (Eq, Show, ToJSON, FromJSON)
newtype IndexAliasName = IndexAliasName { indexAliasName :: IndexName } deriving (Eq, Show, ToJSON)
type Score = Maybe Double
newtype ShardId = ShardId { shardId :: Int }
deriving (Eq, Show, FromJSON)
-- | Milliseconds
newtype MS = MS NominalDiffTime
-- keeps the unexported constructor warnings at bay
unMS :: MS -> NominalDiffTime
unMS (MS t) = t
instance FromJSON MS where
parseJSON = withScientific "MS" (return . MS . parse)
where
parse n = fromInteger ((truncate n) * 1000)
newtype MaybeNA a = MaybeNA { unMaybeNA :: Maybe a }
deriving (Show, Eq)
instance FromJSON a => FromJSON (MaybeNA a) where
parseJSON (String "NA") = pure $ MaybeNA Nothing
parseJSON o = MaybeNA . Just <$> parseJSON o
newtype SnapshotName = SnapshotName { snapshotName :: Text }
deriving (Show, Eq, Ord, ToJSON, FromJSON)
instance FromJSON ShardCount where
parseJSON v = parseAsInt v
<|> parseAsString v
where parseAsInt = fmap ShardCount . parseJSON
parseAsString = withText "ShardCount" (fmap ShardCount . parseReadText)
instance FromJSON ReplicaCount where
parseJSON v = parseAsInt v
<|> parseAsString v
where parseAsInt = fmap ReplicaCount . parseJSON
parseAsString = withText "ReplicaCount" (fmap ReplicaCount . parseReadText)

File diff suppressed because it is too large Load Diff

View File

@ -1,106 +0,0 @@
{-# LANGUAGE OverloadedStrings #-}
module Database.V1.Bloodhound.Internal.Sort where
import Bloodhound.Import
import Database.V1.Bloodhound.Internal.Newtypes
import Database.V1.Bloodhound.Internal.Query
{-| 'Sort' is a synonym for a list of 'SortSpec's. Sort behavior is order
dependent with later sorts acting as tie-breakers for earlier sorts.
-}
type Sort = [SortSpec]
{-| The two main kinds of 'SortSpec' are 'DefaultSortSpec' and
'GeoDistanceSortSpec'. The latter takes a 'SortOrder', 'GeoPoint', and
'DistanceUnit' to express "nearness" to a single geographical point as a
sort specification.
<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-sort.html#search-request-sort>
-}
data SortSpec = DefaultSortSpec DefaultSort
| GeoDistanceSortSpec SortOrder GeoPoint DistanceUnit deriving (Eq, Show)
instance ToJSON SortSpec where
toJSON (DefaultSortSpec
(DefaultSort (FieldName dsSortFieldName) dsSortOrder dsIgnoreUnmapped
dsSortMode dsMissingSort dsNestedFilter)) =
object [dsSortFieldName .= omitNulls base] where
base = [ "order" .= dsSortOrder
, "ignore_unmapped" .= dsIgnoreUnmapped
, "mode" .= dsSortMode
, "missing" .= dsMissingSort
, "nested_filter" .= dsNestedFilter ]
toJSON (GeoDistanceSortSpec gdsSortOrder (GeoPoint (FieldName field) gdsLatLon) units) =
object [ "unit" .= units
, field .= gdsLatLon
, "order" .= gdsSortOrder ]
{-| 'DefaultSort' is usually the kind of 'SortSpec' you'll want. There's a
'mkSort' convenience function for when you want to specify only the most
common parameters.
<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-sort.html#search-request-sort>
-}
data DefaultSort =
DefaultSort { sortFieldName :: FieldName
, sortOrder :: SortOrder
-- default False
, ignoreUnmapped :: Bool
, sortMode :: Maybe SortMode
, missingSort :: Maybe Missing
, nestedFilter :: Maybe Filter } deriving (Eq, Show)
{-| 'SortOrder' is 'Ascending' or 'Descending', as you might expect. These get
encoded into "asc" or "desc" when turned into JSON.
<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-sort.html#search-request-sort>
-}
data SortOrder = Ascending
| Descending deriving (Eq, Show)
instance ToJSON SortOrder where
toJSON Ascending = String "asc"
toJSON Descending = String "desc"
{-| 'SortMode' prescribes how to handle sorting array/multi-valued fields.
http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-sort.html#_sort_mode_option
-}
data SortMode = SortMin
| SortMax
| SortSum
| SortAvg deriving (Eq, Show)
instance ToJSON SortMode where
toJSON SortMin = String "min"
toJSON SortMax = String "max"
toJSON SortSum = String "sum"
toJSON SortAvg = String "avg"
{-| 'Missing' prescribes how to handle missing fields. A missing field can be
sorted last, first, or using a custom value as a substitute.
<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-sort.html#_missing_values>
-}
data Missing = LastMissing
| FirstMissing
| CustomMissing Text deriving (Eq, Show)
instance ToJSON Missing where
toJSON LastMissing = String "_last"
toJSON FirstMissing = String "_first"
toJSON (CustomMissing txt) = String txt
-- {-| 'mkSort' defaults everything but the 'FieldName' and the 'SortOrder' so
-- that you can concisely describe the usual kind of 'SortSpec's you want.
-- -}
mkSort :: FieldName -> SortOrder -> DefaultSort
mkSort fieldName sOrder = DefaultSort fieldName sOrder False Nothing Nothing Nothing

View File

@ -1,26 +0,0 @@
{-# LANGUAGE OverloadedStrings #-}
module Database.V1.Bloodhound.Internal.StringlyTyped where
import Bloodhound.Import
import qualified Data.Text as T
newtype StringlyTypedDouble = StringlyTypedDouble { unStringlyTypedDouble :: Double }
instance FromJSON StringlyTypedDouble where
parseJSON = fmap StringlyTypedDouble . parseJSON . unStringlyTypeJSON
-- | For some reason in several settings APIs, all leaf values get returned
-- as strings. This function attepmts to recover from this for all
-- non-recursive JSON types. If nothing can be done, the value is left alone.
unStringlyTypeJSON :: Value -> Value
unStringlyTypeJSON (String "true") = Bool True
unStringlyTypeJSON (String "false") = Bool False
unStringlyTypeJSON (String "null") = Null
unStringlyTypeJSON v@(String t) = case readMay (T.unpack t) of
Just n -> Number n
Nothing -> v
unStringlyTypeJSON v = v

View File

@ -1,252 +0,0 @@
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Database.V1.Bloodhound.Internal.Suggest where
import Bloodhound.Import
import qualified Data.HashMap.Strict as HM
import Database.V1.Bloodhound.Internal.Newtypes
import Database.V1.Bloodhound.Internal.Query
data Suggest = Suggest { suggestText :: Text
, suggestName :: Text
, suggestType :: SuggestType
}
deriving (Show, Eq)
instance ToJSON Suggest where
toJSON Suggest{..} = object [ "text" .= suggestText
, suggestName .= suggestType
]
instance FromJSON Suggest where
parseJSON (Object o) = do
suggestText' <- o .: "text"
let dropTextList = HM.toList $ HM.filterWithKey (\x _ -> x /= "text") o
suggestName' <- case dropTextList of
[(x, _)] -> return x
_ -> fail "error parsing Suggest field name"
suggestType' <- o .: suggestName'
return $ Suggest suggestText' suggestName' suggestType'
parseJSON x = typeMismatch "Suggest" x
data SuggestType = SuggestTypePhraseSuggester PhraseSuggester
deriving (Show, Eq)
instance ToJSON SuggestType where
toJSON (SuggestTypePhraseSuggester x) = object ["phrase" .= x]
instance FromJSON SuggestType where
parseJSON = withObject "SuggestType" parse
where parse o = phraseSuggester `taggedWith` "phrase"
where taggedWith parser k = parser =<< o .: k
phraseSuggester = pure . SuggestTypePhraseSuggester
data PhraseSuggester =
PhraseSuggester { phraseSuggesterField :: FieldName
, phraseSuggesterGramSize :: Maybe Int
, phraseSuggesterRealWordErrorLikelihood :: Maybe Int
, phraseSuggesterConfidence :: Maybe Int
, phraseSuggesterMaxErrors :: Maybe Int
, phraseSuggesterSeparator :: Maybe Text
, phraseSuggesterSize :: Maybe Size
, phraseSuggesterAnalyzer :: Maybe Analyzer
, phraseSuggesterShardSize :: Maybe Int
, phraseSuggesterHighlight :: Maybe PhraseSuggesterHighlighter
, phraseSuggesterCollate :: Maybe PhraseSuggesterCollate
, phraseSuggesterCandidateGenerators :: [DirectGenerators]
}
deriving (Show, Eq)
instance ToJSON PhraseSuggester where
toJSON PhraseSuggester{..} = omitNulls [ "field" .= phraseSuggesterField
, "gram_size" .= phraseSuggesterGramSize
, "real_word_error_likelihood" .= phraseSuggesterRealWordErrorLikelihood
, "confidence" .= phraseSuggesterConfidence
, "max_errors" .= phraseSuggesterMaxErrors
, "separator" .= phraseSuggesterSeparator
, "size" .= phraseSuggesterSize
, "analyzer" .= phraseSuggesterAnalyzer
, "shard_size" .= phraseSuggesterShardSize
, "highlight" .= phraseSuggesterHighlight
, "collate" .= phraseSuggesterCollate
, "direct_generator" .= phraseSuggesterCandidateGenerators
]
instance FromJSON PhraseSuggester where
parseJSON = withObject "PhraseSuggester" parse
where parse o = PhraseSuggester
<$> o .: "field"
<*> o .:? "gram_size"
<*> o .:? "real_word_error_likelihood"
<*> o .:? "confidence"
<*> o .:? "max_errors"
<*> o .:? "separator"
<*> o .:? "size"
<*> o .:? "analyzer"
<*> o .:? "shard_size"
<*> o .:? "highlight"
<*> o .:? "collate"
<*> o .:? "direct_generator" .!= []
mkPhraseSuggester :: FieldName -> PhraseSuggester
mkPhraseSuggester fName =
PhraseSuggester fName Nothing Nothing Nothing Nothing Nothing Nothing
Nothing Nothing Nothing Nothing []
data PhraseSuggesterHighlighter =
PhraseSuggesterHighlighter { phraseSuggesterHighlighterPreTag :: Text
, phraseSuggesterHighlighterPostTag :: Text
}
deriving (Show, Eq)
instance ToJSON PhraseSuggesterHighlighter where
toJSON PhraseSuggesterHighlighter{..} =
object [ "pre_tag" .= phraseSuggesterHighlighterPreTag
, "post_tag" .= phraseSuggesterHighlighterPostTag
]
instance FromJSON PhraseSuggesterHighlighter where
parseJSON = withObject "PhraseSuggesterHighlighter" parse
where parse o = PhraseSuggesterHighlighter
<$> o .: "pre_tag"
<*> o .: "post_tag"
data PhraseSuggesterCollate =
PhraseSuggesterCollate { phraseSuggesterCollateTemplateQuery :: TemplateQueryInline
, phraseSuggesterCollatePrune :: Bool
}
deriving (Show, Eq)
instance ToJSON PhraseSuggesterCollate where
toJSON PhraseSuggesterCollate{..} = object [ "query" .= object
[ "inline" .= (inline phraseSuggesterCollateTemplateQuery)
]
, "params" .= (params phraseSuggesterCollateTemplateQuery)
, "prune" .= phraseSuggesterCollatePrune
]
instance FromJSON PhraseSuggesterCollate where
parseJSON (Object o) = do
query' <- o .: "query"
inline' <- query' .: "inline"
params' <- o .: "params"
prune' <- o .:? "prune" .!= False
return $ PhraseSuggesterCollate (TemplateQueryInline inline' params') prune'
parseJSON x = typeMismatch "PhraseSuggesterCollate" x
data DirectGenerators = DirectGenerators
{ directGeneratorsField :: FieldName
, directGeneratorsSize :: Maybe Int
, directGeneratorSuggestMode :: DirectGeneratorSuggestModeTypes
, directGeneratorMaxEdits :: Maybe Double
, directGeneratorPrefixLength :: Maybe Int
, directGeneratorMinWordLength :: Maybe Int
, directGeneratorMaxInspections :: Maybe Int
, directGeneratorMinDocFreq :: Maybe Double
, directGeneratorMaxTermFreq :: Maybe Double
, directGeneratorPreFilter :: Maybe Text
, directGeneratorPostFilter :: Maybe Text
}
deriving (Show, Eq)
instance ToJSON DirectGenerators where
toJSON DirectGenerators{..} = omitNulls [ "field" .= directGeneratorsField
, "size" .= directGeneratorsSize
, "suggest_mode" .= directGeneratorSuggestMode
, "max_edits" .= directGeneratorMaxEdits
, "prefix_length" .= directGeneratorPrefixLength
, "min_word_length" .= directGeneratorMinWordLength
, "max_inspections" .= directGeneratorMaxInspections
, "min_doc_freq" .= directGeneratorMinDocFreq
, "max_term_freq" .= directGeneratorMaxTermFreq
, "pre_filter" .= directGeneratorPreFilter
, "post_filter" .= directGeneratorPostFilter
]
instance FromJSON DirectGenerators where
parseJSON = withObject "DirectGenerators" parse
where parse o = DirectGenerators
<$> o .: "field"
<*> o .:? "size"
<*> o .: "suggest_mode"
<*> o .:? "max_edits"
<*> o .:? "prefix_length"
<*> o .:? "min_word_length"
<*> o .:? "max_inspections"
<*> o .:? "min_doc_freq"
<*> o .:? "max_term_freq"
<*> o .:? "pre_filter"
<*> o .:? "post_filter"
mkDirectGenerators :: FieldName -> DirectGenerators
mkDirectGenerators fn = DirectGenerators fn Nothing DirectGeneratorSuggestModeMissing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing
data DirectGeneratorSuggestModeTypes = DirectGeneratorSuggestModeMissing
| DirectGeneratorSuggestModePopular
| DirectGeneratorSuggestModeAlways
deriving (Show, Eq)
instance ToJSON DirectGeneratorSuggestModeTypes where
toJSON DirectGeneratorSuggestModeMissing = "missing"
toJSON DirectGeneratorSuggestModePopular = "popular"
toJSON DirectGeneratorSuggestModeAlways = "always"
instance FromJSON DirectGeneratorSuggestModeTypes where
parseJSON = withText "DirectGeneratorSuggestModeTypes" parse
where parse "missing" = pure DirectGeneratorSuggestModeMissing
parse "popular" = pure DirectGeneratorSuggestModePopular
parse "always" = pure DirectGeneratorSuggestModeAlways
parse f = fail ("Unexpected DirectGeneratorSuggestModeTypes: " <> show f)
data SuggestOptions =
SuggestOptions { suggestOptionsText :: Text
, suggestOptionsScore :: Double
, suggestOptionsFreq :: Maybe Int
, suggestOptionsHighlighted :: Maybe Text
}
deriving (Eq, Show)
instance FromJSON SuggestOptions where
parseJSON = withObject "SuggestOptions" parse
where parse o = SuggestOptions
<$> o .: "text"
<*> o .: "score"
<*> o .:? "freq"
<*> o .:? "highlighted"
data SuggestResponse =
SuggestResponse { suggestResponseText :: Text
, suggestResponseOffset :: Int
, suggestResponseLength :: Int
, suggestResponseOptions :: [SuggestOptions]
}
deriving (Eq, Show)
instance FromJSON SuggestResponse where
parseJSON = withObject "SuggestResponse" parse
where parse o = SuggestResponse
<$> o .: "text"
<*> o .: "offset"
<*> o .: "length"
<*> o .: "options"
data NamedSuggestionResponse =
NamedSuggestionResponse { nsrName :: Text
, nsrResponses :: [SuggestResponse]
}
deriving (Eq, Show)
instance FromJSON NamedSuggestionResponse where
parseJSON (Object o) = do
suggestionName' <- case HM.toList o of
[(x, _)] -> return x
_ -> fail "error parsing NamedSuggestionResponse name"
suggestionResponses' <- o .: suggestionName'
return $ NamedSuggestionResponse suggestionName' suggestionResponses'
parseJSON x = typeMismatch "NamedSuggestionResponse" x

View File

@ -1,452 +0,0 @@
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE UndecidableInstances #-}
-------------------------------------------------------------------------------
-- |
-- Module : Database.Bloodhound.Types
-- Copyright : (C) 2014, 2015, 2016 Chris Allen
-- License : BSD-style (see the file LICENSE)
-- Maintainer : Chris Allen <cma@bitemyapp.com
-- Stability : provisional
-- Portability : DeriveGeneric, RecordWildCards
--
-- Data types for describing actions and data structures performed to interact
-- with Elasticsearch. The two main buckets your queries against Elasticsearch
-- will fall into are 'Query's and 'Filter's. 'Filter's are more like
-- traditional database constraints and often have preferable performance
-- properties. 'Query's support human-written textual queries, such as fuzzy
-- queries.
-------------------------------------------------------------------------------
module Database.V1.Bloodhound.Types
( defaultCache
, defaultIndexSettings
, defaultIndexDocumentSettings
, mkSort
, showText
, unpackId
, mkMatchQuery
, mkMultiMatchQuery
, mkBoolQuery
, mkRangeQuery
, mkQueryStringQuery
, mkAggregations
, mkTermsAggregation
, mkTermsScriptAggregation
, mkDateHistogram
, mkCardinalityAggregation
, mkDocVersion
, docVersionNumber
, toMissing
, toTerms
, toDateHistogram
, toTopHits
, omitNulls
, BH(..)
, runBH
, BHEnv
, bhServer
, bhManager
, bhRequestHook
, mkBHEnv
, MonadBH(..)
, Version(..)
, VersionNumber(..)
, MaybeNA(..)
, BuildHash(..)
, Status(..)
, Existence(..)
, NullValue(..)
, IndexSettings(..)
, UpdatableIndexSetting(..)
, IndexSettingsSummary(..)
, AllocationPolicy(..)
, ReplicaBounds(..)
, Bytes(..)
, gigabytes
, megabytes
, kilobytes
, FSType(..)
, InitialShardCount(..)
, NodeAttrFilter(..)
, NodeAttrName(..)
, CompoundFormat(..)
, IndexTemplate(..)
, Server(..)
, Reply
, EsResult(..)
, EsResultFound(..)
, EsError(..)
, EsProtocolException(..)
, IndexAlias(..)
, IndexAliasName(..)
, IndexAliasAction(..)
, IndexAliasCreate(..)
, IndexAliasSummary(..)
, IndexAliasesSummary(..)
, AliasRouting(..)
, SearchAliasRouting(..)
, IndexAliasRouting(..)
, RoutingValue(..)
, DocVersion
, ExternalDocVersion(..)
, VersionControl(..)
, DocumentParent(..)
, IndexDocumentSettings(..)
, Query(..)
, Search(..)
, SearchType(..)
, SearchResult(..)
, ScrollId(..)
, SearchHits(..)
, TrackSortScores
, From(..)
, Size(..)
, Source(..)
, PatternOrPatterns(..)
, Include(..)
, Exclude(..)
, Pattern(..)
, ShardResult(..)
, Hit(..)
, Filter(..)
, Seminearring(..)
, BoolMatch(..)
, Term(..)
, GeoPoint(..)
, GeoBoundingBoxConstraint(..)
, GeoBoundingBox(..)
, GeoFilterType(..)
, Distance(..)
, DistanceUnit(..)
, DistanceType(..)
, DistanceRange(..)
, OptimizeBbox(..)
, LatLon(..)
, RangeValue(..)
, RangeExecution(..)
, LessThan(..)
, LessThanEq(..)
, GreaterThan(..)
, GreaterThanEq(..)
, LessThanD(..)
, LessThanEqD(..)
, GreaterThanD(..)
, GreaterThanEqD(..)
, Regexp(..)
, RegexpFlags(..)
, RegexpFlag(..)
, FieldName(..)
, Script(..)
, IndexName(..)
, IndexSelection(..)
, NodeSelection(..)
, NodeSelector(..)
, IndexOptimizationSettings(..)
, defaultIndexOptimizationSettings
, TemplateName(..)
, TemplatePattern(..)
, MappingName(..)
, DocId(..)
, CacheName(..)
, CacheKey(..)
, BulkOperation(..)
, ReplicaCount(..)
, ShardCount(..)
, Sort
, SortMode(..)
, SortOrder(..)
, SortSpec(..)
, DefaultSort(..)
, Missing(..)
, OpenCloseIndex(..)
, Method
, Boost(..)
, MatchQuery(..)
, MultiMatchQuery(..)
, BoolQuery(..)
, BoostingQuery(..)
, CommonTermsQuery(..)
, DisMaxQuery(..)
, FilteredQuery(..)
, FuzzyLikeThisQuery(..)
, FuzzyLikeFieldQuery(..)
, FuzzyQuery(..)
, HasChildQuery(..)
, HasParentQuery(..)
, IndicesQuery(..)
, MoreLikeThisQuery(..)
, MoreLikeThisFieldQuery(..)
, NestedQuery(..)
, PrefixQuery(..)
, QueryStringQuery(..)
, SimpleQueryStringQuery(..)
, RangeQuery(..)
, RegexpQuery(..)
, QueryString(..)
, TemplateQueryInline(..)
, TemplateQueryKeyValuePairs(..)
, BooleanOperator(..)
, ZeroTermsQuery(..)
, CutoffFrequency(..)
, Analyzer(..)
, MaxExpansions(..)
, Lenient(..)
, MatchQueryType(..)
, MultiMatchQueryType(..)
, Tiebreaker(..)
, MinimumMatch(..)
, DisableCoord(..)
, CommonMinimumMatch(..)
, MinimumMatchHighLow(..)
, PrefixLength(..)
, Fuzziness(..)
, IgnoreTermFrequency(..)
, MaxQueryTerms(..)
, ScoreType(..)
, Score
, Cache
, TypeName(..)
, BoostTerms(..)
, MaxWordLength(..)
, MinWordLength(..)
, MaxDocFrequency(..)
, MinDocFrequency(..)
, PhraseSlop(..)
, StopWord(..)
, QueryPath(..)
, MinimumTermFrequency(..)
, PercentMatch(..)
, FieldDefinition(..)
, MappingField(..)
, Mapping(..)
, AllowLeadingWildcard(..)
, LowercaseExpanded(..)
, GeneratePhraseQueries(..)
, Locale(..)
, AnalyzeWildcard(..)
, EnablePositionIncrements(..)
, SimpleQueryFlag(..)
, FieldOrFields(..)
, Monoid(..)
, ToJSON(..)
, Interval(..)
, TimeInterval(..)
, ExecutionHint(..)
, CollectionMode(..)
, TermOrder(..)
, TermInclusion(..)
, SnapshotRepoSelection(..)
, GenericSnapshotRepo(..)
, SnapshotRepo(..)
, SnapshotRepoConversionError(..)
, SnapshotRepoType(..)
, GenericSnapshotRepoSettings(..)
, SnapshotRepoUpdateSettings(..)
, defaultSnapshotRepoUpdateSettings
, SnapshotRepoName(..)
, SnapshotRepoPattern(..)
, SnapshotVerification(..)
, SnapshotNodeVerification(..)
, FullNodeId(..)
, NodeName(..)
, ClusterName(..)
, NodesInfo(..)
, NodesStats(..)
, NodeStats(..)
, NodeBreakersStats(..)
, NodeBreakerStats(..)
, NodeHTTPStats(..)
, NodeTransportStats(..)
, NodeFSStats(..)
, NodeDataPathStats(..)
, NodeFSTotalStats(..)
, NodeNetworkStats(..)
, NodeThreadPoolsStats(..)
, NodeThreadPoolStats(..)
, NodeJVMStats(..)
, JVMBufferPoolStats(..)
, JVMGCStats(..)
, JVMPoolStats(..)
, NodeProcessStats(..)
, NodeOSStats(..)
, LoadAvgs(..)
, NodeIndicesStats(..)
, EsAddress(..)
, PluginName(..)
, NodeInfo(..)
, NodePluginInfo(..)
, NodeHTTPInfo(..)
, NodeTransportInfo(..)
, BoundTransportAddress(..)
, NodeNetworkInfo(..)
, MacAddress(..)
, NetworkInterfaceName(..)
, NodeNetworkInterface(..)
, NodeThreadPoolsInfo(..)
, NodeThreadPoolInfo(..)
, ThreadPoolSize(..)
, ThreadPoolType(..)
, NodeJVMInfo(..)
, JVMMemoryPool(..)
, JVMGCCollector(..)
, JVMMemoryInfo(..)
, PID(..)
, NodeOSInfo(..)
, CPUInfo(..)
, NodeProcessInfo(..)
, FsSnapshotRepo(..)
, SnapshotCreateSettings(..)
, defaultSnapshotCreateSettings
, SnapshotSelection(..)
, SnapshotPattern(..)
, SnapshotInfo(..)
, SnapshotShardFailure(..)
, ShardId(..)
, SnapshotName(..)
, SnapshotState(..)
, SnapshotRestoreSettings(..)
, defaultSnapshotRestoreSettings
, RestoreRenamePattern(..)
, RestoreRenameToken(..)
, RRGroupRefNum
, rrGroupRefNum
, mkRRGroupRefNum
, RestoreIndexSettings(..)
, Suggest(..)
, SuggestType(..)
, PhraseSuggester(..)
, PhraseSuggesterHighlighter(..)
, PhraseSuggesterCollate(..)
, mkPhraseSuggester
, SuggestOptions(..)
, SuggestResponse(..)
, NamedSuggestionResponse(..)
, DirectGenerators(..)
, mkDirectGenerators
, DirectGeneratorSuggestModeTypes (..)
, Aggregation(..)
, Aggregations
, AggregationResults
, BucketValue(..)
, Bucket(..)
, BucketAggregation(..)
, TermsAggregation(..)
, MissingAggregation(..)
, ValueCountAggregation(..)
, FilterAggregation(..)
, CardinalityAggregation(..)
, DateHistogramAggregation(..)
, DateRangeAggregation(..)
, DateRangeAggRange(..)
, DateMathExpr(..)
, DateMathAnchor(..)
, DateMathModifier(..)
, DateMathUnit(..)
, TopHitsAggregation(..)
, Highlights(..)
, FieldHighlight(..)
, HighlightSettings(..)
, PlainHighlight(..)
, PostingsHighlight(..)
, FastVectorHighlight(..)
, CommonHighlight(..)
, NonPostings(..)
, HighlightEncoder(..)
, HighlightTag(..)
, HitHighlight
, MissingResult(..)
, TermsResult(..)
, DateHistogramResult(..)
, DateRangeResult(..)
, TopHitResult(..)
, EsUsername(..)
, EsPassword(..)
) where
import Bloodhound.Import
import Database.V1.Bloodhound.Types.Class
import Database.V1.Bloodhound.Internal.Aggregation
import Database.V1.Bloodhound.Internal.Client
import Database.V1.Bloodhound.Internal.Highlight
import Database.V1.Bloodhound.Internal.Newtypes
import Database.V1.Bloodhound.Internal.Query
import Database.V1.Bloodhound.Internal.Sort
import Database.V1.Bloodhound.Internal.Suggest
data SearchResult a =
SearchResult { took :: Int
, timedOut :: Bool
, shards :: ShardResult
, searchHits :: SearchHits a
, aggregations :: Maybe AggregationResults
, scrollId :: Maybe ScrollId
, suggest :: Maybe NamedSuggestionResponse -- ^ Only one Suggestion request / response per Search is supported.
}
deriving (Eq, Show)
type TrackSortScores = Bool
data Search = Search { queryBody :: Maybe Query
, filterBody :: Maybe Filter
, sortBody :: Maybe Sort
, aggBody :: Maybe Aggregations
, highlight :: Maybe Highlights
-- default False
, trackSortScores :: TrackSortScores
, from :: From
, size :: Size
, searchType :: SearchType
, fields :: Maybe [FieldName]
, source :: Maybe Source
, suggestBody :: Maybe Suggest -- ^ Only one Suggestion request / response per Search is supported.
} deriving (Eq, Show)
data SearchType = SearchTypeQueryThenFetch
| SearchTypeDfsQueryThenFetch
| SearchTypeCount
| SearchTypeScan
| SearchTypeQueryAndFetch
| SearchTypeDfsQueryAndFetch
deriving (Eq, Show)
instance ToJSON Search where
toJSON (Search query sFilter sort searchAggs highlight sTrackSortScores sFrom sSize _ sFields sSource sSuggest) =
omitNulls [ "query" .= query
, "filter" .= sFilter
, "sort" .= sort
, "aggregations" .= searchAggs
, "highlight" .= highlight
, "from" .= sFrom
, "size" .= sSize
, "track_scores" .= sTrackSortScores
, "fields" .= sFields
, "_source" .= sSource
, "suggest" .= sSuggest]
instance (FromJSON a) => FromJSON (SearchResult a) where
parseJSON (Object v) = SearchResult <$>
v .: "took" <*>
v .: "timed_out" <*>
v .: "_shards" <*>
v .: "hits" <*>
v .:? "aggregations" <*>
v .:? "_scroll_id" <*>
v .:? "suggest"
parseJSON _ = empty

View File

@ -1,17 +0,0 @@
{-# LANGUAGE CPP #-}
module Database.V1.Bloodhound.Types.Class
( Seminearring(..) )
where
#if !MIN_VERSION_base(4,8,0)
import Data.Monoid
#endif
class Monoid a => Seminearring a where
-- 0, +, *
(<||>) :: a -> a -> a
(<&&>) :: a -> a -> a
(<&&>) = mappend
infixr 5 <||>
infixr 5 <&&>

View File

@ -1,10 +0,0 @@
module Database.V5.Bloodhound
( -- module Data.Aeson.Types
-- ,
module Database.V5.Bloodhound.Client
, module Database.V5.Bloodhound.Types
) where
-- import Data.Aeson.Types
import Database.V5.Bloodhound.Client
import Database.V5.Bloodhound.Types

40
stack-8.2.yaml.lock Normal file
View File

@ -0,0 +1,40 @@
# This file was autogenerated by Stack.
# You should not edit this file by hand.
# For more information, please see the documentation at:
# https://docs.haskellstack.org/en/stable/lock_files
packages:
- completed:
hackage: aeson-1.3.0.0@sha256:23c353da98033eab7dca65d496df6c79c929f8d1413119648d1de01e703fbd3b,6539
pantry-tree:
size: 39767
sha256: 68512381071165c48e123cf515c1098ee455ec75b56724e31f6d570d3c865c47
original:
hackage: aeson-1.3.0.0
- completed:
hackage: http-types-0.12.1@sha256:12ce2fc081fde3cf219b630c2fb3815183a64eee236ba7aa735f666eab949610,2035
pantry-tree:
size: 832
sha256: c66c35b8f4f1b8b82f99cf8de3711027b5d0cc64bd031ab84375100038f81310
original:
hackage: http-types-0.12.1
- completed:
hackage: quickcheck-arbitrary-template-0.2.0.0@sha256:c8afad9c6e1f2a1d9813bb6c94158a99f7d6edeae8bb696b29d6c450f67811ec,2025
pantry-tree:
size: 625
sha256: 64262d8f3365305c35babbb2f5727ea3287c3fad61d3b66c3a2c68185fe92c5a
original:
hackage: quickcheck-arbitrary-template-0.2.0.0
- completed:
hackage: quickcheck-properties-0.1@sha256:3bd0890e84ec4cf1c1e6e475152c2c0d7c85915a051cf4e8bf9ec70c32df735c,1268
pantry-tree:
size: 550
sha256: 12264d8a89883b42e78c313792f8cba9af51c8fda915fce34e9780ff6972cf7e
original:
hackage: quickcheck-properties-0.1
snapshots:
- completed:
size: 507208
url: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/11/6.yaml
sha256: 99d52f009dc5a53ac61ffc4e6044f910ca7b28b4596b6962135a48e8c7d3d093
original: lts-11.6

15
stack-8.4.yaml Normal file
View File

@ -0,0 +1,15 @@
flags:
bloodhound-examples:
werror: true
packages:
- '.'
- './examples'
# extra-deps:
# - aeson-1.3.0.0
# - http-types-0.12.1
# - quickcheck-arbitrary-template-0.2.0.0
# - quickcheck-properties-0.1
resolver: lts-12.6

12
stack-8.4.yaml.lock Normal file
View File

@ -0,0 +1,12 @@
# This file was autogenerated by Stack.
# You should not edit this file by hand.
# For more information, please see the documentation at:
# https://docs.haskellstack.org/en/stable/lock_files
packages: []
snapshots:
- completed:
size: 502349
url: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/12/6.yaml
sha256: fbce9f11b2f1c637855eb29fe952f3ac16c35d46836df0dee0cc54a85e1e33d6
original: lts-12.6

14
stack-8.6.yaml Normal file
View File

@ -0,0 +1,14 @@
flags:
bloodhound-examples:
werror: true
packages:
- '.'
- './examples'
extra-deps:
- git: https://github.com/bitemyapp/quickcheck-arbitrary-template.git
commit: bf9cee96a88ecce2ff920ce74c166a38712e5ed2
- quickcheck-properties-0.1
resolver: lts-13.30

33
stack-8.6.yaml.lock Normal file
View File

@ -0,0 +1,33 @@
# This file was autogenerated by Stack.
# You should not edit this file by hand.
# For more information, please see the documentation at:
# https://docs.haskellstack.org/en/stable/lock_files
packages:
- completed:
cabal-file:
size: 2026
sha256: d340ae2f3fd0b54b2f899d482072be24f41eddac8ea081f11d9db260b14264c8
name: quickcheck-arbitrary-template
version: 0.2.0.0
git: https://github.com/bitemyapp/quickcheck-arbitrary-template.git
pantry-tree:
size: 859
sha256: a6c70780465af154ada9481c0a5ffcda1b069d249522634c578218c3c390cbf1
commit: bf9cee96a88ecce2ff920ce74c166a38712e5ed2
original:
git: https://github.com/bitemyapp/quickcheck-arbitrary-template.git
commit: bf9cee96a88ecce2ff920ce74c166a38712e5ed2
- completed:
hackage: quickcheck-properties-0.1@sha256:3bd0890e84ec4cf1c1e6e475152c2c0d7c85915a051cf4e8bf9ec70c32df735c,1268
pantry-tree:
size: 550
sha256: 12264d8a89883b42e78c313792f8cba9af51c8fda915fce34e9780ff6972cf7e
original:
hackage: quickcheck-properties-0.1
snapshots:
- completed:
size: 500539
url: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/13/30.yaml
sha256: 59ad6b944c9903847fecdc1d4815e8500c1f9999d80fd1b4d2d66e408faec44b
original: lts-13.30

View File

@ -1 +1 @@
stack-8.2.yaml
stack-8.6.yaml

12
stack.yaml.lock Normal file
View File

@ -0,0 +1,12 @@
# This file was autogenerated by Stack.
# You should not edit this file by hand.
# For more information, please see the documentation at:
# https://docs.haskellstack.org/en/stable/lock_files
packages: []
snapshots:
- completed:
size: 500539
url: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/13/30.yaml
sha256: 59ad6b944c9903847fecdc1d4815e8500c1f9999d80fd1b4d2d66e408faec44b
original: lts-13.30

View File

@ -7,7 +7,7 @@ import Test.Import
import Control.Error (fmapL, note)
import qualified Data.Map as M
import qualified Database.V5.Bloodhound
import qualified Database.Bloodhound
spec :: Spec
spec =
@ -35,7 +35,7 @@ spec =
_ <- insertData
let cardinality = CardinalityAgg $ mkCardinalityAggregation $ FieldName "user"
let search = mkAggregateSearch Nothing $ mkAggregations "users" cardinality
let search' = search { Database.V5.Bloodhound.from = From 0, size = Size 0 }
let search' = search { Database.Bloodhound.from = From 0, size = Size 0 }
searchExpectAggs search'
let docCountPair k n = (k, object ["value" .= Number n])
res <- searchTweets search'
@ -46,7 +46,7 @@ spec =
_ <- insertData
let stats = StatsAgg $ mkStatsAggregation $ FieldName "age"
let search = mkAggregateSearch Nothing $ mkAggregations "users" stats
let search' = search { Database.V5.Bloodhound.from = From 0, size = Size 0 }
let search' = search { Database.Bloodhound.from = From 0, size = Size 0 }
searchExpectAggs search'
let statsAggRes k n = (k, object [ "max" .= Number n
, "avg" .= Number n

View File

@ -3,7 +3,7 @@
module Test.ApproxEq where
import Database.V5.Bloodhound
import Database.Bloodhound
import Test.Import

View File

@ -5,7 +5,7 @@
module Test.Generators where
import Database.V5.Bloodhound
import Database.Bloodhound
import Test.Import
@ -211,9 +211,13 @@ instance Arbitrary NodeAttrName where
instance Arbitrary NodeAttrFilter where
arbitrary = do
n <- arbitrary
s:ss <- listOf1 (listOf1 arbitraryAlphaNum)
let ts = T.pack <$> s :| ss
xs <- listOf1 (listOf1 arbitraryAlphaNum)
let (s, ss) = unpackConsPartial xs
ts = T.pack <$> s :| ss
return (NodeAttrFilter n ts)
where -- listOf1 means this shouldn't blow up.
unpackConsPartial (x : xs) = (x, xs)
unpackConsPartial _ = error "unpackConsPartial failed but shouldn't have"
instance Arbitrary VersionNumber where
arbitrary = do

View File

@ -23,7 +23,7 @@ import Data.Text as X (Text)
import Data.Time.Calendar as X (Day(..), fromGregorian)
import Data.Time.Clock as X
import Data.Typeable as X
import Database.V5.Bloodhound as X hiding (key)
import Database.Bloodhound as X hiding (key)
import Lens.Micro as X
import Lens.Micro.Aeson as X
import Network.HTTP.Client as X hiding (Proxy, fileSize)
@ -31,7 +31,7 @@ import System.IO.Temp as X
import System.PosixCompat.Files as X
import Test.Hspec as X
import Test.Hspec.QuickCheck as X (prop)
import Test.QuickCheck as X hiding (Result, Success)
import Test.QuickCheck as X hiding (Result, Success, isSuccess)
import Test.QuickCheck.Property.Monoid as X (T (..), eq, prop_Monoid)
import Text.Pretty.Simple as X (pPrint)

View File

@ -1,132 +0,0 @@
{-# LANGUAGE OverloadedStrings #-}
module Test.Aggregation (spec) where
import Test.Common
import Test.Import
import Control.Error (fmapL, note)
import qualified Data.Map as M
import qualified Database.V1.Bloodhound
spec :: Spec
spec =
describe "Aggregation API" $ do
it "returns term aggregation results" $ withTestEnv $ do
_ <- insertData
let terms = TermsAgg $ mkTermsAggregation "user"
let search = mkAggregateSearch Nothing $ mkAggregations "users" terms
searchExpectAggs search
searchValidBucketAgg search "users" toTerms
it "return sub-aggregation results" $ withTestEnv $ do
_ <- insertData
let subaggs = mkAggregations "age_agg" . TermsAgg $ mkTermsAggregation "age"
agg = TermsAgg $ (mkTermsAggregation "user") { termAggs = Just subaggs}
search = mkAggregateSearch Nothing $ mkAggregations "users" agg
reply <- searchByIndex testIndex search
let result = decode (responseBody reply) :: Maybe (SearchResult Tweet)
usersAggResults = result >>= aggregations >>= toTerms "users"
subAggResults = usersAggResults >>= (listToMaybe . buckets) >>= termsAggs >>= toTerms "age_agg"
subAddResultsExists = isJust subAggResults
liftIO $ subAddResultsExists `shouldBe` True
it "returns cardinality aggregation results" $ withTestEnv $ do
_ <- insertData
let cardinality = CardinalityAgg $ mkCardinalityAggregation $ FieldName "user"
let search = mkAggregateSearch Nothing $ mkAggregations "users" cardinality
let search' = search { Database.V1.Bloodhound.from = From 0, size = Size 0 }
searchExpectAggs search'
let docCountPair k n = (k, object ["value" .= Number n])
res <- searchTweets search'
liftIO $
fmap aggregations res `shouldBe` Right (Just (M.fromList [ docCountPair "users" 1]))
it "can give collection hint parameters to term aggregations" $ when' (atleast es13) $ withTestEnv $ do
_ <- insertData
let terms = TermsAgg $ (mkTermsAggregation "user") { termCollectMode = Just BreadthFirst }
let search = mkAggregateSearch Nothing $ mkAggregations "users" terms
searchExpectAggs search
searchValidBucketAgg search "users" toTerms
-- One of these fails with 1.7.3
it "can give execution hint parameters to term aggregations" $ when' (atmost es11) $ withTestEnv $ do
_ <- insertData
searchTermsAggHint [Map, Ordinals]
it "can give execution hint parameters to term aggregations" $ when' (is es12) $ withTestEnv $ do
_ <- insertData
searchTermsAggHint [GlobalOrdinals, GlobalOrdinalsHash, GlobalOrdinalsLowCardinality, Map, Ordinals]
it "can give execution hint parameters to term aggregations" $ when' (atleast es12) $ withTestEnv $ do
_ <- insertData
searchTermsAggHint [GlobalOrdinals, GlobalOrdinalsHash, GlobalOrdinalsLowCardinality, Map]
-- One of the above.
it "can execute value_count aggregations" $ withTestEnv $ do
_ <- insertData
_ <- insertOther
let ags = mkAggregations "user_count" (ValueCountAgg (FieldValueCount (FieldName "user"))) <>
mkAggregations "bogus_count" (ValueCountAgg (FieldValueCount (FieldName "bogus")))
let search = mkAggregateSearch Nothing ags
let docCountPair k n = (k, object ["value" .= Number n])
res <- searchTweets search
liftIO $
fmap aggregations res `shouldBe` Right (Just (M.fromList [ docCountPair "user_count" 2
, docCountPair "bogus_count" 0
]))
it "can execute date_range aggregations" $ withTestEnv $ do
let now = fromGregorian 2015 3 14
let ltAMonthAgo = UTCTime (fromGregorian 2015 3 1) 0
let ltAWeekAgo = UTCTime (fromGregorian 2015 3 10) 0
let oldDoc = exampleTweet { postDate = ltAMonthAgo }
let newDoc = exampleTweet { postDate = ltAWeekAgo }
_ <- indexDocument testIndex testMapping defaultIndexDocumentSettings oldDoc (DocId "1")
_ <- indexDocument testIndex testMapping defaultIndexDocumentSettings newDoc (DocId "2")
_ <- refreshIndex testIndex
let thisMonth = DateRangeFrom (DateMathExpr (DMDate now) [SubtractTime 1 DMMonth])
let thisWeek = DateRangeFrom (DateMathExpr (DMDate now) [SubtractTime 1 DMWeek])
let agg = DateRangeAggregation (FieldName "postDate") Nothing (thisMonth :| [thisWeek])
let ags = mkAggregations "date_ranges" (DateRangeAgg agg)
let search = mkAggregateSearch Nothing ags
res <- searchTweets search
liftIO $ hitsTotal . searchHits <$> res `shouldBe` Right 2
let bucks = do magrs <- fmapL show (aggregations <$> res)
agrs <- note "no aggregations returned" magrs
rawBucks <- note "no date_ranges aggregation" $ M.lookup "date_ranges" agrs
parseEither parseJSON rawBucks
let fromMonthT = UTCTime (fromGregorian 2015 2 14) 0
let fromWeekT = UTCTime (fromGregorian 2015 3 7) 0
liftIO $ buckets <$> bucks `shouldBe` Right [ DateRangeResult "2015-02-14T00:00:00.000Z-*"
(Just fromMonthT)
(Just "2015-02-14T00:00:00.000Z")
Nothing
Nothing
2
Nothing
, DateRangeResult "2015-03-07T00:00:00.000Z-*"
(Just fromWeekT)
(Just "2015-03-07T00:00:00.000Z")
Nothing
Nothing
1
Nothing
]
it "returns date histogram aggregation results" $ withTestEnv $ do
_ <- insertData
let histogram = DateHistogramAgg $ mkDateHistogram (FieldName "postDate") Minute
let search = mkAggregateSearch Nothing (mkAggregations "byDate" histogram)
searchExpectAggs search
searchValidBucketAgg search "byDate" toDateHistogram
it "can execute missing aggregations" $ withTestEnv $ do
_ <- insertData
_ <- insertExtra
let ags = mkAggregations "missing_agg" (MissingAgg (MissingAggregation "extra"))
let search = mkAggregateSearch Nothing ags
let docCountPair k n = (k, object ["doc_count" .= Number n])
res <- searchTweets search
liftIO $
fmap aggregations res `shouldBe` Right (Just (M.fromList [docCountPair "missing_agg" 1]))

View File

@ -1,63 +0,0 @@
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE OverloadedStrings #-}
module Test.ApproxEq where
import Database.V1.Bloodhound
import Test.Import
import qualified Data.List.NonEmpty as NE
-- | Typeclass for "equal where it matters". Use this to specify
-- less-strict equivalence for things such as lists that can wind up
-- in an unpredictable order
class ApproxEq a where
(=~) :: a -> a -> Bool
showApproxEq :: a -> String
default showApproxEq :: (Show a) => a -> String
showApproxEq = show
(==~) :: (ApproxEq a) => a -> a -> Property
a ==~ b = counterexample (showApproxEq a ++ " !=~ " ++ showApproxEq b) (a =~ b)
instance ApproxEq NominalDiffTime where (=~) = (==)
instance ApproxEq Bool where (=~) = (==)
instance ApproxEq Int where (=~) = (==)
instance (Eq a, Show a) => ApproxEq (Maybe a) where (=~) = (==)
instance ApproxEq Char where
(=~) = (==)
instance ApproxEq NodeAttrFilter where (=~) = (==)
instance ApproxEq NodeAttrName where (=~) = (==)
instance (Eq a, Show a) => ApproxEq (NonEmpty a) where (=~) = (==)
instance (ApproxEq l, Show l, ApproxEq r, Show r) => ApproxEq (Either l r) where
Left a =~ Left b = a =~ b
Right a =~ Right b = a =~ b
_ =~ _ = False
showApproxEq (Left x) = "Left " <> showApproxEq x
showApproxEq (Right x) = "Right " <> showApproxEq x
instance (ApproxEq a, Show a) => ApproxEq [a] where
as =~ bs = and (zipWith (=~) as bs)
instance ApproxEq ReplicaCount where (=~) = (==)
instance ApproxEq ReplicaBounds where (=~) = (==)
instance ApproxEq Bytes where (=~) = (==)
instance ApproxEq AllocationPolicy where (=~) = (==)
instance ApproxEq InitialShardCount where (=~) = (==)
instance ApproxEq FSType where (=~) = (==)
-- | Due to the way nodeattrfilters get serialized here, they may come
-- out in a different order, but they are morally equivalent
instance ApproxEq UpdatableIndexSetting where
RoutingAllocationInclude a =~ RoutingAllocationInclude b =
NE.sort a =~ NE.sort b
RoutingAllocationExclude a =~ RoutingAllocationExclude b =
NE.sort a =~ NE.sort b
RoutingAllocationRequire a =~ RoutingAllocationRequire b =
NE.sort a =~ NE.sort b
a =~ b = a == b
showApproxEq (RoutingAllocationInclude xs) = show (RoutingAllocationInclude (NE.sort xs))
showApproxEq (RoutingAllocationExclude xs) = show (RoutingAllocationExclude (NE.sort xs))
showApproxEq (RoutingAllocationRequire xs) = show (RoutingAllocationRequire (NE.sort xs))
showApproxEq x = show x

View File

@ -1,77 +0,0 @@
{-# LANGUAGE OverloadedStrings #-}
module Test.BulkAPI (spec) where
import Test.Common
import Test.Import
import qualified Data.Vector as V
import qualified Lens.Micro.Aeson as LMA
newtype BulkTest =
BulkTest Text
deriving (Eq, Show)
instance ToJSON BulkTest where
toJSON (BulkTest name') =
object ["name" .= name']
instance FromJSON BulkTest where
parseJSON = withObject "BulkTest" parse
where
parse o = do
t <- o .: "name"
BulkTest <$> parseJSON t
spec :: Spec
spec =
describe "Bulk API" $
it "inserts all documents we request" $ withTestEnv $ do
_ <- insertData
let firstTest = BulkTest "blah"
let secondTest = BulkTest "bloo"
let firstDoc = BulkIndex testIndex
testMapping (DocId "2") (toJSON firstTest)
let secondDoc = BulkCreate testIndex
testMapping (DocId "3") (toJSON secondTest)
let stream = V.fromList [firstDoc, secondDoc]
_ <- bulk stream
_ <- refreshIndex testIndex
fDoc <- getDocument testIndex testMapping (DocId "2")
sDoc <- getDocument testIndex testMapping (DocId "3")
-- note that we cannot query for fourthDoc and fifthDoc since we
-- do not know their autogenerated ids.
let maybeFirst =
eitherDecode
$ responseBody fDoc
:: Either String (EsResult BulkTest)
let maybeSecond =
eitherDecode
$ responseBody sDoc
:: Either String (EsResult BulkTest)
liftIO $ do
fmap getSource maybeFirst `shouldBe` Right (Just firstTest)
fmap getSource maybeSecond `shouldBe` Right (Just secondTest)
-- Since we can't get the docs by doc id, we check for their existence in
-- a match all query.
let query = MatchAllQuery Nothing
let search = mkSearch (Just query) Nothing
resp <- searchByIndex testIndex search
parsed <- parseEsResponse resp :: BH IO (Either EsError (SearchResult Value))
case parsed of
Left e ->
liftIO $ expectationFailure ("Expected a script-transformed result but got: " <> show e)
(Right sr) -> do
liftIO $
hitsTotal (searchHits sr) `shouldBe` 3
let nameList :: [Text]
nameList =
hits (searchHits sr)
^.. traverse
. to hitSource
. _Just
. LMA.key "name"
. _String
liftIO $
nameList
`shouldBe` ["blah","bloo"]

View File

@ -1,289 +0,0 @@
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Test.Common where
import Test.Import
import qualified Data.Map as M
import qualified Data.Version as Vers
import qualified Network.HTTP.Types.Status as NHTS
testServer :: Server
testServer = Server "http://localhost:9200"
testIndex :: IndexName
testIndex = IndexName "bloodhound-tests-twitter-1"
testMapping :: MappingName
testMapping = MappingName "tweet"
withTestEnv :: BH IO a -> IO a
withTestEnv = withBH defaultManagerSettings testServer
data Location = Location { lat :: Double
, lon :: Double } deriving (Eq, Show)
data Tweet = Tweet { user :: Text
, postDate :: UTCTime
, message :: Text
, age :: Int
, location :: Location
, extra :: Maybe Text }
deriving (Eq, Show)
$(deriveJSON defaultOptions ''Location)
$(deriveJSON defaultOptions ''Tweet)
data ParentMapping = ParentMapping deriving (Eq, Show)
instance ToJSON ParentMapping where
toJSON ParentMapping =
object ["properties" .=
object [ "user" .= object ["type" .= ("string" :: Text)
]
-- Serializing the date as a date is breaking other tests, mysteriously.
-- , "postDate" .= object [ "type" .= ("date" :: Text)
-- , "format" .= ("YYYY-MM-dd`T`HH:mm:ss.SSSZZ" :: Text)]
, "message" .= object ["type" .= ("string" :: Text)]
, "age" .= object ["type" .= ("integer" :: Text)]
, "location" .= object ["type" .= ("geo_point" :: Text)]
, "extra" .= object ["type" .= ("string" :: Text), "index" .= ("not_analyzed" :: Text)]
]]
es13 :: Vers.Version
es13 = Vers.Version [1, 3, 0] []
es12 :: Vers.Version
es12 = Vers.Version [1, 2, 0] []
es11 :: Vers.Version
es11 = Vers.Version [1, 1, 0] []
es14 :: Vers.Version
es14 = Vers.Version [1, 4, 0] []
es15 :: Vers.Version
es15 = Vers.Version [1, 5, 0] []
es16 :: Vers.Version
es16 = Vers.Version [1, 6, 0] []
es20 :: Vers.Version
es20 = Vers.Version [2, 0, 0] []
es50 :: Vers.Version
es50 = Vers.Version [5, 0, 0] []
getServerVersion :: IO (Maybe Vers.Version)
getServerVersion = fmap extractVersion <$> withTestEnv getStatus
where
extractVersion = versionNumber . number . version
createExampleIndex :: (MonadBH m) => m Reply
createExampleIndex =
createIndex (IndexSettings (ShardCount 1) (ReplicaCount 0)) testIndex
deleteExampleIndex :: (MonadBH m) => m Reply
deleteExampleIndex =
deleteIndex testIndex
validateStatus :: Show body => Response body -> Int -> Expectation
validateStatus resp expected =
if actual == expected
then return ()
else expectationFailure ("Expected " <> show expected <> " but got " <> show actual <> ": " <> show body)
where
actual = NHTS.statusCode (responseStatus resp)
body = responseBody resp
data ChildMapping = ChildMapping deriving (Eq, Show)
instance ToJSON ChildMapping where
toJSON ChildMapping =
object ["_parent" .= object ["type" .= ("parent" :: Text)]
, "properties" .=
object [ "user" .= object ["type" .= ("string" :: Text)
]
-- Serializing the date as a date is breaking other tests, mysteriously.
-- , "postDate" .= object [ "type" .= ("date" :: Text)
-- , "format" .= ("YYYY-MM-dd`T`HH:mm:ss.SSSZZ" :: Text)]
, "message" .= object ["type" .= ("string" :: Text)]
, "age" .= object ["type" .= ("integer" :: Text)]
, "location" .= object ["type" .= ("geo_point" :: Text)]
, "extra" .= object ["type" .= ("string" :: Text), "index" .= ("not_analyzed" :: Text)]
]]
data TweetMapping = TweetMapping deriving (Eq, Show)
instance ToJSON TweetMapping where
toJSON TweetMapping =
object ["tweet" .=
object ["properties" .=
object [ "user" .= object [ "type" .= ("string" :: Text)
]
-- Serializing the date as a date is breaking other tests, mysteriously.
-- , "postDate" .= object [ "type" .= ("date" :: Text)
-- , "format" .= ("YYYY-MM-dd`T`HH:mm:ss.SSSZZ" :: Text)]
, "message" .= object ["type" .= ("string" :: Text)]
, "age" .= object ["type" .= ("integer" :: Text)]
, "location" .= object ["type" .= ("geo_point" :: Text)]
, "extra" .= object ["type" .= ("string" :: Text), "index" .= ("not_analyzed" :: Text)]
]]]
exampleTweet :: Tweet
exampleTweet = Tweet { user = "bitemyapp"
, postDate = UTCTime
(ModifiedJulianDay 55000)
(secondsToDiffTime 10)
, message = "Use haskell!"
, age = 10000
, location = Location 40.12 (-71.34)
, extra = Nothing }
tweetWithExtra :: Tweet
tweetWithExtra = Tweet { user = "bitemyapp"
, postDate = UTCTime
(ModifiedJulianDay 55000)
(secondsToDiffTime 10)
, message = "Use haskell!"
, age = 10000
, location = Location 40.12 (-71.34)
, extra = Just "blah blah" }
newAge :: Int
newAge = 31337
newUser :: Text
newUser = "someotherapp"
tweetPatch :: Value
tweetPatch =
object [ "age" .= newAge
, "user" .= newUser
]
patchedTweet :: Tweet
patchedTweet = exampleTweet{age = newAge, user = newUser}
otherTweet :: Tweet
otherTweet = Tweet { user = "notmyapp"
, postDate = UTCTime
(ModifiedJulianDay 55000)
(secondsToDiffTime 11)
, message = "Use haskell!"
, age = 1000
, location = Location 40.12 (-71.34)
, extra = Nothing }
resetIndex :: BH IO ()
resetIndex = do
_ <- deleteExampleIndex
_ <- createExampleIndex
_ <- putMapping testIndex testMapping TweetMapping
return ()
insertData :: BH IO Reply
insertData = do
resetIndex
insertData' defaultIndexDocumentSettings
insertData' :: IndexDocumentSettings -> BH IO Reply
insertData' ids = do
r <- indexDocument testIndex testMapping ids exampleTweet (DocId "1")
_ <- refreshIndex testIndex
return r
updateData :: BH IO Reply
updateData = do
r <- updateDocument testIndex testMapping defaultIndexDocumentSettings tweetPatch (DocId "1")
_ <- refreshIndex testIndex
return r
insertOther :: BH IO ()
insertOther = do
_ <- indexDocument testIndex testMapping defaultIndexDocumentSettings otherTweet (DocId "2")
_ <- refreshIndex testIndex
return ()
insertExtra :: BH IO ()
insertExtra = do
_ <- indexDocument testIndex testMapping defaultIndexDocumentSettings tweetWithExtra (DocId "4")
_ <- refreshIndex testIndex
return ()
insertWithSpaceInId :: BH IO ()
insertWithSpaceInId = do
_ <- indexDocument testIndex testMapping defaultIndexDocumentSettings exampleTweet (DocId "Hello World")
_ <- refreshIndex testIndex
return ()
searchTweet :: Search -> BH IO (Either EsError Tweet)
searchTweet search = do
result <- searchTweets search
let myTweet :: Either EsError Tweet
myTweet = grabFirst result
return myTweet
searchTweets :: Search -> BH IO (Either EsError (SearchResult Tweet))
searchTweets search = parseEsResponse =<< searchByIndex testIndex search
searchExpectNoResults :: Search -> BH IO ()
searchExpectNoResults search = do
result <- searchTweets search
let emptyHits = fmap (hits . searchHits) result
liftIO $
emptyHits `shouldBe` Right []
searchExpectAggs :: Search -> BH IO ()
searchExpectAggs search = do
reply <- searchByIndex testIndex search
let isEmpty x = return (M.null x)
let result = decode (responseBody reply) :: Maybe (SearchResult Tweet)
liftIO $
(result >>= aggregations >>= isEmpty) `shouldBe` Just False
searchValidBucketAgg :: (BucketAggregation a, FromJSON a, Show a) =>
Search -> Text -> (Text -> AggregationResults -> Maybe (Bucket a)) -> BH IO ()
searchValidBucketAgg search aggKey extractor = do
reply <- searchByIndex testIndex search
let bucketDocs = docCount . head . buckets
let result = decode (responseBody reply) :: Maybe (SearchResult Tweet)
let count = result >>= aggregations >>= extractor aggKey >>= \x -> return (bucketDocs x)
liftIO $
count `shouldBe` Just 1
searchTermsAggHint :: [ExecutionHint] -> BH IO ()
searchTermsAggHint hints = do
let terms hint = TermsAgg $ (mkTermsAggregation "user") { termExecutionHint = Just hint }
let search hint = mkAggregateSearch Nothing $ mkAggregations "users" $ terms hint
forM_ hints $ searchExpectAggs . search
forM_ hints (\x -> searchValidBucketAgg (search x) "users" toTerms)
searchTweetHighlight :: Search
-> BH IO (Either EsError (Maybe HitHighlight))
searchTweetHighlight search = do
result <- searchTweets search
let tweetHit :: Either EsError (Maybe (Hit Tweet))
tweetHit = fmap (headMay . hits . searchHits) result
myHighlight :: Either EsError (Maybe HitHighlight)
myHighlight = (join . fmap hitHighlight) <$> tweetHit
return myHighlight
searchExpectSource :: Source -> Either EsError Value -> BH IO ()
searchExpectSource src expected = do
_ <- insertData
let query = QueryMatchQuery $ mkMatchQuery (FieldName "_all") (QueryString "haskell")
let search = (mkSearch (Just query) Nothing) { source = Just src }
reply <- searchByIndex testIndex search
result <- parseEsResponse reply
let value = grabFirst result
liftIO $
value `shouldBe` expected
atleast :: Vers.Version -> IO Bool
atleast v = getServerVersion >>= \x -> return $ x >= Just v
atmost :: Vers.Version -> IO Bool
atmost v = getServerVersion >>= \x -> return $ x <= Just v
is :: Vers.Version -> IO Bool
is v = getServerVersion >>= \x -> return $ x == Just v

View File

@ -1,52 +0,0 @@
{-# LANGUAGE OverloadedStrings #-}
module Test.Documents where
import Test.Common
import Test.Import
spec :: Spec
spec =
describe "document API" $ do
it "indexes, updates, gets, and then deletes the generated document" $ withTestEnv $ do
_ <- insertData
_ <- updateData
docInserted <- getDocument testIndex testMapping (DocId "1")
let newTweet = eitherDecode
(responseBody docInserted) :: Either String (EsResult Tweet)
liftIO $ fmap getSource newTweet `shouldBe` Right (Just patchedTweet)
it "indexes, gets, and then deletes the generated document with a DocId containing a space" $ withTestEnv $ do
_ <- insertWithSpaceInId
docInserted <- getDocument testIndex testMapping (DocId "Hello World")
let newTweet = eitherDecode
(responseBody docInserted) :: Either String (EsResult Tweet)
liftIO $ fmap getSource newTweet `shouldBe` Right (Just exampleTweet)
it "produces a parseable result when looking up a bogus document" $ withTestEnv $ do
doc <- getDocument testIndex testMapping (DocId "bogus")
let noTweet = eitherDecode
(responseBody doc) :: Either String (EsResult Tweet)
liftIO $ fmap foundResult noTweet `shouldBe` Right Nothing
it "can use optimistic concurrency control" $ withTestEnv $ do
let ev = ExternalDocVersion minBound
let cfg = defaultIndexDocumentSettings { idsVersionControl = ExternalGT ev }
resetIndex
res <- insertData' cfg
liftIO $ isCreated res `shouldBe` True
res' <- insertData' cfg
liftIO $ isVersionConflict res' `shouldBe` True
it "indexes two documents in a parent/child relationship and checks that the child exists" $ withTestEnv $ do
resetIndex
let validateStatus' stat = liftIO . flip validateStatus stat
_ <- validateStatus' 200 =<< putMapping testIndex (MappingName "child") ChildMapping
_ <- validateStatus' 200 =<< putMapping testIndex (MappingName "parent") ParentMapping
_ <- validateStatus' 201 =<< indexDocument testIndex (MappingName "parent") defaultIndexDocumentSettings exampleTweet (DocId "1")
let parent = (Just . DocumentParent . DocId) "1"
ids = IndexDocumentSettings NoVersionControl parent
_ <- validateStatus' 201 =<< indexDocument testIndex (MappingName "child") ids otherTweet (DocId "2")
_ <- refreshIndex testIndex
exists <- documentExists testIndex (MappingName "child") parent (DocId "2")
liftIO $ exists `shouldBe` True

View File

@ -1,432 +0,0 @@
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Test.Generators where
import Database.V1.Bloodhound
import Test.Import
import qualified Data.HashMap.Strict as HM
import qualified Data.List as L
import qualified Data.List.NonEmpty as NE
import qualified Data.Map as M
import qualified Data.Text as T
import qualified Data.Version as Vers
import Test.QuickCheck.TH.Generators
import Test.ApproxEq
instance Arbitrary NominalDiffTime where
arbitrary = fromInteger <$> arbitrary
#if !MIN_VERSION_QuickCheck(2,8,0)
instance (Arbitrary k, Ord k, Arbitrary v) => Arbitrary (M.Map k v) where
arbitrary = M.fromList <$> arbitrary
#endif
instance Arbitrary Text where
arbitrary = T.pack <$> arbitrary
instance Arbitrary UTCTime where
arbitrary = UTCTime
<$> arbitrary
<*> (fromRational . toRational <$> choose (0::Double, 86400))
instance Arbitrary Day where
arbitrary =
ModifiedJulianDay . (2000 +) <$> arbitrary
shrink =
(ModifiedJulianDay <$>) . shrink . toModifiedJulianDay
#if !MIN_VERSION_QuickCheck(2,9,0)
instance Arbitrary a => Arbitrary (NonEmpty a) where
arbitrary = liftA2 (:|) arbitrary arbitrary
#endif
arbitraryScore :: Gen Score
arbitraryScore = fmap getPositive <$> arbitrary
instance (Arbitrary a, Typeable a) => Arbitrary (Hit a) where
arbitrary = Hit <$> arbitrary
<*> arbitrary
<*> arbitrary
<*> arbitraryScore
<*> arbitrary
<*> arbitrary
instance (Arbitrary a, Typeable a) => Arbitrary (SearchHits a) where
arbitrary = reduceSize $ do
tot <- getPositive <$> arbitrary
score <- arbitraryScore
hs <- arbitrary
return $ SearchHits tot score hs
reduceSize :: Gen a -> Gen a
reduceSize f = sized $ \n -> resize (n `div` 2) f
arbitraryAlphaNum :: Gen Char
arbitraryAlphaNum = oneof [choose ('a', 'z')
,choose ('A','Z')
, choose ('0', '9')]
instance Arbitrary RoutingValue where
arbitrary = RoutingValue . T.pack <$> listOf1 arbitraryAlphaNum
instance Arbitrary AliasRouting where
arbitrary = oneof [allAlias
,one
,theOther
,both']
where one = GranularAliasRouting
<$> (Just <$> arbitrary)
<*> pure Nothing
theOther = GranularAliasRouting Nothing
<$> (Just <$> arbitrary)
both' = GranularAliasRouting
<$> (Just <$> arbitrary)
<*> (Just <$> arbitrary)
allAlias = AllAliasRouting <$> arbitrary
instance Arbitrary FieldName where
arbitrary =
FieldName
. T.pack
<$> listOf1 arbitraryAlphaNum
#if MIN_VERSION_base(4,10,0)
-- Test.QuickCheck.Modifiers
qcNonEmptyToNonEmpty :: NonEmptyList a -> NonEmpty a
qcNonEmptyToNonEmpty (NonEmpty (a : xs)) = (a :| xs)
qcNonEmptyToNonEmpty (NonEmpty []) = error "NonEmpty was empty!"
instance Arbitrary a => Arbitrary (NonEmpty a) where
arbitrary =
qcNonEmptyToNonEmpty
<$> arbitrary
#endif
instance Arbitrary RegexpFlags where
arbitrary = oneof [ pure AllRegexpFlags
, pure NoRegexpFlags
, SomeRegexpFlags <$> genUniqueFlags
]
where genUniqueFlags =
NE.fromList . L.nub
<$> listOf1 arbitrary
instance Arbitrary IndexAliasCreate where
arbitrary =
IndexAliasCreate
<$> arbitrary
<*> reduceSize arbitrary
instance Arbitrary ReplicaBounds where
arbitrary = oneof [ replicasBounded
, replicasLowerBounded
, pure ReplicasUnbounded
]
where replicasBounded = do
Positive a <- arbitrary
Positive b <- arbitrary
return (ReplicasBounded a b)
replicasLowerBounded = do
Positive a <- arbitrary
return (ReplicasLowerBounded a)
instance Arbitrary NodeAttrName where
arbitrary =
NodeAttrName
. T.pack
<$> listOf1 arbitraryAlphaNum
instance Arbitrary NodeAttrFilter where
arbitrary = do
n <- arbitrary
s:ss <- listOf1 (listOf1 arbitraryAlphaNum)
let ts = T.pack <$> s :| ss
return (NodeAttrFilter n ts)
instance Arbitrary VersionNumber where
arbitrary = mk . fmap getPositive . getNonEmpty <$> arbitrary
where
mk versions = VersionNumber (Vers.Version versions [])
instance Arbitrary TemplateQueryKeyValuePairs where
arbitrary = TemplateQueryKeyValuePairs . HM.fromList <$> arbitrary
shrink (TemplateQueryKeyValuePairs x) = map (TemplateQueryKeyValuePairs . HM.fromList) . shrink $ HM.toList x
makeArbitrary ''FilteredQuery
instance Arbitrary FilteredQuery where arbitrary = reduceSize arbitraryFilteredQuery
makeArbitrary ''Query
instance Arbitrary Query where arbitrary = reduceSize arbitraryQuery
makeArbitrary ''Filter
instance Arbitrary Filter where arbitrary = reduceSize arbitraryFilter
makeArbitrary ''IndexName
instance Arbitrary IndexName where arbitrary = arbitraryIndexName
makeArbitrary ''MappingName
instance Arbitrary MappingName where arbitrary = arbitraryMappingName
makeArbitrary ''DocId
instance Arbitrary DocId where arbitrary = arbitraryDocId
makeArbitrary ''Version
instance Arbitrary Version where arbitrary = arbitraryVersion
makeArbitrary ''BuildHash
instance Arbitrary BuildHash where arbitrary = arbitraryBuildHash
makeArbitrary ''IndexAliasRouting
instance Arbitrary IndexAliasRouting where arbitrary = arbitraryIndexAliasRouting
makeArbitrary ''ShardCount
instance Arbitrary ShardCount where arbitrary = arbitraryShardCount
makeArbitrary ''ReplicaCount
instance Arbitrary ReplicaCount where arbitrary = arbitraryReplicaCount
makeArbitrary ''TemplateName
instance Arbitrary TemplateName where arbitrary = arbitraryTemplateName
makeArbitrary ''TemplatePattern
instance Arbitrary TemplatePattern where arbitrary = arbitraryTemplatePattern
makeArbitrary ''QueryString
instance Arbitrary QueryString where arbitrary = arbitraryQueryString
makeArbitrary ''CacheName
instance Arbitrary CacheName where arbitrary = arbitraryCacheName
makeArbitrary ''CacheKey
instance Arbitrary CacheKey where arbitrary = arbitraryCacheKey
makeArbitrary ''Existence
instance Arbitrary Existence where arbitrary = arbitraryExistence
makeArbitrary ''CutoffFrequency
instance Arbitrary CutoffFrequency where arbitrary = arbitraryCutoffFrequency
makeArbitrary ''Analyzer
instance Arbitrary Analyzer where arbitrary = arbitraryAnalyzer
makeArbitrary ''MaxExpansions
instance Arbitrary MaxExpansions where arbitrary = arbitraryMaxExpansions
makeArbitrary ''Lenient
instance Arbitrary Lenient where arbitrary = arbitraryLenient
makeArbitrary ''Tiebreaker
instance Arbitrary Tiebreaker where arbitrary = arbitraryTiebreaker
makeArbitrary ''Boost
instance Arbitrary Boost where arbitrary = arbitraryBoost
makeArbitrary ''BoostTerms
instance Arbitrary BoostTerms where arbitrary = arbitraryBoostTerms
makeArbitrary ''MinimumMatch
instance Arbitrary MinimumMatch where arbitrary = arbitraryMinimumMatch
makeArbitrary ''DisableCoord
instance Arbitrary DisableCoord where arbitrary = arbitraryDisableCoord
makeArbitrary ''IgnoreTermFrequency
instance Arbitrary IgnoreTermFrequency where arbitrary = arbitraryIgnoreTermFrequency
makeArbitrary ''MinimumTermFrequency
instance Arbitrary MinimumTermFrequency where arbitrary = arbitraryMinimumTermFrequency
makeArbitrary ''MaxQueryTerms
instance Arbitrary MaxQueryTerms where arbitrary = arbitraryMaxQueryTerms
makeArbitrary ''Fuzziness
instance Arbitrary Fuzziness where arbitrary = arbitraryFuzziness
makeArbitrary ''PrefixLength
instance Arbitrary PrefixLength where arbitrary = arbitraryPrefixLength
makeArbitrary ''TypeName
instance Arbitrary TypeName where arbitrary = arbitraryTypeName
makeArbitrary ''PercentMatch
instance Arbitrary PercentMatch where arbitrary = arbitraryPercentMatch
makeArbitrary ''StopWord
instance Arbitrary StopWord where arbitrary = arbitraryStopWord
makeArbitrary ''QueryPath
instance Arbitrary QueryPath where arbitrary = arbitraryQueryPath
makeArbitrary ''AllowLeadingWildcard
instance Arbitrary AllowLeadingWildcard where arbitrary = arbitraryAllowLeadingWildcard
makeArbitrary ''LowercaseExpanded
instance Arbitrary LowercaseExpanded where arbitrary = arbitraryLowercaseExpanded
makeArbitrary ''EnablePositionIncrements
instance Arbitrary EnablePositionIncrements where arbitrary = arbitraryEnablePositionIncrements
makeArbitrary ''AnalyzeWildcard
instance Arbitrary AnalyzeWildcard where arbitrary = arbitraryAnalyzeWildcard
makeArbitrary ''GeneratePhraseQueries
instance Arbitrary GeneratePhraseQueries where arbitrary = arbitraryGeneratePhraseQueries
makeArbitrary ''Locale
instance Arbitrary Locale where arbitrary = arbitraryLocale
makeArbitrary ''MaxWordLength
instance Arbitrary MaxWordLength where arbitrary = arbitraryMaxWordLength
makeArbitrary ''MinWordLength
instance Arbitrary MinWordLength where arbitrary = arbitraryMinWordLength
makeArbitrary ''PhraseSlop
instance Arbitrary PhraseSlop where arbitrary = arbitraryPhraseSlop
makeArbitrary ''MinDocFrequency
instance Arbitrary MinDocFrequency where arbitrary = arbitraryMinDocFrequency
makeArbitrary ''MaxDocFrequency
instance Arbitrary MaxDocFrequency where arbitrary = arbitraryMaxDocFrequency
makeArbitrary ''Regexp
instance Arbitrary Regexp where arbitrary = arbitraryRegexp
makeArbitrary ''SimpleQueryStringQuery
instance Arbitrary SimpleQueryStringQuery where arbitrary = arbitrarySimpleQueryStringQuery
makeArbitrary ''FieldOrFields
instance Arbitrary FieldOrFields where arbitrary = arbitraryFieldOrFields
makeArbitrary ''SimpleQueryFlag
instance Arbitrary SimpleQueryFlag where arbitrary = arbitrarySimpleQueryFlag
makeArbitrary ''RegexpQuery
instance Arbitrary RegexpQuery where arbitrary = arbitraryRegexpQuery
makeArbitrary ''QueryStringQuery
instance Arbitrary QueryStringQuery where arbitrary = arbitraryQueryStringQuery
makeArbitrary ''RangeQuery
instance Arbitrary RangeQuery where arbitrary = arbitraryRangeQuery
makeArbitrary ''RangeValue
instance Arbitrary RangeValue where arbitrary = arbitraryRangeValue
makeArbitrary ''PrefixQuery
instance Arbitrary PrefixQuery where arbitrary = arbitraryPrefixQuery
makeArbitrary ''NestedQuery
instance Arbitrary NestedQuery where arbitrary = arbitraryNestedQuery
makeArbitrary ''MoreLikeThisFieldQuery
instance Arbitrary MoreLikeThisFieldQuery where arbitrary = arbitraryMoreLikeThisFieldQuery
makeArbitrary ''MoreLikeThisQuery
instance Arbitrary MoreLikeThisQuery where arbitrary = arbitraryMoreLikeThisQuery
makeArbitrary ''IndicesQuery
instance Arbitrary IndicesQuery where arbitrary = arbitraryIndicesQuery
makeArbitrary ''HasParentQuery
instance Arbitrary HasParentQuery where arbitrary = arbitraryHasParentQuery
makeArbitrary ''HasChildQuery
instance Arbitrary HasChildQuery where arbitrary = arbitraryHasChildQuery
makeArbitrary ''FuzzyQuery
instance Arbitrary FuzzyQuery where arbitrary = arbitraryFuzzyQuery
makeArbitrary ''FuzzyLikeFieldQuery
instance Arbitrary FuzzyLikeFieldQuery where arbitrary = arbitraryFuzzyLikeFieldQuery
makeArbitrary ''FuzzyLikeThisQuery
instance Arbitrary FuzzyLikeThisQuery where arbitrary = arbitraryFuzzyLikeThisQuery
makeArbitrary ''DisMaxQuery
instance Arbitrary DisMaxQuery where arbitrary = arbitraryDisMaxQuery
makeArbitrary ''CommonTermsQuery
instance Arbitrary CommonTermsQuery where arbitrary = arbitraryCommonTermsQuery
makeArbitrary ''DistanceRange
instance Arbitrary DistanceRange where arbitrary = arbitraryDistanceRange
makeArbitrary ''MultiMatchQuery
instance Arbitrary MultiMatchQuery where arbitrary = arbitraryMultiMatchQuery
makeArbitrary ''LessThanD
instance Arbitrary LessThanD where arbitrary = arbitraryLessThanD
makeArbitrary ''LessThanEqD
instance Arbitrary LessThanEqD where arbitrary = arbitraryLessThanEqD
makeArbitrary ''GreaterThanD
instance Arbitrary GreaterThanD where arbitrary = arbitraryGreaterThanD
makeArbitrary ''GreaterThanEqD
instance Arbitrary GreaterThanEqD where arbitrary = arbitraryGreaterThanEqD
makeArbitrary ''LessThan
instance Arbitrary LessThan where arbitrary = arbitraryLessThan
makeArbitrary ''LessThanEq
instance Arbitrary LessThanEq where arbitrary = arbitraryLessThanEq
makeArbitrary ''GreaterThan
instance Arbitrary GreaterThan where arbitrary = arbitraryGreaterThan
makeArbitrary ''GreaterThanEq
instance Arbitrary GreaterThanEq where arbitrary = arbitraryGreaterThanEq
makeArbitrary ''GeoPoint
instance Arbitrary GeoPoint where arbitrary = arbitraryGeoPoint
makeArbitrary ''NullValue
instance Arbitrary NullValue where arbitrary = arbitraryNullValue
makeArbitrary ''MinimumMatchHighLow
instance Arbitrary MinimumMatchHighLow where arbitrary = arbitraryMinimumMatchHighLow
makeArbitrary ''CommonMinimumMatch
instance Arbitrary CommonMinimumMatch where arbitrary = arbitraryCommonMinimumMatch
makeArbitrary ''BoostingQuery
instance Arbitrary BoostingQuery where arbitrary = arbitraryBoostingQuery
makeArbitrary ''BoolQuery
instance Arbitrary BoolQuery where arbitrary = arbitraryBoolQuery
makeArbitrary ''MatchQuery
instance Arbitrary MatchQuery where arbitrary = arbitraryMatchQuery
makeArbitrary ''MultiMatchQueryType
instance Arbitrary MultiMatchQueryType where arbitrary = arbitraryMultiMatchQueryType
makeArbitrary ''BooleanOperator
instance Arbitrary BooleanOperator where arbitrary = arbitraryBooleanOperator
makeArbitrary ''ZeroTermsQuery
instance Arbitrary ZeroTermsQuery where arbitrary = arbitraryZeroTermsQuery
makeArbitrary ''MatchQueryType
instance Arbitrary MatchQueryType where arbitrary = arbitraryMatchQueryType
makeArbitrary ''SearchAliasRouting
instance Arbitrary SearchAliasRouting where arbitrary = arbitrarySearchAliasRouting
makeArbitrary ''ScoreType
instance Arbitrary ScoreType where arbitrary = arbitraryScoreType
makeArbitrary ''Distance
instance Arbitrary Distance where arbitrary = arbitraryDistance
makeArbitrary ''DistanceUnit
instance Arbitrary DistanceUnit where arbitrary = arbitraryDistanceUnit
makeArbitrary ''DistanceType
instance Arbitrary DistanceType where arbitrary = arbitraryDistanceType
makeArbitrary ''OptimizeBbox
instance Arbitrary OptimizeBbox where arbitrary = arbitraryOptimizeBbox
makeArbitrary ''GeoBoundingBoxConstraint
instance Arbitrary GeoBoundingBoxConstraint where arbitrary = arbitraryGeoBoundingBoxConstraint
makeArbitrary ''GeoFilterType
instance Arbitrary GeoFilterType where arbitrary = arbitraryGeoFilterType
makeArbitrary ''GeoBoundingBox
instance Arbitrary GeoBoundingBox where arbitrary = arbitraryGeoBoundingBox
makeArbitrary ''LatLon
instance Arbitrary LatLon where arbitrary = arbitraryLatLon
makeArbitrary ''RangeExecution
instance Arbitrary RangeExecution where arbitrary = arbitraryRangeExecution
makeArbitrary ''RegexpFlag
instance Arbitrary RegexpFlag where arbitrary = arbitraryRegexpFlag
makeArbitrary ''BoolMatch
instance Arbitrary BoolMatch where arbitrary = arbitraryBoolMatch
makeArbitrary ''Term
instance Arbitrary Term where arbitrary = arbitraryTerm
makeArbitrary ''IndexSettings
instance Arbitrary IndexSettings where arbitrary = arbitraryIndexSettings
makeArbitrary ''UpdatableIndexSetting
instance Arbitrary UpdatableIndexSetting where
arbitrary = arbitraryUpdatableIndexSetting
makeArbitrary ''Bytes
instance Arbitrary Bytes where arbitrary = arbitraryBytes
makeArbitrary ''AllocationPolicy
instance Arbitrary AllocationPolicy where arbitrary = arbitraryAllocationPolicy
makeArbitrary ''InitialShardCount
instance Arbitrary InitialShardCount where arbitrary = arbitraryInitialShardCount
makeArbitrary ''FSType
instance Arbitrary FSType where arbitrary = arbitraryFSType
makeArbitrary ''CompoundFormat
instance Arbitrary CompoundFormat where arbitrary = arbitraryCompoundFormat
makeArbitrary ''FsSnapshotRepo
instance Arbitrary FsSnapshotRepo where arbitrary = arbitraryFsSnapshotRepo
makeArbitrary ''SnapshotRepoName
instance Arbitrary SnapshotRepoName where arbitrary = arbitrarySnapshotRepoName
makeArbitrary ''TemplateQueryInline
instance Arbitrary TemplateQueryInline where arbitrary = arbitraryTemplateQueryInline
makeArbitrary ''DirectGeneratorSuggestModeTypes
instance Arbitrary DirectGeneratorSuggestModeTypes where arbitrary = arbitraryDirectGeneratorSuggestModeTypes
makeArbitrary ''DirectGenerators
instance Arbitrary DirectGenerators where arbitrary = arbitraryDirectGenerators
makeArbitrary ''PhraseSuggesterCollate
instance Arbitrary PhraseSuggesterCollate where arbitrary = arbitraryPhraseSuggesterCollate
makeArbitrary ''PhraseSuggesterHighlighter
instance Arbitrary PhraseSuggesterHighlighter where arbitrary = arbitraryPhraseSuggesterHighlighter
makeArbitrary ''Size
instance Arbitrary Size where arbitrary = arbitrarySize
makeArbitrary ''PhraseSuggester
instance Arbitrary PhraseSuggester where arbitrary = arbitraryPhraseSuggester
makeArbitrary ''SuggestType
instance Arbitrary SuggestType where arbitrary = arbitrarySuggestType
makeArbitrary ''Suggest
instance Arbitrary Suggest where arbitrary = arbitrarySuggest
makeArbitrary ''Script
instance Arbitrary Script where arbitrary = arbitraryScript
newtype UpdatableIndexSetting' =
UpdatableIndexSetting' UpdatableIndexSetting
deriving (Show, Eq, ToJSON, FromJSON, ApproxEq, Typeable)
instance Arbitrary UpdatableIndexSetting' where
arbitrary = do
settings <- arbitrary
return $ UpdatableIndexSetting' $ case settings of
RoutingAllocationInclude xs ->
RoutingAllocationInclude (dropDuplicateAttrNames xs)
RoutingAllocationExclude xs ->
RoutingAllocationExclude (dropDuplicateAttrNames xs)
RoutingAllocationRequire xs ->
RoutingAllocationRequire (dropDuplicateAttrNames xs)
x -> x
where
dropDuplicateAttrNames =
NE.fromList . L.nubBy sameAttrName . NE.toList
sameAttrName a b =
nodeAttrFilterName a == nodeAttrFilterName b
-- shrink (UpdatableIndexSetting' x) = map UpdatableIndexSetting' (shrink x)

View File

@ -1,58 +0,0 @@
{-# LANGUAGE OverloadedStrings #-}
module Test.Import
( module X
, module Test.Import
) where
import Control.Applicative as X
import Control.Exception as X (evaluate)
import Control.Monad as X
import Control.Monad.Catch as X
import Control.Monad.Reader as X
import Data.Aeson as X
import Data.Aeson.TH as X
import Data.Aeson.Types as X (parseEither)
import Data.Maybe as X
import Data.List.NonEmpty as X (NonEmpty(..))
import Data.Monoid as X
import Data.Ord as X (comparing)
import Data.Proxy as X
import Data.Text as X (Text)
import Data.Time.Calendar as X (Day(..), fromGregorian)
import Data.Time.Clock as X
import Data.Typeable as X
import Database.V1.Bloodhound as X hiding (key)
import Lens.Micro as X
import Lens.Micro.Aeson as X
import Network.HTTP.Client as X hiding (Proxy, fileSize)
import System.IO.Temp as X
import System.PosixCompat.Files as X
import Test.Hspec as X
import Test.Hspec.QuickCheck as X (prop)
import Test.QuickCheck as X hiding (Result, Success)
import Test.QuickCheck.Property.Monoid as X (T (..), eq, prop_Monoid)
import Text.Pretty.Simple as X (pPrint)
import qualified Data.List as L
noDuplicates :: Eq a => [a] -> Bool
noDuplicates xs = L.nub xs == xs
getSource :: EsResult a -> Maybe a
getSource = fmap _source . foundResult
grabFirst :: Either EsError (SearchResult a) -> Either EsError a
grabFirst r =
case fmap (hitSource . head . hits . searchHits) r of
(Left e) -> Left e
(Right Nothing) -> Left (EsError 500 "Source was missing")
(Right (Just x)) -> Right x
when' :: Monad m => m Bool -> m () -> m ()
when' b f = b >>= \x -> when x f
headMay :: [a] -> Maybe a
headMay (x : _) = Just x
headMay _ = Nothing

View File

@ -1,78 +0,0 @@
{-# LANGUAGE OverloadedStrings #-}
module Test.Indices where
import Test.Common
import Test.Import
import qualified Data.List as L
import qualified Data.List.NonEmpty as NE
spec :: Spec
spec = do
describe "Index create/delete API" $ do
it "creates and then deletes the requested index" $ withTestEnv $ do
-- priming state.
_ <- deleteExampleIndex
resp <- createExampleIndex
deleteResp <- deleteExampleIndex
liftIO $ do
validateStatus resp 200
validateStatus deleteResp 200
describe "Index aliases" $ do
let aname = IndexAliasName (IndexName "bloodhound-tests-twitter-1-alias")
let alias = IndexAlias (testIndex) aname
let create = IndexAliasCreate Nothing Nothing
let action = AddAlias alias create
it "handles the simple case of aliasing an existing index" $ do
withTestEnv $ do
resetIndex
resp <- updateIndexAliases (action :| [])
liftIO $ validateStatus resp 200
let cleanup = withTestEnv (updateIndexAliases (RemoveAlias alias :| []))
(do aliases <- withTestEnv getIndexAliases
let expected = IndexAliasSummary alias create
case aliases of
Right (IndexAliasesSummary summs) ->
L.find ((== alias) . indexAliasSummaryAlias) summs `shouldBe` Just expected
Left e -> expectationFailure ("Expected an IndexAliasesSummary but got " <> show e)) `finally` cleanup
describe "Index Listing" $ do
it "returns a list of index names" $ withTestEnv $ do
_ <- createExampleIndex
ixns <- listIndices
liftIO (ixns `shouldContain` [testIndex])
describe "Index Settings" $ do
it "persists settings" $ withTestEnv $ do
_ <- deleteExampleIndex
_ <- createExampleIndex
let updates = BlocksWrite False :| []
updateResp <- updateIndexSettings updates testIndex
liftIO $ validateStatus updateResp 200
getResp <- getIndexSettings testIndex
liftIO $
getResp `shouldBe` Right (IndexSettingsSummary
testIndex
(IndexSettings (ShardCount 1) (ReplicaCount 0))
(NE.toList updates))
it "allows total fields to be set" $ when' (atleast es50) $ withTestEnv $ do
_ <- deleteExampleIndex
_ <- createExampleIndex
let updates = FailOnMergeFailure True :| []
updateResp <- updateIndexSettings updates testIndex
liftIO $ validateStatus updateResp 200
getResp <- getIndexSettings testIndex
liftIO $
getResp `shouldBe` Right (IndexSettingsSummary
testIndex
(IndexSettings (ShardCount 1) (ReplicaCount 0))
(NE.toList updates))
describe "Index Optimization" $ do
it "returns a successful response upon completion" $ withTestEnv $ do
_ <- createExampleIndex
resp <- optimizeIndex (IndexList (testIndex :| [])) defaultIndexOptimizationSettings
liftIO $ validateStatus resp 200

View File

@ -1,202 +0,0 @@
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Test.JSON (spec) where
import Test.Import
import qualified Data.ByteString.Lazy.Char8 as BL8
import qualified Data.List as L
import Data.List.NonEmpty (NonEmpty(..))
import qualified Data.List.NonEmpty as NE
import qualified Data.HashMap.Strict as HM
import qualified Data.Text as T
import qualified Data.Vector as V
import Test.ApproxEq
import Test.Generators
propJSON :: forall a
. ( Arbitrary a
, ToJSON a
, FromJSON a
, Show a
, Eq a
, Typeable a
)
=> Proxy a -> Spec
propJSON _ = prop testName $ \(a :: a) ->
let jsonStr = "via " <> BL8.unpack (encode a)
in counterexample jsonStr (parseEither parseJSON (toJSON a)
=== Right a)
where testName = show ty <> " FromJSON/ToJSON roundtrips"
ty = typeOf (undefined :: a)
propApproxJSON :: forall a
. ( Arbitrary a
, ToJSON a
, FromJSON a
, Show a
, ApproxEq a
, Typeable a
)
=> Proxy a -> Spec
propApproxJSON _ = prop testName $ \(a :: a) ->
let jsonStr = "via " <> BL8.unpack (encode a)
in counterexample jsonStr (parseEither parseJSON (toJSON a)
==~ Right a)
where testName = show ty <> " FromJSON/ToJSON roundtrips"
ty = typeOf (undefined :: a)
spec :: Spec
spec = do
describe "ToJSON RegexpFlags" $ do
it "generates the correct JSON for AllRegexpFlags" $
toJSON AllRegexpFlags `shouldBe` String "ALL"
it "generates the correct JSON for NoRegexpFlags" $
toJSON NoRegexpFlags `shouldBe` String "NONE"
it "generates the correct JSON for SomeRegexpFlags" $
let flags = AnyString :| [ Automaton
, Complement
, Empty
, Intersection
, Interval ]
in toJSON (SomeRegexpFlags flags) `shouldBe` String "ANYSTRING|AUTOMATON|COMPLEMENT|EMPTY|INTERSECTION|INTERVAL"
prop "removes duplicates from flags" $ \(flags :: RegexpFlags) ->
let String str = toJSON flags
flagStrs = T.splitOn "|" str
in noDuplicates flagStrs
describe "omitNulls" $ do
it "checks that omitNulls drops list elements when it should" $
let dropped = omitNulls $ [ "test1" .= (toJSON ([] :: [Int]))
, "test2" .= (toJSON ("some value" :: Text))]
in dropped `shouldBe` Object (HM.fromList [("test2", String "some value")])
it "checks that omitNulls doesn't drop list elements when it shouldn't" $
let notDropped = omitNulls $ [ "test1" .= (toJSON ([1] :: [Int]))
, "test2" .= (toJSON ("some value" :: Text))]
in notDropped `shouldBe` Object (HM.fromList [ ("test1", Array (V.fromList [Number 1.0]))
, ("test2", String "some value")])
it "checks that omitNulls drops non list elements when it should" $
let dropped = omitNulls $ [ "test1" .= (toJSON Null)
, "test2" .= (toJSON ("some value" :: Text))]
in dropped `shouldBe` Object (HM.fromList [("test2", String "some value")])
it "checks that omitNulls doesn't drop non list elements when it shouldn't" $
let notDropped = omitNulls $ [ "test1" .= (toJSON (1 :: Int))
, "test2" .= (toJSON ("some value" :: Text))]
in notDropped `shouldBe` Object (HM.fromList [ ("test1", Number 1.0)
, ("test2", String "some value")])
describe "Exact isomorphism JSON instances" $ do
propJSON (Proxy :: Proxy Version)
propJSON (Proxy :: Proxy IndexName)
propJSON (Proxy :: Proxy MappingName)
propJSON (Proxy :: Proxy DocId)
propJSON (Proxy :: Proxy IndexAliasRouting)
propJSON (Proxy :: Proxy RoutingValue)
propJSON (Proxy :: Proxy ShardCount)
propJSON (Proxy :: Proxy ReplicaCount)
propJSON (Proxy :: Proxy TemplateName)
propJSON (Proxy :: Proxy TemplatePattern)
propJSON (Proxy :: Proxy QueryString)
propJSON (Proxy :: Proxy FieldName)
propJSON (Proxy :: Proxy CacheName)
propJSON (Proxy :: Proxy CacheKey)
propJSON (Proxy :: Proxy Existence)
propJSON (Proxy :: Proxy CutoffFrequency)
propJSON (Proxy :: Proxy Analyzer)
propJSON (Proxy :: Proxy MaxExpansions)
propJSON (Proxy :: Proxy Lenient)
propJSON (Proxy :: Proxy Tiebreaker)
propJSON (Proxy :: Proxy Boost)
propJSON (Proxy :: Proxy BoostTerms)
propJSON (Proxy :: Proxy MinimumMatch)
propJSON (Proxy :: Proxy DisableCoord)
propJSON (Proxy :: Proxy IgnoreTermFrequency)
propJSON (Proxy :: Proxy MinimumTermFrequency)
propJSON (Proxy :: Proxy MaxQueryTerms)
propJSON (Proxy :: Proxy Fuzziness)
propJSON (Proxy :: Proxy PrefixLength)
propJSON (Proxy :: Proxy TypeName)
propJSON (Proxy :: Proxy PercentMatch)
propJSON (Proxy :: Proxy StopWord)
propJSON (Proxy :: Proxy QueryPath)
propJSON (Proxy :: Proxy AllowLeadingWildcard)
propJSON (Proxy :: Proxy LowercaseExpanded)
propJSON (Proxy :: Proxy EnablePositionIncrements)
propJSON (Proxy :: Proxy AnalyzeWildcard)
propJSON (Proxy :: Proxy GeneratePhraseQueries)
propJSON (Proxy :: Proxy Locale)
propJSON (Proxy :: Proxy MaxWordLength)
propJSON (Proxy :: Proxy MinWordLength)
propJSON (Proxy :: Proxy PhraseSlop)
propJSON (Proxy :: Proxy MinDocFrequency)
propJSON (Proxy :: Proxy MaxDocFrequency)
propJSON (Proxy :: Proxy Filter)
propJSON (Proxy :: Proxy Query)
propJSON (Proxy :: Proxy SimpleQueryStringQuery)
propJSON (Proxy :: Proxy FieldOrFields)
propJSON (Proxy :: Proxy SimpleQueryFlag)
propJSON (Proxy :: Proxy RegexpQuery)
propJSON (Proxy :: Proxy QueryStringQuery)
propJSON (Proxy :: Proxy RangeQuery)
propJSON (Proxy :: Proxy PrefixQuery)
propJSON (Proxy :: Proxy NestedQuery)
propJSON (Proxy :: Proxy MoreLikeThisFieldQuery)
propJSON (Proxy :: Proxy MoreLikeThisQuery)
propJSON (Proxy :: Proxy IndicesQuery)
propJSON (Proxy :: Proxy HasParentQuery)
propJSON (Proxy :: Proxy HasChildQuery)
propJSON (Proxy :: Proxy FuzzyQuery)
propJSON (Proxy :: Proxy FuzzyLikeFieldQuery)
propJSON (Proxy :: Proxy FuzzyLikeThisQuery)
propJSON (Proxy :: Proxy DisMaxQuery)
propJSON (Proxy :: Proxy CommonTermsQuery)
propJSON (Proxy :: Proxy CommonMinimumMatch)
propJSON (Proxy :: Proxy BoostingQuery)
propJSON (Proxy :: Proxy BoolQuery)
propJSON (Proxy :: Proxy MatchQuery)
propJSON (Proxy :: Proxy MultiMatchQueryType)
propJSON (Proxy :: Proxy BooleanOperator)
propJSON (Proxy :: Proxy ZeroTermsQuery)
propJSON (Proxy :: Proxy MatchQueryType)
propJSON (Proxy :: Proxy AliasRouting)
propJSON (Proxy :: Proxy IndexAliasCreate)
propJSON (Proxy :: Proxy SearchAliasRouting)
propJSON (Proxy :: Proxy ScoreType)
propJSON (Proxy :: Proxy Distance)
propJSON (Proxy :: Proxy DistanceUnit)
propJSON (Proxy :: Proxy DistanceType)
propJSON (Proxy :: Proxy OptimizeBbox)
propJSON (Proxy :: Proxy GeoBoundingBoxConstraint)
propJSON (Proxy :: Proxy GeoFilterType)
propJSON (Proxy :: Proxy GeoBoundingBox)
propJSON (Proxy :: Proxy LatLon)
propJSON (Proxy :: Proxy RangeExecution)
prop "RegexpFlags FromJSON/ToJSON roundtrips, removing dups " $ \rfs ->
let expected = case rfs of
SomeRegexpFlags fs -> SomeRegexpFlags (NE.fromList (L.nub (NE.toList fs)))
x -> x
in parseEither parseJSON (toJSON rfs) === Right expected
propJSON (Proxy :: Proxy BoolMatch)
propJSON (Proxy :: Proxy Term)
propJSON (Proxy :: Proxy MultiMatchQuery)
propJSON (Proxy :: Proxy IndexSettings)
propJSON (Proxy :: Proxy CompoundFormat)
propJSON (Proxy :: Proxy TemplateQueryInline)
propJSON (Proxy :: Proxy Suggest)
propJSON (Proxy :: Proxy DirectGenerators)
propJSON (Proxy :: Proxy DirectGeneratorSuggestModeTypes)
describe "Approximate isomorphism JSON instances" $ do
propApproxJSON (Proxy :: Proxy UpdatableIndexSetting')
propApproxJSON (Proxy :: Proxy ReplicaCount)
propApproxJSON (Proxy :: Proxy ReplicaBounds)
propApproxJSON (Proxy :: Proxy Bytes)
propApproxJSON (Proxy :: Proxy AllocationPolicy)
propApproxJSON (Proxy :: Proxy InitialShardCount)
propApproxJSON (Proxy :: Proxy FSType)

View File

@ -1,115 +0,0 @@
{-# LANGUAGE OverloadedStrings #-}
module Test.Query where
import Test.Common
import Test.Import
import qualified Data.HashMap.Strict as HM
spec :: Spec
spec =
describe "query API" $ do
it "returns document for term query and identity filter" $ withTestEnv $ do
_ <- insertData
let query = TermQuery (Term "user" "bitemyapp") Nothing
let filter' = IdentityFilter
let search = mkSearch (Just query) (Just filter')
myTweet <- searchTweet search
liftIO $
myTweet `shouldBe` Right exampleTweet
it "handles constant score queries" $ withTestEnv $ do
_ <- insertData
let query = TermsQuery "user" ("bitemyapp" :| [])
let cfQuery = ConstantScoreQuery query (Boost 1.0)
let filter' = IdentityFilter
let search = mkSearch (Just cfQuery) (Just filter')
myTweet <- searchTweet search
liftIO $
myTweet `shouldBe` Right exampleTweet
it "returns document for terms query and identity filter" $ withTestEnv $ do
_ <- insertData
let query = TermsQuery "user" ("bitemyapp" :| [])
let filter' = IdentityFilter
let search = mkSearch (Just query) (Just filter')
myTweet <- searchTweet search
liftIO $
myTweet `shouldBe` Right exampleTweet
it "returns document for match query" $ withTestEnv $ do
_ <- insertData
let query = QueryMatchQuery $ mkMatchQuery (FieldName "user") (QueryString "bitemyapp")
let search = mkSearch (Just query) Nothing
myTweet <- searchTweet search
liftIO $
myTweet `shouldBe` Right exampleTweet
it "returns document for multi-match query" $ withTestEnv $ do
_ <- insertData
let flds = [FieldName "user", FieldName "message"]
let query = QueryMultiMatchQuery $ mkMultiMatchQuery flds (QueryString "bitemyapp")
let search = mkSearch (Just query) Nothing
myTweet <- searchTweet search
liftIO $
myTweet `shouldBe` Right exampleTweet
it "returns document for multi-match query with a custom tiebreaker" $ withTestEnv $ do
_ <- insertData
let tiebreaker = Just $ Tiebreaker 0.3
flds = [FieldName "user", FieldName "message"]
multiQuery' = mkMultiMatchQuery flds (QueryString "bitemyapp")
query = QueryMultiMatchQuery $ multiQuery' { multiMatchQueryTiebreaker = tiebreaker }
search = mkSearch (Just query) Nothing
myTweet <- searchTweet search
liftIO $
myTweet `shouldBe` Right exampleTweet
it "returns document for bool query" $ withTestEnv $ do
_ <- insertData
let innerQuery = QueryMatchQuery $
mkMatchQuery (FieldName "user") (QueryString "bitemyapp")
let query = QueryBoolQuery $
mkBoolQuery [innerQuery] [] []
let search = mkSearch (Just query) Nothing
myTweet <- searchTweet search
liftIO $
myTweet `shouldBe` Right exampleTweet
it "returns document for boosting query" $ withTestEnv $ do
_ <- insertData
let posQuery = QueryMatchQuery $ mkMatchQuery (FieldName "user") (QueryString "bitemyapp")
let negQuery = QueryMatchQuery $ mkMatchQuery (FieldName "user") (QueryString "notmyapp")
let query = QueryBoostingQuery $ BoostingQuery posQuery negQuery (Boost 0.2)
let search = mkSearch (Just query) Nothing
myTweet <- searchTweet search
liftIO $
myTweet `shouldBe` Right exampleTweet
it "returns document for common terms query" $ withTestEnv $ do
_ <- insertData
let query = QueryCommonTermsQuery $
CommonTermsQuery (FieldName "user")
(QueryString "bitemyapp")
(CutoffFrequency 0.0001)
Or Or Nothing Nothing Nothing Nothing
let search = mkSearch (Just query) Nothing
myTweet <- searchTweet search
liftIO $
myTweet `shouldBe` Right exampleTweet
it "returns document for for inline template query" $ withTestEnv $ do
_ <- insertData
let innerQuery = QueryMatchQuery $
mkMatchQuery (FieldName "{{userKey}}")
(QueryString "{{bitemyappKey}}")
templateParams = TemplateQueryKeyValuePairs $ HM.fromList
[ ("userKey", "user")
, ("bitemyappKey", "bitemyapp")
]
templateQuery = QueryTemplateQueryInline $
TemplateQueryInline innerQuery templateParams
search = mkSearch (Just templateQuery) Nothing
myTweet <- searchTweet search
liftIO $ myTweet `shouldBe` Right exampleTweet

View File

@ -1,22 +0,0 @@
{-# LANGUAGE OverloadedStrings #-}
module Test.Sorting where
import Test.Common
import Test.Import
spec :: Spec
spec =
describe "sorting" $
it "returns documents in the right order" $ withTestEnv $ do
_ <- insertData
_ <- insertOther
let sortSpec = DefaultSortSpec $ mkSort (FieldName "age") Ascending
let search = Search Nothing
Nothing (Just [sortSpec]) Nothing Nothing
False (From 0) (Size 10) SearchTypeQueryThenFetch Nothing Nothing
Nothing
result <- searchTweets search
let myTweet = grabFirst result
liftIO $
myTweet `shouldBe` Right otherTweet

View File

@ -1,22 +0,0 @@
{-# LANGUAGE OverloadedStrings #-}
module Test.Suggest where
import Test.Common
import Test.Import
spec :: Spec
spec =
describe "Suggest" $
it "returns a search suggestion using the phrase suggester" $ withTestEnv $ do
_ <- insertData
let phraseSuggester = mkPhraseSuggester (FieldName "message")
namedSuggester = Suggest "Use haskel" "suggest_name" (SuggestTypePhraseSuggester phraseSuggester)
search' = mkSearch Nothing Nothing
search = search' { suggestBody = Just namedSuggester }
expectedText = Just "use haskell"
resp <- searchByIndex testIndex search
parsed <- parseEsResponse resp :: BH IO (Either EsError (SearchResult Tweet))
case parsed of
Left e -> liftIO $ expectationFailure ("Expected an search suggestion but got " <> show e)
Right sr -> liftIO $ (suggestOptionsText . head . suggestResponseOptions . head . nsrResponses <$> suggest sr) `shouldBe` expectedText

View File

@ -1,117 +0,0 @@
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
#if __GLASGOW_HASKELL__ < 800
{-# OPTIONS_GHC -fcontext-stack=100 #-}
#endif
#if __GLASGOW_HASKELL__ >= 802
{-# LANGUAGE MonoLocalBinds #-}
#endif
module Main where
import Test.Common
import Test.Import
import Prelude
import qualified Test.Aggregation as Aggregation
import qualified Test.BulkAPI as Bulk
import qualified Test.Documents as Documents
import qualified Test.Highlights as Highlights
import qualified Test.Indices as Indices
import qualified Test.JSON as JSON
import qualified Test.Query as Query
import qualified Test.Snapshots as Snapshots
import qualified Test.Sorting as Sorting
import qualified Test.SourceFiltering as SourceFiltering
import qualified Test.Suggest as Suggest
import qualified Test.Templates as Templates
main :: IO ()
main = hspec $ do
Aggregation.spec
Bulk.spec
Documents.spec
Highlights.spec
Indices.spec
JSON.spec
Query.spec
Snapshots.spec
Sorting.spec
SourceFiltering.spec
Suggest.spec
Templates.spec
describe "error parsing" $ do
it "can parse EsErrors for < 2.0" $ when' (atmost es16) $ withTestEnv $ do
res <- getDocument (IndexName "bogus") (MappingName "also_bogus") (DocId "bogus_as_well")
let errorResp = eitherDecode (responseBody res)
liftIO (errorResp `shouldBe` Right (EsError 404 "IndexMissingException[[bogus] missing]"))
it "can parse EsErrors for >= 2.0" $ when' (atleast es20) $ withTestEnv $ do
res <- getDocument (IndexName "bogus") (MappingName "also_bogus") (DocId "bogus_as_well")
let errorResp = eitherDecode (responseBody res)
liftIO (errorResp `shouldBe` Right (EsError 404 "no such index"))
describe "Monoid (SearchHits a)" $
prop "abides the monoid laws" $ eq $
prop_Monoid (T :: T (SearchHits ()))
describe "mkDocVersion" $
prop "can never construct an out of range docVersion" $ \i ->
let res = mkDocVersion i
in case res of
Nothing -> property True
Just dv -> (dv >= minBound) .&&.
(dv <= maxBound) .&&.
docVersionNumber dv === i
describe "getNodesInfo" $
it "fetches the responding node when LocalNode is used" $ withTestEnv $ do
res <- getNodesInfo LocalNode
liftIO $ case res of
-- This is really just a smoke test for response
-- parsing. Node info is so variable, there's not much I can
-- assert here.
Right NodesInfo {..} -> length nodesInfo `shouldBe` 1
Left e -> expectationFailure ("Expected NodesInfo but got " <> show e)
describe "getNodesStats" $
it "fetches the responding node when LocalNode is used" $ withTestEnv $ do
res <- getNodesStats LocalNode
liftIO $ case res of
-- This is really just a smoke test for response
-- parsing. Node stats is so variable, there's not much I can
-- assert here.
Right NodesStats {..} -> length nodesStats `shouldBe` 1
Left e -> expectationFailure ("Expected NodesStats but got " <> show e)
describe "Enum DocVersion" $
it "follows the laws of Enum, Bounded" $ do
evaluate (succ maxBound :: DocVersion) `shouldThrow` anyErrorCall
evaluate (pred minBound :: DocVersion) `shouldThrow` anyErrorCall
evaluate (toEnum 0 :: DocVersion) `shouldThrow` anyErrorCall
evaluate (toEnum 9200000000000000001 :: DocVersion) `shouldThrow` anyErrorCall
enumFrom (pred maxBound :: DocVersion) `shouldBe` [pred maxBound, maxBound]
enumFrom (pred maxBound :: DocVersion) `shouldBe` [pred maxBound, maxBound]
enumFromThen minBound (pred maxBound :: DocVersion) `shouldBe` [minBound, pred maxBound]
describe "Scan & Scroll API" $
it "returns documents using the scan&scroll API" $ withTestEnv $ do
_ <- insertData
_ <- insertOther
let search =
(mkSearch
(Just $ MatchAllQuery Nothing) Nothing)
{ size = Size 1 }
regular_search <- searchTweet search
scan_search' <- scanSearch testIndex testMapping search :: BH IO [Hit Tweet]
let scan_search = map hitSource scan_search'
liftIO $
regular_search `shouldBe` Right exampleTweet -- Check that the size restrtiction is being honored
liftIO $
scan_search `shouldMatchList` [Just exampleTweet, Just otherTweet]

View File

@ -1,32 +0,0 @@
{-# LANGUAGE OverloadedStrings #-}
module Test.Highlights where
import Test.Common
import Test.Import
import qualified Data.Map as M
initHighlights :: Text -> BH IO (Either EsError (Maybe HitHighlight))
initHighlights fieldName = do
_ <- insertData
_ <- insertOther
let query = QueryMatchQuery $ mkMatchQuery (FieldName fieldName) (QueryString "haskell")
let testHighlight = Highlights Nothing [FieldHighlight (FieldName fieldName) Nothing]
let search = mkHighlightSearch (Just query) testHighlight
searchTweetHighlight search
spec :: Spec
spec =
describe "Highlights API" $ do
it "returns highlight from query when there should be one" $ withTestEnv $ do
myHighlight <- initHighlights "message"
liftIO $
myHighlight `shouldBe`
Right (Just (M.fromList [("message", ["Use <em>haskell</em>!"])]))
it "doesn't return highlight from a query when it shouldn't" $ withTestEnv $ do
myHighlight <- initHighlights "user"
liftIO $
myHighlight `shouldBe`
Right Nothing

View File

@ -1,201 +0,0 @@
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE OverloadedStrings #-}
module Test.Snapshots (spec) where
import Test.Common
import Test.Import
import Data.Maybe (fromMaybe)
import qualified Data.List as L
import qualified Data.HashMap.Strict as HM
import qualified Data.Text as T
import qualified Data.Vector as V
import qualified Network.HTTP.Types.Method as NHTM
import qualified Network.URI as URI
import Test.Generators ()
spec :: Spec
spec = do
describe "FsSnapshotRepo" $
prop "SnapshotRepo laws" $ \fsr ->
fromGSnapshotRepo (toGSnapshotRepo fsr) === Right (fsr :: FsSnapshotRepo)
describe "Snapshot repos" $ do
it "always parses all snapshot repos API" $ when' canSnapshot $ withTestEnv $ do
res <- getSnapshotRepos AllSnapshotRepos
liftIO $ case res of
Left e -> expectationFailure ("Expected a right but got Left " <> show e)
Right _ -> return ()
it "finds an existing list of repos" $ when' canSnapshot $ withTestEnv $ do
let r1n = SnapshotRepoName "bloodhound-repo1"
let r2n = SnapshotRepoName "bloodhound-repo2"
withSnapshotRepo r1n $ \r1 ->
withSnapshotRepo r2n $ \r2 -> do
repos <- getSnapshotRepos (SnapshotRepoList (ExactRepo r1n :| [ExactRepo r2n]))
liftIO $ case repos of
Right xs -> do
let srt = L.sortBy (comparing gSnapshotRepoName)
srt xs `shouldBe` srt [r1, r2]
Left e -> expectationFailure (show e)
it "creates and updates with updateSnapshotRepo" $ when' canSnapshot $ withTestEnv $ do
let r1n = SnapshotRepoName "bloodhound-repo1"
withSnapshotRepo r1n $ \r1 -> do
let Just (String dir) = HM.lookup "location" (gSnapshotRepoSettingsObject (gSnapshotRepoSettings r1))
let noCompression = FsSnapshotRepo r1n (T.unpack dir) False Nothing Nothing Nothing
resp <- updateSnapshotRepo defaultSnapshotRepoUpdateSettings noCompression
liftIO (validateStatus resp 200)
Right [roundtrippedNoCompression] <- getSnapshotRepos (SnapshotRepoList (ExactRepo r1n :| []))
liftIO (roundtrippedNoCompression `shouldBe` toGSnapshotRepo noCompression)
-- verify came around in 1.4 it seems
it "can verify existing repos" $ when' canSnapshot $ when' (atleast es14) $ withTestEnv $ do
let r1n = SnapshotRepoName "bloodhound-repo1"
withSnapshotRepo r1n $ \_ -> do
res <- verifySnapshotRepo r1n
liftIO $ case res of
Right (SnapshotVerification vs)
| null vs -> expectationFailure "Expected nonempty set of verifying nodes"
| otherwise -> return ()
Left e -> expectationFailure (show e)
describe "Snapshots" $ do
it "always parses all snapshots API" $ when' canSnapshot $ withTestEnv $ do
let r1n = SnapshotRepoName "bloodhound-repo1"
withSnapshotRepo r1n $ \_ -> do
res <- getSnapshots r1n AllSnapshots
liftIO $ case res of
Left e -> expectationFailure ("Expected a right but got Left " <> show e)
Right _ -> return ()
it "can parse a snapshot that it created" $ when' canSnapshot $ withTestEnv $ do
let r1n = SnapshotRepoName "bloodhound-repo1"
withSnapshotRepo r1n $ \_ -> do
let s1n = SnapshotName "example-snapshot"
withSnapshot r1n s1n $ do
res <- getSnapshots r1n (SnapshotList (ExactSnap s1n :| []))
liftIO $ case res of
Right [snap]
| snapInfoState snap == SnapshotSuccess &&
snapInfoName snap == s1n -> return ()
| otherwise -> expectationFailure (show snap)
Right [] -> expectationFailure "There were no snapshots"
Right snaps -> expectationFailure ("Expected 1 snapshot but got" <> show (length snaps))
Left e -> expectationFailure (show e)
describe "Snapshot restore" $ do
it "can restore a snapshot that we create" $ when' canSnapshot $ withTestEnv $ do
let r1n = SnapshotRepoName "bloodhound-repo1"
withSnapshotRepo r1n $ \_ -> do
let s1n = SnapshotName "example-snapshot"
withSnapshot r1n s1n $ do
let settings = defaultSnapshotRestoreSettings { snapRestoreWaitForCompletion = True }
-- have to close an index to restore it
resp1 <- closeIndex testIndex
liftIO (validateStatus resp1 200)
resp2 <- restoreSnapshot r1n s1n settings
liftIO (validateStatus resp2 200)
it "can restore and rename" $ when' canSnapshot $ withTestEnv $ do
let r1n = SnapshotRepoName "bloodhound-repo1"
withSnapshotRepo r1n $ \_ -> do
let s1n = SnapshotName "example-snapshot"
withSnapshot r1n s1n $ do
let pat = RestoreRenamePattern "bloodhound-tests-twitter-(\\d+)"
let replace = RRTLit "restored-" :| [RRSubWholeMatch]
let expectedIndex = IndexName "restored-bloodhound-tests-twitter-1"
oldEnoughForOverrides <- liftIO (atleast es15)
let overrides = RestoreIndexSettings { restoreOverrideReplicas = Just (ReplicaCount 0) }
let settings = defaultSnapshotRestoreSettings { snapRestoreWaitForCompletion = True
, snapRestoreRenamePattern = Just pat
, snapRestoreRenameReplacement = Just replace
, snapRestoreIndexSettingsOverrides = if oldEnoughForOverrides
then Just overrides
else Nothing
}
-- have to close an index to restore it
let go = do
resp <- restoreSnapshot r1n s1n settings
liftIO (validateStatus resp 200)
exists <- indexExists expectedIndex
liftIO (exists `shouldBe` True)
go `finally` deleteIndex expectedIndex
-- | Get configured repo paths for snapshotting. Note that by default
-- this is not enabled and if we are over es 1.5, we won't be able to
-- test snapshotting. Note that this can and should be part of the
-- client functionality in a much less ad-hoc incarnation.
getRepoPaths :: IO [FilePath]
getRepoPaths = withTestEnv $ do
bhe <- getBHEnv
let Server s = bhServer bhe
let tUrl = s <> "/" <> "_nodes"
initReq <- parseRequest (URI.escapeURIString URI.isAllowedInURI (T.unpack tUrl))
let req = setRequestIgnoreStatus $ initReq { method = NHTM.methodGet }
Right (Object o) <- parseEsResponse =<< liftIO (httpLbs req (bhManager bhe))
return $ fromMaybe mempty $ do
Object nodes <- HM.lookup "nodes" o
Object firstNode <- snd <$> headMay (HM.toList nodes)
Object settings <- HM.lookup "settings" firstNode
Object path <- HM.lookup "path" settings
Array repo <- HM.lookup "repo" path
return [ T.unpack t | String t <- V.toList repo]
-- | 1.5 and earlier don't care about repo paths
canSnapshot :: IO Bool
canSnapshot = do
caresAboutRepos <- atleast es16
repoPaths <- getRepoPaths
return (not caresAboutRepos || not (null repoPaths))
withSnapshotRepo
:: ( MonadMask m
, MonadBH m
)
=> SnapshotRepoName
-> (GenericSnapshotRepo -> m a)
-> m a
withSnapshotRepo srn@(SnapshotRepoName n) f = do
repoPaths <- liftIO getRepoPaths
-- we'll use the first repo path if available, otherwise system temp
-- dir. Note that this will fail on ES > 1.6, so be sure you use
-- @when' canSnapshot@.
case repoPaths of
(firstRepoPath:_) -> withTempDirectory firstRepoPath (T.unpack n) $ \dir -> bracket (alloc dir) free f
[] -> withSystemTempDirectory (T.unpack n) $ \dir -> bracket (alloc dir) free f
where
alloc dir = do
liftIO (setFileMode dir mode)
let repo = FsSnapshotRepo srn "bloodhound-tests-backups" True Nothing Nothing Nothing
resp <- updateSnapshotRepo defaultSnapshotRepoUpdateSettings repo
liftIO (validateStatus resp 200)
return (toGSnapshotRepo repo)
mode = ownerModes `unionFileModes` groupModes `unionFileModes` otherModes
free GenericSnapshotRepo {..} = do
resp <- deleteSnapshotRepo gSnapshotRepoName
liftIO (validateStatus resp 200)
withSnapshot
:: ( MonadMask m
, MonadBH m
)
=> SnapshotRepoName
-> SnapshotName
-> m a
-> m a
withSnapshot srn sn = bracket_ alloc free
where
alloc = do
resp <- createSnapshot srn sn createSettings
liftIO (validateStatus resp 200)
-- We'll make this synchronous for testing purposes
createSettings = defaultSnapshotCreateSettings { snapWaitForCompletion = True
, snapIndices = Just (IndexList (testIndex :| []))
-- We don't actually need to back up any data
}
free =
deleteSnapshot srn sn

View File

@ -1,38 +0,0 @@
{-# LANGUAGE OverloadedStrings #-}
module Test.SourceFiltering where
import Test.Common
import Test.Import
import qualified Data.HashMap.Strict as HM
spec :: Spec
spec =
describe "Source filtering" $ do
it "doesn't include source when sources are disabled" $ withTestEnv $
searchExpectSource
NoSource
(Left (EsError 500 "Source was missing"))
it "includes a source" $ withTestEnv $
searchExpectSource
(SourcePatterns (PopPattern (Pattern "message")))
(Right (Object (HM.fromList [("message", String "Use haskell!")])))
it "includes sources" $ withTestEnv $
searchExpectSource
(SourcePatterns (PopPatterns [Pattern "user", Pattern "message"]))
(Right (Object (HM.fromList [("user",String "bitemyapp"),("message", String "Use haskell!")])))
it "includes source patterns" $ withTestEnv $
searchExpectSource
(SourcePatterns (PopPattern (Pattern "*ge")))
(Right (Object (HM.fromList [("age", Number 10000),("message", String "Use haskell!")])))
it "excludes source patterns" $ withTestEnv $
searchExpectSource
(SourceIncludeExclude (Include [])
(Exclude [Pattern "l*", Pattern "*ge", Pattern "postDate", Pattern "extra"]))
(Right (Object (HM.fromList [("user",String "bitemyapp")])))

View File

@ -1,26 +0,0 @@
{-# LANGUAGE OverloadedStrings #-}
module Test.Templates where
import Test.Common
import Test.Import
spec :: Spec
spec =
describe "template API" $ do
it "can create a template" $ withTestEnv $ do
let idxTpl = IndexTemplate (TemplatePattern "tweet-*") (Just (IndexSettings (ShardCount 1) (ReplicaCount 1))) [toJSON TweetMapping]
resp <- putTemplate idxTpl (TemplateName "tweet-tpl")
liftIO $ validateStatus resp 200
it "can detect if a template exists" $ withTestEnv $ do
exists <- templateExists (TemplateName "tweet-tpl")
liftIO $ exists `shouldBe` True
it "can delete a template" $ withTestEnv $ do
resp <- deleteTemplate (TemplateName "tweet-tpl")
liftIO $ validateStatus resp 200
it "can detect if a template doesn't exist" $ withTestEnv $ do
exists <- templateExists (TemplateName "tweet-tpl")
liftIO $ exists `shouldBe` False