hlint, weeder

This commit is contained in:
Chris Allen 2018-02-13 15:47:43 -06:00
parent 103aac6431
commit bc0da4a4eb
23 changed files with 983 additions and 846 deletions

66
.hlint.yaml Normal file
View File

@ -0,0 +1,66 @@
# HLint configuration file
# https://github.com/ndmitchell/hlint
##########################
# This file contains a template configuration file, which is typically
# placed as .hlint.yaml in the root of your project
# Specify additional command line arguments
#
# - arguments: [--color, --cpp-simple, -XQuasiQuotes]
# Control which extensions/flags/modules/functions can be used
#
# - extensions:
# - default: false # all extension are banned by default
# - name: [PatternGuards, ViewPatterns] # only these listed extensions can be used
# - {name: CPP, within: CrossPlatform} # CPP can only be used in a given module
#
# - flags:
# - {name: -w, within: []} # -w is allowed nowhere
#
# - modules:
# - {name: [Data.Set, Data.HashSet], as: Set} # if you import Data.Set qualified, it must be as 'Set'
# - {name: Control.Arrow, within: []} # Certain modules are banned entirely
#
# - functions:
# - {name: unsafePerformIO, within: []} # unsafePerformIO can only appear in no modules
# Add custom hints for this project
#
# Will suggest replacing "wibbleMany [myvar]" with "wibbleOne myvar"
# - error: {lhs: "wibbleMany [x]", rhs: wibbleOne x}
# Turn on hints that are off by default
#
# Ban "module X(module X) where", to require a real export list
# - warn: {name: Use explicit module export list}
#
# Replace a $ b $ c with a . b $ c
# - group: {name: dollar, enabled: true}
#
# Generalise map to fmap, ++ to <>
# - group: {name: generalise, enabled: true}
# Ignore some builtin hints
- ignore: {name: "Use <$>"}
- ignore: {name: "Use lambda-case"}
# When we don't use camelCase it's to match ES.
# Possibly this is a mistake.
- ignore: {name: "Use camelCase"}
- ignore: {name: "Eta reduce"}
# - ignore: {name: Use let}
# - ignore: {name: Use const, within: SpecialModule} # Only within certain modules
# Define some custom infix operators
# - fixity: infixr 3 ~^#^~
# To generate a suitable file for HLint do:
# $ hlint --default > .hlint.yaml

View File

@ -3,6 +3,9 @@ stack = STACK_YAML='stack.yaml' stack
build: build:
stack build stack build
build-validate:
stack build --fast --ghc-options '-Werror'
ghci: ghci:
stack ghci stack ghci
@ -18,6 +21,19 @@ test-ghci:
ghcid: ghcid:
ghcid -c "$(stack) ghci bloodhound:lib --test --ghci-options='-fobject-code -fno-warn-unused-do-bind' --main-is bloodhound:test:bloodhound-tests" ghcid -c "$(stack) ghci bloodhound:lib --test --ghci-options='-fobject-code -fno-warn-unused-do-bind' --main-is bloodhound:test:bloodhound-tests"
ghcid-validate:
ghcid -c "$(stack) ghci bloodhound:lib --test --ghci-options='-Werror -fobject-code -fno-warn-unused-do-bind' --main-is bloodhound:test:bloodhound-tests"
weeder:
weeder . --build
# hlint --default > .hlint.yaml
hlint:
hlint .
hlint-watch:
sos src/ -c "hlint ." -p "src/(.*)\.hs"
mod-build: mod-build:
stack build --ghc-options '+RTS -A128M -RTS' stack build --ghc-options '+RTS -A128M -RTS'

View File

@ -37,6 +37,11 @@ library
Database.V5.Bloodhound.Client Database.V5.Bloodhound.Client
Database.V5.Bloodhound.Types Database.V5.Bloodhound.Types
Database.V5.Bloodhound.Types.Class Database.V5.Bloodhound.Types.Class
Database.V5.Bloodhound.Internal.Analysis
Database.V5.Bloodhound.Internal.Client
Database.V5.Bloodhound.Internal.Newtypes
Database.V5.Bloodhound.Internal.Query
Database.V5.Bloodhound.Internal.StringlyTyped
Database.V1.Bloodhound Database.V1.Bloodhound
Database.V1.Bloodhound.Client Database.V1.Bloodhound.Client
Database.V1.Bloodhound.Types Database.V1.Bloodhound.Types
@ -44,11 +49,6 @@ library
Database.V1.Bloodhound.Types.Internal Database.V1.Bloodhound.Types.Internal
other-modules: Bloodhound.Import other-modules: Bloodhound.Import
Database.Bloodhound.Common.Script Database.Bloodhound.Common.Script
Database.V5.Bloodhound.Internal.Analysis
Database.V5.Bloodhound.Internal.Client
Database.V5.Bloodhound.Internal.Newtypes
Database.V5.Bloodhound.Internal.Query
Database.V5.Bloodhound.Internal.StringlyTyped
hs-source-dirs: src hs-source-dirs: src
build-depends: base >= 4.3 && <5, build-depends: base >= 4.3 && <5,
bytestring >= 0.10.0 && <0.11, bytestring >= 0.10.0 && <0.11,
@ -64,13 +64,10 @@ library
http-types >= 0.8 && <0.13, http-types >= 0.8 && <0.13,
vector >= 0.10.9 && <0.13, vector >= 0.10.9 && <0.13,
scientific >= 0.3.0.0 && <0.4.0.0, scientific >= 0.3.0.0 && <0.4.0.0,
bifunctors,
exceptions,
data-default-class,
blaze-builder, blaze-builder,
unordered-containers, exceptions,
mtl-compat, hashable,
hashable unordered-containers
default-language: Haskell2010 default-language: Haskell2010
test-suite bloodhound-tests test-suite bloodhound-tests
@ -116,7 +113,6 @@ test-suite bloodhound-tests
pretty-simple, pretty-simple,
quickcheck-arbitrary-template, quickcheck-arbitrary-template,
quickcheck-properties, quickcheck-properties,
generics-sop >=0.2 && <0.4,
errors, errors,
exceptions, exceptions,
temporary, temporary,

View File

@ -223,7 +223,8 @@ getStatus :: MonadBH m => m (Maybe Status)
getStatus = do getStatus = do
response <- get =<< url response <- get =<< url
return $ decode (responseBody response) return $ decode (responseBody response)
where url = joinPath [] where
url = joinPath []
-- | 'getSnapshotRepos' gets the definitions of a subset of the -- | 'getSnapshotRepos' gets the definitions of a subset of the
-- defined snapshot repos. -- defined snapshot repos.
@ -252,7 +253,7 @@ instance FromJSON GSRs where
parseJSON = withObject "Collection of GenericSnapshotRepo" parse parseJSON = withObject "Collection of GenericSnapshotRepo" parse
where where
parse = fmap GSRs . mapM (uncurry go) . HM.toList parse = fmap GSRs . mapM (uncurry go) . HM.toList
go rawName = withObject "GenericSnapshotRepo" $ \o -> do go rawName = withObject "GenericSnapshotRepo" $ \o ->
GenericSnapshotRepo (SnapshotRepoName rawName) <$> o .: "type" GenericSnapshotRepo (SnapshotRepoName rawName) <$> o .: "type"
<*> o .: "settings" <*> o .: "settings"
@ -454,16 +455,18 @@ deleteIndex (IndexName indexName) =
updateIndexSettings :: MonadBH m => NonEmpty UpdatableIndexSetting -> IndexName -> m Reply updateIndexSettings :: MonadBH m => NonEmpty UpdatableIndexSetting -> IndexName -> m Reply
updateIndexSettings updates (IndexName indexName) = updateIndexSettings updates (IndexName indexName) =
bindM2 put url (return body) bindM2 put url (return body)
where url = joinPath [indexName, "_settings"] where
body = Just (encode jsonBody) url = joinPath [indexName, "_settings"]
jsonBody = Object (deepMerge [u | Object u <- toJSON <$> toList updates]) body = Just (encode jsonBody)
jsonBody = Object (deepMerge [u | Object u <- toJSON <$> toList updates])
getIndexSettings :: (MonadBH m, MonadThrow m) => IndexName getIndexSettings :: (MonadBH m, MonadThrow m) => IndexName
-> m (Either EsError IndexSettingsSummary) -> m (Either EsError IndexSettingsSummary)
getIndexSettings (IndexName indexName) = do getIndexSettings (IndexName indexName) =
parseEsResponse =<< get =<< url parseEsResponse =<< get =<< url
where url = joinPath [indexName, "_settings"] where
url = joinPath [indexName, "_settings"]
-- | 'optimizeIndex' will optimize a single index, list of indexes or -- | 'optimizeIndex' will optimize a single index, list of indexes or
@ -586,7 +589,7 @@ listIndices =
url = joinPath ["_cat/indices?format=json"] url = joinPath ["_cat/indices?format=json"]
parse body = maybe (throwM (EsProtocolException body)) return $ do parse body = maybe (throwM (EsProtocolException body)) return $ do
vals <- decode body vals <- decode body
forM vals $ \val -> do forM vals $ \val ->
case val of case val of
Object obj -> do Object obj -> do
indexVal <- HM.lookup "index" obj indexVal <- HM.lookup "index" obj
@ -718,7 +721,8 @@ encodeBulkOperations stream = collapsed where
collapsed = toLazyByteString $ mappend mashedTaters (byteString "\n") collapsed = toLazyByteString $ mappend mashedTaters (byteString "\n")
mash :: Builder -> V.Vector L.ByteString -> Builder mash :: Builder -> V.Vector L.ByteString -> Builder
mash = V.foldl' (\b x -> b `mappend` (byteString "\n") `mappend` (lazyByteString x)) mash =
V.foldl' (\b x -> b <> byteString "\n" <> lazyByteString x)
mkBulkStreamValue :: Text -> Text -> Text -> Text -> Value mkBulkStreamValue :: Text -> Text -> Text -> Text -> Value
mkBulkStreamValue operation indexName mappingName docId = mkBulkStreamValue operation indexName mappingName docId =

View File

@ -4,7 +4,6 @@
{-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE MultiParamTypeClasses #-}
-- {-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE NamedFieldPuns #-} {-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-} {-# LANGUAGE RecordWildCards #-}
@ -1137,8 +1136,10 @@ data Source =
| SourceIncludeExclude Include Exclude | SourceIncludeExclude Include Exclude
deriving (Read, Show, Eq, Generic, Typeable) deriving (Read, Show, Eq, Generic, Typeable)
data PatternOrPatterns = PopPattern Pattern data PatternOrPatterns =
| PopPatterns [Pattern] deriving (Eq, Read, Show, Generic, Typeable) PopPattern Pattern
| PopPatterns [Pattern]
deriving (Eq, Read, Show, Generic, Typeable)
data Include = Include [Pattern] deriving (Eq, Read, Show, Generic, Typeable) data Include = Include [Pattern] deriving (Eq, Read, Show, Generic, Typeable)
data Exclude = Exclude [Pattern] deriving (Eq, Read, Show, Generic, Typeable) data Exclude = Exclude [Pattern] deriving (Eq, Read, Show, Generic, Typeable)
@ -1906,8 +1907,9 @@ instance ToJSON TermOrder where
instance ToJSON TermInclusion where instance ToJSON TermInclusion where
toJSON (TermInclusion x) = toJSON x toJSON (TermInclusion x) = toJSON x
toJSON (TermPattern pattern flags) = omitNulls [ "pattern" .= pattern, toJSON (TermPattern pattern flags) =
"flags" .= flags] omitNulls [ "pattern" .= pattern
, "flags" .= flags]
instance ToJSON CollectionMode where instance ToJSON CollectionMode where
toJSON BreadthFirst = "breadth_first" toJSON BreadthFirst = "breadth_first"

View File

@ -317,7 +317,7 @@ instance FromJSON GSRs where
parseJSON = withObject "Collection of GenericSnapshotRepo" parse parseJSON = withObject "Collection of GenericSnapshotRepo" parse
where where
parse = fmap GSRs . mapM (uncurry go) . HM.toList parse = fmap GSRs . mapM (uncurry go) . HM.toList
go rawName = withObject "GenericSnapshotRepo" $ \o -> do go rawName = withObject "GenericSnapshotRepo" $ \o ->
GenericSnapshotRepo (SnapshotRepoName rawName) <$> o .: "type" GenericSnapshotRepo (SnapshotRepoName rawName) <$> o .: "type"
<*> o .: "settings" <*> o .: "settings"
@ -555,16 +555,18 @@ deleteIndex (IndexName indexName) =
updateIndexSettings :: MonadBH m => NonEmpty UpdatableIndexSetting -> IndexName -> m Reply updateIndexSettings :: MonadBH m => NonEmpty UpdatableIndexSetting -> IndexName -> m Reply
updateIndexSettings updates (IndexName indexName) = updateIndexSettings updates (IndexName indexName) =
bindM2 put url (return body) bindM2 put url (return body)
where url = joinPath [indexName, "_settings"] where
body = Just (encode jsonBody) url = joinPath [indexName, "_settings"]
jsonBody = Object (deepMerge [u | Object u <- toJSON <$> toList updates]) body = Just (encode jsonBody)
jsonBody = Object (deepMerge [u | Object u <- toJSON <$> toList updates])
getIndexSettings :: (MonadBH m, MonadThrow m) => IndexName getIndexSettings :: (MonadBH m, MonadThrow m) => IndexName
-> m (Either EsError IndexSettingsSummary) -> m (Either EsError IndexSettingsSummary)
getIndexSettings (IndexName indexName) = do getIndexSettings (IndexName indexName) =
parseEsResponse =<< get =<< url parseEsResponse =<< get =<< url
where url = joinPath [indexName, "_settings"] where
url = joinPath [indexName, "_settings"]
-- | 'forceMergeIndex' -- | 'forceMergeIndex'
-- --
@ -704,7 +706,7 @@ listIndices =
url = joinPath ["_cat/indices?format=json"] url = joinPath ["_cat/indices?format=json"]
parse body = maybe (throwM (EsProtocolException body)) return $ do parse body = maybe (throwM (EsProtocolException body)) return $ do
vals <- decode body vals <- decode body
forM vals $ \val -> do forM vals $ \val ->
case val of case val of
Object obj -> do Object obj -> do
indexVal <- HM.lookup "index" obj indexVal <- HM.lookup "index" obj
@ -861,10 +863,11 @@ deleteDocument (IndexName indexName)
-- >>> _ <- runBH' $ bulk stream -- >>> _ <- runBH' $ bulk stream
-- >>> _ <- runBH' $ refreshIndex testIndex -- >>> _ <- runBH' $ refreshIndex testIndex
bulk :: MonadBH m => V.Vector BulkOperation -> m Reply bulk :: MonadBH m => V.Vector BulkOperation -> m Reply
bulk bulkOps = do bulk bulkOps =
bindM2 post url (return body) bindM2 post url (return body)
where url = joinPath ["_bulk"] where
body = Just $ encodeBulkOperations bulkOps url = joinPath ["_bulk"]
body = Just $ encodeBulkOperations bulkOps
-- | 'encodeBulkOperations' is a convenience function for dumping a vector of 'BulkOperation' -- | 'encodeBulkOperations' is a convenience function for dumping a vector of 'BulkOperation'
-- into an 'L.ByteString' -- into an 'L.ByteString'
@ -882,7 +885,7 @@ encodeBulkOperations stream = collapsed where
toLazyByteString $ mappend mashedTaters (byteString "\n") toLazyByteString $ mappend mashedTaters (byteString "\n")
mash :: Builder -> V.Vector L.ByteString -> Builder mash :: Builder -> V.Vector L.ByteString -> Builder
mash = V.foldl' (\b x -> b `mappend` (byteString "\n") `mappend` (lazyByteString x)) mash = V.foldl' (\b x -> b <> byteString "\n" <> lazyByteString x)
mkBulkStreamValue :: Text -> Text -> Text -> Text -> Value mkBulkStreamValue :: Text -> Text -> Text -> Text -> Value
mkBulkStreamValue operation indexName mappingName docId = mkBulkStreamValue operation indexName mappingName docId =

View File

@ -52,8 +52,8 @@ instance FromJSON AnalyzerDefinition where
<$> m .:? "tokenizer" <$> m .:? "tokenizer"
<*> m .:? "filter" .!= [] <*> m .:? "filter" .!= []
data TokenizerDefinition newtype TokenizerDefinition =
= TokenizerDefinitionNgram Ngram TokenizerDefinitionNgram Ngram
deriving (Eq,Show) deriving (Eq,Show)
instance ToJSON TokenizerDefinition where instance ToJSON TokenizerDefinition where
@ -70,8 +70,8 @@ instance FromJSON TokenizerDefinition where
typ <- m .: "type" :: Parser Text typ <- m .: "type" :: Parser Text
case typ of case typ of
"ngram" -> fmap TokenizerDefinitionNgram $ Ngram "ngram" -> fmap TokenizerDefinitionNgram $ Ngram
<$> (fmap unStringlyTypedInt (m .: "min_gram")) <$> fmap unStringlyTypedInt (m .: "min_gram")
<*> (fmap unStringlyTypedInt (m .: "max_gram")) <*> fmap unStringlyTypedInt (m .: "max_gram")
<*> m .: "token_chars" <*> m .: "token_chars"
_ -> fail "invalid TokenizerDefinition" _ -> fail "invalid TokenizerDefinition"

View File

@ -10,6 +10,7 @@ module Database.V5.Bloodhound.Internal.Client where
import Bloodhound.Import import Bloodhound.Import
import qualified Data.Text as T import qualified Data.Text as T
import qualified Data.Traversable as DT
import qualified Data.HashMap.Strict as HM import qualified Data.HashMap.Strict as HM
import qualified Data.Vector as V import qualified Data.Vector as V
import qualified Data.Version as Vers import qualified Data.Version as Vers
@ -114,6 +115,15 @@ data Status = Status
, tagline :: Text } , tagline :: Text }
deriving (Eq, Show) deriving (Eq, Show)
instance FromJSON Status where
parseJSON (Object v) = Status <$>
v .: "name" <*>
v .: "cluster_name" <*>
v .: "cluster_uuid" <*>
v .: "version" <*>
v .: "tagline"
parseJSON _ = empty
{-| 'IndexSettings' is used to configure the shards and replicas when {-| 'IndexSettings' is used to configure the shards and replicas when
you create an Elasticsearch Index. you create an Elasticsearch Index.
@ -125,6 +135,20 @@ data IndexSettings = IndexSettings
, indexReplicas :: ReplicaCount } , indexReplicas :: ReplicaCount }
deriving (Eq, Show) deriving (Eq, Show)
instance ToJSON IndexSettings where
toJSON (IndexSettings s r) = object ["settings" .=
object ["index" .=
object ["number_of_shards" .= s, "number_of_replicas" .= r]
]
]
instance FromJSON IndexSettings where
parseJSON = withObject "IndexSettings" parse
where parse o = do s <- o .: "settings"
i <- s .: "index"
IndexSettings <$> i .: "number_of_shards"
<*> i .: "number_of_replicas"
{-| 'defaultIndexSettings' is an 'IndexSettings' with 3 shards and {-| 'defaultIndexSettings' is an 'IndexSettings' with 3 shards and
2 replicas. -} 2 replicas. -}
defaultIndexSettings :: IndexSettings defaultIndexSettings :: IndexSettings
@ -212,11 +236,150 @@ data UpdatableIndexSetting = NumberOfReplicas ReplicaCount
-- ^ Analysis is not a dynamic setting and can only be performed on a closed index. -- ^ Analysis is not a dynamic setting and can only be performed on a closed index.
deriving (Eq, Show) deriving (Eq, Show)
oPath :: ToJSON a => NonEmpty Text -> a -> Value
oPath (k :| []) v = object [k .= v]
oPath (k:| (h:t)) v = object [k .= oPath (h :| t) v]
attrFilterJSON :: NonEmpty NodeAttrFilter -> Value
attrFilterJSON fs = object [ n .= T.intercalate "," (toList vs)
| NodeAttrFilter (NodeAttrName n) vs <- toList fs]
parseAttrFilter :: Value -> Parser (NonEmpty NodeAttrFilter)
parseAttrFilter = withObject "NonEmpty NodeAttrFilter" parse
where parse o = case HM.toList o of
[] -> fail "Expected non-empty list of NodeAttrFilters"
x:xs -> DT.mapM (uncurry parse') (x :| xs)
parse' n = withText "Text" $ \t ->
case T.splitOn "," t of
fv:fvs -> return (NodeAttrFilter (NodeAttrName n) (fv :| fvs))
[] -> fail "Expected non-empty list of filter values"
instance ToJSON UpdatableIndexSetting where
toJSON (NumberOfReplicas x) = oPath ("index" :| ["number_of_replicas"]) x
toJSON (AutoExpandReplicas x) = oPath ("index" :| ["auto_expand_replicas"]) x
toJSON (RefreshInterval x) = oPath ("index" :| ["refresh_interval"]) (NominalDiffTimeJSON x)
toJSON (IndexConcurrency x) = oPath ("index" :| ["concurrency"]) x
toJSON (FailOnMergeFailure x) = oPath ("index" :| ["fail_on_merge_failure"]) x
toJSON (TranslogFlushThresholdOps x) = oPath ("index" :| ["translog", "flush_threshold_ops"]) x
toJSON (TranslogFlushThresholdSize x) = oPath ("index" :| ["translog", "flush_threshold_size"]) x
toJSON (TranslogFlushThresholdPeriod x) = oPath ("index" :| ["translog", "flush_threshold_period"]) (NominalDiffTimeJSON x)
toJSON (TranslogDisableFlush x) = oPath ("index" :| ["translog", "disable_flush"]) x
toJSON (CacheFilterMaxSize x) = oPath ("index" :| ["cache", "filter", "max_size"]) x
toJSON (CacheFilterExpire x) = oPath ("index" :| ["cache", "filter", "expire"]) (NominalDiffTimeJSON <$> x)
toJSON (GatewaySnapshotInterval x) = oPath ("index" :| ["gateway", "snapshot_interval"]) (NominalDiffTimeJSON x)
toJSON (RoutingAllocationInclude fs) = oPath ("index" :| ["routing", "allocation", "include"]) (attrFilterJSON fs)
toJSON (RoutingAllocationExclude fs) = oPath ("index" :| ["routing", "allocation", "exclude"]) (attrFilterJSON fs)
toJSON (RoutingAllocationRequire fs) = oPath ("index" :| ["routing", "allocation", "require"]) (attrFilterJSON fs)
toJSON (RoutingAllocationEnable x) = oPath ("index" :| ["routing", "allocation", "enable"]) x
toJSON (RoutingAllocationShardsPerNode x) = oPath ("index" :| ["routing", "allocation", "total_shards_per_node"]) x
toJSON (RecoveryInitialShards x) = oPath ("index" :| ["recovery", "initial_shards"]) x
toJSON (GCDeletes x) = oPath ("index" :| ["gc_deletes"]) (NominalDiffTimeJSON x)
toJSON (TTLDisablePurge x) = oPath ("index" :| ["ttl", "disable_purge"]) x
toJSON (TranslogFSType x) = oPath ("index" :| ["translog", "fs", "type"]) x
toJSON (CompressionSetting x) = oPath ("index" :| ["codec"]) x
toJSON (IndexCompoundFormat x) = oPath ("index" :| ["compound_format"]) x
toJSON (IndexCompoundOnFlush x) = oPath ("index" :| ["compound_on_flush"]) x
toJSON (WarmerEnabled x) = oPath ("index" :| ["warmer", "enabled"]) x
toJSON (BlocksReadOnly x) = oPath ("blocks" :| ["read_only"]) x
toJSON (BlocksRead x) = oPath ("blocks" :| ["read"]) x
toJSON (BlocksWrite x) = oPath ("blocks" :| ["write"]) x
toJSON (BlocksMetaData x) = oPath ("blocks" :| ["metadata"]) x
toJSON (MappingTotalFieldsLimit x) = oPath ("index" :| ["mapping","total_fields","limit"]) x
toJSON (AnalysisSetting x) = oPath ("index" :| ["analysis"]) x
instance FromJSON UpdatableIndexSetting where
parseJSON = withObject "UpdatableIndexSetting" parse
where parse o = numberOfReplicas `taggedAt` ["index", "number_of_replicas"]
<|> autoExpandReplicas `taggedAt` ["index", "auto_expand_replicas"]
<|> refreshInterval `taggedAt` ["index", "refresh_interval"]
<|> indexConcurrency `taggedAt` ["index", "concurrency"]
<|> failOnMergeFailure `taggedAt` ["index", "fail_on_merge_failure"]
<|> translogFlushThresholdOps `taggedAt` ["index", "translog", "flush_threshold_ops"]
<|> translogFlushThresholdSize `taggedAt` ["index", "translog", "flush_threshold_size"]
<|> translogFlushThresholdPeriod `taggedAt` ["index", "translog", "flush_threshold_period"]
<|> translogDisableFlush `taggedAt` ["index", "translog", "disable_flush"]
<|> cacheFilterMaxSize `taggedAt` ["index", "cache", "filter", "max_size"]
<|> cacheFilterExpire `taggedAt` ["index", "cache", "filter", "expire"]
<|> gatewaySnapshotInterval `taggedAt` ["index", "gateway", "snapshot_interval"]
<|> routingAllocationInclude `taggedAt` ["index", "routing", "allocation", "include"]
<|> routingAllocationExclude `taggedAt` ["index", "routing", "allocation", "exclude"]
<|> routingAllocationRequire `taggedAt` ["index", "routing", "allocation", "require"]
<|> routingAllocationEnable `taggedAt` ["index", "routing", "allocation", "enable"]
<|> routingAllocationShardsPerNode `taggedAt` ["index", "routing", "allocation", "total_shards_per_node"]
<|> recoveryInitialShards `taggedAt` ["index", "recovery", "initial_shards"]
<|> gcDeletes `taggedAt` ["index", "gc_deletes"]
<|> ttlDisablePurge `taggedAt` ["index", "ttl", "disable_purge"]
<|> translogFSType `taggedAt` ["index", "translog", "fs", "type"]
<|> compressionSetting `taggedAt` ["index", "codec"]
<|> compoundFormat `taggedAt` ["index", "compound_format"]
<|> compoundOnFlush `taggedAt` ["index", "compound_on_flush"]
<|> warmerEnabled `taggedAt` ["index", "warmer", "enabled"]
<|> blocksReadOnly `taggedAt` ["blocks", "read_only"]
<|> blocksRead `taggedAt` ["blocks", "read"]
<|> blocksWrite `taggedAt` ["blocks", "write"]
<|> blocksMetaData `taggedAt` ["blocks", "metadata"]
<|> mappingTotalFieldsLimit `taggedAt` ["index", "mapping", "total_fields", "limit"]
<|> analysisSetting `taggedAt` ["index", "analysis"]
where taggedAt f ks = taggedAt' f (Object o) ks
taggedAt' f v [] =
f =<< (parseJSON v <|> parseJSON (unStringlyTypeJSON v))
taggedAt' f v (k:ks) =
withObject "Object" (\o -> do v' <- o .: k
taggedAt' f v' ks) v
numberOfReplicas = pure . NumberOfReplicas
autoExpandReplicas = pure . AutoExpandReplicas
refreshInterval = pure . RefreshInterval . ndtJSON
indexConcurrency = pure . IndexConcurrency
failOnMergeFailure = pure . FailOnMergeFailure
translogFlushThresholdOps = pure . TranslogFlushThresholdOps
translogFlushThresholdSize = pure . TranslogFlushThresholdSize
translogFlushThresholdPeriod = pure . TranslogFlushThresholdPeriod . ndtJSON
translogDisableFlush = pure . TranslogDisableFlush
cacheFilterMaxSize = pure . CacheFilterMaxSize
cacheFilterExpire = pure . CacheFilterExpire . fmap ndtJSON
gatewaySnapshotInterval = pure . GatewaySnapshotInterval . ndtJSON
routingAllocationInclude = fmap RoutingAllocationInclude . parseAttrFilter
routingAllocationExclude = fmap RoutingAllocationExclude . parseAttrFilter
routingAllocationRequire = fmap RoutingAllocationRequire . parseAttrFilter
routingAllocationEnable = pure . RoutingAllocationEnable
routingAllocationShardsPerNode = pure . RoutingAllocationShardsPerNode
recoveryInitialShards = pure . RecoveryInitialShards
gcDeletes = pure . GCDeletes . ndtJSON
ttlDisablePurge = pure . TTLDisablePurge
translogFSType = pure . TranslogFSType
compressionSetting = pure . CompressionSetting
compoundFormat = pure . IndexCompoundFormat
compoundOnFlush = pure . IndexCompoundOnFlush
warmerEnabled = pure . WarmerEnabled
blocksReadOnly = pure . BlocksReadOnly
blocksRead = pure . BlocksRead
blocksWrite = pure . BlocksWrite
blocksMetaData = pure . BlocksMetaData
mappingTotalFieldsLimit = pure . MappingTotalFieldsLimit
analysisSetting = pure . AnalysisSetting
data ReplicaBounds = ReplicasBounded Int Int data ReplicaBounds = ReplicasBounded Int Int
| ReplicasLowerBounded Int | ReplicasLowerBounded Int
| ReplicasUnbounded | ReplicasUnbounded
deriving (Eq, Show) deriving (Eq, Show)
instance ToJSON ReplicaBounds where
toJSON (ReplicasBounded a b) = String (showText a <> "-" <> showText b)
toJSON (ReplicasLowerBounded a) = String (showText a <> "-all")
toJSON ReplicasUnbounded = Bool False
instance FromJSON ReplicaBounds where
parseJSON v = withText "ReplicaBounds" parseText v
<|> withBool "ReplicaBounds" parseBool v
where parseText t = case T.splitOn "-" t of
[a, "all"] -> ReplicasLowerBounded <$> parseReadText a
[a, b] -> ReplicasBounded <$> parseReadText a
<*> parseReadText b
_ -> fail ("Could not parse ReplicaBounds: " <> show t)
parseBool False = pure ReplicasUnbounded
parseBool _ = fail "ReplicasUnbounded cannot be represented with True"
data Compression data Compression
= CompressionDefault = CompressionDefault
-- ^ Compress with LZ4 -- ^ Compress with LZ4
@ -267,6 +430,16 @@ kilobytes n = Bytes (1000 * n)
data FSType = FSSimple data FSType = FSSimple
| FSBuffered deriving (Eq, Show) | FSBuffered deriving (Eq, Show)
instance ToJSON FSType where
toJSON FSSimple = "simple"
toJSON FSBuffered = "buffered"
instance FromJSON FSType where
parseJSON = withText "FSType" parse
where parse "simple" = pure FSSimple
parse "buffered" = pure FSBuffered
parse t = fail ("Invalid FSType: " <> show t)
data InitialShardCount = QuorumShards data InitialShardCount = QuorumShards
| QuorumMinus1Shards | QuorumMinus1Shards
| FullShards | FullShards
@ -274,6 +447,22 @@ data InitialShardCount = QuorumShards
| ExplicitShards Int | ExplicitShards Int
deriving (Eq, Show) deriving (Eq, Show)
instance FromJSON InitialShardCount where
parseJSON v = withText "InitialShardCount" parseText v
<|> ExplicitShards <$> parseJSON v
where parseText "quorum" = pure QuorumShards
parseText "quorum-1" = pure QuorumMinus1Shards
parseText "full" = pure FullShards
parseText "full-1" = pure FullMinus1Shards
parseText _ = mzero
instance ToJSON InitialShardCount where
toJSON QuorumShards = String "quorum"
toJSON QuorumMinus1Shards = String "quorum-1"
toJSON FullShards = String "full"
toJSON FullMinus1Shards = String "full-1"
toJSON (ExplicitShards x) = toJSON x
data NodeAttrFilter = NodeAttrFilter data NodeAttrFilter = NodeAttrFilter
{ nodeAttrFilterName :: NodeAttrName { nodeAttrFilterName :: NodeAttrName
, nodeAttrFilterValues :: NonEmpty Text } , nodeAttrFilterValues :: NonEmpty Text }
@ -286,12 +475,52 @@ data CompoundFormat = CompoundFileFormat Bool
-- ^ percentage between 0 and 1 where 0 is false, 1 is true -- ^ percentage between 0 and 1 where 0 is false, 1 is true
deriving (Eq, Show) deriving (Eq, Show)
newtype NominalDiffTimeJSON = NominalDiffTimeJSON { ndtJSON :: NominalDiffTime } instance ToJSON CompoundFormat where
toJSON (CompoundFileFormat x) = Bool x
toJSON (MergeSegmentVsTotalIndex x) = toJSON x
data IndexSettingsSummary = IndexSettingsSummary { sSummaryIndexName :: IndexName instance FromJSON CompoundFormat where
, sSummaryFixedSettings :: IndexSettings parseJSON v = CompoundFileFormat <$> parseJSON v
, sSummaryUpdateable :: [UpdatableIndexSetting]} <|> MergeSegmentVsTotalIndex <$> parseJSON v
deriving (Eq, Show)
newtype NominalDiffTimeJSON =
NominalDiffTimeJSON { ndtJSON :: NominalDiffTime }
instance ToJSON NominalDiffTimeJSON where
toJSON (NominalDiffTimeJSON t) = String (showText (round t :: Integer) <> "s")
instance FromJSON NominalDiffTimeJSON where
parseJSON = withText "NominalDiffTime" parse
where parse t = case T.takeEnd 1 t of
"s" -> NominalDiffTimeJSON . fromInteger <$> parseReadText (T.dropEnd 1 t)
_ -> fail "Invalid or missing NominalDiffTime unit (expected s)"
data IndexSettingsSummary = IndexSettingsSummary
{ sSummaryIndexName :: IndexName
, sSummaryFixedSettings :: IndexSettings
, sSummaryUpdateable :: [UpdatableIndexSetting]}
deriving (Eq, Show)
parseSettings :: Object -> Parser [UpdatableIndexSetting]
parseSettings o = do
o' <- o .: "index"
-- slice the index object into singleton hashmaps and try to parse each
parses <- forM (HM.toList o') $ \(k, v) -> do
-- blocks are now nested into the "index" key, which is not how they're serialized
let atRoot = Object (HM.singleton k v)
let atIndex = Object (HM.singleton "index" atRoot)
optional (parseJSON atRoot <|> parseJSON atIndex)
return (catMaybes parses)
instance FromJSON IndexSettingsSummary where
parseJSON = withObject "IndexSettingsSummary" parse
where parse o = case HM.toList o of
[(ixn, v@(Object o'))] -> IndexSettingsSummary (IndexName ixn)
<$> parseJSON v
<*> (fmap (filter (not . redundant)) . parseSettings =<< o' .: "settings")
_ -> fail "Expected single-key object with index name"
redundant (NumberOfReplicas _) = True
redundant _ = False
{-| 'Reply' and 'Method' are type synonyms from 'Network.HTTP.Types.Method.Method' -} {-| 'Reply' and 'Method' are type synonyms from 'Network.HTTP.Types.Method.Method' -}
type Reply = Network.HTTP.Client.Response LByteString type Reply = Network.HTTP.Client.Response LByteString
@ -310,8 +539,9 @@ data FieldType = GeoPointType
| ShortType | ShortType
| ByteType deriving (Eq, Show) | ByteType deriving (Eq, Show)
data FieldDefinition = newtype FieldDefinition = FieldDefinition
FieldDefinition { fieldType :: FieldType } deriving (Eq, Show) { fieldType :: FieldType
} deriving (Eq, Show)
{-| An 'IndexTemplate' defines a template that will automatically be {-| An 'IndexTemplate' defines a template that will automatically be
applied to new indices created. The templates include both applied to new indices created. The templates include both
@ -327,6 +557,17 @@ data IndexTemplate =
, templateMappings :: [Value] , templateMappings :: [Value]
} }
instance ToJSON IndexTemplate where
toJSON (IndexTemplate p s m) = merge
(object [ "template" .= p
, "mappings" .= foldl' merge (object []) m
])
(toJSON s)
where
merge (Object o1) (Object o2) = toJSON $ HM.union o1 o2
merge o Null = o
merge _ _ = undefined
data MappingField = data MappingField =
MappingField { mappingFieldName :: FieldName MappingField { mappingFieldName :: FieldName
, fieldDefinition :: FieldDefinition } , fieldDefinition :: FieldDefinition }
@ -355,6 +596,20 @@ data AllocationPolicy = AllocAll
-- ^ No shard allocation is allowed -- ^ No shard allocation is allowed
deriving (Eq, Show) deriving (Eq, Show)
instance ToJSON AllocationPolicy where
toJSON AllocAll = String "all"
toJSON AllocPrimaries = String "primaries"
toJSON AllocNewPrimaries = String "new_primaries"
toJSON AllocNone = String "none"
instance FromJSON AllocationPolicy where
parseJSON = withText "AllocationPolicy" parse
where parse "all" = pure AllocAll
parse "primaries" = pure AllocPrimaries
parse "new_primaries" = pure AllocNewPrimaries
parse "none" = pure AllocNone
parse t = fail ("Invlaid AllocationPolicy: " <> show t)
{-| 'BulkOperation' is a sum type for expressing the four kinds of bulk {-| 'BulkOperation' is a sum type for expressing the four kinds of bulk
operation index, create, delete, and update. 'BulkIndex' behaves like an operation index, create, delete, and update. 'BulkIndex' behaves like an
"upsert", 'BulkCreate' will fail if a document already exists at the DocId. "upsert", 'BulkCreate' will fail if a document already exists at the DocId.
@ -401,6 +656,24 @@ data EsResultFound a =
, _source :: a } , _source :: a }
deriving (Eq, Show) deriving (Eq, Show)
instance (FromJSON a) => FromJSON (EsResult a) where
parseJSON jsonVal@(Object v) = do
found <- v .:? "found" .!= False
fr <- if found
then parseJSON jsonVal
else return Nothing
EsResult <$> v .: "_index" <*>
v .: "_type" <*>
v .: "_id" <*>
pure fr
parseJSON _ = empty
instance (FromJSON a) => FromJSON (EsResultFound a) where
parseJSON (Object v) = EsResultFound <$>
v .: "_version" <*>
v .: "_source"
parseJSON _ = empty
{-| 'EsError' is the generic type that will be returned when there was a {-| 'EsError' is the generic type that will be returned when there was a
problem. If you can't parse the expected response, its a good idea to problem. If you can't parse the expected response, its a good idea to
try parsing this. try parsing this.
@ -410,6 +683,12 @@ data EsError =
, errorMessage :: Text } , errorMessage :: Text }
deriving (Eq, Show) deriving (Eq, Show)
instance FromJSON EsError where
parseJSON (Object v) = EsError <$>
v .: "status" <*>
(v .: "error" <|> (v .: "error" >>= (.: "reason")))
parseJSON _ = empty
{-| 'EsProtocolException' will be thrown if Bloodhound cannot parse a response {-| 'EsProtocolException' will be thrown if Bloodhound cannot parse a response
returned by the ElasticSearch server. If you encounter this error, please returned by the ElasticSearch server. If you encounter this error, please
verify that your domain data types and FromJSON instances are working properly verify that your domain data types and FromJSON instances are working properly
@ -418,8 +697,9 @@ sure that your mappings are correct, then this error may be an indication of an
incompatibility between Bloodhound and ElasticSearch. Please open a bug report incompatibility between Bloodhound and ElasticSearch. Please open a bug report
and be sure to include the exception body. and be sure to include the exception body.
-} -}
data EsProtocolException = EsProtocolException { esProtoExBody :: LByteString } newtype EsProtocolException = EsProtocolException
deriving (Eq, Show) { esProtoExBody :: LByteString
} deriving (Eq, Show)
instance Exception EsProtocolException instance Exception EsProtocolException
@ -445,6 +725,13 @@ newtype SearchAliasRouting =
SearchAliasRouting (NonEmpty RoutingValue) SearchAliasRouting (NonEmpty RoutingValue)
deriving (Eq, Show) deriving (Eq, Show)
instance ToJSON SearchAliasRouting where
toJSON (SearchAliasRouting rvs) = toJSON (T.intercalate "," (routingValue <$> toList rvs))
instance FromJSON SearchAliasRouting where
parseJSON = withText "SearchAliasRouting" parse
where parse t = SearchAliasRouting <$> parseNEJSON (String <$> T.splitOn "," t)
newtype IndexAliasRouting = newtype IndexAliasRouting =
IndexAliasRouting RoutingValue IndexAliasRouting RoutingValue
deriving (Eq, Show, ToJSON, FromJSON) deriving (Eq, Show, ToJSON, FromJSON)
@ -457,6 +744,55 @@ newtype IndexAliasesSummary =
IndexAliasesSummary { indexAliasesSummary :: [IndexAliasSummary] } IndexAliasesSummary { indexAliasesSummary :: [IndexAliasSummary] }
deriving (Eq, Show) deriving (Eq, Show)
instance FromJSON IndexAliasesSummary where
parseJSON = withObject "IndexAliasesSummary" parse
where parse o = IndexAliasesSummary . mconcat <$> mapM (uncurry go) (HM.toList o)
go ixn = withObject "index aliases" $ \ia -> do
aliases <- ia .:? "aliases" .!= mempty
forM (HM.toList aliases) $ \(aName, v) -> do
let indexAlias = IndexAlias (IndexName ixn) (IndexAliasName (IndexName aName))
IndexAliasSummary indexAlias <$> parseJSON v
instance ToJSON IndexAliasAction where
toJSON (AddAlias ia opts) = object ["add" .= (iaObj <> optsObj)]
where Object iaObj = toJSON ia
Object optsObj = toJSON opts
toJSON (RemoveAlias ia) = object ["remove" .= iaObj]
where Object iaObj = toJSON ia
instance ToJSON IndexAlias where
toJSON IndexAlias {..} = object ["index" .= srcIndex
, "alias" .= indexAlias
]
instance ToJSON IndexAliasCreate where
toJSON IndexAliasCreate {..} = Object (filterObj <> routingObj)
where filterObj = maybe mempty (HM.singleton "filter" . toJSON) aliasCreateFilter
Object routingObj = maybe (Object mempty) toJSON aliasCreateRouting
instance ToJSON AliasRouting where
toJSON (AllAliasRouting v) = object ["routing" .= v]
toJSON (GranularAliasRouting srch idx) = object (catMaybes prs)
where prs = [("search_routing" .=) <$> srch
,("index_routing" .=) <$> idx]
instance FromJSON AliasRouting where
parseJSON = withObject "AliasRouting" parse
where parse o = parseAll o <|> parseGranular o
parseAll o = AllAliasRouting <$> o .: "routing"
parseGranular o = do
sr <- o .:? "search_routing"
ir <- o .:? "index_routing"
if isNothing sr && isNothing ir
then fail "Both search_routing and index_routing can't be blank"
else return (GranularAliasRouting sr ir)
instance FromJSON IndexAliasCreate where
parseJSON v = withObject "IndexAliasCreate" parse v
where parse o = IndexAliasCreate <$> optional (parseJSON v)
<*> o .:? "filter"
{-| 'IndexAliasSummary' is a summary of an index alias configured for a server. -} {-| 'IndexAliasSummary' is a summary of an index alias configured for a server. -}
data IndexAliasSummary = IndexAliasSummary data IndexAliasSummary = IndexAliasSummary
{ indexAliasSummaryAlias :: IndexAlias { indexAliasSummaryAlias :: IndexAlias
@ -473,10 +809,32 @@ newtype DocVersion = DocVersion {
-- | Smart constructor for in-range doc version -- | Smart constructor for in-range doc version
mkDocVersion :: Int -> Maybe DocVersion mkDocVersion :: Int -> Maybe DocVersion
mkDocVersion i mkDocVersion i
| i >= (docVersionNumber minBound) && i <= (docVersionNumber maxBound) = | i >= docVersionNumber minBound
&& i <= docVersionNumber maxBound =
Just $ DocVersion i Just $ DocVersion i
| otherwise = Nothing | otherwise = Nothing
instance Bounded DocVersion where
minBound = DocVersion 1
maxBound = DocVersion 9200000000000000000 -- 9.2e+18
instance Enum DocVersion where
succ x
| x /= maxBound = DocVersion (succ $ docVersionNumber x)
| otherwise = succError "DocVersion"
pred x
| x /= minBound = DocVersion (pred $ docVersionNumber x)
| otherwise = predError "DocVersion"
toEnum i =
fromMaybe (error $ show i ++ " out of DocVersion range") $ mkDocVersion i
fromEnum = docVersionNumber
enumFrom = boundedEnumFrom
enumFromThen = boundedEnumFromThen
instance FromJSON DocVersion where
parseJSON v = do
i <- parseJSON v
maybe (fail "DocVersion out of range") return $ mkDocVersion i
{-| 'ExternalDocVersion' is a convenience wrapper if your code uses its {-| 'ExternalDocVersion' is a convenience wrapper if your code uses its
own version numbers instead of ones from ES. own version numbers instead of ones from ES.
@ -576,36 +934,6 @@ newtype TemplateName = TemplateName Text deriving (Eq, Show, ToJSON, FromJSON)
-} -}
newtype TemplatePattern = TemplatePattern Text deriving (Eq, Show, ToJSON, FromJSON) newtype TemplatePattern = TemplatePattern Text deriving (Eq, Show, ToJSON, FromJSON)
-- This insanity is because ES *sometimes* returns Replica/Shard counts as strings
instance FromJSON ReplicaCount where
parseJSON v = parseAsInt v
<|> parseAsString v
where parseAsInt = fmap ReplicaCount . parseJSON
parseAsString = withText "ReplicaCount" (fmap ReplicaCount . parseReadText)
instance FromJSON ShardCount where
parseJSON v = parseAsInt v
<|> parseAsString v
where parseAsInt = fmap ShardCount . parseJSON
parseAsString = withText "ShardCount" (fmap ShardCount . parseReadText)
instance Bounded DocVersion where
minBound = DocVersion 1
maxBound = DocVersion 9200000000000000000 -- 9.2e+18
instance Enum DocVersion where
succ x
| x /= maxBound = DocVersion (succ $ docVersionNumber x)
| otherwise = succError "DocVersion"
pred x
| x /= minBound = DocVersion (pred $ docVersionNumber x)
| otherwise = predError "DocVersion"
toEnum i =
fromMaybe (error $ show i ++ " out of DocVersion range") $ mkDocVersion i
fromEnum = docVersionNumber
enumFrom = boundedEnumFrom
enumFromThen = boundedEnumFromThen
-- | Username type used for HTTP Basic authentication. See 'basicAuthHook'. -- | Username type used for HTTP Basic authentication. See 'basicAuthHook'.
newtype EsUsername = EsUsername { esUsername :: Text } deriving (Read, Show, Eq) newtype EsUsername = EsUsername { esUsername :: Text } deriving (Read, Show, Eq)
@ -1225,13 +1553,13 @@ data SnapshotRestoreSettings = SnapshotRestoreSettings {
-- revert back to the server default during the restore process. -- revert back to the server default during the restore process.
} deriving (Eq, Show) } deriving (Eq, Show)
data SnapshotRepoUpdateSettings = SnapshotRepoUpdateSettings { newtype SnapshotRepoUpdateSettings = SnapshotRepoUpdateSettings
repoUpdateVerify :: Bool { repoUpdateVerify :: Bool
-- ^ After creation/update, synchronously check that nodes can -- ^ After creation/update, synchronously check that nodes can
-- write to this repo. Defaults to True. You may use False if you -- write to this repo. Defaults to True. You may use False if you
-- need a faster response and plan on verifying manually later -- need a faster response and plan on verifying manually later
-- with 'verifySnapshotRepo'. -- with 'verifySnapshotRepo'.
} deriving (Eq, Show) } deriving (Eq, Show)
-- | Reasonable defaults for repo creation/update -- | Reasonable defaults for repo creation/update
@ -1271,7 +1599,7 @@ instance SnapshotRepo FsSnapshotRepo where
fromGSnapshotRepo GenericSnapshotRepo {..} fromGSnapshotRepo GenericSnapshotRepo {..}
| gSnapshotRepoType == fsRepoType = do | gSnapshotRepoType == fsRepoType = do
let o = gSnapshotRepoSettingsObject gSnapshotRepoSettings let o = gSnapshotRepoSettingsObject gSnapshotRepoSettings
parseRepo $ do parseRepo $
FsSnapshotRepo gSnapshotRepoName <$> o .: "location" FsSnapshotRepo gSnapshotRepoName <$> o .: "location"
<*> o .:? "compress" .!= False <*> o .:? "compress" .!= False
<*> o .:? "chunk_size" <*> o .:? "chunk_size"
@ -1370,12 +1698,6 @@ data SnapshotInfo = SnapshotInfo {
} deriving (Eq, Show) } deriving (Eq, Show)
instance FromJSON POSIXMS where
parseJSON = withScientific "POSIXMS" (return . parse)
where parse n =
let n' = truncate n :: Integer
in POSIXMS (posixSecondsToUTCTime (fromInteger (n' `div` 1000)))
instance FromJSON SnapshotInfo where instance FromJSON SnapshotInfo where
parseJSON = withObject "SnapshotInfo" parse parseJSON = withObject "SnapshotInfo" parse
where where
@ -1437,7 +1759,8 @@ instance Bounded RRGroupRefNum where
-- | Only allows valid group number references (1-9). -- | Only allows valid group number references (1-9).
mkRRGroupRefNum :: Int -> Maybe RRGroupRefNum mkRRGroupRefNum :: Int -> Maybe RRGroupRefNum
mkRRGroupRefNum i mkRRGroupRefNum i
| i >= (rrGroupRefNum minBound) && i <= (rrGroupRefNum maxBound) = | i >= rrGroupRefNum minBound
&& i <= rrGroupRefNum maxBound =
Just $ RRGroupRefNum i Just $ RRGroupRefNum i
| otherwise = Nothing | otherwise = Nothing
@ -1471,9 +1794,9 @@ defaultSnapshotRestoreSettings = SnapshotRestoreSettings {
-- | Index settings that can be overridden. The docs only mention you -- | Index settings that can be overridden. The docs only mention you
-- can update number of replicas, but there may be more. You -- can update number of replicas, but there may be more. You
-- definitely cannot override shard count. -- definitely cannot override shard count.
data RestoreIndexSettings = RestoreIndexSettings { newtype RestoreIndexSettings = RestoreIndexSettings
restoreOverrideReplicas :: Maybe ReplicaCount { restoreOverrideReplicas :: Maybe ReplicaCount
} deriving (Eq, Show) } deriving (Eq, Show)
instance ToJSON RestoreIndexSettings where instance ToJSON RestoreIndexSettings where
@ -1813,7 +2136,7 @@ instance FromJSON NodeBreakersStats where
<*> o .: "fielddata" <*> o .: "fielddata"
parseNodeStats :: FullNodeId -> Object -> Parser NodeStats parseNodeStats :: FullNodeId -> Object -> Parser NodeStats
parseNodeStats fnid o = do parseNodeStats fnid o =
NodeStats <$> o .: "name" NodeStats <$> o .: "name"
<*> pure fnid <*> pure fnid
<*> o .:? "breakers" <*> o .:? "breakers"
@ -1871,7 +2194,7 @@ instance FromJSON BoundTransportAddress where
instance FromJSON NodeOSInfo where instance FromJSON NodeOSInfo where
parseJSON = withObject "NodeOSInfo" parse parseJSON = withObject "NodeOSInfo" parse
where where
parse o = do parse o =
NodeOSInfo <$> (unMS <$> o .: "refresh_interval_in_millis") NodeOSInfo <$> (unMS <$> o .: "refresh_interval_in_millis")
<*> o .: "name" <*> o .: "name"
<*> o .: "arch" <*> o .: "arch"
@ -1990,6 +2313,16 @@ data Interval = Year
| Minute | Minute
| Second deriving (Eq, Show) | Second deriving (Eq, Show)
instance ToJSON Interval where
toJSON Year = "year"
toJSON Quarter = "quarter"
toJSON Month = "month"
toJSON Week = "week"
toJSON Day = "day"
toJSON Hour = "hour"
toJSON Minute = "minute"
toJSON Second = "second"
parseStringInterval :: (Monad m) => String -> m NominalDiffTime parseStringInterval :: (Monad m) => String -> m NominalDiffTime
parseStringInterval s = case span isNumber s of parseStringInterval s = case span isNumber s of
("", _) -> fail "Invalid interval" ("", _) -> fail "Invalid interval"

View File

@ -146,6 +146,12 @@ newtype MaxDocFrequency = MaxDocFrequency Int deriving (Eq, Show, FromJSON, ToJS
-- | Newtype wrapper to parse ES's concerning tendency to in some APIs return a floating point number of milliseconds since epoch ಠ_ಠ -- | Newtype wrapper to parse ES's concerning tendency to in some APIs return a floating point number of milliseconds since epoch ಠ_ಠ
newtype POSIXMS = POSIXMS { posixMS :: UTCTime } newtype POSIXMS = POSIXMS { posixMS :: UTCTime }
instance FromJSON POSIXMS where
parseJSON = withScientific "POSIXMS" (return . parse)
where parse n =
let n' = truncate n :: Integer
in POSIXMS (posixSecondsToUTCTime (fromInteger (n' `div` 1000)))
newtype Boost = newtype Boost =
Boost Double Boost Double
deriving (Eq, Show, ToJSON, FromJSON) deriving (Eq, Show, ToJSON, FromJSON)
@ -154,15 +160,28 @@ newtype BoostTerms =
BoostTerms Double BoostTerms Double
deriving (Eq, Show, ToJSON, FromJSON) deriving (Eq, Show, ToJSON, FromJSON)
{-| 'ReplicaCount' is part of 'IndexSettings' -}
newtype ReplicaCount =
ReplicaCount Int
deriving (Eq, Show, ToJSON)
{-| 'ShardCount' is part of 'IndexSettings' -} {-| 'ShardCount' is part of 'IndexSettings' -}
newtype ShardCount = newtype ShardCount =
ShardCount Int ShardCount Int
deriving (Eq, Show, ToJSON) deriving (Eq, Show, ToJSON)
{-| 'ReplicaCount' is part of 'IndexSettings' -} -- This insanity is because ES *sometimes* returns Replica/Shard counts as strings
newtype ReplicaCount = instance FromJSON ReplicaCount where
ReplicaCount Int parseJSON v = parseAsInt v
deriving (Eq, Show, ToJSON) <|> parseAsString v
where parseAsInt = fmap ReplicaCount . parseJSON
parseAsString = withText "ReplicaCount" (fmap ReplicaCount . parseReadText)
instance FromJSON ShardCount where
parseJSON v = parseAsInt v
<|> parseAsString v
where parseAsInt = fmap ShardCount . parseJSON
parseAsString = withText "ShardCount" (fmap ShardCount . parseReadText)
{-| 'IndexName' is used to describe which index to query/create/delete -} {-| 'IndexName' is used to describe which index to query/create/delete -}
newtype IndexName = newtype IndexName =
@ -195,7 +214,7 @@ unMS (MS t) = t
instance FromJSON MS where instance FromJSON MS where
parseJSON = withScientific "MS" (return . MS . parse) parseJSON = withScientific "MS" (return . MS . parse)
where where
parse n = fromInteger ((truncate n) * 1000) parse n = fromInteger (truncate n * 1000)
newtype TokenFilter = newtype TokenFilter =
TokenFilter Text deriving (Eq, Show, FromJSON, ToJSON) TokenFilter Text deriving (Eq, Show, FromJSON, ToJSON)

View File

@ -435,6 +435,10 @@ data GeoPoint =
GeoPoint { geoField :: FieldName GeoPoint { geoField :: FieldName
, latLon :: LatLon} deriving (Eq, Show) , latLon :: LatLon} deriving (Eq, Show)
instance ToJSON GeoPoint where
toJSON (GeoPoint (FieldName geoPointField) geoPointLatLon) =
object [ geoPointField .= geoPointLatLon ]
data DistanceUnit = Miles data DistanceUnit = Miles
| Yards | Yards
| Feet | Feet
@ -882,7 +886,8 @@ instance FromJSON QueryStringQuery where
instance ToJSON RangeQuery where instance ToJSON RangeQuery where
toJSON (RangeQuery (FieldName fieldName) range boost) = toJSON (RangeQuery (FieldName fieldName) range boost) =
object [ fieldName .= object conjoined ] object [ fieldName .= object conjoined ]
where conjoined = [ "boost" .= boost ] ++ (rangeValueToPair range) where
conjoined = ("boost" .= boost) : rangeValueToPair range
instance FromJSON RangeQuery where instance FromJSON RangeQuery where
parseJSON = withObject "RangeQuery" parse parseJSON = withObject "RangeQuery" parse
@ -891,38 +896,76 @@ instance FromJSON RangeQuery where
<$> parseJSON (Object o) <$> parseJSON (Object o)
<*> o .: "boost" <*> o .: "boost"
parseRangeValue :: ( FromJSON t4
, FromJSON t3
, FromJSON t2
, FromJSON t1
)
=> (t3 -> t5)
-> (t1 -> t6)
-> (t4 -> t7)
-> (t2 -> t8)
-> (t5 -> t6 -> b)
-> (t7 -> t6 -> b)
-> (t5 -> t8 -> b)
-> (t7 -> t8 -> b)
-> (t5 -> b)
-> (t6 -> b)
-> (t7 -> b)
-> (t8 -> b)
-> Parser b
-> Object
-> Parser b
parseRangeValue mkGt mkLt mkGte mkLte
fGtLt fGteLt fGtLte fGteLte
fGt fLt fGte fLte nada o = do
lt <- o .:? "lt"
lte <- o .:? "lte"
gt <- o .:? "gt"
gte <- o .:? "gte"
case (lt, lte, gt, gte) of
(Just a, _, Just b, _) ->
return (fGtLt (mkGt b) (mkLt a))
(Just a, _, _, Just b) ->
return (fGteLt (mkGte b) (mkLt a))
(_, Just a, Just b, _) ->
return (fGtLte (mkGt b) (mkLte a))
(_, Just a, _, Just b) ->
return (fGteLte (mkGte b) (mkLte a))
(_, _, Just a, _) ->
return (fGt (mkGt a))
(Just a, _, _, _) ->
return (fLt (mkLt a))
(_, _, _, Just a) ->
return (fGte (mkGte a))
(_, Just a, _, _) ->
return (fLte (mkLte a))
(Nothing, Nothing, Nothing, Nothing) ->
nada
instance FromJSON RangeValue where instance FromJSON RangeValue where
parseJSON = withObject "RangeValue" parse parseJSON = withObject "RangeValue" parse
where parse o = parseDate o where parse o = parseDate o
<|> parseDouble o <|> parseDouble o
parseDate o = do lt <- o .:? "lt" parseDate o =
lte <- o .:? "lte" parseRangeValue
gt <- o .:? "gt" GreaterThanD LessThanD
gte <- o .:? "gte" GreaterThanEqD LessThanEqD
case (lt, lte, gt, gte) of RangeDateGtLt RangeDateGteLt
(Just a, _, Just b, _) -> return (RangeDateGtLt (GreaterThanD b) (LessThanD a)) RangeDateGtLte RangeDateGteLte
(Just a, _, _, Just b)-> return (RangeDateGteLt (GreaterThanEqD b) (LessThanD a)) RangeDateGt RangeDateLt
(_, Just a, Just b, _)-> return (RangeDateGtLte (GreaterThanD b) (LessThanEqD a)) RangeDateGte RangeDateLte
(_, Just a, _, Just b)-> return (RangeDateGteLte (GreaterThanEqD b) (LessThanEqD a)) mzero o
(_, _, Just a, _)-> return (RangeDateGt (GreaterThanD a)) parseDouble o =
(Just a, _, _, _)-> return (RangeDateLt (LessThanD a)) parseRangeValue
(_, _, _, Just a)-> return (RangeDateGte (GreaterThanEqD a)) GreaterThan LessThan
(_, Just a, _, _)-> return (RangeDateLte (LessThanEqD a)) GreaterThanEq LessThanEq
(Nothing, Nothing, Nothing, Nothing) -> mzero RangeDoubleGtLt RangeDoubleGteLt
parseDouble o = do lt <- o .:? "lt" RangeDoubleGtLte RangeDoubleGteLte
lte <- o .:? "lte" RangeDoubleGt RangeDoubleLt
gt <- o .:? "gt" RangeDoubleGte RangeDoubleLte
gte <- o .:? "gte" mzero o
case (lt, lte, gt, gte) of
(Just a, _, Just b, _) -> return (RangeDoubleGtLt (GreaterThan b) (LessThan a))
(Just a, _, _, Just b)-> return (RangeDoubleGteLt (GreaterThanEq b) (LessThan a))
(_, Just a, Just b, _)-> return (RangeDoubleGtLte (GreaterThan b) (LessThanEq a))
(_, Just a, _, Just b)-> return (RangeDoubleGteLte (GreaterThanEq b) (LessThanEq a))
(_, _, Just a, _)-> return (RangeDoubleGt (GreaterThan a))
(Just a, _, _, _)-> return (RangeDoubleLt (LessThan a))
(_, _, _, Just a)-> return (RangeDoubleGte (GreaterThanEq a))
(_, Just a, _, _)-> return (RangeDoubleLte (LessThanEq a))
(Nothing, Nothing, Nothing, Nothing) -> mzero
instance ToJSON PrefixQuery where instance ToJSON PrefixQuery where
toJSON (PrefixQuery (FieldName fieldName) queryValue boost) = toJSON (PrefixQuery (FieldName fieldName) queryValue boost) =

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE OverloadedStrings #-}
module Test.Aggregation where module Test.Aggregation (spec) where
import Test.Common import Test.Common
import Test.Import import Test.Import
@ -29,7 +29,7 @@ spec =
usersAggResults = result >>= aggregations >>= toTerms "users" usersAggResults = result >>= aggregations >>= toTerms "users"
subAggResults = usersAggResults >>= (listToMaybe . buckets) >>= termsAggs >>= toTerms "age_agg" subAggResults = usersAggResults >>= (listToMaybe . buckets) >>= termsAggs >>= toTerms "age_agg"
subAddResultsExists = isJust subAggResults subAddResultsExists = isJust subAggResults
liftIO $ (subAddResultsExists) `shouldBe` True liftIO $ subAddResultsExists `shouldBe` True
it "returns cardinality aggregation results" $ withTestEnv $ do it "returns cardinality aggregation results" $ withTestEnv $ do
_ <- insertData _ <- insertData

View File

@ -1,6 +1,6 @@
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE OverloadedStrings #-}
module Test.BulkAPI where module Test.BulkAPI (spec) where
import Test.Common import Test.Common
import Test.Import import Test.Import
@ -9,7 +9,7 @@ import qualified Data.Vector as V
import qualified Lens.Micro.Aeson as LMA import qualified Lens.Micro.Aeson as LMA
newtype BulkTest = newtype BulkTest =
BulkTest { name :: Text } BulkTest Text
deriving (Eq, Show) deriving (Eq, Show)
instance ToJSON BulkTest where instance ToJSON BulkTest where
@ -25,7 +25,7 @@ instance FromJSON BulkTest where
spec :: Spec spec :: Spec
spec = spec =
describe "Bulk API" $ do describe "Bulk API" $
it "inserts all documents we request" $ withTestEnv $ do it "inserts all documents we request" $ withTestEnv $ do
_ <- insertData _ <- insertData
let firstTest = BulkTest "blah" let firstTest = BulkTest "blah"
@ -84,7 +84,7 @@ spec =
hitsTotal (searchHits sr) `shouldBe` 6 hitsTotal (searchHits sr) `shouldBe` 6
let nameList :: [Text] let nameList :: [Text]
nameList = nameList =
(hits (searchHits sr)) hits (searchHits sr)
^.. traverse ^.. traverse
. to hitSource . to hitSource
. _Just . _Just

View File

@ -14,14 +14,14 @@ spec =
docInserted <- getDocument testIndex testMapping (DocId "1") docInserted <- getDocument testIndex testMapping (DocId "1")
let newTweet = eitherDecode let newTweet = eitherDecode
(responseBody docInserted) :: Either String (EsResult Tweet) (responseBody docInserted) :: Either String (EsResult Tweet)
liftIO $ (fmap getSource newTweet `shouldBe` Right (Just patchedTweet)) liftIO $ fmap getSource newTweet `shouldBe` Right (Just patchedTweet)
it "indexes, gets, and then deletes the generated document with a DocId containing a space" $ withTestEnv $ do it "indexes, gets, and then deletes the generated document with a DocId containing a space" $ withTestEnv $ do
_ <- insertWithSpaceInId _ <- insertWithSpaceInId
docInserted <- getDocument testIndex testMapping (DocId "Hello World") docInserted <- getDocument testIndex testMapping (DocId "Hello World")
let newTweet = eitherDecode let newTweet = eitherDecode
(responseBody docInserted) :: Either String (EsResult Tweet) (responseBody docInserted) :: Either String (EsResult Tweet)
liftIO $ (fmap getSource newTweet `shouldBe` Right (Just exampleTweet)) liftIO $ fmap getSource newTweet `shouldBe` Right (Just exampleTweet)
it "produces a parseable result when looking up a bogus document" $ withTestEnv $ do it "produces a parseable result when looking up a bogus document" $ withTestEnv $ do
doc <- getDocument testIndex testMapping (DocId "bogus") doc <- getDocument testIndex testMapping (DocId "bogus")

View File

@ -36,8 +36,10 @@ instance Arbitrary UTCTime where
<*> (fromRational . toRational <$> choose (0::Double, 86400)) <*> (fromRational . toRational <$> choose (0::Double, 86400))
instance Arbitrary Day where instance Arbitrary Day where
arbitrary = ModifiedJulianDay <$> (2000 +) <$> arbitrary arbitrary =
shrink = (ModifiedJulianDay <$>) . shrink . toModifiedJulianDay ModifiedJulianDay . (2000 +) <$> arbitrary
shrink =
(ModifiedJulianDay <$>) . shrink . toModifiedJulianDay
#if !MIN_VERSION_QuickCheck(2,9,0) #if !MIN_VERSION_QuickCheck(2,9,0)
instance Arbitrary a => Arbitrary (NonEmpty a) where instance Arbitrary a => Arbitrary (NonEmpty a) where

View File

@ -7,28 +7,24 @@ import Test.Import
import qualified Data.Map as M import qualified Data.Map as M
initHighlights :: Text -> BH IO (Either EsError (Maybe HitHighlight))
initHighlights fieldName = do
_ <- insertData
_ <- insertOther
let query = QueryMatchQuery $ mkMatchQuery (FieldName fieldName) (QueryString "haskell")
let testHighlight = Highlights Nothing [FieldHighlight (FieldName fieldName) Nothing]
let search = mkHighlightSearch (Just query) testHighlight
searchTweetHighlight search
spec :: Spec spec :: Spec
spec = spec =
describe "Highlights API" $ do describe "Highlights API" $ do
it "returns highlight from query when there should be one" $ withTestEnv $ do it "returns highlight from query when there should be one" $ withTestEnv $ do
_ <- insertData myHighlight <- initHighlights "message"
_ <- insertOther
let query = QueryMatchQuery $ mkMatchQuery (FieldName "message") (QueryString "haskell")
let testHighlight = Highlights Nothing [FieldHighlight (FieldName "message") Nothing]
let search = mkHighlightSearch (Just query) testHighlight
myHighlight <- searchTweetHighlight search
liftIO $ liftIO $
myHighlight `shouldBe` Right (Just (M.fromList [("message",["Use <em>haskell</em>!"])])) myHighlight `shouldBe` Right (Just (M.fromList [("message",["Use <em>haskell</em>!"])]))
it "doesn't return highlight from a query when it shouldn't" $ withTestEnv $ do it "doesn't return highlight from a query when it shouldn't" $ withTestEnv $ do
_ <- insertData myHighlight <- initHighlights "user"
_ <- insertOther
let query = QueryMatchQuery $ mkMatchQuery (FieldName "message") (QueryString "haskell")
let testHighlight = Highlights Nothing [FieldHighlight (FieldName "user") Nothing]
let search = mkHighlightSearch (Just query) testHighlight
myHighlight <- searchTweetHighlight search
liftIO $ liftIO $
myHighlight `shouldBe` Right Nothing myHighlight `shouldBe` Right Nothing

View File

@ -1,7 +1,7 @@
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE ScopedTypeVariables #-}
module Test.JSON where module Test.JSON (spec) where
import Test.Import import Test.Import

View File

@ -10,7 +10,7 @@ import qualified Data.Map as M
spec :: Spec spec :: Spec
spec = spec =
describe "Script" $ do describe "Script" $
it "returns a transformed document based on the script field" $ withTestEnv $ do it "returns a transformed document based on the script field" $ withTestEnv $ do
_ <- insertData _ <- insertData
let query = MatchAllQuery Nothing let query = MatchAllQuery Nothing
@ -32,4 +32,5 @@ spec =
Right sr -> do Right sr -> do
let Just results = let Just results =
hitFields (head (hits (searchHits sr))) hitFields (head (hits (searchHits sr)))
liftIO $ results `shouldBe` (HitFields (M.fromList [("test1", [Number 20000.0])])) liftIO $
results `shouldBe` HitFields (M.fromList [("test1", [Number 20000.0])])

View File

@ -1,7 +1,7 @@
{-# LANGUAGE RecordWildCards #-} {-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE OverloadedStrings #-}
module Test.Snapshots where module Test.Snapshots (spec) where
import Test.Common import Test.Common
import Test.Import import Test.Import
@ -18,7 +18,7 @@ import Test.Generators ()
spec :: Spec spec :: Spec
spec = do spec = do
describe "FsSnapshotRepo" $ do describe "FsSnapshotRepo" $
prop "SnapshotRepo laws" $ \fsr -> prop "SnapshotRepo laws" $ \fsr ->
fromGSnapshotRepo (toGSnapshotRepo fsr) === Right (fsr :: FsSnapshotRepo) fromGSnapshotRepo (toGSnapshotRepo fsr) === Right (fsr :: FsSnapshotRepo)
@ -149,7 +149,7 @@ canSnapshot :: IO Bool
canSnapshot = do canSnapshot = do
caresAboutRepos <- atleast es16 caresAboutRepos <- atleast es16
repoPaths <- getRepoPaths repoPaths <- getRepoPaths
return (not caresAboutRepos || not (null (repoPaths))) return (not caresAboutRepos || not (null repoPaths))
withSnapshotRepo withSnapshotRepo
:: ( MonadMask m :: ( MonadMask m
@ -197,5 +197,5 @@ withSnapshot srn sn = bracket_ alloc free
, snapIndices = Just (IndexList (testIndex :| [])) , snapIndices = Just (IndexList (testIndex :| []))
-- We don't actually need to back up any data -- We don't actually need to back up any data
} }
free = do free =
deleteSnapshot srn sn deleteSnapshot srn sn

View File

@ -7,7 +7,7 @@ import Test.Import
spec :: Spec spec :: Spec
spec = spec =
describe "sorting" $ do describe "sorting" $
it "returns documents in the right order" $ withTestEnv $ do it "returns documents in the right order" $ withTestEnv $ do
_ <- insertData _ <- insertData
_ <- insertOther _ <- insertOther

View File

@ -11,27 +11,27 @@ spec :: Spec
spec = spec =
describe "Source filtering" $ do describe "Source filtering" $ do
it "doesn't include source when sources are disabled" $ withTestEnv $ do it "doesn't include source when sources are disabled" $ withTestEnv $
searchExpectSource searchExpectSource
NoSource NoSource
(Left (EsError 500 "Source was missing")) (Left (EsError 500 "Source was missing"))
it "includes a source" $ withTestEnv $ do it "includes a source" $ withTestEnv $
searchExpectSource searchExpectSource
(SourcePatterns (PopPattern (Pattern "message"))) (SourcePatterns (PopPattern (Pattern "message")))
(Right (Object (HM.fromList [("message", String "Use haskell!")]))) (Right (Object (HM.fromList [("message", String "Use haskell!")])))
it "includes sources" $ withTestEnv $ do it "includes sources" $ withTestEnv $
searchExpectSource searchExpectSource
(SourcePatterns (PopPatterns [Pattern "user", Pattern "message"])) (SourcePatterns (PopPatterns [Pattern "user", Pattern "message"]))
(Right (Object (HM.fromList [("user",String "bitemyapp"),("message", String "Use haskell!")]))) (Right (Object (HM.fromList [("user",String "bitemyapp"),("message", String "Use haskell!")])))
it "includes source patterns" $ withTestEnv $ do it "includes source patterns" $ withTestEnv $
searchExpectSource searchExpectSource
(SourcePatterns (PopPattern (Pattern "*ge"))) (SourcePatterns (PopPattern (Pattern "*ge")))
(Right (Object (HM.fromList [("age", Number 10000),("message", String "Use haskell!")]))) (Right (Object (HM.fromList [("age", Number 10000),("message", String "Use haskell!")])))
it "excludes source patterns" $ withTestEnv $ do it "excludes source patterns" $ withTestEnv $
searchExpectSource searchExpectSource
(SourceIncludeExclude (Include []) (SourceIncludeExclude (Include [])
(Exclude [Pattern "l*", Pattern "*ge", Pattern "postDate", Pattern "extra"])) (Exclude [Pattern "l*", Pattern "*ge", Pattern "postDate", Pattern "extra"]))

View File

@ -7,7 +7,7 @@ import Test.Import
spec :: Spec spec :: Spec
spec = spec =
describe "Suggest" $ do describe "Suggest" $
it "returns a search suggestion using the phrase suggester" $ withTestEnv $ do it "returns a search suggestion using the phrase suggester" $ withTestEnv $ do
_ <- insertData _ <- insertData
let query = QueryMatchNoneQuery let query = QueryMatchNoneQuery

View File

@ -1,13 +1,8 @@
{-# LANGUAGE CPP #-} {-# LANGUAGE CPP #-}
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-} {-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeOperators #-} {-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-orphans #-} {-# OPTIONS_GHC -fno-warn-orphans #-}
#if __GLASGOW_HASKELL__ < 800 #if __GLASGOW_HASKELL__ < 800
@ -21,7 +16,7 @@ module Main where
import Test.Common import Test.Common
import Test.Import import Test.Import
import Prelude hiding (filter) import Prelude
import qualified Test.Aggregation as Aggregation import qualified Test.Aggregation as Aggregation
import qualified Test.BulkAPI as Bulk import qualified Test.BulkAPI as Bulk
@ -64,11 +59,11 @@ main = hspec $ do
let errorResp = eitherDecode (responseBody res) let errorResp = eitherDecode (responseBody res)
liftIO (errorResp `shouldBe` Right (EsError 404 "no such index")) liftIO (errorResp `shouldBe` Right (EsError 404 "no such index"))
describe "Monoid (SearchHits a)" $ do describe "Monoid (SearchHits a)" $
prop "abides the monoid laws" $ eq $ prop "abides the monoid laws" $ eq $
prop_Monoid (T :: T (SearchHits ())) prop_Monoid (T :: T (SearchHits ()))
describe "mkDocVersion" $ do describe "mkDocVersion" $
prop "can never construct an out of range docVersion" $ \i -> prop "can never construct an out of range docVersion" $ \i ->
let res = mkDocVersion i let res = mkDocVersion i
in case res of in case res of
@ -77,7 +72,7 @@ main = hspec $ do
(dv <= maxBound) .&&. (dv <= maxBound) .&&.
docVersionNumber dv === i docVersionNumber dv === i
describe "getNodesInfo" $ do describe "getNodesInfo" $
it "fetches the responding node when LocalNode is used" $ withTestEnv $ do it "fetches the responding node when LocalNode is used" $ withTestEnv $ do
res <- getNodesInfo LocalNode res <- getNodesInfo LocalNode
liftIO $ case res of liftIO $ case res of
@ -87,7 +82,7 @@ main = hspec $ do
Right NodesInfo {..} -> length nodesInfo `shouldBe` 1 Right NodesInfo {..} -> length nodesInfo `shouldBe` 1
Left e -> expectationFailure ("Expected NodesInfo but got " <> show e) Left e -> expectationFailure ("Expected NodesInfo but got " <> show e)
describe "getNodesStats" $ do describe "getNodesStats" $
it "fetches the responding node when LocalNode is used" $ withTestEnv $ do it "fetches the responding node when LocalNode is used" $ withTestEnv $ do
res <- getNodesStats LocalNode res <- getNodesStats LocalNode
liftIO $ case res of liftIO $ case res of
@ -97,7 +92,7 @@ main = hspec $ do
Right NodesStats {..} -> length nodesStats `shouldBe` 1 Right NodesStats {..} -> length nodesStats `shouldBe` 1
Left e -> expectationFailure ("Expected NodesStats but got " <> show e) Left e -> expectationFailure ("Expected NodesStats but got " <> show e)
describe "Enum DocVersion" $ do describe "Enum DocVersion" $
it "follows the laws of Enum, Bounded" $ do it "follows the laws of Enum, Bounded" $ do
evaluate (succ maxBound :: DocVersion) `shouldThrow` anyErrorCall evaluate (succ maxBound :: DocVersion) `shouldThrow` anyErrorCall
evaluate (pred minBound :: DocVersion) `shouldThrow` anyErrorCall evaluate (pred minBound :: DocVersion) `shouldThrow` anyErrorCall
@ -107,11 +102,14 @@ main = hspec $ do
enumFrom (pred maxBound :: DocVersion) `shouldBe` [pred maxBound, maxBound] enumFrom (pred maxBound :: DocVersion) `shouldBe` [pred maxBound, maxBound]
enumFromThen minBound (pred maxBound :: DocVersion) `shouldBe` [minBound, pred maxBound] enumFromThen minBound (pred maxBound :: DocVersion) `shouldBe` [minBound, pred maxBound]
describe "Scan & Scroll API" $ do describe "Scan & Scroll API" $
it "returns documents using the scan&scroll API" $ withTestEnv $ do it "returns documents using the scan&scroll API" $ withTestEnv $ do
_ <- insertData _ <- insertData
_ <- insertOther _ <- insertOther
let search = (mkSearch (Just $ MatchAllQuery Nothing) Nothing) { size = (Size 1) } let search =
(mkSearch
(Just $ MatchAllQuery Nothing) Nothing)
{ size = Size 1 }
regular_search <- searchTweet search regular_search <- searchTweet search
scan_search' <- scanSearch testIndex testMapping search :: BH IO [Hit Tweet] scan_search' <- scanSearch testIndex testMapping search :: BH IO [Hit Tweet]
let scan_search = map hitSource scan_search' let scan_search = map hitSource scan_search'