diff --git a/bloodhound.cabal b/bloodhound.cabal index 0dc3634..98bb901 100644 --- a/bloodhound.cabal +++ b/bloodhound.cabal @@ -30,7 +30,7 @@ Flag ES1 Flag ES5 Description: Run the test suite against an Elasticsearch 5.x server Default: True - + library ghc-options: -Wall exposed-modules: Database.V5.Bloodhound @@ -51,16 +51,15 @@ library Database.V1.Bloodhound.Client Database.V1.Bloodhound.Types Database.V1.Bloodhound.Types.Class - - -- Database.V1.Bloodhound.Internal.Aggregation - -- Database.V1.Bloodhound.Internal.Analysis - -- Database.V1.Bloodhound.Internal.Client - -- Database.V1.Bloodhound.Internal.Highlight - -- Database.V1.Bloodhound.Internal.Newtypes - -- Database.V1.Bloodhound.Internal.Query - -- Database.V1.Bloodhound.Internal.Sort + Database.V1.Bloodhound.Internal.Aggregation + Database.V1.Bloodhound.Internal.Client + Database.V1.Bloodhound.Internal.Highlight + Database.V1.Bloodhound.Internal.Newtypes + Database.V1.Bloodhound.Internal.Query + Database.V1.Bloodhound.Internal.Sort Database.V1.Bloodhound.Internal.StringlyTyped - -- Database.V1.Bloodhound.Internal.Suggest + Database.V1.Bloodhound.Internal.Suggest + other-modules: Bloodhound.Import Database.Bloodhound.Common.Script hs-source-dirs: src @@ -103,7 +102,6 @@ test-suite bloodhound-tests Test.Indices Test.JSON Test.Query - Test.Script Test.Snapshots Test.Sorting Test.SourceFiltering diff --git a/src/Bloodhound/Import.hs b/src/Bloodhound/Import.hs index b8f1afc..cee26d2 100644 --- a/src/Bloodhound/Import.hs +++ b/src/Bloodhound/Import.hs @@ -8,6 +8,7 @@ module Bloodhound.Import , readMay , showText , deleteSeveral + , oPath ) where import Control.Applicative as X (Alternative (..), optional) @@ -74,3 +75,7 @@ parseNEJSON (x:xs) = DT.mapM parseJSON (x :| xs) deleteSeveral :: (Eq k, Hashable k) => [k] -> HM.HashMap k v -> HM.HashMap k v deleteSeveral ks hm = foldr HM.delete hm ks + +oPath :: ToJSON a => NonEmpty Text -> a -> Value +oPath (k :| []) v = object [k .= v] +oPath (k:| (h:t)) v = object [k .= oPath (h :| t) v] diff --git a/src/Database/V1/Bloodhound/Internal/Aggregation.hs b/src/Database/V1/Bloodhound/Internal/Aggregation.hs index 2e0694c..760a302 100644 --- a/src/Database/V1/Bloodhound/Internal/Aggregation.hs +++ b/src/Database/V1/Bloodhound/Internal/Aggregation.hs @@ -1,14 +1,16 @@ -{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} module Database.V1.Bloodhound.Internal.Aggregation where import Bloodhound.Import -import qualified Data.HashMap.Strict as HM -import qualified Data.Map.Strict as M -import Data.Text -import qualified Data.Text as T +import qualified Data.HashMap.Strict as HM +import qualified Data.Map.Strict as M +import qualified Data.Text as T +import Database.V1.Bloodhound.Internal.Client +import Database.V1.Bloodhound.Internal.Highlight (HitHighlight) import Database.V1.Bloodhound.Internal.Newtypes import Database.V1.Bloodhound.Internal.Query import Database.V1.Bloodhound.Internal.Sort @@ -88,7 +90,7 @@ data TopHitsAggregation = TopHitsAggregation { taFrom :: Maybe From , taSize :: Maybe Size , taSort :: Maybe Sort - } deriving (Eq, Read, Show) + } deriving (Eq, Show) data MissingAggregation = MissingAggregation { maField :: Text @@ -224,3 +226,191 @@ data DateMathUnit = | DMMinute | DMSecond deriving (Eq, Show) + +data CollectionMode = BreadthFirst + | DepthFirst deriving (Eq, Show) + +type AggregationResults = M.Map Text Value + +class BucketAggregation a where + key :: a -> BucketValue + docCount :: a -> Int + aggs :: a -> Maybe AggregationResults + +data BucketValue = TextValue Text + | ScientificValue Scientific + | BoolValue Bool deriving (Show) + +data Bucket a = Bucket { buckets :: [a]} deriving (Show) + +data TermsResult = TermsResult { termKey :: BucketValue + , termsDocCount :: Int + , termsAggs :: Maybe AggregationResults } deriving (Show) + +data DateHistogramResult = DateHistogramResult { dateKey :: Int + , dateKeyStr :: Maybe Text + , dateDocCount :: Int + , dateHistogramAggs :: Maybe AggregationResults } deriving (Show) + +data DateRangeResult = DateRangeResult { dateRangeKey :: Text + , dateRangeFrom :: Maybe UTCTime + , dateRangeFromAsString :: Maybe Text + , dateRangeTo :: Maybe UTCTime + , dateRangeToAsString :: Maybe Text + , dateRangeDocCount :: Int + , dateRangeAggs :: Maybe AggregationResults } deriving (Show, Eq) + +toTerms :: Text -> AggregationResults -> Maybe (Bucket TermsResult) +toTerms = toAggResult + +toDateHistogram :: Text -> AggregationResults -> Maybe (Bucket DateHistogramResult) +toDateHistogram = toAggResult + +toMissing :: Text -> AggregationResults -> Maybe MissingResult +toMissing = toAggResult + +toTopHits :: (FromJSON a) => Text -> AggregationResults -> Maybe (TopHitResult a) +toTopHits = toAggResult + +toAggResult :: (FromJSON a) => Text -> AggregationResults -> Maybe a +toAggResult t a = M.lookup t a >>= deserialize + where deserialize = parseMaybe parseJSON + +instance BucketAggregation TermsResult where + key = termKey + docCount = termsDocCount + aggs = termsAggs + +instance BucketAggregation DateHistogramResult where + key = TextValue . showText . dateKey + docCount = dateDocCount + aggs = dateHistogramAggs + +instance BucketAggregation DateRangeResult where + key = TextValue . dateRangeKey + docCount = dateRangeDocCount + aggs = dateRangeAggs + +instance (FromJSON a) => FromJSON (Bucket a) where + parseJSON (Object v) = Bucket <$> + v .: "buckets" + parseJSON _ = mempty + +instance FromJSON BucketValue where + parseJSON (String t) = return $ TextValue t + parseJSON (Number s) = return $ ScientificValue s + parseJSON (Bool b) = return $ BoolValue b + parseJSON _ = mempty + +instance FromJSON MissingResult where + parseJSON = withObject "MissingResult" parse + where parse v = MissingResult <$> v .: "doc_count" + +instance FromJSON TermsResult where + parseJSON (Object v) = TermsResult <$> + v .: "key" <*> + v .: "doc_count" <*> + (pure $ getNamedSubAgg v ["key", "doc_count"]) + parseJSON _ = mempty + +instance FromJSON DateHistogramResult where + parseJSON (Object v) = DateHistogramResult <$> + v .: "key" <*> + v .:? "key_as_string" <*> + v .: "doc_count" <*> + (pure $ getNamedSubAgg v [ "key" + , "doc_count" + , "key_as_string" + ] + ) + parseJSON _ = mempty + +instance FromJSON DateRangeResult where + parseJSON = withObject "DateRangeResult" parse + where parse v = DateRangeResult <$> + v .: "key" <*> + (fmap posixMS <$> v .:? "from") <*> + v .:? "from_as_string" <*> + (fmap posixMS <$> v .:? "to") <*> + v .:? "to_as_string" <*> + v .: "doc_count" <*> + (pure $ getNamedSubAgg v [ "key" + , "from" + , "from_as_string" + , "to" + , "to_as_string" + , "doc_count" + ] + ) + +instance (FromJSON a) => FromJSON (TopHitResult a) where + parseJSON (Object v) = TopHitResult <$> + v .: "hits" + parseJSON _ = fail "Failure in FromJSON (TopHitResult a)" + +data MissingResult = MissingResult { missingDocCount :: Int } deriving (Show) + +data TopHitResult a = TopHitResult { tarHits :: (SearchHits a) + } deriving Show + +data SearchHits a = + SearchHits { hitsTotal :: Int + , maxScore :: Score + , hits :: [Hit a] } deriving (Eq, Show) + +instance Semigroup (SearchHits a) where + (SearchHits ta ma ha) <> (SearchHits tb mb hb) = SearchHits (ta + tb) (max ma mb) (ha <> hb) + +instance Monoid (SearchHits a) where + mempty = SearchHits 0 Nothing mempty + mappend = (<>) + +data Hit a = + Hit { hitIndex :: IndexName + , hitType :: MappingName + , hitDocId :: DocId + , hitScore :: Score + , hitSource :: Maybe a + , hitHighlight :: Maybe HitHighlight } deriving (Eq, Show) + +-- Try to get an AggregationResults when we don't know the +-- field name. We filter out the known keys to try to minimize the noise. +getNamedSubAgg :: Object -> [Text] -> Maybe AggregationResults +getNamedSubAgg o knownKeys = maggRes + where unknownKeys = HM.filterWithKey (\k _ -> k `notElem` knownKeys) o + maggRes + | HM.null unknownKeys = Nothing + | otherwise = Just . M.fromList $ HM.toList unknownKeys + +instance ToJSON CollectionMode where + toJSON BreadthFirst = "breadth_first" + toJSON DepthFirst = "depth_first" + +instance ToJSON DateRangeAggregation where + toJSON DateRangeAggregation {..} = + omitNulls [ "field" .= draField + , "format" .= draFormat + , "ranges" .= toList draRanges + ] + +instance (FromJSON a) => FromJSON (SearchHits a) where + parseJSON (Object v) = SearchHits <$> + v .: "total" <*> + v .: "max_score" <*> + v .: "hits" + parseJSON _ = empty + +instance ToJSON DateRangeAggRange where + toJSON (DateRangeFrom e) = object [ "from" .= e ] + toJSON (DateRangeTo e) = object [ "to" .= e ] + toJSON (DateRangeFromAndTo f t) = object [ "from" .= f, "to" .= t ] + +instance (FromJSON a) => FromJSON (Hit a) where + parseJSON (Object v) = Hit <$> + v .: "_index" <*> + v .: "_type" <*> + v .: "_id" <*> + v .: "_score" <*> + v .:? "_source" <*> + v .:? "highlight" + parseJSON _ = empty diff --git a/src/Database/V1/Bloodhound/Internal/Client.hs b/src/Database/V1/Bloodhound/Internal/Client.hs index 8138ca2..b8543f0 100644 --- a/src/Database/V1/Bloodhound/Internal/Client.hs +++ b/src/Database/V1/Bloodhound/Internal/Client.hs @@ -2,20 +2,29 @@ {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} {-# LANGUAGE UndecidableInstances #-} module Database.V1.Bloodhound.Internal.Client where import Bloodhound.Import -import Control.Applicative as A -import Data.Text (Text) -import qualified Data.Version as Vers +import Control.Applicative as A +import qualified Data.HashMap.Strict as HM +import Data.Text (Text) +import qualified Data.Text as T +import qualified Data.Traversable as DT +import qualified Data.Vector as V +import qualified Data.Version as Vers +import GHC.Enum import Network.HTTP.Client -import Text.Read (Read (..)) -import qualified Text.Read as TR +import qualified Text.ParserCombinators.ReadP as RP +import Text.Read (Read (..)) +import qualified Text.Read as TR import Database.V1.Bloodhound.Internal.Newtypes +import Database.V1.Bloodhound.Internal.Query +import Database.V1.Bloodhound.Internal.StringlyTyped {-| Common environment for Elasticsearch calls. Connections will be @@ -99,6 +108,22 @@ data Version = Version { number :: VersionNumber , build_snapshot :: Bool , lucene_version :: VersionNumber } deriving (Eq, Show) +instance ToJSON Version where + toJSON Version {..} = object ["number" .= number + ,"build_hash" .= build_hash + ,"build_timestamp" .= build_timestamp + ,"build_snapshot" .= build_snapshot + ,"lucene_version" .= lucene_version] + +instance FromJSON Version where + parseJSON = withObject "Version" parse + where parse o = Version + <$> o .: "number" + <*> o .: "build_hash" + <*> o .: "build_timestamp" + <*> o .: "build_snapshot" + <*> o .: "lucene_version" + -- | Traditional software versioning number newtype VersionNumber = VersionNumber { versionNumber :: Vers.Version } deriving (Eq, Show, Ord) @@ -115,6 +140,15 @@ data Status = Status { ok :: Maybe Bool , version :: Version , tagline :: Text } deriving (Eq, Show) +instance FromJSON Status where + parseJSON (Object v) = Status <$> + v .:? "ok" <*> + (v .:? "status" .!= 200) <*> + v .: "name" <*> + v .: "version" <*> + v .: "tagline" + parseJSON _ = empty + {-| 'IndexSettings' is used to configure the shards and replicas when you create an Elasticsearch Index. @@ -244,7 +278,6 @@ megabytes n = kilobytes (1000 * n) kilobytes :: Int -> Bytes kilobytes n = Bytes (1000 * n) - data Interval = Year | Quarter | Month @@ -252,31 +285,8 @@ data Interval = Year | Day | Hour | Minute - | Second deriving (Eq, Show) - -instance ToJSON Interval where - toJSON Year = "year" - toJSON Quarter = "quarter" - toJSON Month = "month" - toJSON Week = "week" - toJSON Day = "day" - toJSON Hour = "hour" - toJSON Minute = "minute" - toJSON Second = "second" - -parseStringInterval :: (Monad m) => String -> m NominalDiffTime -parseStringInterval s = case span isNumber s of - ("", _) -> fail "Invalid interval" - (nS, unitS) -> case (readMay nS, readMay unitS) of - (Just n, Just unit) -> return (fromInteger (n * unitNDT unit)) - (Nothing, _) -> fail "Invalid interval number" - (_, Nothing) -> fail "Invalid interval unit" - where - unitNDT Seconds = 1 - unitNDT Minutes = 60 - unitNDT Hours = 60 * 60 - unitNDT Days = 24 * 60 * 60 - unitNDT Weeks = 7 * 24 * 60 * 60 + | Second + | FractionalInterval Float TimeInterval deriving (Eq, Show) data TimeInterval = Weeks | Days @@ -352,3 +362,1980 @@ data CompoundFormat = CompoundFileFormat Bool | MergeSegmentVsTotalIndex Double -- ^ percentage between 0 and 1 where 0 is false, 1 is true deriving (Eq, Show) + +attrFilterJSON :: NonEmpty NodeAttrFilter -> Value +attrFilterJSON fs = object [ n .= T.intercalate "," (toList vs) + | NodeAttrFilter (NodeAttrName n) vs <- toList fs] + +parseAttrFilter :: Value -> Parser (NonEmpty NodeAttrFilter) +parseAttrFilter = withObject "NonEmpty NodeAttrFilter" parse + where parse o = case HM.toList o of + [] -> fail "Expected non-empty list of NodeAttrFilters" + x:xs -> DT.mapM (uncurry parse') (x :| xs) + parse' n = withText "Text" $ \t -> + case T.splitOn "," t of + fv:fvs -> return (NodeAttrFilter (NodeAttrName n) (fv :| fvs)) + [] -> fail "Expected non-empty list of filter values" + +newtype NominalDiffTimeJSON = NominalDiffTimeJSON { ndtJSON :: NominalDiffTime } + +data IndexSettingsSummary = IndexSettingsSummary { sSummaryIndexName :: IndexName + , sSummaryFixedSettings :: IndexSettings + , sSummaryUpdateable :: [UpdatableIndexSetting]} + deriving (Eq, Show) + +parseSettings :: Object -> Parser [UpdatableIndexSetting] +parseSettings o = do + o' <- o .: "index" + -- slice the index object into singleton hashmaps and try to parse each + parses <- forM (HM.toList o') $ \(k, v) -> do + -- blocks are now nested into the "index" key, which is not how they're serialized + let atRoot = Object (HM.singleton k v) + let atIndex = Object (HM.singleton "index" atRoot) + optional (parseJSON atRoot <|> parseJSON atIndex) + return (catMaybes parses) + +{-| 'OpenCloseIndex' is a sum type for opening and closing indices. + + +-} +data OpenCloseIndex = OpenIndex | CloseIndex deriving (Eq, Show) + +data FieldType = GeoPointType + | GeoShapeType + | FloatType + | IntegerType + | LongType + | ShortType + | ByteType deriving (Eq, Show) + +data FieldDefinition = + FieldDefinition { fieldType :: FieldType } deriving (Eq, Show) + +{-| An 'IndexTemplate' defines a template that will automatically be + applied to new indices created. The templates include both + 'IndexSettings' and mappings, and a simple 'TemplatePattern' that + controls if the template will be applied to the index created. + Specify mappings as follows: @[toJSON TweetMapping, ...]@ + + https://www.elastic.co/guide/en/elasticsearch/reference/1.7/indices-templates.html +-} +data IndexTemplate = + IndexTemplate { templatePattern :: TemplatePattern + , templateSettings :: Maybe IndexSettings + , templateMappings :: [Value] + } + +data MappingField = + MappingField { mappingFieldName :: FieldName + , fieldDefinition :: FieldDefinition } deriving (Eq, Show) + +{-| Support for type reification of 'Mapping's is currently incomplete, for + now the mapping API verbiage expects a 'ToJSON'able blob. + + Indexes have mappings, mappings are schemas for the documents contained in the + index. I'd recommend having only one mapping per index, always having a mapping, + and keeping different kinds of documents separated if possible. +-} +data Mapping = Mapping { typeName :: TypeName + , mappingFields :: [MappingField] } deriving (Eq, Show) + +{-| 'BulkOperation' is a sum type for expressing the four kinds of bulk + operation index, create, delete, and update. 'BulkIndex' behaves like an + "upsert", 'BulkCreate' will fail if a document already exists at the DocId. + + +-} +data BulkOperation = + BulkIndex IndexName MappingName DocId Value + | BulkCreate IndexName MappingName DocId Value + | BulkDelete IndexName MappingName DocId + | BulkUpdate IndexName MappingName DocId Value deriving (Eq, Show) + +{-| 'EsResult' describes the standard wrapper JSON document that you see in + successful Elasticsearch lookups or lookups that couldn't find the document. +-} +data EsResult a = EsResult { _index :: Text + , _type :: Text + , _id :: Text + , foundResult :: Maybe (EsResultFound a)} deriving (Eq, Show) + +instance (FromJSON a) => FromJSON (EsResult a) where + parseJSON jsonVal@(Object v) = do + found <- v .:? "found" .!= False + fr <- if found + then parseJSON jsonVal + else return Nothing + EsResult <$> v .: "_index" <*> + v .: "_type" <*> + v .: "_id" <*> + pure fr + parseJSON _ = empty + +{-| 'EsResultFound' contains the document and its metadata inside of an + 'EsResult' when the document was successfully found. +-} +data EsResultFound a = EsResultFound { _version :: DocVersion + , _source :: a } deriving (Eq, Show) + +instance (FromJSON a) => FromJSON (EsResultFound a) where + parseJSON (Object v) = EsResultFound <$> + v .: "_version" <*> + v .: "_source" + parseJSON _ = empty + +{-| 'EsError' is the generic type that will be returned when there was a + problem. If you can't parse the expected response, its a good idea to + try parsing this. +-} +data EsError = EsError { errorStatus :: Int + , errorMessage :: Text } deriving (Eq, Show) + +{-| 'EsProtocolException' will be thrown if Bloodhound cannot parse a response +returned by the ElasticSearch server. If you encounter this error, please +verify that your domain data types and FromJSON instances are working properly +(for example, the 'a' of '[Hit a]' in 'SearchResult.searchHits.hits'). If you're +sure that your mappings are correct, then this error may be an indication of an +incompatibility between Bloodhound and ElasticSearch. Please open a bug report +and be sure to include the exception body. +-} +data EsProtocolException = EsProtocolException { esProtoExBody :: LByteString } + deriving (Eq, Show) + +instance Exception EsProtocolException + +data IndexAlias = IndexAlias { srcIndex :: IndexName + , indexAlias :: IndexAliasName } deriving (Eq, Show) + +{-| 'DocVersion' is an integer version number for a document between 1 +and 9.2e+18 used for <>. +-} +newtype DocVersion = DocVersion { + docVersionNumber :: Int + } deriving (Eq, Show, Ord, ToJSON) + +instance FromJSON DocVersion where + parseJSON v = do + i <- parseJSON v + maybe (fail "DocVersion out of range") return $ mkDocVersion i + +-- | Smart constructor for in-range doc version +mkDocVersion :: Int -> Maybe DocVersion +mkDocVersion i + | i >= (docVersionNumber minBound) && i <= (docVersionNumber maxBound) = + Just $ DocVersion i + | otherwise = Nothing + +data IndexAliasAction = AddAlias IndexAlias IndexAliasCreate + | RemoveAlias IndexAlias deriving (Show, Eq) + +data IndexAliasCreate = IndexAliasCreate { aliasCreateRouting :: Maybe AliasRouting + , aliasCreateFilter :: Maybe Filter} + deriving (Show, Eq) + +data AliasRouting = AllAliasRouting RoutingValue + | GranularAliasRouting (Maybe SearchAliasRouting) (Maybe IndexAliasRouting) + deriving (Show, Eq) + +newtype SearchAliasRouting = SearchAliasRouting (NonEmpty RoutingValue) deriving (Show, Eq) + +newtype IndexAliasRouting = IndexAliasRouting RoutingValue deriving (Show, Eq, ToJSON, FromJSON) + +newtype RoutingValue = RoutingValue { routingValue :: Text } deriving (Show, Eq, ToJSON, FromJSON) + +newtype IndexAliasesSummary = IndexAliasesSummary { indexAliasesSummary :: [IndexAliasSummary] } deriving (Show, Eq) + +{-| 'IndexAliasSummary' is a summary of an index alias configured for a server. -} +data IndexAliasSummary = IndexAliasSummary { indexAliasSummaryAlias :: IndexAlias + , indexAliasSummaryCreate :: IndexAliasCreate} deriving (Show, Eq) + +{-| 'ExternalDocVersion' is a convenience wrapper if your code uses its +own version numbers instead of ones from ES. +-} +newtype ExternalDocVersion = ExternalDocVersion DocVersion + deriving (Eq, Show, Ord, Bounded, Enum, ToJSON) + +{-| 'VersionControl' is specified when indexing documents as a +optimistic concurrency control. +-} +data VersionControl = NoVersionControl + -- ^ Don't send a version. This is a pure overwrite. + | InternalVersion DocVersion + -- ^ Use the default ES versioning scheme. Only + -- index the document if the version is the same + -- as the one specified. Only applicable to + -- updates, as you should be getting Version from + -- a search result. + | ExternalGT ExternalDocVersion + -- ^ Use your own version numbering. Only index + -- the document if the version is strictly higher + -- OR the document doesn't exist. The given + -- version will be used as the new version number + -- for the stored document. N.B. All updates must + -- increment this number, meaning there is some + -- global, external ordering of updates. + | ExternalGTE ExternalDocVersion + -- ^ Use your own version numbering. Only index + -- the document if the version is equal or higher + -- than the stored version. Will succeed if there + -- is no existing document. The given version will + -- be used as the new version number for the + -- stored document. Use with care, as this could + -- result in data loss. + | ForceVersion ExternalDocVersion + -- ^ The document will always be indexed and the + -- given version will be the new version. This is + -- typically used for correcting errors. Use with + -- care, as this could result in data loss. + deriving (Show, Eq, Ord) + +{-| 'DocumentParent' is used to specify a parent document. +-} +newtype DocumentParent = DocumentParent DocId + deriving (Eq, Show) + +{-| 'IndexDocumentSettings' are special settings supplied when indexing +a document. For the best backwards compatiblity when new fields are +added, you should probably prefer to start with 'defaultIndexDocumentSettings' +-} +data IndexDocumentSettings = + IndexDocumentSettings { idsVersionControl :: VersionControl + , idsParent :: Maybe DocumentParent + } deriving (Eq, Show) + +{-| Reasonable default settings. Chooses no version control and no parent. +-} +defaultIndexDocumentSettings :: IndexDocumentSettings +defaultIndexDocumentSettings = IndexDocumentSettings NoVersionControl Nothing + +{-| 'IndexSelection' is used for APIs which take a single index, a list of + indexes, or the special @_all@ index. +-} +--TODO: this does not fully support . It wouldn't be too hard to implement but you'd have to add the optional parameters (ignore_unavailable, allow_no_indices, expand_wildcards) to any APIs using it. Also would be a breaking API. +data IndexSelection = IndexList (NonEmpty IndexName) + | AllIndexes deriving (Eq, Show) + +{-| 'NodeSelection' is used for most cluster APIs. See for more details. +-} +data NodeSelection = LocalNode + -- ^ Whatever node receives this request + | NodeList (NonEmpty NodeSelector) + | AllNodes deriving (Eq, Show) + + +-- | An exact match or pattern to identify a node. Note that All of +-- these options support wildcarding, so your node name, server, attr +-- name can all contain * characters to be a fuzzy match. +data NodeSelector = NodeByName NodeName + | NodeByFullNodeId FullNodeId + | NodeByHost Server + -- ^ e.g. 10.0.0.1 or even 10.0.0.* + | NodeByAttribute NodeAttrName Text + -- ^ NodeAttrName can be a pattern, e.g. rack*. The value can too. + deriving (Eq, Show) + +-- | A human-readable node name that is supplied by the user in the +-- node config or automatically generated by ElasticSearch. +newtype NodeName = NodeName { nodeName :: Text } + deriving (Eq, Ord, Show, FromJSON) + +-- | Unique, automatically-generated name assigned to nodes that are +-- usually returned in node-oriented APIs. +newtype FullNodeId = FullNodeId { fullNodeId :: Text } + deriving (Eq, Ord, Show, FromJSON) + +-- | Username type used for HTTP Basic authentication. See 'basicAuthHook'. +newtype EsUsername = EsUsername { esUsername :: Text } deriving (Show, Eq) + +-- | Password type used for HTTP Basic authentication. See 'basicAuthHook'. +newtype EsPassword = EsPassword { esPassword :: Text } deriving (Show, Eq) + +data SnapshotRepoSelection = SnapshotRepoList (NonEmpty SnapshotRepoPattern) + | AllSnapshotRepos deriving (Eq, Show) + + +-- | Either specifies an exact repo name or one with globs in it, +-- e.g. @RepoPattern "foo*"@ __NOTE__: Patterns are not supported on ES < 1.7 +data SnapshotRepoPattern = ExactRepo SnapshotRepoName + | RepoPattern Text + deriving (Eq, Show) + +-- | The unique name of a snapshot repository. +newtype SnapshotRepoName = SnapshotRepoName { snapshotRepoName :: Text } + deriving (Eq, Ord, Show, ToJSON, FromJSON) + + +-- | A generic representation of a snapshot repo. This is what gets +-- sent to and parsed from the server. For repo types enabled by +-- plugins that aren't exported by this library, consider making a +-- custom type which implements 'SnapshotRepo'. If it is a common repo +-- type, consider submitting a pull request to have it included in the +-- library proper +data GenericSnapshotRepo = GenericSnapshotRepo { + gSnapshotRepoName :: SnapshotRepoName + , gSnapshotRepoType :: SnapshotRepoType + , gSnapshotRepoSettings :: GenericSnapshotRepoSettings + } deriving (Eq, Show) + +-- | Law: fromGSnapshotRepo (toGSnapshotRepo r) == Right r +class SnapshotRepo r where + toGSnapshotRepo :: r -> GenericSnapshotRepo + fromGSnapshotRepo :: GenericSnapshotRepo -> Either SnapshotRepoConversionError r + +data SnapshotRepoConversionError = RepoTypeMismatch SnapshotRepoType SnapshotRepoType + -- ^ Expected type and actual type + | OtherRepoConversionError Text + deriving (Show, Eq) + +instance SnapshotRepo GenericSnapshotRepo where + toGSnapshotRepo = id + fromGSnapshotRepo = Right + + +newtype SnapshotRepoType = SnapshotRepoType { snapshotRepoType :: Text } + deriving (Eq, Ord, Show, ToJSON, FromJSON) + + +-- | Opaque representation of snapshot repo settings. Instances of +-- 'SnapshotRepo' will produce this. +newtype GenericSnapshotRepoSettings = GenericSnapshotRepoSettings { gSnapshotRepoSettingsObject :: Object } + deriving (Eq, Show, ToJSON) + + + -- Regardless of whether you send strongly typed json, my version of + -- ES sends back stringly typed json in the settings, e.g. booleans + -- as strings, so we'll try to convert them. +instance FromJSON GenericSnapshotRepoSettings where + parseJSON = fmap (GenericSnapshotRepoSettings . fmap unStringlyTypeJSON). parseJSON + +-- | The result of running 'verifySnapshotRepo'. +newtype SnapshotVerification = SnapshotVerification { snapshotNodeVerifications :: [SnapshotNodeVerification] } + deriving (Eq, Show) + + +instance FromJSON SnapshotVerification where + parseJSON = withObject "SnapshotVerification" parse + where + parse o = do + o2 <- o .: "nodes" + SnapshotVerification <$> mapM (uncurry parse') (HM.toList o2) + parse' rawFullId = withObject "SnapshotNodeVerification" $ \o -> + SnapshotNodeVerification (FullNodeId rawFullId) <$> o .: "name" + + +-- | A node that has verified a snapshot +data SnapshotNodeVerification = SnapshotNodeVerification { + snvFullId :: FullNodeId + , snvNodeName :: NodeName + } deriving (Eq, Show) + +newtype ClusterName = ClusterName { clusterName :: Text } + deriving (Eq, Ord, Show, FromJSON) + +data NodesInfo = NodesInfo { + nodesInfo :: [NodeInfo] + , nodesClusterName :: ClusterName + } deriving (Eq, Show) + +instance FromJSON NodesInfo where + parseJSON = withObject "NodesInfo" parse + where + parse o = do + nodes <- o .: "nodes" + infos <- forM (HM.toList nodes) $ \(fullNID, v) -> do + node <- parseJSON v + parseNodeInfo (FullNodeId fullNID) node + cn <- o .: "cluster_name" + return (NodesInfo infos cn) + +data NodesStats = NodesStats { + nodesStats :: [NodeStats] + , nodesStatsClusterName :: ClusterName + } deriving (Eq, Show) + +data NodeStats = NodeStats { + nodeStatsName :: NodeName + , nodeStatsFullId :: FullNodeId + , nodeStatsBreakersStats :: Maybe NodeBreakersStats + , nodeStatsHTTP :: NodeHTTPStats + , nodeStatsTransport :: NodeTransportStats + , nodeStatsFS :: NodeFSStats + , nodeStatsNetwork :: NodeNetworkStats + , nodeStatsThreadPool :: NodeThreadPoolsStats + , nodeStatsJVM :: NodeJVMStats + , nodeStatsProcess :: NodeProcessStats + , nodeStatsOS :: NodeOSStats + , nodeStatsIndices :: NodeIndicesStats + } deriving (Eq, Show) + +data NodeBreakersStats = NodeBreakersStats { + nodeStatsParentBreaker :: NodeBreakerStats + , nodeStatsRequestBreaker :: NodeBreakerStats + , nodeStatsFieldDataBreaker :: NodeBreakerStats + } deriving (Eq, Show) + +data NodeBreakerStats = NodeBreakerStats { + nodeBreakersTripped :: Int + , nodeBreakersOverhead :: Double + , nodeBreakersEstSize :: Bytes + , nodeBreakersLimitSize :: Bytes + } deriving (Eq, Show) + +data NodeHTTPStats = NodeHTTPStats { + nodeHTTPTotalOpened :: Int + , nodeHTTPCurrentOpen :: Int + } deriving (Eq, Show) + +data NodeTransportStats = NodeTransportStats { + nodeTransportTXSize :: Bytes + , nodeTransportCount :: Int + , nodeTransportRXSize :: Bytes + , nodeTransportRXCount :: Int + , nodeTransportServerOpen :: Int + } deriving (Eq, Show) + +data NodeFSStats = NodeFSStats { + nodeFSDataPaths :: [NodeDataPathStats] + , nodeFSTotal :: NodeFSTotalStats + , nodeFSTimestamp :: UTCTime + } deriving (Eq, Show) + +data NodeDataPathStats = NodeDataPathStats { + nodeDataPathDiskServiceTime :: Maybe Double + , nodeDataPathDiskQueue :: Maybe Double + , nodeDataPathIOSize :: Maybe Bytes + , nodeDataPathWriteSize :: Maybe Bytes + , nodeDataPathReadSize :: Maybe Bytes + , nodeDataPathIOOps :: Maybe Int + , nodeDataPathWrites :: Maybe Int + , nodeDataPathReads :: Maybe Int + , nodeDataPathAvailable :: Bytes + , nodeDataPathFree :: Bytes + , nodeDataPathTotal :: Bytes + , nodeDataPathType :: Maybe Text + , nodeDataPathDevice :: Text + , nodeDataPathMount :: Text + , nodeDataPathPath :: Text + } deriving (Eq, Show) + +data NodeFSTotalStats = NodeFSTotalStats { + nodeFSTotalDiskServiceTime :: Maybe Double + , nodeFSTotalDiskQueue :: Maybe Double + , nodeFSTotalIOSize :: Maybe Bytes + , nodeFSTotalWriteSize :: Maybe Bytes + , nodeFSTotalReadSize :: Maybe Bytes + , nodeFSTotalIOOps :: Maybe Int + , nodeFSTotalWrites :: Maybe Int + , nodeFSTotalReads :: Maybe Int + , nodeFSTotalAvailable :: Bytes + , nodeFSTotalFree :: Bytes + , nodeFSTotalTotal :: Bytes + } deriving (Eq, Show) + +data NodeNetworkStats = NodeNetworkStats { + nodeNetTCPOutRSTs :: Int + , nodeNetTCPInErrs :: Int + , nodeNetTCPAttemptFails :: Int + , nodeNetTCPEstabResets :: Int + , nodeNetTCPRetransSegs :: Int + , nodeNetTCPOutSegs :: Int + , nodeNetTCPInSegs :: Int + , nodeNetTCPCurrEstab :: Int + , nodeNetTCPPassiveOpens :: Int + , nodeNetTCPActiveOpens :: Int + } deriving (Eq, Show) + +data NodeThreadPoolsStats = NodeThreadPoolsStats { + nodeThreadPoolsStatsSnapshot :: NodeThreadPoolStats + , nodeThreadPoolsStatsBulk :: NodeThreadPoolStats + , nodeThreadPoolsStatsMerge :: NodeThreadPoolStats + , nodeThreadPoolsStatsGet :: NodeThreadPoolStats + , nodeThreadPoolsStatsManagement :: NodeThreadPoolStats + , nodeThreadPoolsStatsFetchShardStore :: Maybe NodeThreadPoolStats + , nodeThreadPoolsStatsOptimize :: NodeThreadPoolStats + , nodeThreadPoolsStatsFlush :: NodeThreadPoolStats + , nodeThreadPoolsStatsSearch :: NodeThreadPoolStats + , nodeThreadPoolsStatsWarmer :: NodeThreadPoolStats + , nodeThreadPoolsStatsGeneric :: NodeThreadPoolStats + , nodeThreadPoolsStatsSuggest :: NodeThreadPoolStats + , nodeThreadPoolsStatsRefresh :: NodeThreadPoolStats + , nodeThreadPoolsStatsIndex :: NodeThreadPoolStats + , nodeThreadPoolsStatsListener :: Maybe NodeThreadPoolStats + , nodeThreadPoolsStatsFetchShardStarted :: Maybe NodeThreadPoolStats + , nodeThreadPoolsStatsPercolate :: NodeThreadPoolStats + } deriving (Eq, Show) + +data NodeThreadPoolStats = NodeThreadPoolStats { + nodeThreadPoolCompleted :: Int + , nodeThreadPoolLargest :: Int + , nodeThreadPoolRejected :: Int + , nodeThreadPoolActive :: Int + , nodeThreadPoolQueue :: Int + , nodeThreadPoolThreads :: Int + } deriving (Eq, Show) + +data NodeJVMStats = NodeJVMStats { + nodeJVMStatsMappedBufferPool :: JVMBufferPoolStats + , nodeJVMStatsDirectBufferPool :: JVMBufferPoolStats + , nodeJVMStatsGCOldCollector :: JVMGCStats + , nodeJVMStatsGCYoungCollector :: JVMGCStats + , nodeJVMStatsPeakThreadsCount :: Int + , nodeJVMStatsThreadsCount :: Int + , nodeJVMStatsOldPool :: JVMPoolStats + , nodeJVMStatsSurvivorPool :: JVMPoolStats + , nodeJVMStatsYoungPool :: JVMPoolStats + , nodeJVMStatsNonHeapCommitted :: Bytes + , nodeJVMStatsNonHeapUsed :: Bytes + , nodeJVMStatsHeapMax :: Bytes + , nodeJVMStatsHeapCommitted :: Bytes + , nodeJVMStatsHeapUsedPercent :: Int + , nodeJVMStatsHeapUsed :: Bytes + , nodeJVMStatsUptime :: NominalDiffTime + , nodeJVMStatsTimestamp :: UTCTime + } deriving (Eq, Show) + +data JVMBufferPoolStats = JVMBufferPoolStats { + jvmBufferPoolStatsTotalCapacity :: Bytes + , jvmBufferPoolStatsUsed :: Bytes + , jvmBufferPoolStatsCount :: Int + } deriving (Eq, Show) + +data JVMGCStats = JVMGCStats { + jvmGCStatsCollectionTime :: NominalDiffTime + , jvmGCStatsCollectionCount :: Int + } deriving (Eq, Show) + +data JVMPoolStats = JVMPoolStats { + jvmPoolStatsPeakMax :: Bytes + , jvmPoolStatsPeakUsed :: Bytes + , jvmPoolStatsMax :: Bytes + , jvmPoolStatsUsed :: Bytes + } deriving (Eq, Show) + +data NodeProcessStats = NodeProcessStats { + nodeProcessMemTotalVirtual :: Bytes + , nodeProcessMemShare :: Bytes + , nodeProcessMemResident :: Bytes + , nodeProcessCPUTotal :: NominalDiffTime + , nodeProcessCPUUser :: NominalDiffTime + , nodeProcessCPUSys :: NominalDiffTime + , nodeProcessCPUPercent :: Int + , nodeProcessOpenFDs :: Int + , nodeProcessTimestamp :: UTCTime + } deriving (Eq, Show) + +data NodeOSStats = NodeOSStats { + nodeOSSwapFree :: Bytes + , nodeOSSwapUsed :: Bytes + , nodeOSMemActualUsed :: Bytes + , nodeOSMemActualFree :: Bytes + , nodeOSMemUsedPercent :: Int + , nodeOSMemFreePercent :: Int + , nodeOSMemUsed :: Bytes + , nodeOSMemFree :: Bytes + , nodeOSCPUStolen :: Int + , nodeOSCPUUsage :: Int + , nodeOSCPUIdle :: Int + , nodeOSCPUUser :: Int + , nodeOSCPUSys :: Int + , nodeOSLoad :: Maybe LoadAvgs + , nodeOSUptime :: NominalDiffTime + , nodeOSTimestamp :: UTCTime + } deriving (Eq, Show) + +data LoadAvgs = LoadAvgs { + loadAvg1Min :: Double + , loadAvg5Min :: Double + , loadAvg15Min :: Double + } deriving (Eq, Show) + +data NodeIndicesStats = NodeIndicesStats { + nodeIndicesStatsRecoveryThrottleTime :: Maybe NominalDiffTime + , nodeIndicesStatsRecoveryCurrentAsTarget :: Maybe Int + , nodeIndicesStatsRecoveryCurrentAsSource :: Maybe Int + , nodeIndicesStatsQueryCacheMisses :: Maybe Int + , nodeIndicesStatsQueryCacheHits :: Maybe Int + , nodeIndicesStatsQueryCacheEvictions :: Maybe Int + , nodeIndicesStatsQueryCacheSize :: Maybe Bytes + , nodeIndicesStatsSuggestCurrent :: Int + , nodeIndicesStatsSuggestTime :: NominalDiffTime + , nodeIndicesStatsSuggestTotal :: Int + , nodeIndicesStatsTranslogSize :: Bytes + , nodeIndicesStatsTranslogOps :: Int + , nodeIndicesStatsSegFixedBitSetMemory :: Maybe Bytes + , nodeIndicesStatsSegVersionMapMemory :: Bytes + , nodeIndicesStatsSegIndexWriterMaxMemory :: Maybe Bytes + , nodeIndicesStatsSegIndexWriterMemory :: Bytes + , nodeIndicesStatsSegMemory :: Bytes + , nodeIndicesStatsSegCount :: Int + , nodeIndicesStatsCompletionSize :: Bytes + , nodeIndicesStatsPercolateQueries :: Int + , nodeIndicesStatsPercolateMemory :: Bytes + , nodeIndicesStatsPercolateCurrent :: Int + , nodeIndicesStatsPercolateTime :: NominalDiffTime + , nodeIndicesStatsPercolateTotal :: Int + , nodeIndicesStatsFieldDataEvictions :: Int + , nodeIndicesStatsFieldDataMemory :: Bytes + , nodeIndicesStatsIDCacheMemory :: Bytes + , nodeIndicesStatsFilterCacheEvictions :: Int + , nodeIndicesStatsFilterCacheMemory :: Bytes + , nodeIndicesStatsWarmerTotalTime :: NominalDiffTime + , nodeIndicesStatsWarmerTotal :: Int + , nodeIndicesStatsWarmerCurrent :: Int + , nodeIndicesStatsFlushTotalTime :: NominalDiffTime + , nodeIndicesStatsFlushTotal :: Int + , nodeIndicesStatsRefreshTotalTime :: NominalDiffTime + , nodeIndicesStatsRefreshTotal :: Int + , nodeIndicesStatsMergesTotalSize :: Bytes + , nodeIndicesStatsMergesTotalDocs :: Int + , nodeIndicesStatsMergesTotalTime :: NominalDiffTime + , nodeIndicesStatsMergesTotal :: Int + , nodeIndicesStatsMergesCurrentSize :: Bytes + , nodeIndicesStatsMergesCurrentDocs :: Int + , nodeIndicesStatsMergesCurrent :: Int + , nodeIndicesStatsSearchFetchCurrent :: Int + , nodeIndicesStatsSearchFetchTime :: NominalDiffTime + , nodeIndicesStatsSearchFetchTotal :: Int + , nodeIndicesStatsSearchQueryCurrent :: Int + , nodeIndicesStatsSearchQueryTime :: NominalDiffTime + , nodeIndicesStatsSearchQueryTotal :: Int + , nodeIndicesStatsSearchOpenContexts :: Int + , nodeIndicesStatsGetCurrent :: Int + , nodeIndicesStatsGetMissingTime :: NominalDiffTime + , nodeIndicesStatsGetMissingTotal :: Int + , nodeIndicesStatsGetExistsTime :: NominalDiffTime + , nodeIndicesStatsGetExistsTotal :: Int + , nodeIndicesStatsGetTime :: NominalDiffTime + , nodeIndicesStatsGetTotal :: Int + , nodeIndicesStatsIndexingThrottleTime :: Maybe NominalDiffTime + , nodeIndicesStatsIndexingIsThrottled :: Maybe Bool + , nodeIndicesStatsIndexingNoopUpdateTotal :: Maybe Int + , nodeIndicesStatsIndexingDeleteCurrent :: Int + , nodeIndicesStatsIndexingDeleteTime :: NominalDiffTime + , nodeIndicesStatsIndexingDeleteTotal :: Int + , nodeIndicesStatsIndexingIndexCurrent :: Int + , nodeIndicesStatsIndexingIndexTime :: NominalDiffTime + , nodeIndicesStatsIndexingTotal :: Int + , nodeIndicesStatsStoreThrottleTime :: NominalDiffTime + , nodeIndicesStatsStoreSize :: Bytes + , nodeIndicesStatsDocsDeleted :: Int + , nodeIndicesStatsDocsCount :: Int + } deriving (Eq, Show) + +data NodeInfo = NodeInfo { + nodeInfoHTTPAddress :: EsAddress + , nodeInfoBuild :: BuildHash + , nodeInfoESVersion :: VersionNumber + , nodeInfoIP :: Server + , nodeInfoHost :: Server + , nodeInfoTransportAddress :: EsAddress + , nodeInfoName :: NodeName + , nodeInfoFullId :: FullNodeId + , nodeInfoPlugins :: [NodePluginInfo] + , nodeInfoHTTP :: NodeHTTPInfo + , nodeInfoTransport :: NodeTransportInfo + , nodeInfoNetwork :: NodeNetworkInfo + , nodeInfoThreadPool :: NodeThreadPoolsInfo + , nodeInfoJVM :: NodeJVMInfo + , nodeInfoProcess :: NodeProcessInfo + , nodeInfoOS :: NodeOSInfo + , nodeInfoSettings :: Object + -- ^ The members of the settings objects are not consistent, + -- dependent on plugins, etc. + } deriving (Eq, Show) + +data NodePluginInfo = NodePluginInfo { + nodePluginSite :: Bool + -- ^ Is this a site plugin? + , nodePluginJVM :: Bool + -- ^ Is this plugin running on the JVM + , nodePluginDescription :: Text + , nodePluginVersion :: MaybeNA VersionNumber + , nodePluginName :: PluginName + } deriving (Eq, Show) + +data NodeHTTPInfo = NodeHTTPInfo { + nodeHTTPMaxContentLength :: Bytes + , nodeHTTPTransportAddress :: BoundTransportAddress + } deriving (Eq, Show) + +data NodeTransportInfo = NodeTransportInfo { + nodeTransportProfiles :: [BoundTransportAddress] + , nodeTransportAddress :: BoundTransportAddress + } deriving (Eq, Show) + +data BoundTransportAddress = BoundTransportAddress { + publishAddress :: EsAddress + , boundAddress :: EsAddress + } deriving (Eq, Show) + +data NodeNetworkInfo = NodeNetworkInfo { + nodeNetworkPrimaryInterface :: NodeNetworkInterface + , nodeNetworkRefreshInterval :: NominalDiffTime + } deriving (Eq, Show) + +newtype MacAddress = MacAddress { macAddress :: Text } + deriving (Eq, Ord, Show, FromJSON) + +newtype NetworkInterfaceName = NetworkInterfaceName { networkInterfaceName :: Text } + deriving (Eq, Ord, Show, FromJSON) + +data NodeNetworkInterface = NodeNetworkInterface { + nodeNetIfaceMacAddress :: MacAddress + , nodeNetIfaceName :: NetworkInterfaceName + , nodeNetIfaceAddress :: Server + } deriving (Eq, Show) + +data NodeThreadPoolsInfo = NodeThreadPoolsInfo { + nodeThreadPoolsRefresh :: NodeThreadPoolInfo + , nodeThreadPoolsManagement :: NodeThreadPoolInfo + , nodeThreadPoolsPercolate :: NodeThreadPoolInfo + , nodeThreadPoolsListener :: Maybe NodeThreadPoolInfo + , nodeThreadPoolsFetchShardStarted :: Maybe NodeThreadPoolInfo + , nodeThreadPoolsSearch :: NodeThreadPoolInfo + , nodeThreadPoolsFlush :: NodeThreadPoolInfo + , nodeThreadPoolsWarmer :: NodeThreadPoolInfo + , nodeThreadPoolsOptimize :: NodeThreadPoolInfo + , nodeThreadPoolsBulk :: NodeThreadPoolInfo + , nodeThreadPoolsSuggest :: NodeThreadPoolInfo + , nodeThreadPoolsMerge :: NodeThreadPoolInfo + , nodeThreadPoolsSnapshot :: NodeThreadPoolInfo + , nodeThreadPoolsGet :: NodeThreadPoolInfo + , nodeThreadPoolsFetchShardStore :: Maybe NodeThreadPoolInfo + , nodeThreadPoolsIndex :: NodeThreadPoolInfo + , nodeThreadPoolsGeneric :: NodeThreadPoolInfo + } deriving (Eq, Show) + +data NodeThreadPoolInfo = NodeThreadPoolInfo { + nodeThreadPoolQueueSize :: ThreadPoolSize + , nodeThreadPoolKeepalive :: Maybe NominalDiffTime + , nodeThreadPoolMin :: Maybe Int + , nodeThreadPoolMax :: Maybe Int + , nodeThreadPoolType :: ThreadPoolType + } deriving (Eq, Show) + +data ThreadPoolSize = ThreadPoolBounded Int + | ThreadPoolUnbounded + deriving (Eq, Show) + +data ThreadPoolType = ThreadPoolScaling + | ThreadPoolFixed + | ThreadPoolCached + deriving (Eq, Show) + +data NodeJVMInfo = NodeJVMInfo { + nodeJVMInfoMemoryPools :: [JVMMemoryPool] + , nodeJVMInfoMemoryPoolsGCCollectors :: [JVMGCCollector] + , nodeJVMInfoMemoryInfo :: JVMMemoryInfo + , nodeJVMInfoStartTime :: UTCTime + , nodeJVMInfoVMVendor :: Text + , nodeJVMVMVersion :: VersionNumber + -- ^ JVM doesn't seme to follow normal version conventions + , nodeJVMVMName :: Text + , nodeJVMVersion :: VersionNumber + , nodeJVMPID :: PID + } deriving (Eq, Show) + +-- | Handles quirks in the way JVM versions are rendered (1.7.0_101 -> 1.7.0.101) +newtype JVMVersion = JVMVersion { unJVMVersion :: VersionNumber } + +data JVMMemoryInfo = JVMMemoryInfo { + jvmMemoryInfoDirectMax :: Bytes + , jvmMemoryInfoNonHeapMax :: Bytes + , jvmMemoryInfoNonHeapInit :: Bytes + , jvmMemoryInfoHeapMax :: Bytes + , jvmMemoryInfoHeapInit :: Bytes + } deriving (Eq, Show) + +newtype JVMMemoryPool = JVMMemoryPool { + jvmMemoryPool :: Text + } deriving (Eq, Show, FromJSON) + +newtype JVMGCCollector = JVMGCCollector { + jvmGCCollector :: Text + } deriving (Eq, Show, FromJSON) + +newtype PID = PID { + pid :: Int + } deriving (Eq, Show, FromJSON) + +data NodeOSInfo = NodeOSInfo { + nodeOSSwap :: Bytes + , nodeOSMem :: Bytes + , nodeOSCPUInfo :: CPUInfo + , nodeOSAvailableProcessors :: Int + , nodeOSRefreshInterval :: NominalDiffTime + } deriving (Eq, Show) + +data CPUInfo = CPUInfo { + cpuCacheSize :: Bytes + , cpuCoresPerSocket :: Int + , cpuTotalSockets :: Int + , cpuTotalCores :: Int + , cpuMHZ :: Int + , cpuModel :: Text + , cpuVendor :: Text + } deriving (Eq, Show) + +data NodeProcessInfo = NodeProcessInfo { + nodeProcessMLockAll :: Bool + -- ^ See + , nodeProcessMaxFileDescriptors :: Int + , nodeProcessId :: PID + , nodeProcessRefreshInterval :: NominalDiffTime + } deriving (Eq, Show) + +-- | A quirky address format used throughout ElasticSearch. An example +-- would be inet[/1.1.1.1:9200]. inet may be a placeholder for a +-- . +newtype EsAddress = EsAddress { esAddress :: Text } + deriving (Eq, Ord, Show, FromJSON) + +newtype PluginName = PluginName { pluginName :: Text } + deriving (Eq, Ord, Show, FromJSON) + +data ShardResult = + ShardResult { shardTotal :: Int + , shardsSuccessful :: Int + , shardsFailed :: Int } deriving (Eq, Show) + +data SnapshotState = SnapshotInit + | SnapshotStarted + | SnapshotSuccess + | SnapshotFailed + | SnapshotAborted + | SnapshotMissing + | SnapshotWaiting + deriving (Show, Eq) + +instance FromJSON SnapshotState where + parseJSON = withText "SnapshotState" parse + where + parse "INIT" = return SnapshotInit + parse "STARTED" = return SnapshotStarted + parse "SUCCESS" = return SnapshotSuccess + parse "FAILED" = return SnapshotFailed + parse "ABORTED" = return SnapshotAborted + parse "MISSING" = return SnapshotMissing + parse "WAITING" = return SnapshotWaiting + parse t = fail ("Invalid snapshot state " <> T.unpack t) + +data SnapshotRestoreSettings = SnapshotRestoreSettings { + snapRestoreWaitForCompletion :: Bool + -- ^ Should the API call return immediately after initializing + -- the restore or wait until completed? Note that if this is + -- enabled, it could wait a long time, so you should adjust your + -- 'ManagerSettings' accordingly to set long timeouts or + -- explicitly handle timeouts. + , snapRestoreIndices :: Maybe IndexSelection + -- ^ Nothing will restore all indices in the snapshot. Just [] is + -- permissable and will essentially be a no-op restore. + , snapRestoreIgnoreUnavailable :: Bool + -- ^ If set to True, any indices that do not exist will be ignored + -- during snapshot rather than failing the restore. + , snapRestoreIncludeGlobalState :: Bool + -- ^ If set to false, will ignore any global state in the snapshot + -- and will not restore it. + , snapRestoreRenamePattern :: Maybe RestoreRenamePattern + -- ^ A regex pattern for matching indices. Used with + -- 'snapRestoreRenameReplacement', the restore can reference the + -- matched index and create a new index name upon restore. + , snapRestoreRenameReplacement :: Maybe (NonEmpty RestoreRenameToken) + -- ^ Expression of how index renames should be constructed. + , snapRestorePartial :: Bool + -- ^ If some indices fail to restore, should the process proceed? + , snapRestoreIncludeAliases :: Bool + -- ^ Should the restore also restore the aliases captured in the + -- snapshot. + , snapRestoreIndexSettingsOverrides :: Maybe RestoreIndexSettings + -- ^ Settings to apply during the restore process. __NOTE:__ This + -- option is not supported in ES < 1.5 and should be set to + -- Nothing in that case. + , snapRestoreIgnoreIndexSettings :: Maybe (NonEmpty Text) + -- ^ This type could be more rich but it isn't clear which + -- settings are allowed to be ignored during restore, so we're + -- going with including this feature in a basic form rather than + -- omitting it. One example here would be + -- "index.refresh_interval". Any setting specified here will + -- revert back to the server default during the restore process. + } deriving (Eq, Show) + +-- | Regex-stype pattern, e.g. "index_(.+)" to match index names +newtype RestoreRenamePattern = RestoreRenamePattern { rrPattern :: Text } + deriving (Show, Eq, Ord, ToJSON) + + +-- | A single token in a index renaming scheme for a restore. These +-- are concatenated into a string before being sent to +-- ElasticSearch. Check out these Java +-- to find out more if you're into that sort of thing. +data RestoreRenameToken = RRTLit Text + -- ^ Just a literal string of characters + | RRSubWholeMatch + -- ^ Equivalent to $0. The entire matched pattern, not any subgroup + | RRSubGroup RRGroupRefNum + -- ^ A specific reference to a group number + deriving (Show, Eq) + + +-- | A group number for regex matching. Only values from 1-9 are +-- supported. Construct with 'mkRRGroupRefNum' +newtype RRGroupRefNum = RRGroupRefNum { rrGroupRefNum :: Int } + deriving (Show, Eq, Ord) + +instance Bounded RRGroupRefNum where + minBound = RRGroupRefNum 1 + maxBound = RRGroupRefNum 9 + + +-- | Only allows valid group number references (1-9). +mkRRGroupRefNum :: Int -> Maybe RRGroupRefNum +mkRRGroupRefNum i + | i >= (rrGroupRefNum minBound) && i <= (rrGroupRefNum maxBound) = + Just $ RRGroupRefNum i + | otherwise = Nothing + +-- | Index settings that can be overridden. The docs only mention you +-- can update number of replicas, but there may be more. You +-- definitely cannot override shard count. +data RestoreIndexSettings = RestoreIndexSettings { + restoreOverrideReplicas :: Maybe ReplicaCount + } deriving (Show, Eq) + +instance ToJSON RestoreIndexSettings where + toJSON RestoreIndexSettings {..} = object prs + where + prs = catMaybes [("index.number_of_replicas" .=) <$> restoreOverrideReplicas] + +data SnapshotRepoUpdateSettings = SnapshotRepoUpdateSettings { + repoUpdateVerify :: Bool + -- ^ After creation/update, synchronously check that nodes can + -- write to this repo. Defaults to True. You may use False if you + -- need a faster response and plan on verifying manually later + -- with 'verifySnapshotRepo'. + } deriving (Eq, Show) + +-- | Reasonable defaults for repo creation/update +-- +-- * repoUpdateVerify True +defaultSnapshotRepoUpdateSettings :: SnapshotRepoUpdateSettings +defaultSnapshotRepoUpdateSettings = SnapshotRepoUpdateSettings True + +-- | A filesystem-based snapshot repo that ships with +-- ElasticSearch. This is an instance of 'SnapshotRepo' so it can be +-- used with 'updateSnapshotRepo' +data FsSnapshotRepo = FsSnapshotRepo { + fsrName :: SnapshotRepoName + , fsrLocation :: FilePath + , fsrCompressMetadata :: Bool + , fsrChunkSize :: Maybe Bytes + -- ^ Size by which to split large files during snapshotting. + , fsrMaxRestoreBytesPerSec :: Maybe Bytes + -- ^ Throttle node restore rate. If not supplied, defaults to 40mb/sec + , fsrMaxSnapshotBytesPerSec :: Maybe Bytes + -- ^ Throttle node snapshot rate. If not supplied, defaults to 40mb/sec + } deriving (Eq, Show) + +instance SnapshotRepo FsSnapshotRepo where + toGSnapshotRepo FsSnapshotRepo {..} = + GenericSnapshotRepo fsrName fsRepoType (GenericSnapshotRepoSettings settings) + where + Object settings = object $ [ "location" .= fsrLocation + , "compress" .= fsrCompressMetadata + ] ++ optionalPairs + optionalPairs = catMaybes [ ("chunk_size" .=) <$> fsrChunkSize + , ("max_restore_bytes_per_sec" .=) <$> fsrMaxRestoreBytesPerSec + , ("max_snapshot_bytes_per_sec" .=) <$> fsrMaxSnapshotBytesPerSec + ] + fromGSnapshotRepo GenericSnapshotRepo {..} + | gSnapshotRepoType == fsRepoType = do + let o = gSnapshotRepoSettingsObject gSnapshotRepoSettings + parseRepo $ do + FsSnapshotRepo gSnapshotRepoName <$> o .: "location" + <*> o .:? "compress" .!= False + <*> o .:? "chunk_size" + <*> o .:? "max_restore_bytes_per_sec" + <*> o .:? "max_snapshot_bytes_per_sec" + | otherwise = Left (RepoTypeMismatch fsRepoType gSnapshotRepoType) + +data SnapshotCreateSettings = SnapshotCreateSettings { + snapWaitForCompletion :: Bool + -- ^ Should the API call return immediately after initializing + -- the snapshot or wait until completed? Note that if this is + -- enabled it could wait a long time, so you should adjust your + -- 'ManagerSettings' accordingly to set long timeouts or + -- explicitly handle timeouts. + , snapIndices :: Maybe IndexSelection + -- ^ Nothing will snapshot all indices. Just [] is permissable and + -- will essentially be a no-op snapshot. + , snapIgnoreUnavailable :: Bool + -- ^ If set to True, any matched indices that don't exist will be + -- ignored. Otherwise it will be an error and fail. + , snapIncludeGlobalState :: Bool + , snapPartial :: Bool + -- ^ If some indices failed to snapshot (e.g. if not all primary + -- shards are available), should the process proceed? + } deriving (Eq, Show) + +-- | Reasonable defaults for snapshot creation +-- +-- * snapWaitForCompletion False +-- * snapIndices Nothing +-- * snapIgnoreUnavailable False +-- * snapIncludeGlobalState True +-- * snapPartial False +defaultSnapshotCreateSettings :: SnapshotCreateSettings +defaultSnapshotCreateSettings = SnapshotCreateSettings { + snapWaitForCompletion = False + , snapIndices = Nothing + , snapIgnoreUnavailable = False + , snapIncludeGlobalState = True + , snapPartial = False + } + + +data SnapshotSelection = SnapshotList (NonEmpty SnapshotPattern) + | AllSnapshots deriving (Eq, Show) + + +-- | Either specifies an exact snapshot name or one with globs in it, +-- e.g. @SnapPattern "foo*"@ __NOTE__: Patterns are not supported on +-- ES < 1.7 +data SnapshotPattern = ExactSnap SnapshotName + | SnapPattern Text + deriving (Eq, Show) + + +-- | General information about the state of a snapshot. Has some +-- redundancies with 'SnapshotStatus' +data SnapshotInfo = SnapshotInfo { + snapInfoShards :: ShardResult + , snapInfoFailures :: [SnapshotShardFailure] + , snapInfoDuration :: NominalDiffTime + , snapInfoEndTime :: UTCTime + , snapInfoStartTime :: UTCTime + , snapInfoState :: SnapshotState + , snapInfoIndices :: [IndexName] + , snapInfoName :: SnapshotName + } deriving (Eq, Show) + + +instance FromJSON SnapshotInfo where + parseJSON = withObject "SnapshotInfo" parse + where + parse o = SnapshotInfo <$> o .: "shards" + <*> o .: "failures" + <*> (unMS <$> o .: "duration_in_millis") + <*> (posixMS <$> o .: "end_time_in_millis") + <*> (posixMS <$> o .: "start_time_in_millis") + <*> o .: "state" + <*> o .: "indices" + <*> o .: "snapshot" + +data SnapshotShardFailure = SnapshotShardFailure { + snapShardFailureIndex :: IndexName + , snapShardFailureNodeId :: Maybe NodeName -- I'm not 100% sure this isn't actually 'FullNodeId' + , snapShardFailureReason :: Text + , snapShardFailureShardId :: ShardId + } deriving (Eq, Show) + + +instance FromJSON SnapshotShardFailure where + parseJSON = withObject "SnapshotShardFailure" parse + where + parse o = SnapshotShardFailure <$> o .: "index" + <*> o .:? "node_id" + <*> o .: "reason" + <*> o .: "shard_id" + +parseRepo :: Parser a -> Either SnapshotRepoConversionError a +parseRepo parser = case parseEither (const parser) () of + Left e -> Left (OtherRepoConversionError (T.pack e)) + Right a -> Right a + +parseNodeStats :: FullNodeId -> Object -> Parser NodeStats +parseNodeStats fnid o = do + NodeStats <$> o .: "name" + <*> pure fnid + <*> o .:? "breakers" + <*> o .: "http" + <*> o .: "transport" + <*> o .: "fs" + <*> o .: "network" + <*> o .: "thread_pool" + <*> o .: "jvm" + <*> o .: "process" + <*> o .: "os" + <*> o .: "indices" + +parseNodeInfo :: FullNodeId -> Object -> Parser NodeInfo +parseNodeInfo nid o = + NodeInfo <$> o .: "http_address" + <*> o .: "build" + <*> o .: "version" + <*> o .: "ip" + <*> o .: "host" + <*> o .: "transport_address" + <*> o .: "name" + <*> pure nid + <*> o .: "plugins" + <*> o .: "http" + <*> o .: "transport" + <*> o .: "network" + <*> o .: "thread_pool" + <*> o .: "jvm" + <*> o .: "process" + <*> o .: "os" + <*> o .: "settings" + +-- | Reasonable defaults for snapshot restores +-- +-- * snapRestoreWaitForCompletion False +-- * snapRestoreIndices Nothing +-- * snapRestoreIgnoreUnavailable False +-- * snapRestoreIncludeGlobalState True +-- * snapRestoreRenamePattern Nothing +-- * snapRestoreRenameReplacement Nothing +-- * snapRestorePartial False +-- * snapRestoreIncludeAliases True +-- * snapRestoreIndexSettingsOverrides Nothing +-- * snapRestoreIgnoreIndexSettings Nothing +defaultSnapshotRestoreSettings :: SnapshotRestoreSettings +defaultSnapshotRestoreSettings = SnapshotRestoreSettings { + snapRestoreWaitForCompletion = False + , snapRestoreIndices = Nothing + , snapRestoreIgnoreUnavailable = False + , snapRestoreIncludeGlobalState = True + , snapRestoreRenamePattern = Nothing + , snapRestoreRenameReplacement = Nothing + , snapRestorePartial = False + , snapRestoreIncludeAliases = True + , snapRestoreIndexSettingsOverrides = Nothing + , snapRestoreIgnoreIndexSettings = Nothing + } + +fsRepoType :: SnapshotRepoType +fsRepoType = SnapshotRepoType "fs" + +{-| 'Reply' and 'Method' are type synonyms from 'Network.HTTP.Types.Method.Method' -} +type Reply = Network.HTTP.Client.Response LByteString + +instance FromJSON IndexSettingsSummary where + parseJSON = withObject "IndexSettingsSummary" parse + where parse o = case HM.toList o of + [(ixn, v@(Object o'))] -> IndexSettingsSummary (IndexName ixn) + <$> parseJSON v + <*> (fmap (filter (not . redundant)) . parseSettings =<< o' .: "settings") + _ -> fail "Expected single-key object with index name" + redundant (NumberOfReplicas _) = True + redundant _ = False + +instance ToJSON VersionNumber where + toJSON = toJSON . Vers.showVersion . versionNumber + +instance FromJSON VersionNumber where + parseJSON = withText "VersionNumber" (parse . T.unpack) + where + parse s = case filter (null . snd)(RP.readP_to_S Vers.parseVersion s) of + [(v, _)] -> pure (VersionNumber v) + [] -> fail ("Invalid version string " ++ s) + xs -> fail ("Ambiguous version string " ++ s ++ " (" ++ intercalate ", " (Vers.showVersion . fst <$> xs) ++ ")") + +instance ToJSON Interval where + toJSON Year = "year" + toJSON Quarter = "quarter" + toJSON Month = "month" + toJSON Week = "week" + toJSON Day = "day" + toJSON Hour = "hour" + toJSON Minute = "minute" + toJSON Second = "second" + toJSON (FractionalInterval fraction interval) = toJSON $ show fraction ++ show interval + +parseStringInterval :: (Monad m) => String -> m NominalDiffTime +parseStringInterval s = case span isNumber s of + ("", _) -> fail "Invalid interval" + (nS, unitS) -> case (readMay nS, readMay unitS) of + (Just n, Just unit) -> return (fromInteger (n * unitNDT unit)) + (Nothing, _) -> fail "Invalid interval number" + (_, Nothing) -> fail "Invalid interval unit" + where + unitNDT Seconds = 1 + unitNDT Minutes = 60 + unitNDT Hours = 60 * 60 + unitNDT Days = 24 * 60 * 60 + unitNDT Weeks = 7 * 24 * 60 * 60 + +instance ToJSON IndexSettings where + toJSON (IndexSettings s r) = object ["settings" .= + object ["index" .= + object ["number_of_shards" .= s, "number_of_replicas" .= r] + ] + ] + +instance FromJSON IndexSettings where + parseJSON = withObject "IndexSettings" parse + where parse o = do s <- o .: "settings" + i <- s .: "index" + IndexSettings <$> i .: "number_of_shards" + <*> i .: "number_of_replicas" + +instance Bounded DocVersion where + minBound = DocVersion 1 + maxBound = DocVersion 9200000000000000000 -- 9.2e+18 + +instance Enum DocVersion where + succ x + | x /= maxBound = DocVersion (succ $ docVersionNumber x) + | otherwise = succError "DocVersion" + pred x + | x /= minBound = DocVersion (pred $ docVersionNumber x) + | otherwise = predError "DocVersion" + toEnum i = + fromMaybe (error $ show i ++ " out of DocVersion range") $ mkDocVersion i + fromEnum = docVersionNumber + enumFrom = boundedEnumFrom + enumFromThen = boundedEnumFromThen + +instance FromJSON ShardResult where + parseJSON (Object v) = ShardResult <$> + v .: "total" <*> + v .: "successful" <*> + v .: "failed" + parseJSON _ = empty + +instance FromJSON NodeBreakerStats where + parseJSON = withObject "NodeBreakerStats" parse + where + parse o = NodeBreakerStats <$> o .: "tripped" + <*> o .: "overhead" + <*> o .: "estimated_size_in_bytes" + <*> o .: "limit_size_in_bytes" + +instance FromJSON NodeHTTPStats where + parseJSON = withObject "NodeHTTPStats" parse + where + parse o = NodeHTTPStats <$> o .: "total_opened" + <*> o .: "current_open" + +instance FromJSON NodeTransportStats where + parseJSON = withObject "NodeTransportStats" parse + where + parse o = NodeTransportStats <$> o .: "tx_size_in_bytes" + <*> o .: "tx_count" + <*> o .: "rx_size_in_bytes" + <*> o .: "rx_count" + <*> o .: "server_open" + +instance FromJSON NodeFSStats where + parseJSON = withObject "NodeFSStats" parse + where + parse o = NodeFSStats <$> o .: "data" + <*> o .: "total" + <*> (posixMS <$> o .: "timestamp") + +instance FromJSON NodeDataPathStats where + parseJSON = withObject "NodeDataPathStats" parse + where + parse o = + NodeDataPathStats <$> (fmap unStringlyTypedDouble <$> o .:? "disk_service_time") + <*> (fmap unStringlyTypedDouble <$> o .:? "disk_queue") + <*> o .:? "disk_io_size_in_bytes" + <*> o .:? "disk_write_size_in_bytes" + <*> o .:? "disk_read_size_in_bytes" + <*> o .:? "disk_io_op" + <*> o .:? "disk_writes" + <*> o .:? "disk_reads" + <*> o .: "available_in_bytes" + <*> o .: "free_in_bytes" + <*> o .: "total_in_bytes" + <*> o .:? "type" + <*> o .: "dev" + <*> o .: "mount" + <*> o .: "path" + +instance FromJSON NodeNetworkStats where + parseJSON = withObject "NodeNetworkStats" parse + where + parse o = do + tcp <- o .: "tcp" + NodeNetworkStats <$> tcp .: "out_rsts" + <*> tcp .: "in_errs" + <*> tcp .: "attempt_fails" + <*> tcp .: "estab_resets" + <*> tcp .: "retrans_segs" + <*> tcp .: "out_segs" + <*> tcp .: "in_segs" + <*> tcp .: "curr_estab" + <*> tcp .: "passive_opens" + <*> tcp .: "active_opens" + +instance FromJSON NodeThreadPoolsStats where + parseJSON = withObject "NodeThreadPoolsStats" parse + where + parse o = NodeThreadPoolsStats <$> o .: "snapshot" + <*> o .: "bulk" + <*> o .: "merge" + <*> o .: "get" + <*> o .: "management" + <*> o .:? "fetch_shard_store" + <*> o .: "optimize" + <*> o .: "flush" + <*> o .: "search" + <*> o .: "warmer" + <*> o .: "generic" + <*> o .: "suggest" + <*> o .: "refresh" + <*> o .: "index" + <*> o .:? "listener" + <*> o .:? "fetch_shard_started" + <*> o .: "percolate" +instance FromJSON NodeThreadPoolStats where + parseJSON = withObject "NodeThreadPoolStats" parse + where + parse o = NodeThreadPoolStats <$> o .: "completed" + <*> o .: "largest" + <*> o .: "rejected" + <*> o .: "active" + <*> o .: "queue" + <*> o .: "threads" + +instance FromJSON NodeJVMStats where + parseJSON = withObject "NodeJVMStats" parse + where + parse o = do + bufferPools <- o .: "buffer_pools" + mapped <- bufferPools .: "mapped" + direct <- bufferPools .: "direct" + gc <- o .: "gc" + collectors <- gc .: "collectors" + oldC <- collectors .: "old" + youngC <- collectors .: "young" + threads <- o .: "threads" + mem <- o .: "mem" + pools <- mem .: "pools" + oldM <- pools .: "old" + survivorM <- pools .: "survivor" + youngM <- pools .: "young" + NodeJVMStats <$> pure mapped + <*> pure direct + <*> pure oldC + <*> pure youngC + <*> threads .: "peak_count" + <*> threads .: "count" + <*> pure oldM + <*> pure survivorM + <*> pure youngM + <*> mem .: "non_heap_committed_in_bytes" + <*> mem .: "non_heap_used_in_bytes" + <*> mem .: "heap_max_in_bytes" + <*> mem .: "heap_committed_in_bytes" + <*> mem .: "heap_used_percent" + <*> mem .: "heap_used_in_bytes" + <*> (unMS <$> o .: "uptime_in_millis") + <*> (posixMS <$> o .: "timestamp") + +instance FromJSON JVMBufferPoolStats where + parseJSON = withObject "JVMBufferPoolStats" parse + where + parse o = JVMBufferPoolStats <$> o .: "total_capacity_in_bytes" + <*> o .: "used_in_bytes" + <*> o .: "count" + +instance FromJSON JVMGCStats where + parseJSON = withObject "JVMGCStats" parse + where + parse o = JVMGCStats <$> (unMS <$> o .: "collection_time_in_millis") + <*> o .: "collection_count" + +instance FromJSON JVMPoolStats where + parseJSON = withObject "JVMPoolStats" parse + where + parse o = JVMPoolStats <$> o .: "peak_max_in_bytes" + <*> o .: "peak_used_in_bytes" + <*> o .: "max_in_bytes" + <*> o .: "used_in_bytes" + +instance FromJSON NodeProcessStats where + parseJSON = withObject "NodeProcessStats" parse + where + parse o = do + mem <- o .: "mem" + cpu <- o .: "cpu" + NodeProcessStats <$> mem .: "total_virtual_in_bytes" + <*> mem .: "share_in_bytes" + <*> mem .: "resident_in_bytes" + <*> (unMS <$> cpu .: "total_in_millis") + <*> (unMS <$> cpu .: "user_in_millis") + <*> (unMS <$> cpu .: "sys_in_millis") + <*> cpu .: "percent" + <*> o .: "open_file_descriptors" + <*> (posixMS <$> o .: "timestamp") + +instance FromJSON NodeOSStats where + parseJSON = withObject "NodeOSStats" parse + where + parse o = do + swap <- o .: "swap" + mem <- o .: "mem" + cpu <- o .: "cpu" + load <- o .:? "load_average" + NodeOSStats <$> swap .: "free_in_bytes" + <*> swap .: "used_in_bytes" + <*> mem .: "actual_used_in_bytes" + <*> mem .: "actual_free_in_bytes" + <*> mem .: "used_percent" + <*> mem .: "free_percent" + <*> mem .: "used_in_bytes" + <*> mem .: "free_in_bytes" + <*> cpu .: "stolen" + <*> cpu .: "usage" + <*> cpu .: "idle" + <*> cpu .: "user" + <*> cpu .: "sys" + <*> pure load + <*> (unMS <$> o .: "uptime_in_millis") + <*> (posixMS <$> o .: "timestamp") + +instance FromJSON LoadAvgs where + parseJSON = withArray "LoadAvgs" parse + where + parse v = case V.toList v of + [one, five, fifteen] -> LoadAvgs <$> parseJSON one + <*> parseJSON five + <*> parseJSON fifteen + _ -> fail "Expecting a triple of Doubles" + +instance FromJSON NodeIndicesStats where + parseJSON = withObject "NodeIndicesStats" parse + where + parse o = do + let (.::) mv k = case mv of + Just v -> Just <$> v .: k + Nothing -> pure Nothing + mRecovery <- o .:? "recovery" + mQueryCache <- o .:? "query_cache" + suggest <- o .: "suggest" + translog <- o .: "translog" + segments <- o .: "segments" + completion <- o .: "completion" + percolate <- o .: "percolate" + fielddata <- o .: "fielddata" + idCache <- o .: "id_cache" + filterCache <- o .: "filter_cache" + warmer <- o .: "warmer" + flush <- o .: "flush" + refresh <- o .: "refresh" + merges <- o .: "merges" + search <- o .: "search" + getStats <- o .: "get" + indexing <- o .: "indexing" + store <- o .: "store" + docs <- o .: "docs" + NodeIndicesStats <$> (fmap unMS <$> mRecovery .:: "throttle_time_in_millis") + <*> mRecovery .:: "current_as_target" + <*> mRecovery .:: "current_as_source" + <*> mQueryCache .:: "miss_count" + <*> mQueryCache .:: "hit_count" + <*> mQueryCache .:: "evictions" + <*> mQueryCache .:: "memory_size_in_bytes" + <*> suggest .: "current" + <*> (unMS <$> suggest .: "time_in_millis") + <*> suggest .: "total" + <*> translog .: "size_in_bytes" + <*> translog .: "operations" + <*> segments .:? "fixed_bit_set_memory_in_bytes" + <*> segments .: "version_map_memory_in_bytes" + <*> segments .:? "index_writer_max_memory_in_bytes" + <*> segments .: "index_writer_memory_in_bytes" + <*> segments .: "memory_in_bytes" + <*> segments .: "count" + <*> completion .: "size_in_bytes" + <*> percolate .: "queries" + <*> percolate .: "memory_size_in_bytes" + <*> percolate .: "current" + <*> (unMS <$> percolate .: "time_in_millis") + <*> percolate .: "total" + <*> fielddata .: "evictions" + <*> fielddata .: "memory_size_in_bytes" + <*> idCache .: "memory_size_in_bytes" + <*> filterCache .: "evictions" + <*> filterCache .: "memory_size_in_bytes" + <*> (unMS <$> warmer .: "total_time_in_millis") + <*> warmer .: "total" + <*> warmer .: "current" + <*> (unMS <$> flush .: "total_time_in_millis") + <*> flush .: "total" + <*> (unMS <$> refresh .: "total_time_in_millis") + <*> refresh .: "total" + <*> merges .: "total_size_in_bytes" + <*> merges .: "total_docs" + <*> (unMS <$> merges .: "total_time_in_millis") + <*> merges .: "total" + <*> merges .: "current_size_in_bytes" + <*> merges .: "current_docs" + <*> merges .: "current" + <*> search .: "fetch_current" + <*> (unMS <$> search .: "fetch_time_in_millis") + <*> search .: "fetch_total" + <*> search .: "query_current" + <*> (unMS <$> search .: "query_time_in_millis") + <*> search .: "query_total" + <*> search .: "open_contexts" + <*> getStats .: "current" + <*> (unMS <$> getStats .: "missing_time_in_millis") + <*> getStats .: "missing_total" + <*> (unMS <$> getStats .: "exists_time_in_millis") + <*> getStats .: "exists_total" + <*> (unMS <$> getStats .: "time_in_millis") + <*> getStats .: "total" + <*> (fmap unMS <$> indexing .:? "throttle_time_in_millis") + <*> indexing .:? "is_throttled" + <*> indexing .:? "noop_update_total" + <*> indexing .: "delete_current" + <*> (unMS <$> indexing .: "delete_time_in_millis") + <*> indexing .: "delete_total" + <*> indexing .: "index_current" + <*> (unMS <$> indexing .: "index_time_in_millis") + <*> indexing .: "index_total" + <*> (unMS <$> store .: "throttle_time_in_millis") + <*> store .: "size_in_bytes" + <*> docs .: "deleted" + <*> docs .: "count" + +instance FromJSON NodeBreakersStats where + parseJSON = withObject "NodeBreakersStats" parse + where + parse o = NodeBreakersStats <$> o .: "parent" + <*> o .: "request" + <*> o .: "fielddata" + +instance FromJSON NodePluginInfo where + parseJSON = withObject "NodePluginInfo" parse + where + parse o = NodePluginInfo <$> o .: "site" + <*> o .: "jvm" + <*> o .: "description" + <*> o .: "version" + <*> o .: "name" + +instance FromJSON NodeHTTPInfo where + parseJSON = withObject "NodeHTTPInfo" parse + where + parse o = NodeHTTPInfo <$> o .: "max_content_length_in_bytes" + <*> parseJSON (Object o) + +instance FromJSON NodeTransportInfo where + parseJSON = withObject "NodeTransportInfo" parse + where + parse o = NodeTransportInfo <$> (maybe (return mempty) parseProfiles =<< o .:? "profiles") + <*> parseJSON (Object o) + parseProfiles (Object o) | HM.null o = return [] + parseProfiles v@(Array _) = parseJSON v + parseProfiles Null = return [] + parseProfiles _ = fail "Could not parse profiles" + +instance FromJSON NodeNetworkInfo where + parseJSON = withObject "NodeNetworkInfo" parse + where + parse o = NodeNetworkInfo <$> o .: "primary_interface" + <*> (unMS <$> o .: "refresh_interval_in_millis") + +instance FromJSON NodeNetworkInterface where + parseJSON = withObject "NodeNetworkInterface" parse + where + parse o = NodeNetworkInterface <$> o .: "mac_address" + <*> o .: "name" + <*> o .: "address" + +instance FromJSON NodeJVMInfo where + parseJSON = withObject "NodeJVMInfo" parse + where + parse o = NodeJVMInfo <$> o .: "memory_pools" + <*> o .: "gc_collectors" + <*> o .: "mem" + <*> (posixMS <$> o .: "start_time_in_millis") + <*> o .: "vm_vendor" + <*> o .: "vm_version" + <*> o .: "vm_name" + <*> (unJVMVersion <$> o .: "version") + <*> o .: "pid" + +instance FromJSON NodeThreadPoolsInfo where + parseJSON = withObject "NodeThreadPoolsInfo" parse + where + parse o = NodeThreadPoolsInfo <$> o .: "refresh" + <*> o .: "management" + <*> o .: "percolate" + <*> o .:? "listener" + <*> o .:? "fetch_shard_started" + <*> o .: "search" + <*> o .: "flush" + <*> o .: "warmer" + <*> o .: "optimize" + <*> o .: "bulk" + <*> o .: "suggest" + <*> o .: "merge" + <*> o .: "snapshot" + <*> o .: "get" + <*> o .:? "fetch_shard_store" + <*> o .: "index" + <*> o .: "generic" + +instance FromJSON NodeProcessInfo where + parseJSON = withObject "NodeProcessInfo" parse + where + parse o = NodeProcessInfo <$> o .: "mlockall" + <*> o .: "max_file_descriptors" + <*> o .: "id" + <*> (unMS <$> o .: "refresh_interval_in_millis") + +instance FromJSON NodeOSInfo where + parseJSON = withObject "NodeOSInfo" parse + where + parse o = do + swap <- o .: "swap" + mem <- o .: "mem" + NodeOSInfo <$> swap .: "total_in_bytes" + <*> mem .: "total_in_bytes" + <*> o .: "cpu" + <*> o .: "available_processors" + <*> (unMS <$> o .: "refresh_interval_in_millis") + +instance FromJSON ReplicaBounds where + parseJSON v = withText "ReplicaBounds" parseText v + <|> withBool "ReplicaBounds" parseBool v + where parseText t = case T.splitOn "-" t of + [a, "all"] -> ReplicasLowerBounded <$> parseReadText a + [a, b] -> ReplicasBounded <$> parseReadText a + <*> parseReadText b + _ -> fail ("Could not parse ReplicaBounds: " <> show t) + parseBool False = pure ReplicasUnbounded + parseBool _ = fail "ReplicasUnbounded cannot be represented with True" + +instance FromJSON NominalDiffTimeJSON where + parseJSON = withText "NominalDiffTime" parse + where parse t = case T.takeEnd 1 t of + "s" -> NominalDiffTimeJSON . fromInteger <$> parseReadText (T.dropEnd 1 t) + _ -> fail "Invalid or missing NominalDiffTime unit (expected s)" + +instance FromJSON AllocationPolicy where + parseJSON = withText "AllocationPolicy" parse + where parse "all" = pure AllocAll + parse "primaries" = pure AllocPrimaries + parse "new_primaries" = pure AllocNewPrimaries + parse "none" = pure AllocNone + parse t = fail ("Invlaid AllocationPolicy: " <> show t) + +instance ToJSON CompoundFormat where + toJSON (CompoundFileFormat x) = Bool x + toJSON (MergeSegmentVsTotalIndex x) = toJSON x + +instance FromJSON CompoundFormat where + parseJSON v = CompoundFileFormat <$> parseJSON v + <|> MergeSegmentVsTotalIndex <$> parseJSON v + +instance FromJSON NodeFSTotalStats where + parseJSON = withObject "NodeFSTotalStats" parse + where + parse o = NodeFSTotalStats <$> (fmap unStringlyTypedDouble <$> o .:? "disk_service_time") + <*> (fmap unStringlyTypedDouble <$> o .:? "disk_queue") + <*> o .:? "disk_io_size_in_bytes" + <*> o .:? "disk_write_size_in_bytes" + <*> o .:? "disk_read_size_in_bytes" + <*> o .:? "disk_io_op" + <*> o .:? "disk_writes" + <*> o .:? "disk_reads" + <*> o .: "available_in_bytes" + <*> o .: "free_in_bytes" + <*> o .: "total_in_bytes" + +instance FromJSON BoundTransportAddress where + parseJSON = withObject "BoundTransportAddress" parse + where + parse o = BoundTransportAddress <$> o .: "publish_address" + <*> o .: "bound_address" + +instance FromJSON JVMMemoryInfo where + parseJSON = withObject "JVMMemoryInfo" parse + where + parse o = JVMMemoryInfo <$> o .: "direct_max_in_bytes" + <*> o .: "non_heap_max_in_bytes" + <*> o .: "non_heap_init_in_bytes" + <*> o .: "heap_max_in_bytes" + <*> o .: "heap_init_in_bytes" + +instance FromJSON JVMVersion where + parseJSON (String t) = + JVMVersion <$> parseJSON (String (T.replace "_" "." t)) + parseJSON v = JVMVersion <$> parseJSON v + +instance FromJSON NodeThreadPoolInfo where + parseJSON = withObject "NodeThreadPoolInfo" parse + where + parse o = do + ka <- maybe (return Nothing) (fmap Just . parseStringInterval) =<< o .:? "keep_alive" + NodeThreadPoolInfo <$> (parseJSON . unStringlyTypeJSON =<< o .: "queue_size") + <*> pure ka + <*> o .:? "min" + <*> o .:? "max" + <*> o .: "type" + +instance FromJSON CPUInfo where + parseJSON = withObject "CPUInfo" parse + where + parse o = CPUInfo <$> o .: "cache_size_in_bytes" + <*> o .: "cores_per_socket" + <*> o .: "total_sockets" + <*> o .: "total_cores" + <*> o .: "mhz" + <*> o .: "model" + <*> o .: "vendor" + +instance FromJSON ThreadPoolSize where + parseJSON v = parseAsNumber v <|> parseAsString v + where + parseAsNumber = parseAsInt <=< parseJSON + parseAsInt (-1) = return ThreadPoolUnbounded + parseAsInt n + | n >= 0 = return (ThreadPoolBounded n) + | otherwise = fail "Thread pool size must be >= -1." + parseAsString = withText "ThreadPoolSize" $ \t -> + case first (readMay . T.unpack) (T.span isNumber t) of + (Just n, "k") -> return (ThreadPoolBounded (n * 1000)) + (Just n, "") -> return (ThreadPoolBounded n) + _ -> fail ("Invalid thread pool size " <> T.unpack t) + +instance FromJSON ThreadPoolType where + parseJSON = withText "ThreadPoolType" parse + where + parse "scaling" = return ThreadPoolScaling + parse "fixed" = return ThreadPoolFixed + parse "cached" = return ThreadPoolCached + parse e = fail ("Unexpected thread pool type" <> T.unpack e) + +data Source = + NoSource + | SourcePatterns PatternOrPatterns + | SourceIncludeExclude Include Exclude + deriving (Show, Eq) + +instance ToJSON Source where + toJSON NoSource = toJSON False + toJSON (SourcePatterns patterns) = toJSON patterns + toJSON (SourceIncludeExclude incl excl) = object [ "include" .= incl, "exclude" .= excl ] + +data PatternOrPatterns = + PopPattern Pattern + | PopPatterns [Pattern] + deriving (Eq, Show) + +data Include = Include [Pattern] deriving (Eq, Show) +data Exclude = Exclude [Pattern] deriving (Eq, Show) + +newtype Pattern = Pattern Text deriving (Eq, Show) + +newtype ScrollId = ScrollId Text deriving (Eq, Show, Ord, ToJSON, FromJSON) + +instance ToJSON PatternOrPatterns where + toJSON (PopPattern pattern) = toJSON pattern + toJSON (PopPatterns patterns) = toJSON patterns + +instance ToJSON Include where + toJSON (Include patterns) = toJSON patterns + +instance ToJSON Exclude where + toJSON (Exclude patterns) = toJSON patterns + +instance ToJSON Pattern where + toJSON (Pattern pattern) = toJSON pattern + +instance FromJSON NodesStats where + parseJSON = withObject "NodesStats" parse + where + parse o = do + nodes <- o .: "nodes" + stats <- forM (HM.toList nodes) $ \(fullNID, v) -> do + node <- parseJSON v + parseNodeStats (FullNodeId fullNID) node + cn <- o .: "cluster_name" + return (NodesStats stats cn) + +instance ToJSON UpdatableIndexSetting where + toJSON (NumberOfReplicas x) = oPath ("index" :| ["number_of_replicas"]) x + toJSON (AutoExpandReplicas x) = oPath ("index" :| ["auto_expand_replicas"]) x + toJSON (RefreshInterval x) = oPath ("index" :| ["refresh_interval"]) (NominalDiffTimeJSON x) + toJSON (IndexConcurrency x) = oPath ("index" :| ["concurrency"]) x + toJSON (FailOnMergeFailure x) = oPath ("index" :| ["fail_on_merge_failure"]) x + toJSON (TranslogFlushThresholdOps x) = oPath ("index" :| ["translog", "flush_threshold_ops"]) x + toJSON (TranslogFlushThresholdSize x) = oPath ("index" :| ["translog", "flush_threshold_size"]) x + toJSON (TranslogFlushThresholdPeriod x) = oPath ("index" :| ["translog", "flush_threshold_period"]) (NominalDiffTimeJSON x) + toJSON (TranslogDisableFlush x) = oPath ("index" :| ["translog", "disable_flush"]) x + toJSON (CacheFilterMaxSize x) = oPath ("index" :| ["cache", "filter", "max_size"]) x + toJSON (CacheFilterExpire x) = oPath ("index" :| ["cache", "filter", "expire"]) (NominalDiffTimeJSON <$> x) + toJSON (GatewaySnapshotInterval x) = oPath ("index" :| ["gateway", "snapshot_interval"]) (NominalDiffTimeJSON x) + toJSON (RoutingAllocationInclude fs) = oPath ("index" :| ["routing", "allocation", "include"]) (attrFilterJSON fs) + toJSON (RoutingAllocationExclude fs) = oPath ("index" :| ["routing", "allocation", "exclude"]) (attrFilterJSON fs) + toJSON (RoutingAllocationRequire fs) = oPath ("index" :| ["routing", "allocation", "require"]) (attrFilterJSON fs) + toJSON (RoutingAllocationEnable x) = oPath ("index" :| ["routing", "allocation", "enable"]) x + toJSON (RoutingAllocationShardsPerNode x) = oPath ("index" :| ["routing", "allocation", "total_shards_per_node"]) x + toJSON (RecoveryInitialShards x) = oPath ("index" :| ["recovery", "initial_shards"]) x + toJSON (GCDeletes x) = oPath ("index" :| ["gc_deletes"]) (NominalDiffTimeJSON x) + toJSON (TTLDisablePurge x) = oPath ("index" :| ["ttl", "disable_purge"]) x + toJSON (TranslogFSType x) = oPath ("index" :| ["translog", "fs", "type"]) x + toJSON (IndexCompoundFormat x) = oPath ("index" :| ["compound_format"]) x + toJSON (IndexCompoundOnFlush x) = oPath ("index" :| ["compound_on_flush"]) x + toJSON (WarmerEnabled x) = oPath ("index" :| ["warmer", "enabled"]) x + toJSON (BlocksReadOnly x) = oPath ("blocks" :| ["read_only"]) x + toJSON (BlocksRead x) = oPath ("blocks" :| ["read"]) x + toJSON (BlocksWrite x) = oPath ("blocks" :| ["write"]) x + toJSON (BlocksMetaData x) = oPath ("blocks" :| ["metadata"]) x + +instance FromJSON UpdatableIndexSetting where + parseJSON = withObject "UpdatableIndexSetting" parse + where parse o = numberOfReplicas `taggedAt` ["index", "number_of_replicas"] + <|> autoExpandReplicas `taggedAt` ["index", "auto_expand_replicas"] + <|> refreshInterval `taggedAt` ["index", "refresh_interval"] + <|> indexConcurrency `taggedAt` ["index", "concurrency"] + <|> failOnMergeFailure `taggedAt` ["index", "fail_on_merge_failure"] + <|> translogFlushThresholdOps `taggedAt` ["index", "translog", "flush_threshold_ops"] + <|> translogFlushThresholdSize `taggedAt` ["index", "translog", "flush_threshold_size"] + <|> translogFlushThresholdPeriod `taggedAt` ["index", "translog", "flush_threshold_period"] + <|> translogDisableFlush `taggedAt` ["index", "translog", "disable_flush"] + <|> cacheFilterMaxSize `taggedAt` ["index", "cache", "filter", "max_size"] + <|> cacheFilterExpire `taggedAt` ["index", "cache", "filter", "expire"] + <|> gatewaySnapshotInterval `taggedAt` ["index", "gateway", "snapshot_interval"] + <|> routingAllocationInclude `taggedAt` ["index", "routing", "allocation", "include"] + <|> routingAllocationExclude `taggedAt` ["index", "routing", "allocation", "exclude"] + <|> routingAllocationRequire `taggedAt` ["index", "routing", "allocation", "require"] + <|> routingAllocationEnable `taggedAt` ["index", "routing", "allocation", "enable"] + <|> routingAllocationShardsPerNode `taggedAt` ["index", "routing", "allocation", "total_shards_per_node"] + <|> recoveryInitialShards `taggedAt` ["index", "recovery", "initial_shards"] + <|> gcDeletes `taggedAt` ["index", "gc_deletes"] + <|> ttlDisablePurge `taggedAt` ["index", "ttl", "disable_purge"] + <|> translogFSType `taggedAt` ["index", "translog", "fs", "type"] + <|> compoundFormat `taggedAt` ["index", "compound_format"] + <|> compoundOnFlush `taggedAt` ["index", "compound_on_flush"] + <|> warmerEnabled `taggedAt` ["index", "warmer", "enabled"] + <|> blocksReadOnly `taggedAt` ["blocks", "read_only"] + <|> blocksRead `taggedAt` ["blocks", "read"] + <|> blocksWrite `taggedAt` ["blocks", "write"] + <|> blocksMetaData `taggedAt` ["blocks", "metadata"] + where taggedAt f ks = taggedAt' f (Object o) ks + taggedAt' f v [] = f =<< (parseJSON v <|> (parseJSON (unStringlyTypeJSON v))) + taggedAt' f v (k:ks) = withObject "Object" (\o -> do v' <- o .: k + taggedAt' f v' ks) v + numberOfReplicas = pure . NumberOfReplicas + autoExpandReplicas = pure . AutoExpandReplicas + refreshInterval = pure . RefreshInterval . ndtJSON + indexConcurrency = pure . IndexConcurrency + failOnMergeFailure = pure . FailOnMergeFailure + translogFlushThresholdOps = pure . TranslogFlushThresholdOps + translogFlushThresholdSize = pure . TranslogFlushThresholdSize + translogFlushThresholdPeriod = pure . TranslogFlushThresholdPeriod . ndtJSON + translogDisableFlush = pure . TranslogDisableFlush + cacheFilterMaxSize = pure . CacheFilterMaxSize + cacheFilterExpire = pure . CacheFilterExpire . fmap ndtJSON + gatewaySnapshotInterval = pure . GatewaySnapshotInterval . ndtJSON + routingAllocationInclude = fmap RoutingAllocationInclude . parseAttrFilter + routingAllocationExclude = fmap RoutingAllocationExclude . parseAttrFilter + routingAllocationRequire = fmap RoutingAllocationRequire . parseAttrFilter + routingAllocationEnable = pure . RoutingAllocationEnable + routingAllocationShardsPerNode = pure . RoutingAllocationShardsPerNode + recoveryInitialShards = pure . RecoveryInitialShards + gcDeletes = pure . GCDeletes . ndtJSON + ttlDisablePurge = pure . TTLDisablePurge + translogFSType = pure . TranslogFSType + compoundFormat = pure . IndexCompoundFormat + compoundOnFlush = pure . IndexCompoundOnFlush + warmerEnabled = pure . WarmerEnabled + blocksReadOnly = pure . BlocksReadOnly + blocksRead = pure . BlocksRead + blocksWrite = pure . BlocksWrite + blocksMetaData = pure . BlocksMetaData + +instance ToJSON ReplicaBounds where + toJSON (ReplicasBounded a b) = String (showText a <> "-" <> showText b) + toJSON (ReplicasLowerBounded a) = String (showText a <> "-all") + toJSON ReplicasUnbounded = Bool False + +instance ToJSON NominalDiffTimeJSON where + toJSON (NominalDiffTimeJSON t) = String (showText (round t :: Integer) <> "s") + +instance ToJSON AllocationPolicy where + toJSON AllocAll = String "all" + toJSON AllocPrimaries = String "primaries" + toJSON AllocNewPrimaries = String "new_primaries" + toJSON AllocNone = String "none" + +instance ToJSON IndexTemplate where + toJSON (IndexTemplate p s m) = merge + (object [ "template" .= p + , "mappings" .= foldl' merge (object []) m + ]) + (toJSON s) + where + merge (Object o1) (Object o2) = toJSON $ HM.union o1 o2 + merge o Null = o + merge _ _ = undefined + +instance FromJSON IndexAliasesSummary where + parseJSON = withObject "IndexAliasesSummary" parse + where parse o = IndexAliasesSummary . mconcat <$> mapM (uncurry go) (HM.toList o) + go ixn = withObject "index aliases" $ \ia -> do + aliases <- ia .:? "aliases" .!= mempty + forM (HM.toList aliases) $ \(aName, v) -> do + let indexAlias = IndexAlias (IndexName ixn) (IndexAliasName (IndexName aName)) + IndexAliasSummary indexAlias <$> parseJSON v + +instance ToJSON IndexAliasAction where + toJSON (AddAlias ia opts) = object ["add" .= (iaObj <> optsObj)] + where Object iaObj = toJSON ia + Object optsObj = toJSON opts + toJSON (RemoveAlias ia) = object ["remove" .= iaObj] + where Object iaObj = toJSON ia + +instance ToJSON IndexAlias where + toJSON IndexAlias {..} = object ["index" .= srcIndex + , "alias" .= indexAlias + ] + +instance FromJSON AliasRouting where + parseJSON = withObject "AliasRouting" parse + where parse o = parseAll o <|> parseGranular o + parseAll o = AllAliasRouting <$> o .: "routing" + parseGranular o = do + sr <- o .:? "search_routing" + ir <- o .:? "index_routing" + if isNothing sr && isNothing ir + then fail "Both search_routing and index_routing can't be blank" + else return (GranularAliasRouting sr ir) + +instance FromJSON IndexAliasCreate where + parseJSON v = withObject "IndexAliasCreate" parse v + where parse o = IndexAliasCreate <$> optional (parseJSON v) + <*> o .:? "filter" + +instance ToJSON IndexAliasCreate where + toJSON IndexAliasCreate {..} = Object (filterObj <> routingObj) + where filterObj = maybe mempty (HM.singleton "filter" . toJSON) aliasCreateFilter + Object routingObj = maybe (Object mempty) toJSON aliasCreateRouting + +instance ToJSON AliasRouting where + toJSON (AllAliasRouting v) = object ["routing" .= v] + toJSON (GranularAliasRouting srch idx) = object (catMaybes prs) + where prs = [("search_routing" .=) <$> srch + ,("index_routing" .=) <$> idx] + +instance ToJSON SearchAliasRouting where + toJSON (SearchAliasRouting rvs) = toJSON (T.intercalate "," (routingValue <$> toList rvs)) + +instance FromJSON SearchAliasRouting where + parseJSON = withText "SearchAliasRouting" parse + where parse t = SearchAliasRouting <$> parseNEJSON (String <$> T.splitOn "," t) + +instance FromJSON EsError where + parseJSON (Object v) = EsError <$> + v .: "status" <*> + (v .: "error" <|> (v .: "error" >>= (.: "reason"))) + parseJSON _ = empty diff --git a/src/Database/V1/Bloodhound/Internal/Highlight.hs b/src/Database/V1/Bloodhound/Internal/Highlight.hs new file mode 100644 index 0000000..0b3488b --- /dev/null +++ b/src/Database/V1/Bloodhound/Internal/Highlight.hs @@ -0,0 +1,144 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Database.V1.Bloodhound.Internal.Highlight where + + +import Bloodhound.Import + +import qualified Data.Map as M + +import Database.V1.Bloodhound.Internal.Newtypes +import Database.V1.Bloodhound.Internal.Query + +type HitHighlight = M.Map Text [Text] + +data Highlights = Highlights { globalsettings :: Maybe HighlightSettings + , highlightFields :: [FieldHighlight] + } deriving (Show, Eq) + +instance ToJSON Highlights where + toJSON (Highlights global fields) = + omitNulls (("fields" .= fields) + : highlightSettingsPairs global) + +data HighlightSettings = Plain PlainHighlight + | Postings PostingsHighlight + | FastVector FastVectorHighlight + deriving (Show, Eq) + +data FieldHighlight = FieldHighlight FieldName (Maybe HighlightSettings) + deriving (Show, Eq) + +data PlainHighlight = + PlainHighlight { plainCommon :: Maybe CommonHighlight + , plainNonPost :: Maybe NonPostings } deriving (Show, Eq) + + -- This requires that index_options are set to 'offset' in the mapping. +data PostingsHighlight = PostingsHighlight (Maybe CommonHighlight) deriving (Show, Eq) + +-- This requires that term_vector is set to 'with_positions_offsets' in the mapping. +data FastVectorHighlight = + FastVectorHighlight { fvCommon :: Maybe CommonHighlight + , fvNonPostSettings :: Maybe NonPostings + , boundaryChars :: Maybe Text + , boundaryMaxScan :: Maybe Int + , fragmentOffset :: Maybe Int + , matchedFields :: [Text] + , phraseLimit :: Maybe Int + } deriving (Show, Eq) + +data CommonHighlight = + CommonHighlight { order :: Maybe Text + , forceSource :: Maybe Bool + , tag :: Maybe HighlightTag + , encoder :: Maybe HighlightEncoder + , noMatchSize :: Maybe Int + , highlightQuery :: Maybe Query + , requireFieldMatch :: Maybe Bool + } deriving (Show, Eq) + +-- Settings that are only applicable to FastVector and Plain highlighters. +data NonPostings = + NonPostings { fragmentSize :: Maybe Int + , numberOfFragments :: Maybe Int} deriving (Show, Eq) + +data HighlightEncoder = DefaultEncoder + | HTMLEncoder + deriving (Show, Eq) + +-- NOTE: Should the tags use some kind of HTML type, rather than Text? +data HighlightTag = TagSchema Text + | CustomTags ([Text], [Text]) -- Only uses more than the first value in the lists if fvh + deriving (Show, Eq) + +highlightSettingsPairs :: Maybe HighlightSettings -> [Pair] +highlightSettingsPairs Nothing = [] +highlightSettingsPairs (Just (Plain plh)) = plainHighPairs (Just plh) +highlightSettingsPairs (Just (Postings ph)) = postHighPairs (Just ph) +highlightSettingsPairs (Just (FastVector fvh)) = fastVectorHighPairs (Just fvh) + +plainHighPairs :: Maybe PlainHighlight -> [Pair] +plainHighPairs Nothing = [] +plainHighPairs (Just (PlainHighlight plCom plNonPost)) = + [ "type" .= String "plain"] + ++ commonHighlightPairs plCom + ++ nonPostingsToPairs plNonPost + +postHighPairs :: Maybe PostingsHighlight -> [Pair] +postHighPairs Nothing = [] +postHighPairs (Just (PostingsHighlight pCom)) = + ("type" .= String "postings") + : commonHighlightPairs pCom + +fastVectorHighPairs :: Maybe FastVectorHighlight -> [Pair] +fastVectorHighPairs Nothing = [] +fastVectorHighPairs (Just + (FastVectorHighlight fvCom fvNonPostSettings' fvBoundChars + fvBoundMaxScan fvFragOff fvMatchedFields + fvPhraseLim)) = + [ "type" .= String "fvh" + , "boundary_chars" .= fvBoundChars + , "boundary_max_scan" .= fvBoundMaxScan + , "fragment_offset" .= fvFragOff + , "matched_fields" .= fvMatchedFields + , "phraseLimit" .= fvPhraseLim] + ++ commonHighlightPairs fvCom + ++ nonPostingsToPairs fvNonPostSettings' + +commonHighlightPairs :: Maybe CommonHighlight -> [Pair] +commonHighlightPairs Nothing = [] +commonHighlightPairs (Just (CommonHighlight chScore chForceSource chTag chEncoder + chNoMatchSize chHighlightQuery + chRequireFieldMatch)) = + [ "order" .= chScore + , "force_source" .= chForceSource + , "encoder" .= chEncoder + , "no_match_size" .= chNoMatchSize + , "highlight_query" .= chHighlightQuery + , "require_fieldMatch" .= chRequireFieldMatch] + ++ highlightTagToPairs chTag + +nonPostingsToPairs :: Maybe NonPostings -> [Pair] +nonPostingsToPairs Nothing = [] +nonPostingsToPairs (Just (NonPostings npFragSize npNumOfFrags)) = + [ "fragment_size" .= npFragSize + , "number_of_fragments" .= npNumOfFrags] + +highlightTagToPairs :: Maybe HighlightTag -> [Pair] +highlightTagToPairs (Just (TagSchema _)) = [ "scheme" .= String "default"] +highlightTagToPairs (Just (CustomTags (pre, post))) = [ "pre_tags" .= pre + , "post_tags" .= post] +highlightTagToPairs Nothing = [] + +instance ToJSON FieldHighlight where + toJSON (FieldHighlight (FieldName fName) (Just fSettings)) = + object [ fName .= fSettings ] + toJSON (FieldHighlight (FieldName fName) Nothing) = + object [ fName .= emptyObject ] + +instance ToJSON HighlightSettings where + toJSON hs = omitNulls (highlightSettingsPairs (Just hs)) + +instance ToJSON HighlightEncoder where + toJSON DefaultEncoder = String "default" + toJSON HTMLEncoder = String "html" diff --git a/src/Database/V1/Bloodhound/Internal/Newtypes.hs b/src/Database/V1/Bloodhound/Internal/Newtypes.hs index 0c1af11..5f3afdd 100644 --- a/src/Database/V1/Bloodhound/Internal/Newtypes.hs +++ b/src/Database/V1/Bloodhound/Internal/Newtypes.hs @@ -3,10 +3,8 @@ module Database.V1.Bloodhound.Internal.Newtypes where - import Bloodhound.Import - newtype From = From Int deriving (Eq, Show, ToJSON) newtype Size = Size Int deriving (Eq, Show, ToJSON, FromJSON) @@ -16,7 +14,7 @@ newtype Size = Size Int deriving (Eq, Show, ToJSON, FromJSON) -} newtype FieldName = FieldName Text - deriving (Eq, Read, Show, ToJSON, FromJSON) + deriving (Eq, Show, ToJSON, FromJSON) newtype Boost = Boost Double @@ -159,3 +157,55 @@ newtype MaxDocFrequency = MaxDocFrequency Int deriving (Eq, Show, ToJSON, FromJS -- | Newtype wrapper to parse ES's concerning tendency to in some APIs return a floating point number of milliseconds since epoch ಠ_ಠ newtype POSIXMS = POSIXMS { posixMS :: UTCTime } + +instance FromJSON POSIXMS where + parseJSON = withScientific "POSIXMS" (return . parse) + where parse n = let n' = truncate n :: Integer + in POSIXMS (posixSecondsToUTCTime (fromInteger (n' `div` 1000))) + +{-| 'IndexName' is used to describe which index to query/create/delete +-} +newtype IndexName = IndexName Text deriving (Eq, Show, ToJSON, FromJSON) + +newtype IndexAliasName = IndexAliasName { indexAliasName :: IndexName } deriving (Eq, Show, ToJSON) + +type Score = Maybe Double + +newtype ShardId = ShardId { shardId :: Int } + deriving (Eq, Show, FromJSON) + +-- | Milliseconds +newtype MS = MS NominalDiffTime + + +-- keeps the unexported constructor warnings at bay +unMS :: MS -> NominalDiffTime +unMS (MS t) = t + +instance FromJSON MS where + parseJSON = withScientific "MS" (return . MS . parse) + where + parse n = fromInteger ((truncate n) * 1000) + +newtype MaybeNA a = MaybeNA { unMaybeNA :: Maybe a } + deriving (Show, Eq) + +instance FromJSON a => FromJSON (MaybeNA a) where + parseJSON (String "NA") = pure $ MaybeNA Nothing + parseJSON o = MaybeNA . Just <$> parseJSON o + +newtype SnapshotName = SnapshotName { snapshotName :: Text } + deriving (Show, Eq, Ord, ToJSON, FromJSON) + +instance FromJSON ShardCount where + parseJSON v = parseAsInt v + <|> parseAsString v + where parseAsInt = fmap ShardCount . parseJSON + parseAsString = withText "ShardCount" (fmap ShardCount . parseReadText) + + +instance FromJSON ReplicaCount where + parseJSON v = parseAsInt v + <|> parseAsString v + where parseAsInt = fmap ReplicaCount . parseJSON + parseAsString = withText "ReplicaCount" (fmap ReplicaCount . parseReadText) diff --git a/src/Database/V1/Bloodhound/Internal/Query.hs b/src/Database/V1/Bloodhound/Internal/Query.hs index 75e1051..2e22695 100644 --- a/src/Database/V1/Bloodhound/Internal/Query.hs +++ b/src/Database/V1/Bloodhound/Internal/Query.hs @@ -2,10 +2,7 @@ {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} -module Database.V1.Bloodhound.Internal.Query - ( module X - , module Database.V1.Bloodhound.Internal.Query - ) where +module Database.V1.Bloodhound.Internal.Query where import Bloodhound.Import @@ -13,8 +10,8 @@ import Bloodhound.Import import qualified Data.HashMap.Strict as HM import qualified Data.Text as T -import Database.Bloodhound.Common.Script as X -import Database.V5.Bloodhound.Internal.Newtypes +import Database.V1.Bloodhound.Internal.Newtypes +import Database.V1.Bloodhound.Types.Class data GeoPoint = @@ -99,6 +96,15 @@ data Filter = AndFilter [Filter] Cache | TermFilter Term Cache deriving (Eq, Show) +instance Semigroup Filter where + a <> b = AndFilter [a, b] defaultCache + +instance Monoid Filter where + mempty = IdentityFilter + mappend = (<>) + +instance Seminearring Filter where + a <||> b = OrFilter [a, b] defaultCache data BoolMatch = MustMatch Term Cache | MustNotMatch Term Cache diff --git a/src/Database/V1/Bloodhound/Internal/Sort.hs b/src/Database/V1/Bloodhound/Internal/Sort.hs index c10f32c..3c29847 100644 --- a/src/Database/V1/Bloodhound/Internal/Sort.hs +++ b/src/Database/V1/Bloodhound/Internal/Sort.hs @@ -25,6 +25,22 @@ type Sort = [SortSpec] data SortSpec = DefaultSortSpec DefaultSort | GeoDistanceSortSpec SortOrder GeoPoint DistanceUnit deriving (Eq, Show) +instance ToJSON SortSpec where + toJSON (DefaultSortSpec + (DefaultSort (FieldName dsSortFieldName) dsSortOrder dsIgnoreUnmapped + dsSortMode dsMissingSort dsNestedFilter)) = + object [dsSortFieldName .= omitNulls base] where + base = [ "order" .= dsSortOrder + , "ignore_unmapped" .= dsIgnoreUnmapped + , "mode" .= dsSortMode + , "missing" .= dsMissingSort + , "nested_filter" .= dsNestedFilter ] + + toJSON (GeoDistanceSortSpec gdsSortOrder (GeoPoint (FieldName field) gdsLatLon) units) = + object [ "unit" .= units + , field .= gdsLatLon + , "order" .= gdsSortOrder ] + {-| 'DefaultSort' is usually the kind of 'SortSpec' you'll want. There's a 'mkSort' convenience function for when you want to specify only the most common parameters. @@ -49,6 +65,10 @@ data SortOrder = Ascending | Descending deriving (Eq, Show) +instance ToJSON SortOrder where + toJSON Ascending = String "asc" + toJSON Descending = String "desc" + {-| 'SortMode' prescribes how to handle sorting array/multi-valued fields. http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-sort.html#_sort_mode_option @@ -77,3 +97,10 @@ instance ToJSON Missing where toJSON LastMissing = String "_last" toJSON FirstMissing = String "_first" toJSON (CustomMissing txt) = String txt + +-- {-| 'mkSort' defaults everything but the 'FieldName' and the 'SortOrder' so +-- that you can concisely describe the usual kind of 'SortSpec's you want. +-- -} +mkSort :: FieldName -> SortOrder -> DefaultSort +mkSort fieldName sOrder = DefaultSort fieldName sOrder False Nothing Nothing Nothing + diff --git a/src/Database/V1/Bloodhound/Internal/Suggest.hs b/src/Database/V1/Bloodhound/Internal/Suggest.hs index c58e8b8..64d2a47 100644 --- a/src/Database/V1/Bloodhound/Internal/Suggest.hs +++ b/src/Database/V1/Bloodhound/Internal/Suggest.hs @@ -8,8 +8,8 @@ import Bloodhound.Import import qualified Data.HashMap.Strict as HM -import Database.V5.Bloodhound.Internal.Newtypes -import Database.V5.Bloodhound.Internal.Query +import Database.V1.Bloodhound.Internal.Newtypes +import Database.V1.Bloodhound.Internal.Query data Suggest = Suggest { suggestText :: Text , suggestName :: Text @@ -209,7 +209,7 @@ data SuggestOptions = , suggestOptionsFreq :: Maybe Int , suggestOptionsHighlighted :: Maybe Text } - deriving (Eq, Read, Show) + deriving (Eq, Show) instance FromJSON SuggestOptions where parseJSON = withObject "SuggestOptions" parse @@ -225,7 +225,7 @@ data SuggestResponse = , suggestResponseLength :: Int , suggestResponseOptions :: [SuggestOptions] } - deriving (Eq, Read, Show) + deriving (Eq, Show) instance FromJSON SuggestResponse where parseJSON = withObject "SuggestResponse" parse @@ -239,7 +239,7 @@ data NamedSuggestionResponse = NamedSuggestionResponse { nsrName :: Text , nsrResponses :: [SuggestResponse] } - deriving (Eq, Read, Show) + deriving (Eq, Show) instance FromJSON NamedSuggestionResponse where parseJSON (Object o) = do diff --git a/src/Database/V1/Bloodhound/Types.hs b/src/Database/V1/Bloodhound/Types.hs index d69c96a..03c4a24 100644 --- a/src/Database/V1/Bloodhound/Types.hs +++ b/src/Database/V1/Bloodhound/Types.hs @@ -378,535 +378,30 @@ module Database.V1.Bloodhound.Types , EsPassword(..) ) where -import Control.Applicative as A -import Control.Arrow (first) -import Control.Monad.Catch -import Control.Monad.Except -import Control.Monad.Reader (MonadReader (..), - ReaderT (..)) -import Control.Monad.State (MonadState) -import Control.Monad.Writer (MonadWriter) -import Data.Aeson -import Data.Aeson.Types (Pair, Parser, - emptyObject, - parseEither, - parseMaybe, - typeMismatch) -import qualified Data.ByteString.Lazy.Char8 as L -import Data.Char -import Data.Hashable (Hashable) -import qualified Data.HashMap.Strict as HM -import Data.List (foldl', intercalate, - nub) -import Data.List.NonEmpty (NonEmpty (..), toList) -import qualified Data.Map.Strict as M -import Data.Maybe -import Data.Scientific (Scientific) -import Data.Semigroup -import Data.Text (Text) -import qualified Data.Text as T -import Data.Time.Calendar -import Data.Time.Clock (NominalDiffTime, - UTCTime) -import Data.Time.Clock.POSIX -import qualified Data.Traversable as DT -import Data.Typeable (Typeable) -import qualified Data.Vector as V -import qualified Data.Version as Vers -import GHC.Enum -import GHC.Generics (Generic) -import Network.HTTP.Client -import qualified Network.HTTP.Types.Method as NHTM -import qualified Text.ParserCombinators.ReadP as RP -import qualified Text.Read as TR +import Bloodhound.Import -import Database.V1.Bloodhound.Internal.Client import Database.V1.Bloodhound.Types.Class +import Database.V1.Bloodhound.Internal.Aggregation +import Database.V1.Bloodhound.Internal.Client +import Database.V1.Bloodhound.Internal.Highlight +import Database.V1.Bloodhound.Internal.Newtypes +import Database.V1.Bloodhound.Internal.Query +import Database.V1.Bloodhound.Internal.Sort +import Database.V1.Bloodhound.Internal.Suggest + +data SearchResult a = + SearchResult { took :: Int + , timedOut :: Bool + , shards :: ShardResult + , searchHits :: SearchHits a + , aggregations :: Maybe AggregationResults + , scrollId :: Maybe ScrollId + , suggest :: Maybe NamedSuggestionResponse -- ^ Only one Suggestion request / response per Search is supported. + } + deriving (Eq, Show) - -data FSType = FSSimple - | FSBuffered deriving (Eq, Read, Show, Generic, Typeable, Ord) - -data InitialShardCount = QuorumShards - | QuorumMinus1Shards - | FullShards - | FullMinus1Shards - | ExplicitShards Int - deriving (Eq, Read, Show, Generic, Typeable) - -data NodeAttrFilter = NodeAttrFilter { nodeAttrFilterName :: NodeAttrName - , nodeAttrFilterValues :: NonEmpty Text} - deriving (Eq, Read, Show, Generic, Ord, Typeable) - -newtype NodeAttrName = NodeAttrName Text deriving (Eq, Read, Show, Ord, Generic, Typeable) - -data CompoundFormat = CompoundFileFormat Bool - | MergeSegmentVsTotalIndex Double - -- ^ percentage between 0 and 1 where 0 is false, 1 is true - deriving (Eq, Read, Show, Generic, Typeable) - -newtype NominalDiffTimeJSON = NominalDiffTimeJSON { ndtJSON :: NominalDiffTime } - -data IndexSettingsSummary = IndexSettingsSummary { sSummaryIndexName :: IndexName - , sSummaryFixedSettings :: IndexSettings - , sSummaryUpdateable :: [UpdatableIndexSetting]} - deriving (Eq, Show, Generic, Typeable) - -{-| 'Reply' and 'Method' are type synonyms from 'Network.HTTP.Types.Method.Method' -} -type Reply = Network.HTTP.Client.Response L.ByteString -type Method = NHTM.Method - -{-| 'OpenCloseIndex' is a sum type for opening and closing indices. - - --} -data OpenCloseIndex = OpenIndex | CloseIndex deriving (Eq, Read, Show, Generic, Typeable) - -data FieldType = GeoPointType - | GeoShapeType - | FloatType - | IntegerType - | LongType - | ShortType - | ByteType deriving (Eq, Read, Show, Generic, Typeable) - -data FieldDefinition = - FieldDefinition { fieldType :: FieldType } deriving (Eq, Read, Show, Generic, Typeable) - -{-| An 'IndexTemplate' defines a template that will automatically be - applied to new indices created. The templates include both - 'IndexSettings' and mappings, and a simple 'TemplatePattern' that - controls if the template will be applied to the index created. - Specify mappings as follows: @[toJSON TweetMapping, ...]@ - - https://www.elastic.co/guide/en/elasticsearch/reference/1.7/indices-templates.html --} -data IndexTemplate = - IndexTemplate { templatePattern :: TemplatePattern - , templateSettings :: Maybe IndexSettings - , templateMappings :: [Value] - } - -data MappingField = - MappingField { mappingFieldName :: FieldName - , fieldDefinition :: FieldDefinition } deriving (Eq, Read, Show, Generic, Typeable) - -{-| Support for type reification of 'Mapping's is currently incomplete, for - now the mapping API verbiage expects a 'ToJSON'able blob. - - Indexes have mappings, mappings are schemas for the documents contained in the - index. I'd recommend having only one mapping per index, always having a mapping, - and keeping different kinds of documents separated if possible. --} -data Mapping = Mapping { typeName :: TypeName - , mappingFields :: [MappingField] } deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'BulkOperation' is a sum type for expressing the four kinds of bulk - operation index, create, delete, and update. 'BulkIndex' behaves like an - "upsert", 'BulkCreate' will fail if a document already exists at the DocId. - - --} -data BulkOperation = - BulkIndex IndexName MappingName DocId Value - | BulkCreate IndexName MappingName DocId Value - | BulkDelete IndexName MappingName DocId - | BulkUpdate IndexName MappingName DocId Value deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'EsResult' describes the standard wrapper JSON document that you see in - successful Elasticsearch lookups or lookups that couldn't find the document. --} -data EsResult a = EsResult { _index :: Text - , _type :: Text - , _id :: Text - , foundResult :: Maybe (EsResultFound a)} deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'EsResultFound' contains the document and its metadata inside of an - 'EsResult' when the document was successfully found. --} -data EsResultFound a = EsResultFound { _version :: DocVersion - , _source :: a } deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'EsError' is the generic type that will be returned when there was a - problem. If you can't parse the expected response, its a good idea to - try parsing this. --} -data EsError = EsError { errorStatus :: Int - , errorMessage :: Text } deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'EsProtocolException' will be thrown if Bloodhound cannot parse a response -returned by the ElasticSearch server. If you encounter this error, please -verify that your domain data types and FromJSON instances are working properly -(for example, the 'a' of '[Hit a]' in 'SearchResult.searchHits.hits'). If you're -sure that your mappings are correct, then this error may be an indication of an -incompatibility between Bloodhound and ElasticSearch. Please open a bug report -and be sure to include the exception body. --} -data EsProtocolException = EsProtocolException { esProtoExBody :: L.ByteString } - deriving (Eq, Read, Show, Generic, Typeable) - -instance Exception EsProtocolException - -data IndexAlias = IndexAlias { srcIndex :: IndexName - , indexAlias :: IndexAliasName } deriving (Eq, Read, Show, Generic, Typeable) - -newtype IndexAliasName = IndexAliasName { indexAliasName :: IndexName } deriving (Eq, Read, Show, Generic, ToJSON) - -data IndexAliasAction = AddAlias IndexAlias IndexAliasCreate - | RemoveAlias IndexAlias deriving (Read, Show, Eq, Generic, Typeable) - -data IndexAliasCreate = IndexAliasCreate { aliasCreateRouting :: Maybe AliasRouting - , aliasCreateFilter :: Maybe Filter} - deriving (Read, Show, Eq, Generic, Typeable) - -data AliasRouting = AllAliasRouting RoutingValue - | GranularAliasRouting (Maybe SearchAliasRouting) (Maybe IndexAliasRouting) - deriving (Read, Show, Eq, Generic, Typeable) - -newtype SearchAliasRouting = SearchAliasRouting (NonEmpty RoutingValue) deriving (Read, Show, Eq, Generic, Typeable) - -newtype IndexAliasRouting = IndexAliasRouting RoutingValue deriving (Read, Show, Eq, Generic, ToJSON, FromJSON, Typeable) - -newtype RoutingValue = RoutingValue { routingValue :: Text } deriving (Read, Show, Eq, Generic, ToJSON, FromJSON, Typeable) - -newtype IndexAliasesSummary = IndexAliasesSummary { indexAliasesSummary :: [IndexAliasSummary] } deriving (Read, Show, Eq, Generic, Typeable) - -{-| 'IndexAliasSummary' is a summary of an index alias configured for a server. -} -data IndexAliasSummary = IndexAliasSummary { indexAliasSummaryAlias :: IndexAlias - , indexAliasSummaryCreate :: IndexAliasCreate} deriving (Read, Show, Eq, Generic, Typeable) - -{-| 'DocVersion' is an integer version number for a document between 1 -and 9.2e+18 used for <>. --} -newtype DocVersion = DocVersion { - docVersionNumber :: Int - } deriving (Eq, Read, Show, Generic, Ord, ToJSON) - --- | Smart constructor for in-range doc version -mkDocVersion :: Int -> Maybe DocVersion -mkDocVersion i - | i >= (docVersionNumber minBound) && i <= (docVersionNumber maxBound) = - Just $ DocVersion i - | otherwise = Nothing - - -{-| 'ExternalDocVersion' is a convenience wrapper if your code uses its -own version numbers instead of ones from ES. --} -newtype ExternalDocVersion = ExternalDocVersion DocVersion - deriving (Eq, Read, Show, Generic, Ord, Bounded, Enum, ToJSON) - -{-| 'VersionControl' is specified when indexing documents as a -optimistic concurrency control. --} -data VersionControl = NoVersionControl - -- ^ Don't send a version. This is a pure overwrite. - | InternalVersion DocVersion - -- ^ Use the default ES versioning scheme. Only - -- index the document if the version is the same - -- as the one specified. Only applicable to - -- updates, as you should be getting Version from - -- a search result. - | ExternalGT ExternalDocVersion - -- ^ Use your own version numbering. Only index - -- the document if the version is strictly higher - -- OR the document doesn't exist. The given - -- version will be used as the new version number - -- for the stored document. N.B. All updates must - -- increment this number, meaning there is some - -- global, external ordering of updates. - | ExternalGTE ExternalDocVersion - -- ^ Use your own version numbering. Only index - -- the document if the version is equal or higher - -- than the stored version. Will succeed if there - -- is no existing document. The given version will - -- be used as the new version number for the - -- stored document. Use with care, as this could - -- result in data loss. - | ForceVersion ExternalDocVersion - -- ^ The document will always be indexed and the - -- given version will be the new version. This is - -- typically used for correcting errors. Use with - -- care, as this could result in data loss. - deriving (Read, Show, Eq, Generic, Ord) - -{-| 'DocumentParent' is used to specify a parent document. --} -newtype DocumentParent = DocumentParent DocId - deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'IndexDocumentSettings' are special settings supplied when indexing -a document. For the best backwards compatiblity when new fields are -added, you should probably prefer to start with 'defaultIndexDocumentSettings' --} -data IndexDocumentSettings = - IndexDocumentSettings { idsVersionControl :: VersionControl - , idsParent :: Maybe DocumentParent - } deriving (Eq, Read, Show, Generic, Typeable) - -{-| Reasonable default settings. Chooses no version control and no parent. --} -defaultIndexDocumentSettings :: IndexDocumentSettings -defaultIndexDocumentSettings = IndexDocumentSettings NoVersionControl Nothing - -{-| 'Sort' is a synonym for a list of 'SortSpec's. Sort behavior is order - dependent with later sorts acting as tie-breakers for earlier sorts. --} -type Sort = [SortSpec] - -{-| The two main kinds of 'SortSpec' are 'DefaultSortSpec' and - 'GeoDistanceSortSpec'. The latter takes a 'SortOrder', 'GeoPoint', and - 'DistanceUnit' to express "nearness" to a single geographical point as a - sort specification. - - --} -data SortSpec = DefaultSortSpec DefaultSort - | GeoDistanceSortSpec SortOrder GeoPoint DistanceUnit deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'DefaultSort' is usually the kind of 'SortSpec' you'll want. There's a - 'mkSort' convenience function for when you want to specify only the most - common parameters. - - --} -data DefaultSort = - DefaultSort { sortFieldName :: FieldName - , sortOrder :: SortOrder - -- default False - , ignoreUnmapped :: Bool - , sortMode :: Maybe SortMode - , missingSort :: Maybe Missing - , nestedFilter :: Maybe Filter } deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'SortOrder' is 'Ascending' or 'Descending', as you might expect. These get - encoded into "asc" or "desc" when turned into JSON. - - --} -data SortOrder = Ascending - | Descending deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'Missing' prescribes how to handle missing fields. A missing field can be - sorted last, first, or using a custom value as a substitute. - - --} -data Missing = LastMissing - | FirstMissing - | CustomMissing Text deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'SortMode' prescribes how to handle sorting array/multi-valued fields. - -http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-sort.html#_sort_mode_option --} -data SortMode = SortMin - | SortMax - | SortSum - | SortAvg deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'mkSort' defaults everything but the 'FieldName' and the 'SortOrder' so - that you can concisely describe the usual kind of 'SortSpec's you want. --} -mkSort :: FieldName -> SortOrder -> DefaultSort -mkSort fieldName sOrder = DefaultSort fieldName sOrder False Nothing Nothing Nothing - -{-| 'Cache' is for telling ES whether it should cache a 'Filter' not. - 'Query's cannot be cached. --} -type Cache = Bool -- caching on/off -defaultCache :: Cache -defaultCache = False - -{-| 'PrefixValue' is used in 'PrefixQuery' as the main query component. --} -type PrefixValue = Text - -{-| 'BooleanOperator' is the usual And/Or operators with an ES compatible - JSON encoding baked in. Used all over the place. --} -data BooleanOperator = And | Or deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'ShardCount' is part of 'IndexSettings' --} -newtype ShardCount = ShardCount Int deriving (Eq, Read, Show, Generic, ToJSON, Typeable) - -{-| 'ReplicaCount' is part of 'IndexSettings' --} -newtype ReplicaCount = ReplicaCount Int deriving (Eq, Read, Show, Generic, ToJSON, Typeable) - -{-| 'IndexName' is used to describe which index to query/create/delete --} -newtype IndexName = IndexName Text deriving (Eq, Generic, Read, Show, ToJSON, FromJSON, Typeable) - -{-| 'IndexSelection' is used for APIs which take a single index, a list of - indexes, or the special @_all@ index. --} ---TODO: this does not fully support . It wouldn't be too hard to implement but you'd have to add the optional parameters (ignore_unavailable, allow_no_indices, expand_wildcards) to any APIs using it. Also would be a breaking API. -data IndexSelection = IndexList (NonEmpty IndexName) - | AllIndexes deriving (Eq, Generic, Show, Typeable) - -{-| 'NodeSelection' is used for most cluster APIs. See for more details. --} -data NodeSelection = LocalNode - -- ^ Whatever node receives this request - | NodeList (NonEmpty NodeSelector) - | AllNodes deriving (Eq, Generic, Show, Typeable) - - --- | An exact match or pattern to identify a node. Note that All of --- these options support wildcarding, so your node name, server, attr --- name can all contain * characters to be a fuzzy match. -data NodeSelector = NodeByName NodeName - | NodeByFullNodeId FullNodeId - | NodeByHost Server - -- ^ e.g. 10.0.0.1 or even 10.0.0.* - | NodeByAttribute NodeAttrName Text - -- ^ NodeAttrName can be a pattern, e.g. rack*. The value can too. - deriving (Eq, Generic, Show, Typeable) - -{-| 'TemplateName' is used to describe which template to query/create/delete --} -newtype TemplateName = TemplateName Text deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) - -{-| 'TemplatePattern' represents a pattern which is matched against index names --} -newtype TemplatePattern = TemplatePattern Text deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) - -{-| 'MappingName' is part of mappings which are how ES describes and schematizes - the data in the indices. --} -newtype MappingName = MappingName Text deriving (Eq, Generic, Read, Show, ToJSON, FromJSON, Typeable) - -{-| 'DocId' is a generic wrapper value for expressing unique Document IDs. - Can be set by the user or created by ES itself. Often used in client - functions for poking at specific documents. --} -newtype DocId = DocId Text deriving (Eq, Generic, Read, Show, ToJSON, FromJSON, Typeable) - -{-| 'QueryString' is used to wrap query text bodies, be they human written or not. --} -newtype QueryString = QueryString Text deriving (Eq, Generic, Read, Show, ToJSON, FromJSON, Typeable) - -{-| 'FieldName' is used all over the place wherever a specific field within - a document needs to be specified, usually in 'Query's or 'Filter's. --} -newtype FieldName = FieldName Text deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) - - -{-| 'Script' is often used in place of 'FieldName' to specify more -complex ways of extracting a value from a document. --} -newtype Script = Script { scriptText :: Text } deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'CacheName' is used in 'RegexpFilter' for describing the - 'CacheKey' keyed caching behavior. --} -newtype CacheName = CacheName Text deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) - -{-| 'CacheKey' is used in 'RegexpFilter' to key regex caching. --} -newtype CacheKey = - CacheKey Text deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype Existence = - Existence Bool deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype NullValue = - NullValue Bool deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype CutoffFrequency = - CutoffFrequency Double deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype Analyzer = - Analyzer Text deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype MaxExpansions = - MaxExpansions Int deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) - -{-| 'Lenient', if set to true, will cause format based failures to be - ignored. I don't know what the bloody default is, Elasticsearch - documentation didn't say what it was. Let me know if you figure it out. --} -newtype Lenient = - Lenient Bool deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype Tiebreaker = - Tiebreaker Double deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype Boost = - Boost Double deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype BoostTerms = - BoostTerms Double deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) - -{-| 'MinimumMatch' controls how many should clauses in the bool query should - match. Can be an absolute value (2) or a percentage (30%) or a - combination of both. --} -newtype MinimumMatch = - MinimumMatch Int deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype DisableCoord = - DisableCoord Bool deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype IgnoreTermFrequency = - IgnoreTermFrequency Bool deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype MinimumTermFrequency = - MinimumTermFrequency Int deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype MaxQueryTerms = - MaxQueryTerms Int deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype Fuzziness = - Fuzziness Double deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) - -{-| 'PrefixLength' is the prefix length used in queries, defaults to 0. -} -newtype PrefixLength = - PrefixLength Int deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype TypeName = - TypeName Text deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype PercentMatch = - PercentMatch Double deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype StopWord = - StopWord Text deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype QueryPath = - QueryPath Text deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) - -{-| Allowing a wildcard at the beginning of a word (eg "*ing") is particularly - heavy, because all terms in the index need to be examined, just in case - they match. Leading wildcards can be disabled by setting - 'AllowLeadingWildcard' to false. -} -newtype AllowLeadingWildcard = - AllowLeadingWildcard Bool deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype LowercaseExpanded = - LowercaseExpanded Bool deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype EnablePositionIncrements = - EnablePositionIncrements Bool deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) - -{-| By default, wildcard terms in a query are not analyzed. - Setting 'AnalyzeWildcard' to true enables best-effort analysis. --} -newtype AnalyzeWildcard = AnalyzeWildcard Bool deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) - -{-| 'GeneratePhraseQueries' defaults to false. --} -newtype GeneratePhraseQueries = - GeneratePhraseQueries Bool deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) - -{-| 'Locale' is used for string conversions - defaults to ROOT. --} -newtype Locale = Locale Text deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype MaxWordLength = MaxWordLength Int deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype MinWordLength = MinWordLength Int deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) - -{-| 'PhraseSlop' sets the default slop for phrases, 0 means exact - phrase matches. Default is 0. --} -newtype PhraseSlop = PhraseSlop Int deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype MinDocFrequency = MinDocFrequency Int deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) -newtype MaxDocFrequency = MaxDocFrequency Int deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) - --- | Newtype wrapper to parse ES's concerning tendency to in some APIs return a floating point number of milliseconds since epoch ಠ_ಠ -newtype POSIXMS = POSIXMS { posixMS :: UTCTime } - -{-| 'unpackId' is a silly convenience function that gets used once. --} -unpackId :: DocId -> Text -unpackId (DocId docId) = docId - type TrackSortScores = Bool -newtype From = From Int deriving (Eq, Read, Show, Generic, ToJSON) -newtype Size = Size Int deriving (Eq, Read, Show, Generic, ToJSON, FromJSON, Typeable) data Search = Search { queryBody :: Maybe Query , filterBody :: Maybe Filter @@ -921,7 +416,7 @@ data Search = Search { queryBody :: Maybe Query , fields :: Maybe [FieldName] , source :: Maybe Source , suggestBody :: Maybe Suggest -- ^ Only one Suggestion request / response per Search is supported. - } deriving (Eq, Read, Show, Generic, Typeable) + } deriving (Eq, Show) data SearchType = SearchTypeQueryThenFetch | SearchTypeDfsQueryThenFetch @@ -929,2215 +424,7 @@ data SearchType = SearchTypeQueryThenFetch | SearchTypeScan | SearchTypeQueryAndFetch | SearchTypeDfsQueryAndFetch - deriving (Eq, Read, Show, Generic, Typeable) - -data Source = - NoSource - | SourcePatterns PatternOrPatterns - | SourceIncludeExclude Include Exclude - deriving (Read, Show, Eq, Generic, Typeable) - -data PatternOrPatterns = - PopPattern Pattern - | PopPatterns [Pattern] - deriving (Eq, Read, Show, Generic, Typeable) - -data Include = Include [Pattern] deriving (Eq, Read, Show, Generic, Typeable) -data Exclude = Exclude [Pattern] deriving (Eq, Read, Show, Generic, Typeable) - -newtype Pattern = Pattern Text deriving (Eq, Read, Show, Generic, Typeable) - -data Highlights = Highlights { globalsettings :: Maybe HighlightSettings - , highlightFields :: [FieldHighlight] - } deriving (Read, Show, Eq, Generic, Typeable) - -data FieldHighlight = FieldHighlight FieldName (Maybe HighlightSettings) - deriving (Read, Show, Eq, Generic, Typeable) - - -data HighlightSettings = Plain PlainHighlight - | Postings PostingsHighlight - | FastVector FastVectorHighlight - deriving (Read, Show, Eq, Generic, Typeable) -data PlainHighlight = - PlainHighlight { plainCommon :: Maybe CommonHighlight - , plainNonPost :: Maybe NonPostings } deriving (Read, Show, Eq, Generic, Typeable) - - -- This requires that index_options are set to 'offset' in the mapping. -data PostingsHighlight = PostingsHighlight (Maybe CommonHighlight) deriving (Read, Show, Eq, Generic, Typeable) - --- This requires that term_vector is set to 'with_positions_offsets' in the mapping. -data FastVectorHighlight = - FastVectorHighlight { fvCommon :: Maybe CommonHighlight - , fvNonPostSettings :: Maybe NonPostings - , boundaryChars :: Maybe Text - , boundaryMaxScan :: Maybe Int - , fragmentOffset :: Maybe Int - , matchedFields :: [Text] - , phraseLimit :: Maybe Int - } deriving (Read, Show, Eq, Generic, Typeable) - -data CommonHighlight = - CommonHighlight { order :: Maybe Text - , forceSource :: Maybe Bool - , tag :: Maybe HighlightTag - , encoder :: Maybe HighlightEncoder - , noMatchSize :: Maybe Int - , highlightQuery :: Maybe Query - , requireFieldMatch :: Maybe Bool - } deriving (Read, Show, Eq, Generic, Typeable) - --- Settings that are only applicable to FastVector and Plain highlighters. -data NonPostings = - NonPostings { fragmentSize :: Maybe Int - , numberOfFragments :: Maybe Int} deriving (Read, Show, Eq, Generic, Typeable) - -data HighlightEncoder = DefaultEncoder - | HTMLEncoder - deriving (Read, Show, Eq, Generic, Typeable) - --- NOTE: Should the tags use some kind of HTML type, rather than Text? -data HighlightTag = TagSchema Text - | CustomTags ([Text], [Text]) -- Only uses more than the first value in the lists if fvh - deriving (Read, Show, Eq, Generic, Typeable) - - -data Query = - TermQuery Term (Maybe Boost) - | TermsQuery Text (NonEmpty Text) - | QueryMatchQuery MatchQuery - | QueryMultiMatchQuery MultiMatchQuery - | QueryBoolQuery BoolQuery - | QueryBoostingQuery BoostingQuery - | QueryCommonTermsQuery CommonTermsQuery - | ConstantScoreFilter Filter Boost - | ConstantScoreQuery Query Boost - | QueryDisMaxQuery DisMaxQuery - | QueryFilteredQuery FilteredQuery - | QueryFuzzyLikeThisQuery FuzzyLikeThisQuery - | QueryFuzzyLikeFieldQuery FuzzyLikeFieldQuery - | QueryFuzzyQuery FuzzyQuery - | QueryHasChildQuery HasChildQuery - | QueryHasParentQuery HasParentQuery - | IdsQuery MappingName [DocId] - | QueryIndicesQuery IndicesQuery - | MatchAllQuery (Maybe Boost) - | QueryMoreLikeThisQuery MoreLikeThisQuery - | QueryMoreLikeThisFieldQuery MoreLikeThisFieldQuery - | QueryNestedQuery NestedQuery - | QueryPrefixQuery PrefixQuery - | QueryQueryStringQuery QueryStringQuery - | QuerySimpleQueryStringQuery SimpleQueryStringQuery - | QueryRangeQuery RangeQuery - | QueryRegexpQuery RegexpQuery - | QueryTemplateQueryInline TemplateQueryInline - deriving (Eq, Read, Show, Generic, Typeable) - -data RegexpQuery = - RegexpQuery { regexpQueryField :: FieldName - , regexpQuery :: Regexp - , regexpQueryFlags :: RegexpFlags - , regexpQueryBoost :: Maybe Boost - } deriving (Eq, Read, Show, Generic, Typeable) - -data RangeQuery = - RangeQuery { rangeQueryField :: FieldName - , rangeQueryRange :: RangeValue - , rangeQueryBoost :: Boost } deriving (Eq, Read, Show, Generic, Typeable) - -mkRangeQuery :: FieldName -> RangeValue -> RangeQuery -mkRangeQuery f r = RangeQuery f r (Boost 1.0) - -data SimpleQueryStringQuery = - SimpleQueryStringQuery - { simpleQueryStringQuery :: QueryString - , simpleQueryStringField :: Maybe FieldOrFields - , simpleQueryStringOperator :: Maybe BooleanOperator - , simpleQueryStringAnalyzer :: Maybe Analyzer - , simpleQueryStringFlags :: Maybe (NonEmpty SimpleQueryFlag) - , simpleQueryStringLowercaseExpanded :: Maybe LowercaseExpanded - , simpleQueryStringLocale :: Maybe Locale - } deriving (Eq, Read, Show, Generic, Typeable) - -data SimpleQueryFlag = - SimpleQueryAll - | SimpleQueryNone - | SimpleQueryAnd - | SimpleQueryOr - | SimpleQueryPrefix - | SimpleQueryPhrase - | SimpleQueryPrecedence - | SimpleQueryEscape - | SimpleQueryWhitespace - | SimpleQueryFuzzy - | SimpleQueryNear - | SimpleQuerySlop deriving (Eq, Read, Show, Generic, Typeable) - --- use_dis_max and tie_breaker when fields are plural? -data QueryStringQuery = - QueryStringQuery - { queryStringQuery :: QueryString - , queryStringDefaultField :: Maybe FieldName - , queryStringOperator :: Maybe BooleanOperator - , queryStringAnalyzer :: Maybe Analyzer - , queryStringAllowLeadingWildcard :: Maybe AllowLeadingWildcard - , queryStringLowercaseExpanded :: Maybe LowercaseExpanded - , queryStringEnablePositionIncrements :: Maybe EnablePositionIncrements - , queryStringFuzzyMaxExpansions :: Maybe MaxExpansions - , queryStringFuzziness :: Maybe Fuzziness - , queryStringFuzzyPrefixLength :: Maybe PrefixLength - , queryStringPhraseSlop :: Maybe PhraseSlop - , queryStringBoost :: Maybe Boost - , queryStringAnalyzeWildcard :: Maybe AnalyzeWildcard - , queryStringGeneratePhraseQueries :: Maybe GeneratePhraseQueries - , queryStringMinimumShouldMatch :: Maybe MinimumMatch - , queryStringLenient :: Maybe Lenient - , queryStringLocale :: Maybe Locale - } deriving (Eq, Read, Show, Generic, Typeable) - -mkQueryStringQuery :: QueryString -> QueryStringQuery -mkQueryStringQuery qs = - QueryStringQuery qs Nothing Nothing - Nothing Nothing Nothing Nothing - Nothing Nothing Nothing Nothing - Nothing Nothing Nothing Nothing - Nothing Nothing - -data FieldOrFields = FofField FieldName - | FofFields (NonEmpty FieldName) deriving (Eq, Read, Show, Generic, Typeable) - -data PrefixQuery = - PrefixQuery - { prefixQueryField :: FieldName - , prefixQueryPrefixValue :: Text - , prefixQueryBoost :: Maybe Boost } deriving (Eq, Read, Show, Generic, Typeable) - -data NestedQuery = - NestedQuery - { nestedQueryPath :: QueryPath - , nestedQueryScoreType :: ScoreType - , nestedQuery :: Query } deriving (Eq, Read, Show, Generic, Typeable) - -data MoreLikeThisFieldQuery = - MoreLikeThisFieldQuery - { moreLikeThisFieldText :: Text - , moreLikeThisFieldFields :: FieldName - -- default 0.3 (30%) - , moreLikeThisFieldPercentMatch :: Maybe PercentMatch - , moreLikeThisFieldMinimumTermFreq :: Maybe MinimumTermFrequency - , moreLikeThisFieldMaxQueryTerms :: Maybe MaxQueryTerms - , moreLikeThisFieldStopWords :: Maybe (NonEmpty StopWord) - , moreLikeThisFieldMinDocFrequency :: Maybe MinDocFrequency - , moreLikeThisFieldMaxDocFrequency :: Maybe MaxDocFrequency - , moreLikeThisFieldMinWordLength :: Maybe MinWordLength - , moreLikeThisFieldMaxWordLength :: Maybe MaxWordLength - , moreLikeThisFieldBoostTerms :: Maybe BoostTerms - , moreLikeThisFieldBoost :: Maybe Boost - , moreLikeThisFieldAnalyzer :: Maybe Analyzer - } deriving (Eq, Read, Show, Generic, Typeable) - -data MoreLikeThisQuery = - MoreLikeThisQuery - { moreLikeThisText :: Text - , moreLikeThisFields :: Maybe (NonEmpty FieldName) - -- default 0.3 (30%) - , moreLikeThisPercentMatch :: Maybe PercentMatch - , moreLikeThisMinimumTermFreq :: Maybe MinimumTermFrequency - , moreLikeThisMaxQueryTerms :: Maybe MaxQueryTerms - , moreLikeThisStopWords :: Maybe (NonEmpty StopWord) - , moreLikeThisMinDocFrequency :: Maybe MinDocFrequency - , moreLikeThisMaxDocFrequency :: Maybe MaxDocFrequency - , moreLikeThisMinWordLength :: Maybe MinWordLength - , moreLikeThisMaxWordLength :: Maybe MaxWordLength - , moreLikeThisBoostTerms :: Maybe BoostTerms - , moreLikeThisBoost :: Maybe Boost - , moreLikeThisAnalyzer :: Maybe Analyzer - } deriving (Eq, Read, Show, Generic, Typeable) - -data IndicesQuery = - IndicesQuery - { indicesQueryIndices :: [IndexName] - , indicesQuery :: Query - -- default "all" - , indicesQueryNoMatch :: Maybe Query } deriving (Eq, Read, Show, Generic, Typeable) - -data HasParentQuery = - HasParentQuery - { hasParentQueryType :: TypeName - , hasParentQuery :: Query - , hasParentQueryScoreType :: Maybe ScoreType } deriving (Eq, Read, Show, Generic, Typeable) - -data HasChildQuery = - HasChildQuery - { hasChildQueryType :: TypeName - , hasChildQuery :: Query - , hasChildQueryScoreType :: Maybe ScoreType } deriving (Eq, Read, Show, Generic, Typeable) - -data ScoreType = - ScoreTypeMax - | ScoreTypeSum - | ScoreTypeAvg - | ScoreTypeNone deriving (Eq, Read, Show, Generic, Typeable) - -data FuzzyQuery = - FuzzyQuery { fuzzyQueryField :: FieldName - , fuzzyQueryValue :: Text - , fuzzyQueryPrefixLength :: PrefixLength - , fuzzyQueryMaxExpansions :: MaxExpansions - , fuzzyQueryFuzziness :: Fuzziness - , fuzzyQueryBoost :: Maybe Boost - } deriving (Eq, Read, Show, Generic, Typeable) - -data FuzzyLikeFieldQuery = - FuzzyLikeFieldQuery - { fuzzyLikeField :: FieldName - -- anaphora is good for the soul. - , fuzzyLikeFieldText :: Text - , fuzzyLikeFieldMaxQueryTerms :: MaxQueryTerms - , fuzzyLikeFieldIgnoreTermFrequency :: IgnoreTermFrequency - , fuzzyLikeFieldFuzziness :: Fuzziness - , fuzzyLikeFieldPrefixLength :: PrefixLength - , fuzzyLikeFieldBoost :: Boost - , fuzzyLikeFieldAnalyzer :: Maybe Analyzer - } deriving (Eq, Read, Show, Generic, Typeable) - -data FuzzyLikeThisQuery = - FuzzyLikeThisQuery - { fuzzyLikeFields :: [FieldName] - , fuzzyLikeText :: Text - , fuzzyLikeMaxQueryTerms :: MaxQueryTerms - , fuzzyLikeIgnoreTermFrequency :: IgnoreTermFrequency - , fuzzyLikeFuzziness :: Fuzziness - , fuzzyLikePrefixLength :: PrefixLength - , fuzzyLikeBoost :: Boost - , fuzzyLikeAnalyzer :: Maybe Analyzer - } deriving (Eq, Read, Show, Generic, Typeable) - -data FilteredQuery = - FilteredQuery - { filteredQuery :: Query - , filteredFilter :: Filter } deriving (Eq, Read, Show, Generic, Typeable) - -data DisMaxQuery = - DisMaxQuery { disMaxQueries :: [Query] - -- default 0.0 - , disMaxTiebreaker :: Tiebreaker - , disMaxBoost :: Maybe Boost - } deriving (Eq, Read, Show, Generic, Typeable) - -data MatchQuery = - MatchQuery { matchQueryField :: FieldName - , matchQueryQueryString :: QueryString - , matchQueryOperator :: BooleanOperator - , matchQueryZeroTerms :: ZeroTermsQuery - , matchQueryCutoffFrequency :: Maybe CutoffFrequency - , matchQueryMatchType :: Maybe MatchQueryType - , matchQueryAnalyzer :: Maybe Analyzer - , matchQueryMaxExpansions :: Maybe MaxExpansions - , matchQueryLenient :: Maybe Lenient - , matchQueryBoost :: Maybe Boost } deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'mkMatchQuery' is a convenience function that defaults the less common parameters, - enabling you to provide only the 'FieldName' and 'QueryString' to make a 'MatchQuery' --} -mkMatchQuery :: FieldName -> QueryString -> MatchQuery -mkMatchQuery field query = MatchQuery field query Or ZeroTermsNone Nothing Nothing Nothing Nothing Nothing Nothing - -data MatchQueryType = - MatchPhrase - | MatchPhrasePrefix deriving (Eq, Read, Show, Generic, Typeable) - -data MultiMatchQuery = - MultiMatchQuery { multiMatchQueryFields :: [FieldName] - , multiMatchQueryString :: QueryString - , multiMatchQueryOperator :: BooleanOperator - , multiMatchQueryZeroTerms :: ZeroTermsQuery - , multiMatchQueryTiebreaker :: Maybe Tiebreaker - , multiMatchQueryType :: Maybe MultiMatchQueryType - , multiMatchQueryCutoffFrequency :: Maybe CutoffFrequency - , multiMatchQueryAnalyzer :: Maybe Analyzer - , multiMatchQueryMaxExpansions :: Maybe MaxExpansions - , multiMatchQueryLenient :: Maybe Lenient } deriving (Eq, Read, Show, Generic, Typeable) - -{-| 'mkMultiMatchQuery' is a convenience function that defaults the less common parameters, - enabling you to provide only the list of 'FieldName's and 'QueryString' to - make a 'MultiMatchQuery'. --} - -mkMultiMatchQuery :: [FieldName] -> QueryString -> MultiMatchQuery -mkMultiMatchQuery matchFields query = - MultiMatchQuery matchFields query - Or ZeroTermsNone Nothing Nothing Nothing Nothing Nothing Nothing - -data MultiMatchQueryType = - MultiMatchBestFields - | MultiMatchMostFields - | MultiMatchCrossFields - | MultiMatchPhrase - | MultiMatchPhrasePrefix deriving (Eq, Read, Show, Generic, Typeable) - -data BoolQuery = - BoolQuery { boolQueryMustMatch :: [Query] - , boolQueryMustNotMatch :: [Query] - , boolQueryShouldMatch :: [Query] - , boolQueryMinimumShouldMatch :: Maybe MinimumMatch - , boolQueryBoost :: Maybe Boost - , boolQueryDisableCoord :: Maybe DisableCoord - } deriving (Eq, Read, Show, Generic, Typeable) - -mkBoolQuery :: [Query] -> [Query] -> [Query] -> BoolQuery -mkBoolQuery must mustNot should = - BoolQuery must mustNot should Nothing Nothing Nothing - -data BoostingQuery = - BoostingQuery { positiveQuery :: Query - , negativeQuery :: Query - , negativeBoost :: Boost } deriving (Eq, Read, Show, Generic, Typeable) - -data CommonTermsQuery = - CommonTermsQuery { commonField :: FieldName - , commonQuery :: QueryString - , commonCutoffFrequency :: CutoffFrequency - , commonLowFreqOperator :: BooleanOperator - , commonHighFreqOperator :: BooleanOperator - , commonMinimumShouldMatch :: Maybe CommonMinimumMatch - , commonBoost :: Maybe Boost - , commonAnalyzer :: Maybe Analyzer - , commonDisableCoord :: Maybe DisableCoord - } deriving (Eq, Read, Show, Generic, Typeable) - -data CommonMinimumMatch = - CommonMinimumMatchHighLow MinimumMatchHighLow - | CommonMinimumMatch MinimumMatch - deriving (Eq, Read, Show, Generic, Typeable) - -data MinimumMatchHighLow = - MinimumMatchHighLow { lowFreq :: MinimumMatch - , highFreq :: MinimumMatch } deriving (Eq, Read, Show, Generic, Typeable) - -data Filter = AndFilter [Filter] Cache - | OrFilter [Filter] Cache - | NotFilter Filter Cache - | IdentityFilter - | BoolFilter BoolMatch - | ExistsFilter FieldName -- always cached - | GeoBoundingBoxFilter GeoBoundingBoxConstraint - | GeoDistanceFilter GeoPoint Distance DistanceType OptimizeBbox Cache - | GeoDistanceRangeFilter GeoPoint DistanceRange - | GeoPolygonFilter FieldName [LatLon] - | IdsFilter MappingName [DocId] - | LimitFilter Int - | MissingFilter FieldName Existence NullValue - | PrefixFilter FieldName PrefixValue Cache - | QueryFilter Query Cache - | RangeFilter FieldName RangeValue RangeExecution Cache - | RegexpFilter FieldName Regexp RegexpFlags CacheName Cache CacheKey - | TermFilter Term Cache - deriving (Eq, Read, Show, Generic, Typeable) - -data ZeroTermsQuery = ZeroTermsNone - | ZeroTermsAll deriving (Eq, Read, Show, Generic, Typeable) - -data RangeExecution = RangeExecutionIndex - | RangeExecutionFielddata deriving (Eq, Read, Show, Generic, Typeable) - -newtype Regexp = Regexp Text deriving (Eq, Read, Show, Generic, Typeable, FromJSON) - -data RegexpFlags = AllRegexpFlags - | NoRegexpFlags - | SomeRegexpFlags (NonEmpty RegexpFlag) deriving (Eq, Read, Show, Generic, Typeable) - -data RegexpFlag = AnyString - | Automaton - | Complement - | Empty - | Intersection - | Interval deriving (Eq, Read, Show, Generic, Typeable) - -newtype LessThan = LessThan Double deriving (Eq, Read, Show, Generic, Typeable) -newtype LessThanEq = LessThanEq Double deriving (Eq, Read, Show, Generic, Typeable) -newtype GreaterThan = GreaterThan Double deriving (Eq, Read, Show, Generic, Typeable) -newtype GreaterThanEq = GreaterThanEq Double deriving (Eq, Read, Show, Generic, Typeable) - -newtype LessThanD = LessThanD UTCTime deriving (Eq, Read, Show, Generic, Typeable) -newtype LessThanEqD = LessThanEqD UTCTime deriving (Eq, Read, Show, Generic, Typeable) -newtype GreaterThanD = GreaterThanD UTCTime deriving (Eq, Read, Show, Generic, Typeable) -newtype GreaterThanEqD = GreaterThanEqD UTCTime deriving (Eq, Read, Show, Generic, Typeable) - -data RangeValue = RangeDateLte LessThanEqD - | RangeDateLt LessThanD - | RangeDateGte GreaterThanEqD - | RangeDateGt GreaterThanD - | RangeDateGtLt GreaterThanD LessThanD - | RangeDateGteLte GreaterThanEqD LessThanEqD - | RangeDateGteLt GreaterThanEqD LessThanD - | RangeDateGtLte GreaterThanD LessThanEqD - | RangeDoubleLte LessThanEq - | RangeDoubleLt LessThan - | RangeDoubleGte GreaterThanEq - | RangeDoubleGt GreaterThan - | RangeDoubleGtLt GreaterThan LessThan - | RangeDoubleGteLte GreaterThanEq LessThanEq - | RangeDoubleGteLt GreaterThanEq LessThan - | RangeDoubleGtLte GreaterThan LessThanEq - deriving (Eq, Read, Show, Generic, Typeable) - -rangeValueToPair :: RangeValue -> [Pair] -rangeValueToPair rv = case rv of - RangeDateLte (LessThanEqD t) -> ["lte" .= t] - RangeDateGte (GreaterThanEqD t) -> ["gte" .= t] - RangeDateLt (LessThanD t) -> ["lt" .= t] - RangeDateGt (GreaterThanD t) -> ["gt" .= t] - RangeDateGteLte (GreaterThanEqD l) (LessThanEqD g) -> ["gte" .= l, "lte" .= g] - RangeDateGtLte (GreaterThanD l) (LessThanEqD g) -> ["gt" .= l, "lte" .= g] - RangeDateGteLt (GreaterThanEqD l) (LessThanD g) -> ["gte" .= l, "lt" .= g] - RangeDateGtLt (GreaterThanD l) (LessThanD g) -> ["gt" .= l, "lt" .= g] - RangeDoubleLte (LessThanEq t) -> ["lte" .= t] - RangeDoubleGte (GreaterThanEq t) -> ["gte" .= t] - RangeDoubleLt (LessThan t) -> ["lt" .= t] - RangeDoubleGt (GreaterThan t) -> ["gt" .= t] - RangeDoubleGteLte (GreaterThanEq l) (LessThanEq g) -> ["gte" .= l, "lte" .= g] - RangeDoubleGtLte (GreaterThan l) (LessThanEq g) -> ["gt" .= l, "lte" .= g] - RangeDoubleGteLt (GreaterThanEq l) (LessThan g) -> ["gte" .= l, "lt" .= g] - RangeDoubleGtLt (GreaterThan l) (LessThan g) -> ["gt" .= l, "lt" .= g] - -data Term = Term { termField :: Text - , termValue :: Text } deriving (Eq, Read, Show, Generic, Typeable) - -data BoolMatch = MustMatch Term Cache - | MustNotMatch Term Cache - | ShouldMatch [Term] Cache deriving (Eq, Read, Show, Generic, Typeable) - --- "memory" or "indexed" -data GeoFilterType = GeoFilterMemory - | GeoFilterIndexed deriving (Eq, Read, Show, Generic, Typeable) - -data LatLon = LatLon { lat :: Double - , lon :: Double } deriving (Eq, Read, Show, Generic, Typeable) - -data GeoBoundingBox = - GeoBoundingBox { topLeft :: LatLon - , bottomRight :: LatLon } deriving (Eq, Read, Show, Generic, Typeable) - -data GeoBoundingBoxConstraint = - GeoBoundingBoxConstraint { geoBBField :: FieldName - , constraintBox :: GeoBoundingBox - , bbConstraintcache :: Cache - , geoType :: GeoFilterType - } deriving (Eq, Read, Show, Generic, Typeable) - -data GeoPoint = - GeoPoint { geoField :: FieldName - , latLon :: LatLon} deriving (Eq, Read, Show, Generic, Typeable) - -data DistanceUnit = Miles - | Yards - | Feet - | Inches - | Kilometers - | Meters - | Centimeters - | Millimeters - | NauticalMiles deriving (Eq, Read, Show, Generic, Typeable) - -data DistanceType = Arc - | SloppyArc -- doesn't exist <1.0 - | Plane deriving (Eq, Read, Show, Generic, Typeable) - -data OptimizeBbox = OptimizeGeoFilterType GeoFilterType - | NoOptimizeBbox deriving (Eq, Read, Show, Generic, Typeable) - -data Distance = - Distance { coefficient :: Double - , unit :: DistanceUnit } deriving (Eq, Read, Show, Generic, Typeable) - -data DistanceRange = - DistanceRange { distanceFrom :: Distance - , distanceTo :: Distance } deriving (Eq, Read, Show, Generic, Typeable) - -type TemplateQueryKey = Text -type TemplateQueryValue = Text - -newtype TemplateQueryKeyValuePairs = TemplateQueryKeyValuePairs (HM.HashMap TemplateQueryKey TemplateQueryValue) - deriving (Eq, Read, Show, Generic, Typeable) - -instance ToJSON TemplateQueryKeyValuePairs where - toJSON (TemplateQueryKeyValuePairs x) = Object $ HM.map toJSON x - -instance FromJSON TemplateQueryKeyValuePairs where - parseJSON (Object o) = pure . TemplateQueryKeyValuePairs $ HM.mapMaybe getValue o - where getValue (String x) = Just x - getValue _ = Nothing - parseJSON _ = fail "error parsing TemplateQueryKeyValuePairs" - -data TemplateQueryInline = - TemplateQueryInline { inline :: Query - , params :: TemplateQueryKeyValuePairs - } - deriving (Eq, Read, Show, Generic, Typeable) - -instance ToJSON TemplateQueryInline where - toJSON TemplateQueryInline{..} = object [ "query" .= inline - , "params" .= params - ] - -instance FromJSON TemplateQueryInline where - parseJSON = withObject "TemplateQueryInline" parse - where parse o = TemplateQueryInline - <$> o .: "query" - <*> o .: "params" - -data SearchResult a = - SearchResult { took :: Int - , timedOut :: Bool - , shards :: ShardResult - , searchHits :: SearchHits a - , aggregations :: Maybe AggregationResults - , scrollId :: Maybe ScrollId - , suggest :: Maybe NamedSuggestionResponse -- ^ Only one Suggestion request / response per Search is supported. - } - deriving (Eq, Read, Show, Generic, Typeable) - -newtype ScrollId = ScrollId Text deriving (Eq, Read, Show, Generic, Ord, ToJSON, FromJSON) - -type Score = Maybe Double - -data SearchHits a = - SearchHits { hitsTotal :: Int - , maxScore :: Score - , hits :: [Hit a] } deriving (Eq, Read, Show, Generic, Typeable) - -instance Semigroup (SearchHits a) where - (SearchHits ta ma ha) <> (SearchHits tb mb hb) = SearchHits (ta + tb) (max ma mb) (ha <> hb) - -instance Monoid (SearchHits a) where - mempty = SearchHits 0 Nothing mempty - mappend = (<>) - -data Hit a = - Hit { hitIndex :: IndexName - , hitType :: MappingName - , hitDocId :: DocId - , hitScore :: Score - , hitSource :: Maybe a - , hitHighlight :: Maybe HitHighlight } deriving (Eq, Read, Show, Generic, Typeable) - -data ShardResult = - ShardResult { shardTotal :: Int - , shardsSuccessful :: Int - , shardsFailed :: Int } deriving (Eq, Read, Show, Generic, Typeable) - -type HitHighlight = M.Map Text [Text] - -showText :: Show a => a -> Text -showText = T.pack . show - -readMay :: Read a => String -> Maybe a -readMay s = case reads s of - (a, ""):_ -> Just a - _ -> Nothing - -parseReadText :: Read a => Text -> Parser a -parseReadText = maybe mzero return . readMay . T.unpack - - -data TermOrder = TermOrder{ termSortField :: Text - , termSortOrder :: SortOrder } deriving (Eq, Read, Show, Generic, Typeable) - -data TermInclusion = TermInclusion Text - | TermPattern Text Text deriving (Eq, Read, Show, Generic, Typeable) - -data CollectionMode = BreadthFirst - | DepthFirst deriving (Eq, Read, Show, Generic, Typeable) - -data ExecutionHint = Ordinals - | GlobalOrdinals - | GlobalOrdinalsHash - | GlobalOrdinalsLowCardinality - | Map deriving (Eq, Read, Show, Generic, Typeable) - -data TimeInterval = Weeks - | Days - | Hours - | Minutes - | Seconds deriving Eq - -data Interval = Year - | Quarter - | Month - | Week - | Day - | Hour - | Minute - | Second - | FractionalInterval Float TimeInterval deriving (Eq, Read, Show, Generic, Typeable) - - --- | See for more information. -data DateMathExpr = DateMathExpr DateMathAnchor [DateMathModifier] deriving (Eq, Read, Show, Generic, Typeable) - - --- | Starting point for a date range. This along with the 'DateMathModifiers' gets you the date ES will start from. -data DateMathAnchor = DMNow - | DMDate Day deriving (Eq, Read, Show, Generic, Typeable) - -data DateMathModifier = AddTime Int DateMathUnit - | SubtractTime Int DateMathUnit - | RoundDownTo DateMathUnit deriving (Eq, Read, Show, Generic, Typeable) - -data DateMathUnit = DMYear - | DMMonth - | DMWeek - | DMDay - | DMHour - | DMMinute - | DMSecond deriving (Eq, Read, Show, Generic, Typeable) - -instance ToJSON Version where - toJSON Version {..} = object ["number" .= number - ,"build_hash" .= build_hash - ,"build_timestamp" .= build_timestamp - ,"build_snapshot" .= build_snapshot - ,"lucene_version" .= lucene_version] - -instance FromJSON Version where - parseJSON = withObject "Version" parse - where parse o = Version - <$> o .: "number" - <*> o .: "build_hash" - <*> o .: "build_timestamp" - <*> o .: "build_snapshot" - <*> o .: "lucene_version" - -instance ToJSON VersionNumber where - toJSON = toJSON . Vers.showVersion . versionNumber - -instance FromJSON VersionNumber where - parseJSON = withText "VersionNumber" (parse . T.unpack) - where - parse s = case filter (null . snd)(RP.readP_to_S Vers.parseVersion s) of - [(v, _)] -> pure (VersionNumber v) - [] -> fail ("Invalid version string " ++ s) - xs -> fail ("Ambiguous version string " ++ s ++ " (" ++ intercalate ", " (Vers.showVersion . fst <$> xs) ++ ")") - -instance ToJSON TermOrder where - toJSON (TermOrder termSortField termSortOrder) = object [termSortField .= termSortOrder] - -instance ToJSON TermInclusion where - toJSON (TermInclusion x) = toJSON x - toJSON (TermPattern pattern flags) = - omitNulls [ "pattern" .= pattern - , "flags" .= flags] - -instance ToJSON CollectionMode where - toJSON BreadthFirst = "breadth_first" - toJSON DepthFirst = "depth_first" - -instance ToJSON ExecutionHint where - toJSON Ordinals = "ordinals" - toJSON GlobalOrdinals = "global_ordinals" - toJSON GlobalOrdinalsHash = "global_ordinals_hash" - toJSON GlobalOrdinalsLowCardinality = "global_ordinals_low_cardinality" - toJSON Map = "map" - -instance ToJSON Interval where - toJSON Year = "year" - toJSON Quarter = "quarter" - toJSON Month = "month" - toJSON Week = "week" - toJSON Day = "day" - toJSON Hour = "hour" - toJSON Minute = "minute" - toJSON Second = "second" - toJSON (FractionalInterval fraction interval) = toJSON $ show fraction ++ show interval - -instance Show TimeInterval where - show Weeks = "w" - show Days = "d" - show Hours = "h" - show Minutes = "m" - show Seconds = "s" - -instance Read TimeInterval where - readPrec = f =<< TR.get - where - f 'w' = return Weeks - f 'd' = return Days - f 'h' = return Hours - f 'm' = return Minutes - f 's' = return Seconds - f _ = fail "TimeInterval expected one of w, d, h, m, s" - - -instance ToJSON DateRangeAggregation where - toJSON DateRangeAggregation {..} = - omitNulls [ "field" .= draField - , "format" .= draFormat - , "ranges" .= toList draRanges - ] - -instance ToJSON DateRangeAggRange where - toJSON (DateRangeFrom e) = object [ "from" .= e ] - toJSON (DateRangeTo e) = object [ "to" .= e ] - toJSON (DateRangeFromAndTo f t) = object [ "from" .= f, "to" .= t ] - -instance ToJSON DateMathExpr where - toJSON (DateMathExpr a mods) = String (fmtA a <> mconcat (fmtMod <$> mods)) - where fmtA DMNow = "now" - fmtA (DMDate date) = (T.pack $ showGregorian date) <> "||" - fmtMod (AddTime n u) = "+" <> showText n <> fmtU u - fmtMod (SubtractTime n u) = "-" <> showText n <> fmtU u - fmtMod (RoundDownTo u) = "/" <> fmtU u - fmtU DMYear = "y" - fmtU DMMonth = "M" - fmtU DMWeek = "w" - fmtU DMDay = "d" - fmtU DMHour = "h" - fmtU DMMinute = "m" - fmtU DMSecond = "s" - - -type AggregationResults = M.Map Text Value - -class BucketAggregation a where - key :: a -> BucketValue - docCount :: a -> Int - aggs :: a -> Maybe AggregationResults - - -data Bucket a = Bucket { buckets :: [a]} deriving (Read, Show) - -data BucketValue = TextValue Text - | ScientificValue Scientific - | BoolValue Bool deriving (Read, Show) - -data MissingResult = MissingResult { missingDocCount :: Int } deriving (Show) - -data TopHitResult a = TopHitResult { tarHits :: (SearchHits a) - } deriving Show - -data TermsResult = TermsResult { termKey :: BucketValue - , termsDocCount :: Int - , termsAggs :: Maybe AggregationResults } deriving (Read, Show) - -data DateHistogramResult = DateHistogramResult { dateKey :: Int - , dateKeyStr :: Maybe Text - , dateDocCount :: Int - , dateHistogramAggs :: Maybe AggregationResults } deriving (Read, Show) - -data DateRangeResult = DateRangeResult { dateRangeKey :: Text - , dateRangeFrom :: Maybe UTCTime - , dateRangeFromAsString :: Maybe Text - , dateRangeTo :: Maybe UTCTime - , dateRangeToAsString :: Maybe Text - , dateRangeDocCount :: Int - , dateRangeAggs :: Maybe AggregationResults } deriving (Read, Show, Eq, Generic, Typeable) - -toTerms :: Text -> AggregationResults -> Maybe (Bucket TermsResult) -toTerms = toAggResult - -toDateHistogram :: Text -> AggregationResults -> Maybe (Bucket DateHistogramResult) -toDateHistogram = toAggResult - -toMissing :: Text -> AggregationResults -> Maybe MissingResult -toMissing = toAggResult - -toTopHits :: (FromJSON a) => Text -> AggregationResults -> Maybe (TopHitResult a) -toTopHits = toAggResult - -toAggResult :: (FromJSON a) => Text -> AggregationResults -> Maybe a -toAggResult t a = M.lookup t a >>= deserialize - where deserialize = parseMaybe parseJSON - -instance BucketAggregation TermsResult where - key = termKey - docCount = termsDocCount - aggs = termsAggs - -instance BucketAggregation DateHistogramResult where - key = TextValue . showText . dateKey - docCount = dateDocCount - aggs = dateHistogramAggs - -instance BucketAggregation DateRangeResult where - key = TextValue . dateRangeKey - docCount = dateRangeDocCount - aggs = dateRangeAggs - -instance (FromJSON a) => FromJSON (Bucket a) where - parseJSON (Object v) = Bucket <$> - v .: "buckets" - parseJSON _ = mempty - -instance FromJSON BucketValue where - parseJSON (String t) = return $ TextValue t - parseJSON (Number s) = return $ ScientificValue s - parseJSON (Bool b) = return $ BoolValue b - parseJSON _ = mempty - -instance FromJSON MissingResult where - parseJSON = withObject "MissingResult" parse - where parse v = MissingResult <$> v .: "doc_count" - -instance FromJSON TermsResult where - parseJSON (Object v) = TermsResult <$> - v .: "key" <*> - v .: "doc_count" <*> - (pure $ getNamedSubAgg v ["key", "doc_count"]) - parseJSON _ = mempty - -instance FromJSON DateHistogramResult where - parseJSON (Object v) = DateHistogramResult <$> - v .: "key" <*> - v .:? "key_as_string" <*> - v .: "doc_count" <*> - (pure $ getNamedSubAgg v [ "key" - , "doc_count" - , "key_as_string" - ] - ) - parseJSON _ = mempty - -instance FromJSON DateRangeResult where - parseJSON = withObject "DateRangeResult" parse - where parse v = DateRangeResult <$> - v .: "key" <*> - (fmap posixMS <$> v .:? "from") <*> - v .:? "from_as_string" <*> - (fmap posixMS <$> v .:? "to") <*> - v .:? "to_as_string" <*> - v .: "doc_count" <*> - (pure $ getNamedSubAgg v [ "key" - , "from" - , "from_as_string" - , "to" - , "to_as_string" - , "doc_count" - ] - ) - -instance FromJSON POSIXMS where - parseJSON = withScientific "POSIXMS" (return . parse) - where parse n = let n' = truncate n :: Integer - in POSIXMS (posixSecondsToUTCTime (fromInteger (n' `div` 1000))) - -instance (FromJSON a) => FromJSON (TopHitResult a) where - parseJSON (Object v) = TopHitResult <$> - v .: "hits" - parseJSON _ = fail "Failure in FromJSON (TopHitResult a)" - -instance Semigroup Filter where - a <> b = AndFilter [a, b] defaultCache - -instance Monoid Filter where - mempty = IdentityFilter - mappend = (<>) - -instance Seminearring Filter where - a <||> b = OrFilter [a, b] defaultCache - -instance ToJSON Filter where - toJSON (AndFilter filters cache) = - object ["and" .= - object [ "filters" .= fmap toJSON filters - , "_cache" .= cache]] - - toJSON (OrFilter filters cache) = - object ["or" .= - object [ "filters" .= fmap toJSON filters - , "_cache" .= cache]] - - toJSON (NotFilter notFilter cache) = - object ["not" .= - object ["filter" .= notFilter - , "_cache" .= cache]] - - toJSON (IdentityFilter) = - object ["match_all" .= object []] - - toJSON (TermFilter (Term termFilterField termFilterValue) cache) = - object ["term" .= object base] - where base = [termFilterField .= termFilterValue, - "_cache" .= cache] - - toJSON (ExistsFilter (FieldName fieldName)) = - object ["exists" .= object - ["field" .= fieldName]] - - toJSON (BoolFilter boolMatch) = - object ["bool" .= boolMatch] - - toJSON (GeoBoundingBoxFilter bbConstraint) = - object ["geo_bounding_box" .= bbConstraint] - - toJSON (GeoDistanceFilter (GeoPoint (FieldName distanceGeoField) geoDistLatLon) - distance distanceType optimizeBbox cache) = - object ["geo_distance" .= - object ["distance" .= distance - , "distance_type" .= distanceType - , "optimize_bbox" .= optimizeBbox - , distanceGeoField .= geoDistLatLon - , "_cache" .= cache]] - - toJSON (GeoDistanceRangeFilter (GeoPoint (FieldName gddrField) drLatLon) - (DistanceRange geoDistRangeDistFrom drDistanceTo)) = - object ["geo_distance_range" .= - object ["from" .= geoDistRangeDistFrom - , "to" .= drDistanceTo - , gddrField .= drLatLon]] - - toJSON (GeoPolygonFilter (FieldName geoPolygonFilterField) latLons) = - object ["geo_polygon" .= - object [geoPolygonFilterField .= - object ["points" .= fmap toJSON latLons]]] - - toJSON (IdsFilter (MappingName mappingName) values) = - object ["ids" .= - object ["type" .= mappingName - , "values" .= fmap unpackId values]] - - toJSON (LimitFilter limit) = - object ["limit" .= object ["value" .= limit]] - - toJSON (MissingFilter (FieldName fieldName) (Existence existence) (NullValue nullValue)) = - object ["missing" .= - object [ "field" .= fieldName - , "existence" .= existence - , "null_value" .= nullValue]] - - toJSON (PrefixFilter (FieldName fieldName) fieldValue cache) = - object ["prefix" .= - object [fieldName .= fieldValue - , "_cache" .= cache]] - - toJSON (QueryFilter query False) = - object ["query" .= toJSON query ] - toJSON (QueryFilter query True) = - object ["fquery" .= - object [ "query" .= toJSON query - , "_cache" .= True ]] - - toJSON (RangeFilter (FieldName fieldName) rangeValue rangeExecution cache) = - object ["range" .= - object [ fieldName .= object (rangeValueToPair rangeValue) - , "execution" .= rangeExecution - , "_cache" .= cache]] - - toJSON (RegexpFilter (FieldName fieldName) - (Regexp regexText) flags (CacheName cacheName) cache (CacheKey cacheKey)) = - object ["regexp" .= - object [fieldName .= - object ["value" .= regexText - , "flags" .= flags] - , "_name" .= cacheName - , "_cache" .= cache - , "_cache_key" .= cacheKey]] - -instance FromJSON Filter where - parseJSON = withObject "Filter" parse - where parse o = andFilter `taggedWith` "and" - <|> orFilter `taggedWith` "or" - <|> notFilter `taggedWith` "not" - <|> identityFilter `taggedWith` "match_all" - <|> boolFilter `taggedWith` "bool" - <|> existsFilter `taggedWith` "exists" - <|> geoBoundingBoxFilter `taggedWith` "geo_bounding_box" - <|> geoDistanceFilter `taggedWith` "geo_distance" - <|> geoDistanceRangeFilter `taggedWith` "geo_distance_range" - <|> geoPolygonFilter `taggedWith` "geo_polygon" - <|> idsFilter `taggedWith` "ids" - <|> limitFilter `taggedWith` "limit" - <|> missingFilter `taggedWith` "missing" - <|> prefixFilter `taggedWith` "prefix" - <|> queryFilter `taggedWith` "query" - <|> fqueryFilter `taggedWith` "fquery" - <|> rangeFilter `taggedWith` "range" - <|> regexpFilter `taggedWith` "regexp" - <|> termFilter `taggedWith` "term" - where taggedWith parser k = parser =<< o .: k - andFilter o = AndFilter <$> o .: "filters" - <*> o .:? "_cache" .!= defaultCache - orFilter o = OrFilter <$> o .: "filters" - <*> o .:? "_cache" .!= defaultCache - notFilter o = NotFilter <$> o .: "filter" - <*> o .: "_cache" .!= defaultCache - identityFilter :: Object -> Parser Filter - identityFilter m - | HM.null m = pure IdentityFilter - | otherwise = fail ("Identityfilter expected empty object but got " <> show m) - boolFilter = pure . BoolFilter - existsFilter o = ExistsFilter <$> o .: "field" - geoBoundingBoxFilter = pure . GeoBoundingBoxFilter - geoDistanceFilter o = do - case HM.toList (deleteSeveral ["distance", "distance_type", "optimize_bbox", "_cache"] o) of - [(fn, v)] -> do - gp <- GeoPoint (FieldName fn) <$> parseJSON v - GeoDistanceFilter gp <$> o .: "distance" - <*> o .: "distance_type" - <*> o .: "optimize_bbox" - <*> o .:? "_cache" .!= defaultCache - _ -> fail "Could not find GeoDistanceFilter field name" - geoDistanceRangeFilter o = do - case HM.toList (deleteSeveral ["from", "to"] o) of - [(fn, v)] -> do - gp <- GeoPoint (FieldName fn) <$> parseJSON v - rng <- DistanceRange <$> o .: "from" <*> o .: "to" - return (GeoDistanceRangeFilter gp rng) - _ -> fail "Could not find GeoDistanceRangeFilter field name" - geoPolygonFilter = fieldTagged $ \fn o -> GeoPolygonFilter fn <$> o .: "points" - idsFilter o = IdsFilter <$> o .: "type" - <*> o .: "values" - limitFilter o = LimitFilter <$> o .: "value" - missingFilter o = MissingFilter <$> o .: "field" - <*> o .: "existence" - <*> o .: "null_value" - prefixFilter o = case HM.toList (HM.delete "_cache" o) of - [(fn, String v)] -> PrefixFilter (FieldName fn) v <$> o .:? "_cache" .!= defaultCache - _ -> fail "Could not parse PrefixFilter" - - queryFilter q = pure (QueryFilter q False) - fqueryFilter o = QueryFilter <$> o .: "query" <*> pure True - rangeFilter o = case HM.toList (deleteSeveral ["execution", "_cache"] o) of - [(fn, v)] -> RangeFilter (FieldName fn) - <$> parseJSON v - <*> o .: "execution" - <*> o .:? "_cache" .!= defaultCache - _ -> fail "Could not find field name for RangeFilter" - regexpFilter o = case HM.toList (deleteSeveral ["_name", "_cache", "_cache_key"] o) of - [(fn, Object o')] -> RegexpFilter (FieldName fn) - <$> o' .: "value" - <*> o' .: "flags" - <*> o .: "_name" - <*> o .:? "_cache" .!= defaultCache - <*> o .: "_cache_key" - _ -> fail "Could not find field name for RegexpFilter" - termFilter o = case HM.toList (HM.delete "_cache" o) of - [(termField, String termVal)] -> TermFilter (Term termField termVal) - <$> o .:? "_cache" .!= defaultCache - _ -> fail "Could not find term field for TermFilter" - -fieldTagged :: Monad m => (FieldName -> Object -> m a) -> Object -> m a -fieldTagged f o = case HM.toList o of - [(k, Object o')] -> f (FieldName k) o' - _ -> fail "Expected object with 1 field-named key" - --- Try to get an AggregationResults when we don't know the --- field name. We filter out the known keys to try to minimize the noise. -getNamedSubAgg :: Object -> [Text] -> Maybe AggregationResults -getNamedSubAgg o knownKeys = maggRes - where unknownKeys = HM.filterWithKey (\k _ -> k `notElem` knownKeys) o - maggRes - | HM.null unknownKeys = Nothing - | otherwise = Just . M.fromList $ HM.toList unknownKeys - -instance ToJSON GeoPoint where - toJSON (GeoPoint (FieldName geoPointField) geoPointLatLon) = - object [ geoPointField .= geoPointLatLon ] - - -instance ToJSON Query where - toJSON (TermQuery (Term termQueryField termQueryValue) boost) = - object [ "term" .= - object [termQueryField .= object merged]] - where - base = [ "value" .= termQueryValue ] - boosted = maybe [] (return . ("boost" .=)) boost - merged = mappend base boosted - - toJSON (TermsQuery fieldName terms) = - object [ "terms" .= object conjoined ] - where conjoined = [fieldName .= terms] - - toJSON (IdsQuery idsQueryMappingName docIds) = - object [ "ids" .= object conjoined ] - where conjoined = [ "type" .= idsQueryMappingName - , "values" .= fmap toJSON docIds ] - - toJSON (QueryQueryStringQuery qQueryStringQuery) = - object [ "query_string" .= qQueryStringQuery ] - - toJSON (QueryMatchQuery matchQuery) = - object [ "match" .= matchQuery ] - - toJSON (QueryMultiMatchQuery multiMatchQuery) = - toJSON multiMatchQuery - - toJSON (QueryBoolQuery boolQuery) = - object [ "bool" .= boolQuery ] - - toJSON (QueryBoostingQuery boostingQuery) = - object [ "boosting" .= boostingQuery ] - - toJSON (QueryCommonTermsQuery commonTermsQuery) = - object [ "common" .= commonTermsQuery ] - - toJSON (ConstantScoreFilter csFilter boost) = - object ["constant_score" .= object ["filter" .= csFilter - , "boost" .= boost]] - - toJSON (ConstantScoreQuery query boost) = - object ["constant_score" .= object ["query" .= query - , "boost" .= boost]] - - toJSON (QueryDisMaxQuery disMaxQuery) = - object [ "dis_max" .= disMaxQuery ] - - toJSON (QueryFilteredQuery qFilteredQuery) = - object [ "filtered" .= qFilteredQuery ] - - toJSON (QueryFuzzyLikeThisQuery fuzzyQuery) = - object [ "fuzzy_like_this" .= fuzzyQuery ] - - toJSON (QueryFuzzyLikeFieldQuery fuzzyFieldQuery) = - object [ "fuzzy_like_this_field" .= fuzzyFieldQuery ] - - toJSON (QueryFuzzyQuery fuzzyQuery) = - object [ "fuzzy" .= fuzzyQuery ] - - toJSON (QueryHasChildQuery childQuery) = - object [ "has_child" .= childQuery ] - - toJSON (QueryHasParentQuery parentQuery) = - object [ "has_parent" .= parentQuery ] - - toJSON (QueryIndicesQuery qIndicesQuery) = - object [ "indices" .= qIndicesQuery ] - - toJSON (MatchAllQuery boost) = - object [ "match_all" .= omitNulls [ "boost" .= boost ] ] - - toJSON (QueryMoreLikeThisQuery query) = - object [ "more_like_this" .= query ] - - toJSON (QueryMoreLikeThisFieldQuery query) = - object [ "more_like_this_field" .= query ] - - toJSON (QueryNestedQuery query) = - object [ "nested" .= query ] - - toJSON (QueryPrefixQuery query) = - object [ "prefix" .= query ] - - toJSON (QueryRangeQuery query) = - object [ "range" .= query ] - - toJSON (QueryRegexpQuery query) = - object [ "regexp" .= query ] - - toJSON (QuerySimpleQueryStringQuery query) = - object [ "simple_query_string" .= query ] - - toJSON (QueryTemplateQueryInline templateQuery) = - object [ "template" .= templateQuery ] - -instance FromJSON Query where - parseJSON v = withObject "Query" parse v - where parse o = termQuery `taggedWith` "term" - <|> termsQuery `taggedWith` "terms" - <|> idsQuery `taggedWith` "ids" - <|> queryQueryStringQuery `taggedWith` "query_string" - <|> queryMatchQuery `taggedWith` "match" - <|> queryMultiMatchQuery - <|> queryBoolQuery `taggedWith` "bool" - <|> queryBoostingQuery `taggedWith` "boosting" - <|> queryCommonTermsQuery `taggedWith` "common" - <|> constantScoreFilter `taggedWith` "constant_score" - <|> constantScoreQuery `taggedWith` "constant_score" - <|> queryDisMaxQuery `taggedWith` "dis_max" - <|> queryFilteredQuery `taggedWith` "filtered" - <|> queryFuzzyLikeThisQuery `taggedWith` "fuzzy_like_this" - <|> queryFuzzyLikeFieldQuery `taggedWith` "fuzzy_like_this_field" - <|> queryFuzzyQuery `taggedWith` "fuzzy" - <|> queryHasChildQuery `taggedWith` "has_child" - <|> queryHasParentQuery `taggedWith` "has_parent" - <|> queryIndicesQuery `taggedWith` "indices" - <|> matchAllQuery `taggedWith` "match_all" - <|> queryMoreLikeThisQuery `taggedWith` "more_like_this" - <|> queryMoreLikeThisFieldQuery `taggedWith` "more_like_this_field" - <|> queryNestedQuery `taggedWith` "nested" - <|> queryPrefixQuery `taggedWith` "prefix" - <|> queryRangeQuery `taggedWith` "range" - <|> queryRegexpQuery `taggedWith` "regexp" - <|> querySimpleQueryStringQuery `taggedWith` "simple_query_string" - <|> queryTemplateQueryInline `taggedWith` "template" - where taggedWith parser k = parser =<< o .: k - termQuery = fieldTagged $ \(FieldName fn) o -> - TermQuery <$> (Term fn <$> o .: "value") <*> o .:? "boost" - termsQuery o = case HM.toList o of - [(fn, vs)] -> do vals <- parseJSON vs - case vals of - x:xs -> return (TermsQuery fn (x :| xs)) - _ -> fail "Expected non empty list of values" - _ -> fail "Expected object with 1 field-named key" - idsQuery o = IdsQuery <$> o .: "type" - <*> o .: "values" - queryQueryStringQuery = pure . QueryQueryStringQuery - queryMatchQuery = pure . QueryMatchQuery - queryMultiMatchQuery = QueryMultiMatchQuery <$> parseJSON v - queryBoolQuery = pure . QueryBoolQuery - queryBoostingQuery = pure . QueryBoostingQuery - queryCommonTermsQuery = pure . QueryCommonTermsQuery - constantScoreFilter o = case HM.lookup "filter" o of - Just x -> ConstantScoreFilter <$> parseJSON x - <*> o .: "boost" - _ -> fail "Does not appear to be a ConstantScoreFilter" - constantScoreQuery o = case HM.lookup "query" o of - Just x -> ConstantScoreQuery <$> parseJSON x - <*> o .: "boost" - _ -> fail "Does not appear to be a ConstantScoreQuery" - queryDisMaxQuery = pure . QueryDisMaxQuery - queryFilteredQuery = pure . QueryFilteredQuery - queryFuzzyLikeThisQuery = pure . QueryFuzzyLikeThisQuery - queryFuzzyLikeFieldQuery = pure . QueryFuzzyLikeFieldQuery - queryFuzzyQuery = pure . QueryFuzzyQuery - queryHasChildQuery = pure . QueryHasChildQuery - queryHasParentQuery = pure . QueryHasParentQuery - queryIndicesQuery = pure . QueryIndicesQuery - matchAllQuery o = MatchAllQuery <$> o .:? "boost" - queryMoreLikeThisQuery = pure . QueryMoreLikeThisQuery - queryMoreLikeThisFieldQuery = pure . QueryMoreLikeThisFieldQuery - queryNestedQuery = pure . QueryNestedQuery - queryPrefixQuery = pure . QueryPrefixQuery - queryRangeQuery = pure . QueryRangeQuery - queryRegexpQuery = pure . QueryRegexpQuery - querySimpleQueryStringQuery = pure . QuerySimpleQueryStringQuery - queryTemplateQueryInline = pure . QueryTemplateQueryInline - - -omitNulls :: [(Text, Value)] -> Value -omitNulls = object . filter notNull where - notNull (_, Null) = False - notNull (_, Array a) = (not . V.null) a - notNull _ = True - - -instance ToJSON SimpleQueryStringQuery where - toJSON SimpleQueryStringQuery {..} = - omitNulls (base ++ maybeAdd) - where base = [ "query" .= simpleQueryStringQuery ] - maybeAdd = [ "fields" .= simpleQueryStringField - , "default_operator" .= simpleQueryStringOperator - , "analyzer" .= simpleQueryStringAnalyzer - , "flags" .= simpleQueryStringFlags - , "lowercase_expanded_terms" .= simpleQueryStringLowercaseExpanded - , "locale" .= simpleQueryStringLocale ] - -instance FromJSON SimpleQueryStringQuery where - parseJSON = withObject "SimpleQueryStringQuery" parse - where parse o = SimpleQueryStringQuery <$> o .: "query" - <*> o .:? "fields" - <*> o .:? "default_operator" - <*> o .:? "analyzer" - <*> (parseFlags <$> o .:? "flags") - <*> o .:? "lowercase_expanded_terms" - <*> o .:? "locale" - parseFlags (Just (x:xs)) = Just (x :| xs) - parseFlags _ = Nothing - -instance ToJSON FieldOrFields where - toJSON (FofField fieldName) = - toJSON fieldName - toJSON (FofFields fieldNames) = - toJSON fieldNames - -instance FromJSON FieldOrFields where - parseJSON v = FofField <$> parseJSON v - <|> FofFields <$> (parseNEJSON =<< parseJSON v) - -instance ToJSON SimpleQueryFlag where - toJSON SimpleQueryAll = "ALL" - toJSON SimpleQueryNone = "NONE" - toJSON SimpleQueryAnd = "AND" - toJSON SimpleQueryOr = "OR" - toJSON SimpleQueryPrefix = "PREFIX" - toJSON SimpleQueryPhrase = "PHRASE" - toJSON SimpleQueryPrecedence = "PRECEDENCE" - toJSON SimpleQueryEscape = "ESCAPE" - toJSON SimpleQueryWhitespace = "WHITESPACE" - toJSON SimpleQueryFuzzy = "FUZZY" - toJSON SimpleQueryNear = "NEAR" - toJSON SimpleQuerySlop = "SLOP" - -instance FromJSON SimpleQueryFlag where - parseJSON = withText "SimpleQueryFlag" parse - where parse "ALL" = pure SimpleQueryAll - parse "NONE" = pure SimpleQueryNone - parse "AND" = pure SimpleQueryAnd - parse "OR" = pure SimpleQueryOr - parse "PREFIX" = pure SimpleQueryPrefix - parse "PHRASE" = pure SimpleQueryPhrase - parse "PRECEDENCE" = pure SimpleQueryPrecedence - parse "ESCAPE" = pure SimpleQueryEscape - parse "WHITESPACE" = pure SimpleQueryWhitespace - parse "FUZZY" = pure SimpleQueryFuzzy - parse "NEAR" = pure SimpleQueryNear - parse "SLOP" = pure SimpleQuerySlop - parse f = fail ("Unexpected SimpleQueryFlag: " <> show f) - -instance ToJSON RegexpQuery where - toJSON (RegexpQuery (FieldName rqQueryField) - (Regexp regexpQueryQuery) rqQueryFlags - rqQueryBoost) = - object [ rqQueryField .= omitNulls base ] - where base = [ "value" .= regexpQueryQuery - , "flags" .= rqQueryFlags - , "boost" .= rqQueryBoost ] - -instance FromJSON RegexpQuery where - parseJSON = withObject "RegexpQuery" parse - where parse = fieldTagged $ \fn o -> - RegexpQuery fn - <$> o .: "value" - <*> o .: "flags" - <*> o .:? "boost" - -instance ToJSON QueryStringQuery where - toJSON (QueryStringQuery qsQueryString - qsDefaultField qsOperator - qsAnalyzer qsAllowWildcard - qsLowercaseExpanded qsEnablePositionIncrements - qsFuzzyMaxExpansions qsFuzziness - qsFuzzyPrefixLength qsPhraseSlop - qsBoost qsAnalyzeWildcard - qsGeneratePhraseQueries qsMinimumShouldMatch - qsLenient qsLocale) = - omitNulls base - where - base = [ "query" .= qsQueryString - , "default_field" .= qsDefaultField - , "default_operator" .= qsOperator - , "analyzer" .= qsAnalyzer - , "allow_leading_wildcard" .= qsAllowWildcard - , "lowercase_expanded_terms" .= qsLowercaseExpanded - , "enable_position_increments" .= qsEnablePositionIncrements - , "fuzzy_max_expansions" .= qsFuzzyMaxExpansions - , "fuzziness" .= qsFuzziness - , "fuzzy_prefix_length" .= qsFuzzyPrefixLength - , "phrase_slop" .= qsPhraseSlop - , "boost" .= qsBoost - , "analyze_wildcard" .= qsAnalyzeWildcard - , "auto_generate_phrase_queries" .= qsGeneratePhraseQueries - , "minimum_should_match" .= qsMinimumShouldMatch - , "lenient" .= qsLenient - , "locale" .= qsLocale ] - -instance FromJSON QueryStringQuery where - parseJSON = withObject "QueryStringQuery" parse - where parse o = QueryStringQuery - <$> o .: "query" - <*> o .:? "default_field" - <*> o .:? "default_operator" - <*> o .:? "analyzer" - <*> o .:? "allow_leading_wildcard" - <*> o .:? "lowercase_expanded_terms" - <*> o .:? "enable_position_increments" - <*> o .:? "fuzzy_max_expansions" - <*> o .:? "fuzziness" - <*> o .:? "fuzzy_prefix_length" - <*> o .:? "phrase_slop" - <*> o .:? "boost" - <*> o .:? "analyze_wildcard" - <*> o .:? "auto_generate_phrase_queries" - <*> o .:? "minimum_should_match" - <*> o .:? "lenient" - <*> o .:? "locale" - -instance ToJSON RangeQuery where - toJSON (RangeQuery (FieldName fieldName) range boost) = - object [ fieldName .= object conjoined ] - where conjoined = [ "boost" .= boost ] ++ (rangeValueToPair range) - -instance FromJSON RangeQuery where - parseJSON = withObject "RangeQuery" parse - where parse = fieldTagged $ \fn o -> - RangeQuery fn - <$> parseJSON (Object o) - <*> o .: "boost" - -instance FromJSON RangeValue where - parseJSON = withObject "RangeValue" parse - where parse o = parseDate o - <|> parseDouble o - parseDate o = do lt <- o .:? "lt" - lte <- o .:? "lte" - gt <- o .:? "gt" - gte <- o .:? "gte" - case (lt, lte, gt, gte) of - (Just a, _, Just b, _) -> return (RangeDateGtLt (GreaterThanD b) (LessThanD a)) - (Just a, _, _, Just b)-> return (RangeDateGteLt (GreaterThanEqD b) (LessThanD a)) - (_, Just a, Just b, _)-> return (RangeDateGtLte (GreaterThanD b) (LessThanEqD a)) - (_, Just a, _, Just b)-> return (RangeDateGteLte (GreaterThanEqD b) (LessThanEqD a)) - (_, _, Just a, _)-> return (RangeDateGt (GreaterThanD a)) - (Just a, _, _, _)-> return (RangeDateLt (LessThanD a)) - (_, _, _, Just a)-> return (RangeDateGte (GreaterThanEqD a)) - (_, Just a, _, _)-> return (RangeDateLte (LessThanEqD a)) - (Nothing, Nothing, Nothing, Nothing) -> mzero - parseDouble o = do lt <- o .:? "lt" - lte <- o .:? "lte" - gt <- o .:? "gt" - gte <- o .:? "gte" - case (lt, lte, gt, gte) of - (Just a, _, Just b, _) -> return (RangeDoubleGtLt (GreaterThan b) (LessThan a)) - (Just a, _, _, Just b)-> return (RangeDoubleGteLt (GreaterThanEq b) (LessThan a)) - (_, Just a, Just b, _)-> return (RangeDoubleGtLte (GreaterThan b) (LessThanEq a)) - (_, Just a, _, Just b)-> return (RangeDoubleGteLte (GreaterThanEq b) (LessThanEq a)) - (_, _, Just a, _)-> return (RangeDoubleGt (GreaterThan a)) - (Just a, _, _, _)-> return (RangeDoubleLt (LessThan a)) - (_, _, _, Just a)-> return (RangeDoubleGte (GreaterThanEq a)) - (_, Just a, _, _)-> return (RangeDoubleLte (LessThanEq a)) - (Nothing, Nothing, Nothing, Nothing) -> mzero - -instance ToJSON PrefixQuery where - toJSON (PrefixQuery (FieldName fieldName) queryValue boost) = - object [ fieldName .= omitNulls base ] - where base = [ "value" .= queryValue - , "boost" .= boost ] - -instance FromJSON PrefixQuery where - parseJSON = withObject "PrefixQuery" parse - where parse = fieldTagged $ \fn o -> - PrefixQuery fn - <$> o .: "value" - <*> o .:? "boost" - -instance ToJSON NestedQuery where - toJSON (NestedQuery nqPath nqScoreType nqQuery) = - object [ "path" .= nqPath - , "score_mode" .= nqScoreType - , "query" .= nqQuery ] - -instance FromJSON NestedQuery where - parseJSON = withObject "NestedQuery" parse - where parse o = NestedQuery - <$> o .: "path" - <*> o .: "score_mode" - <*> o .: "query" - -instance ToJSON MoreLikeThisFieldQuery where - toJSON (MoreLikeThisFieldQuery text (FieldName fieldName) - percent mtf mqt stopwords mindf maxdf - minwl maxwl boostTerms boost analyzer) = - object [ fieldName .= omitNulls base ] - where base = [ "like_text" .= text - , "percent_terms_to_match" .= percent - , "min_term_freq" .= mtf - , "max_query_terms" .= mqt - , "stop_words" .= stopwords - , "min_doc_freq" .= mindf - , "max_doc_freq" .= maxdf - , "min_word_length" .= minwl - , "max_word_length" .= maxwl - , "boost_terms" .= boostTerms - , "boost" .= boost - , "analyzer" .= analyzer ] - -instance FromJSON MoreLikeThisFieldQuery where - parseJSON = withObject "MoreLikeThisFieldQuery" parse - where parse = fieldTagged $ \fn o -> - MoreLikeThisFieldQuery - <$> o .: "like_text" - <*> pure fn - <*> o .:? "percent_terms_to_match" - <*> o .:? "min_term_freq" - <*> o .:? "max_query_terms" - -- <*> (optionalNE =<< o .:? "stop_words") - <*> o .:? "stop_words" - <*> o .:? "min_doc_freq" - <*> o .:? "max_doc_freq" - <*> o .:? "min_word_length" - <*> o .:? "max_word_length" - <*> o .:? "boost_terms" - <*> o .:? "boost" - <*> o .:? "analyzer" - -- optionalNE = maybe (pure Nothing) (fmap Just . parseNEJSON) - -instance ToJSON MoreLikeThisQuery where - toJSON (MoreLikeThisQuery text fields percent - mtf mqt stopwords mindf maxdf - minwl maxwl boostTerms boost analyzer) = - omitNulls base - where base = [ "like_text" .= text - , "fields" .= fields - , "percent_terms_to_match" .= percent - , "min_term_freq" .= mtf - , "max_query_terms" .= mqt - , "stop_words" .= stopwords - , "min_doc_freq" .= mindf - , "max_doc_freq" .= maxdf - , "min_word_length" .= minwl - , "max_word_length" .= maxwl - , "boost_terms" .= boostTerms - , "boost" .= boost - , "analyzer" .= analyzer ] - -instance FromJSON MoreLikeThisQuery where - parseJSON = withObject "MoreLikeThisQuery" parse - where parse o = MoreLikeThisQuery - <$> o .: "like_text" - -- <*> (optionalNE =<< o .:? "fields") - <*> o .:? "fields" - <*> o .:? "percent_terms_to_match" - <*> o .:? "min_term_freq" - <*> o .:? "max_query_terms" - -- <*> (optionalNE =<< o .:? "stop_words") - <*> o .:? "stop_words" - <*> o .:? "min_doc_freq" - <*> o .:? "max_doc_freq" - <*> o .:? "min_word_length" - <*> o .:? "max_word_length" - <*> o .:? "boost_terms" - <*> o .:? "boost" - <*> o .:? "analyzer" - -- optionalNE = maybe (pure Nothing) (fmap Just . parseNEJSON) - -instance ToJSON IndicesQuery where - toJSON (IndicesQuery indices query noMatch) = - omitNulls [ "indices" .= indices - , "no_match_query" .= noMatch - , "query" .= query ] - -instance FromJSON IndicesQuery where - parseJSON = withObject "IndicesQuery" parse - where parse o = IndicesQuery - <$> o .:? "indices" .!= [] - <*> o .: "query" - <*> o .:? "no_match_query" - -instance ToJSON HasParentQuery where - toJSON (HasParentQuery queryType query scoreType) = - omitNulls [ "parent_type" .= queryType - , "score_type" .= scoreType - , "query" .= query ] - -instance FromJSON HasParentQuery where - parseJSON = withObject "HasParentQuery" parse - where parse o = HasParentQuery - <$> o .: "parent_type" - <*> o .: "query" - <*> o .:? "score_type" - -instance ToJSON HasChildQuery where - toJSON (HasChildQuery queryType query scoreType) = - omitNulls [ "query" .= query - , "score_type" .= scoreType - , "type" .= queryType ] - -instance FromJSON HasChildQuery where - parseJSON = withObject "HasChildQuery" parse - where parse o = HasChildQuery - <$> o .: "type" - <*> o .: "query" - <*> o .:? "score_type" - -instance ToJSON FuzzyQuery where - toJSON (FuzzyQuery (FieldName fieldName) queryText - prefixLength maxEx fuzziness boost) = - object [ fieldName .= omitNulls base ] - where base = [ "value" .= queryText - , "fuzziness" .= fuzziness - , "prefix_length" .= prefixLength - , "boost" .= boost - , "max_expansions" .= maxEx ] - -instance FromJSON FuzzyQuery where - parseJSON = withObject "FuzzyQuery" parse - where parse = fieldTagged $ \fn o -> - FuzzyQuery fn - <$> o .: "value" - <*> o .: "prefix_length" - <*> o .: "max_expansions" - <*> o .: "fuzziness" - <*> o .:? "boost" - -instance ToJSON FuzzyLikeFieldQuery where - toJSON (FuzzyLikeFieldQuery (FieldName fieldName) - fieldText maxTerms ignoreFreq fuzziness prefixLength - boost analyzer) = - object [ fieldName .= - omitNulls [ "like_text" .= fieldText - , "max_query_terms" .= maxTerms - , "ignore_tf" .= ignoreFreq - , "fuzziness" .= fuzziness - , "prefix_length" .= prefixLength - , "analyzer" .= analyzer - , "boost" .= boost ]] - -instance FromJSON FuzzyLikeFieldQuery where - parseJSON = withObject "FuzzyLikeFieldQuery" parse - where parse = fieldTagged $ \fn o -> - FuzzyLikeFieldQuery fn - <$> o .: "like_text" - <*> o .: "max_query_terms" - <*> o .: "ignore_tf" - <*> o .: "fuzziness" - <*> o .: "prefix_length" - <*> o .: "boost" - <*> o .:? "analyzer" - -instance ToJSON FuzzyLikeThisQuery where - toJSON (FuzzyLikeThisQuery fields text maxTerms - ignoreFreq fuzziness prefixLength boost analyzer) = - omitNulls base - where base = [ "fields" .= fields - , "like_text" .= text - , "max_query_terms" .= maxTerms - , "ignore_tf" .= ignoreFreq - , "fuzziness" .= fuzziness - , "prefix_length" .= prefixLength - , "analyzer" .= analyzer - , "boost" .= boost ] - -instance FromJSON FuzzyLikeThisQuery where - parseJSON = withObject "FuzzyLikeThisQuery" parse - where parse o = FuzzyLikeThisQuery - <$> o .:? "fields" .!= [] - <*> o .: "like_text" - <*> o .: "max_query_terms" - <*> o .: "ignore_tf" - <*> o .: "fuzziness" - <*> o .: "prefix_length" - <*> o .: "boost" - <*> o .:? "analyzer" - -instance ToJSON FilteredQuery where - toJSON (FilteredQuery query fFilter) = - object [ "query" .= query - , "filter" .= fFilter ] - -instance FromJSON FilteredQuery where - parseJSON = withObject "FilteredQuery" parse - where parse o = FilteredQuery - <$> o .: "query" - <*> o .: "filter" - -instance ToJSON DisMaxQuery where - toJSON (DisMaxQuery queries tiebreaker boost) = - omitNulls base - where base = [ "queries" .= queries - , "boost" .= boost - , "tie_breaker" .= tiebreaker ] - -instance FromJSON DisMaxQuery where - parseJSON = withObject "DisMaxQuery" parse - where parse o = DisMaxQuery - <$> o .:? "queries" .!= [] - <*> o .: "tie_breaker" - <*> o .:? "boost" - -instance ToJSON CommonTermsQuery where - toJSON (CommonTermsQuery (FieldName fieldName) - (QueryString query) cf lfo hfo msm - boost analyzer disableCoord) = - object [fieldName .= omitNulls base ] - where base = [ "query" .= query - , "cutoff_frequency" .= cf - , "low_freq_operator" .= lfo - , "minimum_should_match" .= msm - , "boost" .= boost - , "analyzer" .= analyzer - , "disable_coord" .= disableCoord - , "high_freq_operator" .= hfo ] - -instance FromJSON CommonTermsQuery where - parseJSON = withObject "CommonTermsQuery" parse - where parse = fieldTagged $ \fn o -> - CommonTermsQuery fn - <$> o .: "query" - <*> o .: "cutoff_frequency" - <*> o .: "low_freq_operator" - <*> o .: "high_freq_operator" - <*> o .:? "minimum_should_match" - <*> o .:? "boost" - <*> o .:? "analyzer" - <*> o .:? "disable_coord" - -instance ToJSON CommonMinimumMatch where - toJSON (CommonMinimumMatch mm) = toJSON mm - toJSON (CommonMinimumMatchHighLow (MinimumMatchHighLow lowF highF)) = - object [ "low_freq" .= lowF - , "high_freq" .= highF ] - -instance FromJSON CommonMinimumMatch where - parseJSON v = parseMinimum v - <|> parseMinimumHighLow v - where parseMinimum = fmap CommonMinimumMatch . parseJSON - parseMinimumHighLow = fmap CommonMinimumMatchHighLow . withObject "CommonMinimumMatchHighLow" (\o -> - MinimumMatchHighLow - <$> o .: "low_freq" - <*> o .: "high_freq") - - -instance ToJSON BoostingQuery where - toJSON (BoostingQuery bqPositiveQuery bqNegativeQuery bqNegativeBoost) = - object [ "positive" .= bqPositiveQuery - , "negative" .= bqNegativeQuery - , "negative_boost" .= bqNegativeBoost ] - -instance FromJSON BoostingQuery where - parseJSON = withObject "BoostingQuery" parse - where parse o = BoostingQuery - <$> o .: "positive" - <*> o .: "negative" - <*> o .: "negative_boost" - -instance ToJSON BoolQuery where - toJSON (BoolQuery mustM notM shouldM bqMin boost disableCoord) = - omitNulls base - where base = [ "must" .= mustM - , "must_not" .= notM - , "should" .= shouldM - , "minimum_should_match" .= bqMin - , "boost" .= boost - , "disable_coord" .= disableCoord ] - -instance FromJSON BoolQuery where - parseJSON = withObject "BoolQuery" parse - where parse o = BoolQuery - <$> o .:? "must" .!= [] - <*> o .:? "must_not" .!= [] - <*> o .:? "should" .!= [] - <*> o .:? "minimum_should_match" - <*> o .:? "boost" - <*> o .:? "disable_coord" - -instance ToJSON MatchQuery where - toJSON (MatchQuery (FieldName fieldName) - (QueryString mqQueryString) booleanOperator - zeroTermsQuery cutoffFrequency matchQueryType - analyzer maxExpansions lenient boost) = - object [ fieldName .= omitNulls base ] - where base = [ "query" .= mqQueryString - , "operator" .= booleanOperator - , "zero_terms_query" .= zeroTermsQuery - , "cutoff_frequency" .= cutoffFrequency - , "type" .= matchQueryType - , "analyzer" .= analyzer - , "max_expansions" .= maxExpansions - , "lenient" .= lenient - , "boost" .= boost ] - -instance FromJSON MatchQuery where - parseJSON = withObject "MatchQuery" parse - where parse = fieldTagged $ \fn o -> - MatchQuery fn - <$> o .: "query" - <*> o .: "operator" - <*> o .: "zero_terms_query" - <*> o .:? "cutoff_frequency" - <*> o .:? "type" - <*> o .:? "analyzer" - <*> o .:? "max_expansions" - <*> o .:? "lenient" - <*> o .:? "boost" - -instance ToJSON MultiMatchQuery where - toJSON (MultiMatchQuery fields (QueryString query) boolOp - ztQ tb mmqt cf analyzer maxEx lenient) = - object ["multi_match" .= omitNulls base] - where base = [ "fields" .= fmap toJSON fields - , "query" .= query - , "operator" .= boolOp - , "zero_terms_query" .= ztQ - , "tie_breaker" .= tb - , "type" .= mmqt - , "cutoff_frequency" .= cf - , "analyzer" .= analyzer - , "max_expansions" .= maxEx - , "lenient" .= lenient ] - -instance FromJSON MultiMatchQuery where - parseJSON = withObject "MultiMatchQuery" parse - where parse raw = do o <- raw .: "multi_match" - MultiMatchQuery - <$> o .:? "fields" .!= [] - <*> o .: "query" - <*> o .: "operator" - <*> o .: "zero_terms_query" - <*> o .:? "tie_breaker" - <*> o .:? "type" - <*> o .:? "cutoff_frequency" - <*> o .:? "analyzer" - <*> o .:? "max_expansions" - <*> o .:? "lenient" - -instance ToJSON MultiMatchQueryType where - toJSON MultiMatchBestFields = "best_fields" - toJSON MultiMatchMostFields = "most_fields" - toJSON MultiMatchCrossFields = "cross_fields" - toJSON MultiMatchPhrase = "phrase" - toJSON MultiMatchPhrasePrefix = "phrase_prefix" - -instance FromJSON MultiMatchQueryType where - parseJSON = withText "MultiMatchPhrasePrefix" parse - where parse "best_fields" = pure MultiMatchBestFields - parse "most_fields" = pure MultiMatchMostFields - parse "cross_fields" = pure MultiMatchCrossFields - parse "phrase" = pure MultiMatchPhrase - parse "phrase_prefix" = pure MultiMatchPhrasePrefix - parse t = fail ("Unexpected MultiMatchPhrasePrefix: " <> show t) - -instance ToJSON BooleanOperator where - toJSON And = String "and" - toJSON Or = String "or" - -instance FromJSON BooleanOperator where - parseJSON = withText "BooleanOperator" parse - where parse "and" = pure And - parse "or" = pure Or - parse o = fail ("Unexpected BooleanOperator: " <> show o) - -instance ToJSON ZeroTermsQuery where - toJSON ZeroTermsNone = String "none" - toJSON ZeroTermsAll = String "all" - -instance FromJSON ZeroTermsQuery where - parseJSON = withText "ZeroTermsQuery" parse - where parse "none" = pure ZeroTermsNone - parse "all" = pure ZeroTermsAll - parse q = fail ("Unexpected ZeroTermsQuery: " <> show q) - -instance ToJSON MatchQueryType where - toJSON MatchPhrase = "phrase" - toJSON MatchPhrasePrefix = "phrase_prefix" - -instance FromJSON MatchQueryType where - parseJSON = withText "MatchQueryType" parse - where parse "phrase" = pure MatchPhrase - parse "phrase_prefix" = pure MatchPhrasePrefix - parse t = fail ("Unexpected MatchQueryType: " <> show t) - -instance FromJSON Status where - parseJSON (Object v) = Status <$> - v .:? "ok" <*> - (v .:? "status" .!= 200) <*> - v .: "name" <*> - v .: "version" <*> - v .: "tagline" - parseJSON _ = empty - - -instance ToJSON IndexSettings where - toJSON (IndexSettings s r) = object ["settings" .= - object ["index" .= - object ["number_of_shards" .= s, "number_of_replicas" .= r] - ] - ] - -instance FromJSON IndexSettings where - parseJSON = withObject "IndexSettings" parse - where parse o = do s <- o .: "settings" - i <- s .: "index" - IndexSettings <$> i .: "number_of_shards" - <*> i .: "number_of_replicas" - -instance ToJSON UpdatableIndexSetting where - toJSON (NumberOfReplicas x) = oPath ("index" :| ["number_of_replicas"]) x - toJSON (AutoExpandReplicas x) = oPath ("index" :| ["auto_expand_replicas"]) x - toJSON (RefreshInterval x) = oPath ("index" :| ["refresh_interval"]) (NominalDiffTimeJSON x) - toJSON (IndexConcurrency x) = oPath ("index" :| ["concurrency"]) x - toJSON (FailOnMergeFailure x) = oPath ("index" :| ["fail_on_merge_failure"]) x - toJSON (TranslogFlushThresholdOps x) = oPath ("index" :| ["translog", "flush_threshold_ops"]) x - toJSON (TranslogFlushThresholdSize x) = oPath ("index" :| ["translog", "flush_threshold_size"]) x - toJSON (TranslogFlushThresholdPeriod x) = oPath ("index" :| ["translog", "flush_threshold_period"]) (NominalDiffTimeJSON x) - toJSON (TranslogDisableFlush x) = oPath ("index" :| ["translog", "disable_flush"]) x - toJSON (CacheFilterMaxSize x) = oPath ("index" :| ["cache", "filter", "max_size"]) x - toJSON (CacheFilterExpire x) = oPath ("index" :| ["cache", "filter", "expire"]) (NominalDiffTimeJSON <$> x) - toJSON (GatewaySnapshotInterval x) = oPath ("index" :| ["gateway", "snapshot_interval"]) (NominalDiffTimeJSON x) - toJSON (RoutingAllocationInclude fs) = oPath ("index" :| ["routing", "allocation", "include"]) (attrFilterJSON fs) - toJSON (RoutingAllocationExclude fs) = oPath ("index" :| ["routing", "allocation", "exclude"]) (attrFilterJSON fs) - toJSON (RoutingAllocationRequire fs) = oPath ("index" :| ["routing", "allocation", "require"]) (attrFilterJSON fs) - toJSON (RoutingAllocationEnable x) = oPath ("index" :| ["routing", "allocation", "enable"]) x - toJSON (RoutingAllocationShardsPerNode x) = oPath ("index" :| ["routing", "allocation", "total_shards_per_node"]) x - toJSON (RecoveryInitialShards x) = oPath ("index" :| ["recovery", "initial_shards"]) x - toJSON (GCDeletes x) = oPath ("index" :| ["gc_deletes"]) (NominalDiffTimeJSON x) - toJSON (TTLDisablePurge x) = oPath ("index" :| ["ttl", "disable_purge"]) x - toJSON (TranslogFSType x) = oPath ("index" :| ["translog", "fs", "type"]) x - toJSON (IndexCompoundFormat x) = oPath ("index" :| ["compound_format"]) x - toJSON (IndexCompoundOnFlush x) = oPath ("index" :| ["compound_on_flush"]) x - toJSON (WarmerEnabled x) = oPath ("index" :| ["warmer", "enabled"]) x - toJSON (BlocksReadOnly x) = oPath ("blocks" :| ["read_only"]) x - toJSON (BlocksRead x) = oPath ("blocks" :| ["read"]) x - toJSON (BlocksWrite x) = oPath ("blocks" :| ["write"]) x - toJSON (BlocksMetaData x) = oPath ("blocks" :| ["metadata"]) x - -instance FromJSON UpdatableIndexSetting where - parseJSON = withObject "UpdatableIndexSetting" parse - where parse o = numberOfReplicas `taggedAt` ["index", "number_of_replicas"] - <|> autoExpandReplicas `taggedAt` ["index", "auto_expand_replicas"] - <|> refreshInterval `taggedAt` ["index", "refresh_interval"] - <|> indexConcurrency `taggedAt` ["index", "concurrency"] - <|> failOnMergeFailure `taggedAt` ["index", "fail_on_merge_failure"] - <|> translogFlushThresholdOps `taggedAt` ["index", "translog", "flush_threshold_ops"] - <|> translogFlushThresholdSize `taggedAt` ["index", "translog", "flush_threshold_size"] - <|> translogFlushThresholdPeriod `taggedAt` ["index", "translog", "flush_threshold_period"] - <|> translogDisableFlush `taggedAt` ["index", "translog", "disable_flush"] - <|> cacheFilterMaxSize `taggedAt` ["index", "cache", "filter", "max_size"] - <|> cacheFilterExpire `taggedAt` ["index", "cache", "filter", "expire"] - <|> gatewaySnapshotInterval `taggedAt` ["index", "gateway", "snapshot_interval"] - <|> routingAllocationInclude `taggedAt` ["index", "routing", "allocation", "include"] - <|> routingAllocationExclude `taggedAt` ["index", "routing", "allocation", "exclude"] - <|> routingAllocationRequire `taggedAt` ["index", "routing", "allocation", "require"] - <|> routingAllocationEnable `taggedAt` ["index", "routing", "allocation", "enable"] - <|> routingAllocationShardsPerNode `taggedAt` ["index", "routing", "allocation", "total_shards_per_node"] - <|> recoveryInitialShards `taggedAt` ["index", "recovery", "initial_shards"] - <|> gcDeletes `taggedAt` ["index", "gc_deletes"] - <|> ttlDisablePurge `taggedAt` ["index", "ttl", "disable_purge"] - <|> translogFSType `taggedAt` ["index", "translog", "fs", "type"] - <|> compoundFormat `taggedAt` ["index", "compound_format"] - <|> compoundOnFlush `taggedAt` ["index", "compound_on_flush"] - <|> warmerEnabled `taggedAt` ["index", "warmer", "enabled"] - <|> blocksReadOnly `taggedAt` ["blocks", "read_only"] - <|> blocksRead `taggedAt` ["blocks", "read"] - <|> blocksWrite `taggedAt` ["blocks", "write"] - <|> blocksMetaData `taggedAt` ["blocks", "metadata"] - where taggedAt f ks = taggedAt' f (Object o) ks - taggedAt' f v [] = f =<< (parseJSON v <|> (parseJSON (unStringlyTypeJSON v))) - taggedAt' f v (k:ks) = withObject "Object" (\o -> do v' <- o .: k - taggedAt' f v' ks) v - numberOfReplicas = pure . NumberOfReplicas - autoExpandReplicas = pure . AutoExpandReplicas - refreshInterval = pure . RefreshInterval . ndtJSON - indexConcurrency = pure . IndexConcurrency - failOnMergeFailure = pure . FailOnMergeFailure - translogFlushThresholdOps = pure . TranslogFlushThresholdOps - translogFlushThresholdSize = pure . TranslogFlushThresholdSize - translogFlushThresholdPeriod = pure . TranslogFlushThresholdPeriod . ndtJSON - translogDisableFlush = pure . TranslogDisableFlush - cacheFilterMaxSize = pure . CacheFilterMaxSize - cacheFilterExpire = pure . CacheFilterExpire . fmap ndtJSON - gatewaySnapshotInterval = pure . GatewaySnapshotInterval . ndtJSON - routingAllocationInclude = fmap RoutingAllocationInclude . parseAttrFilter - routingAllocationExclude = fmap RoutingAllocationExclude . parseAttrFilter - routingAllocationRequire = fmap RoutingAllocationRequire . parseAttrFilter - routingAllocationEnable = pure . RoutingAllocationEnable - routingAllocationShardsPerNode = pure . RoutingAllocationShardsPerNode - recoveryInitialShards = pure . RecoveryInitialShards - gcDeletes = pure . GCDeletes . ndtJSON - ttlDisablePurge = pure . TTLDisablePurge - translogFSType = pure . TranslogFSType - compoundFormat = pure . IndexCompoundFormat - compoundOnFlush = pure . IndexCompoundOnFlush - warmerEnabled = pure . WarmerEnabled - blocksReadOnly = pure . BlocksReadOnly - blocksRead = pure . BlocksRead - blocksWrite = pure . BlocksWrite - blocksMetaData = pure . BlocksMetaData - -instance FromJSON IndexSettingsSummary where - parseJSON = withObject "IndexSettingsSummary" parse - where parse o = case HM.toList o of - [(ixn, v@(Object o'))] -> IndexSettingsSummary (IndexName ixn) - <$> parseJSON v - <*> (fmap (filter (not . redundant)) . parseSettings =<< o' .: "settings") - _ -> fail "Expected single-key object with index name" - redundant (NumberOfReplicas _) = True - redundant _ = False - --- | For some reason in several settings APIs, all leaf values get returned --- as strings. This function attepmts to recover from this for all --- non-recursive JSON types. If nothing can be done, the value is left alone. -unStringlyTypeJSON :: Value -> Value -unStringlyTypeJSON (String "true") = Bool True -unStringlyTypeJSON (String "false") = Bool False -unStringlyTypeJSON (String "null") = Null -unStringlyTypeJSON v@(String t) = case readMay (T.unpack t) of - Just n -> Number n - Nothing -> v -unStringlyTypeJSON v = v - - -parseSettings :: Object -> Parser [UpdatableIndexSetting] -parseSettings o = do - o' <- o .: "index" - -- slice the index object into singleton hashmaps and try to parse each - parses <- forM (HM.toList o') $ \(k, v) -> do - -- blocks are now nested into the "index" key, which is not how they're serialized - let atRoot = Object (HM.singleton k v) - let atIndex = Object (HM.singleton "index" atRoot) - optional (parseJSON atRoot <|> parseJSON atIndex) - return (catMaybes parses) - -oPath :: ToJSON a => NonEmpty Text -> a -> Value -oPath (k :| []) v = object [k .= v] -oPath (k:| (h:t)) v = object [k .= oPath (h :| t) v] - -attrFilterJSON :: NonEmpty NodeAttrFilter -> Value -attrFilterJSON fs = object [ n .= T.intercalate "," (toList vs) - | NodeAttrFilter (NodeAttrName n) vs <- toList fs] - -parseAttrFilter :: Value -> Parser (NonEmpty NodeAttrFilter) -parseAttrFilter = withObject "NonEmpty NodeAttrFilter" parse - where parse o = case HM.toList o of - [] -> fail "Expected non-empty list of NodeAttrFilters" - x:xs -> DT.mapM (uncurry parse') (x :| xs) - parse' n = withText "Text" $ \t -> - case T.splitOn "," t of - fv:fvs -> return (NodeAttrFilter (NodeAttrName n) (fv :| fvs)) - [] -> fail "Expected non-empty list of filter values" - -instance ToJSON ReplicaBounds where - toJSON (ReplicasBounded a b) = String (showText a <> "-" <> showText b) - toJSON (ReplicasLowerBounded a) = String (showText a <> "-all") - toJSON ReplicasUnbounded = Bool False - -instance FromJSON ReplicaBounds where - parseJSON v = withText "ReplicaBounds" parseText v - <|> withBool "ReplicaBounds" parseBool v - where parseText t = case T.splitOn "-" t of - [a, "all"] -> ReplicasLowerBounded <$> parseReadText a - [a, b] -> ReplicasBounded <$> parseReadText a - <*> parseReadText b - _ -> fail ("Could not parse ReplicaBounds: " <> show t) - parseBool False = pure ReplicasUnbounded - parseBool _ = fail "ReplicasUnbounded cannot be represented with True" - -instance ToJSON AllocationPolicy where - toJSON AllocAll = String "all" - toJSON AllocPrimaries = String "primaries" - toJSON AllocNewPrimaries = String "new_primaries" - toJSON AllocNone = String "none" - -instance FromJSON AllocationPolicy where - parseJSON = withText "AllocationPolicy" parse - where parse "all" = pure AllocAll - parse "primaries" = pure AllocPrimaries - parse "new_primaries" = pure AllocNewPrimaries - parse "none" = pure AllocNone - parse t = fail ("Invlaid AllocationPolicy: " <> show t) - -instance ToJSON InitialShardCount where - toJSON QuorumShards = String "quorum" - toJSON QuorumMinus1Shards = String "quorum-1" - toJSON FullShards = String "full" - toJSON FullMinus1Shards = String "full-1" - toJSON (ExplicitShards x) = toJSON x - -instance FromJSON InitialShardCount where - parseJSON v = withText "InitialShardCount" parseText v - <|> ExplicitShards <$> parseJSON v - where parseText "quorum" = pure QuorumShards - parseText "quorum-1" = pure QuorumMinus1Shards - parseText "full" = pure FullShards - parseText "full-1" = pure FullMinus1Shards - parseText _ = mzero - -instance ToJSON FSType where - toJSON FSSimple = "simple" - toJSON FSBuffered = "buffered" - -instance FromJSON FSType where - parseJSON = withText "FSType" parse - where parse "simple" = pure FSSimple - parse "buffered" = pure FSBuffered - parse t = fail ("Invalid FSType: " <> show t) - -instance ToJSON CompoundFormat where - toJSON (CompoundFileFormat x) = Bool x - toJSON (MergeSegmentVsTotalIndex x) = toJSON x - -instance FromJSON CompoundFormat where - parseJSON v = CompoundFileFormat <$> parseJSON v - <|> MergeSegmentVsTotalIndex <$> parseJSON v - -instance ToJSON NominalDiffTimeJSON where - toJSON (NominalDiffTimeJSON t) = String (showText (round t :: Integer) <> "s") - -instance FromJSON NominalDiffTimeJSON where - parseJSON = withText "NominalDiffTime" parse - where parse t = case T.takeEnd 1 t of - "s" -> NominalDiffTimeJSON . fromInteger <$> parseReadText (T.dropEnd 1 t) - _ -> fail "Invalid or missing NominalDiffTime unit (expected s)" - -instance ToJSON IndexTemplate where - toJSON (IndexTemplate p s m) = merge - (object [ "template" .= p - , "mappings" .= foldl' merge (object []) m - ]) - (toJSON s) - where - merge (Object o1) (Object o2) = toJSON $ HM.union o1 o2 - merge o Null = o - merge _ _ = undefined - -instance (FromJSON a) => FromJSON (EsResult a) where - parseJSON jsonVal@(Object v) = do - found <- v .:? "found" .!= False - fr <- if found - then parseJSON jsonVal - else return Nothing - EsResult <$> v .: "_index" <*> - v .: "_type" <*> - v .: "_id" <*> - pure fr - parseJSON _ = empty - -instance (FromJSON a) => FromJSON (EsResultFound a) where - parseJSON (Object v) = EsResultFound <$> - v .: "_version" <*> - v .: "_source" - parseJSON _ = empty - -instance FromJSON EsError where - parseJSON (Object v) = EsError <$> - v .: "status" <*> - (v .: "error" <|> (v .: "error" >>= (.: "reason"))) - parseJSON _ = empty - -instance FromJSON IndexAliasesSummary where - parseJSON = withObject "IndexAliasesSummary" parse - where parse o = IndexAliasesSummary . mconcat <$> mapM (uncurry go) (HM.toList o) - go ixn = withObject "index aliases" $ \ia -> do - aliases <- ia .:? "aliases" .!= mempty - forM (HM.toList aliases) $ \(aName, v) -> do - let indexAlias = IndexAlias (IndexName ixn) (IndexAliasName (IndexName aName)) - IndexAliasSummary indexAlias <$> parseJSON v - - -instance ToJSON IndexAliasAction where - toJSON (AddAlias ia opts) = object ["add" .= (iaObj <> optsObj)] - where Object iaObj = toJSON ia - Object optsObj = toJSON opts - toJSON (RemoveAlias ia) = object ["remove" .= iaObj] - where Object iaObj = toJSON ia - -instance ToJSON IndexAlias where - toJSON IndexAlias {..} = object ["index" .= srcIndex - , "alias" .= indexAlias - ] - -instance ToJSON IndexAliasCreate where - toJSON IndexAliasCreate {..} = Object (filterObj <> routingObj) - where filterObj = maybe mempty (HM.singleton "filter" . toJSON) aliasCreateFilter - Object routingObj = maybe (Object mempty) toJSON aliasCreateRouting - -instance ToJSON AliasRouting where - toJSON (AllAliasRouting v) = object ["routing" .= v] - toJSON (GranularAliasRouting srch idx) = object (catMaybes prs) - where prs = [("search_routing" .=) <$> srch - ,("index_routing" .=) <$> idx] - -instance FromJSON AliasRouting where - parseJSON = withObject "AliasRouting" parse - where parse o = parseAll o <|> parseGranular o - parseAll o = AllAliasRouting <$> o .: "routing" - parseGranular o = do - sr <- o .:? "search_routing" - ir <- o .:? "index_routing" - if isNothing sr && isNothing ir - then fail "Both search_routing and index_routing can't be blank" - else return (GranularAliasRouting sr ir) - -instance FromJSON IndexAliasCreate where - parseJSON v = withObject "IndexAliasCreate" parse v - where parse o = IndexAliasCreate <$> optional (parseJSON v) - <*> o .:? "filter" - -instance ToJSON SearchAliasRouting where - toJSON (SearchAliasRouting rvs) = toJSON (T.intercalate "," (routingValue <$> toList rvs)) - -instance FromJSON SearchAliasRouting where - parseJSON = withText "SearchAliasRouting" parse - where parse t = SearchAliasRouting <$> parseNEJSON (String <$> T.splitOn "," t) + deriving (Eq, Show) instance ToJSON Search where toJSON (Search query sFilter sort searchAggs highlight sTrackSortScores sFrom sSize _ sFields sSource sSuggest) = @@ -3154,346 +441,6 @@ instance ToJSON Search where , "suggest" .= sSuggest] -instance ToJSON Source where - toJSON NoSource = toJSON False - toJSON (SourcePatterns patterns) = toJSON patterns - toJSON (SourceIncludeExclude incl excl) = object [ "include" .= incl, "exclude" .= excl ] - -instance ToJSON PatternOrPatterns where - toJSON (PopPattern pattern) = toJSON pattern - toJSON (PopPatterns patterns) = toJSON patterns - -instance ToJSON Include where - toJSON (Include patterns) = toJSON patterns - -instance ToJSON Exclude where - toJSON (Exclude patterns) = toJSON patterns - -instance ToJSON Pattern where - toJSON (Pattern pattern) = toJSON pattern - - -instance ToJSON FieldHighlight where - toJSON (FieldHighlight (FieldName fName) (Just fSettings)) = - object [ fName .= fSettings ] - toJSON (FieldHighlight (FieldName fName) Nothing) = - object [ fName .= emptyObject ] - -instance ToJSON Highlights where - toJSON (Highlights global fields) = - omitNulls (("fields" .= fields) - : highlightSettingsPairs global) - -instance ToJSON HighlightSettings where - toJSON hs = omitNulls (highlightSettingsPairs (Just hs)) - -highlightSettingsPairs :: Maybe HighlightSettings -> [Pair] -highlightSettingsPairs Nothing = [] -highlightSettingsPairs (Just (Plain plh)) = plainHighPairs (Just plh) -highlightSettingsPairs (Just (Postings ph)) = postHighPairs (Just ph) -highlightSettingsPairs (Just (FastVector fvh)) = fastVectorHighPairs (Just fvh) - - -plainHighPairs :: Maybe PlainHighlight -> [Pair] -plainHighPairs Nothing = [] -plainHighPairs (Just (PlainHighlight plCom plNonPost)) = - [ "type" .= String "plain"] - ++ commonHighlightPairs plCom - ++ nonPostingsToPairs plNonPost - -postHighPairs :: Maybe PostingsHighlight -> [Pair] -postHighPairs Nothing = [] -postHighPairs (Just (PostingsHighlight pCom)) = - ("type" .= String "postings") - : commonHighlightPairs pCom - -fastVectorHighPairs :: Maybe FastVectorHighlight -> [Pair] -fastVectorHighPairs Nothing = [] -fastVectorHighPairs (Just - (FastVectorHighlight fvCom fvNonPostSettings fvBoundChars - fvBoundMaxScan fvFragOff fvMatchedFields - fvPhraseLim)) = - [ "type" .= String "fvh" - , "boundary_chars" .= fvBoundChars - , "boundary_max_scan" .= fvBoundMaxScan - , "fragment_offset" .= fvFragOff - , "matched_fields" .= fvMatchedFields - , "phraseLimit" .= fvPhraseLim] - ++ commonHighlightPairs fvCom - ++ nonPostingsToPairs fvNonPostSettings - -deleteSeveral :: (Eq k, Hashable k) => [k] -> HM.HashMap k v -> HM.HashMap k v -deleteSeveral ks hm = foldr HM.delete hm ks - -commonHighlightPairs :: Maybe CommonHighlight -> [Pair] -commonHighlightPairs Nothing = [] -commonHighlightPairs (Just (CommonHighlight chScore chForceSource chTag chEncoder - chNoMatchSize chHighlightQuery - chRequireFieldMatch)) = - [ "order" .= chScore - , "force_source" .= chForceSource - , "encoder" .= chEncoder - , "no_match_size" .= chNoMatchSize - , "highlight_query" .= chHighlightQuery - , "require_fieldMatch" .= chRequireFieldMatch] - ++ highlightTagToPairs chTag - - -nonPostingsToPairs :: Maybe NonPostings -> [Pair] -nonPostingsToPairs Nothing = [] -nonPostingsToPairs (Just (NonPostings npFragSize npNumOfFrags)) = - [ "fragment_size" .= npFragSize - , "number_of_fragments" .= npNumOfFrags] - -parseNEJSON :: (FromJSON a) => [Value] -> Parser (NonEmpty a) -parseNEJSON [] = fail "Expected non-empty list" -parseNEJSON (x:xs) = DT.mapM parseJSON (x :| xs) - - -instance ToJSON HighlightEncoder where - toJSON DefaultEncoder = String "default" - toJSON HTMLEncoder = String "html" - -highlightTagToPairs :: Maybe HighlightTag -> [Pair] -highlightTagToPairs (Just (TagSchema _)) = [ "scheme" .= String "default"] -highlightTagToPairs (Just (CustomTags (pre, post))) = [ "pre_tags" .= pre - , "post_tags" .= post] -highlightTagToPairs Nothing = [] - -instance ToJSON SortSpec where - toJSON (DefaultSortSpec - (DefaultSort (FieldName dsSortFieldName) dsSortOrder dsIgnoreUnmapped - dsSortMode dsMissingSort dsNestedFilter)) = - object [dsSortFieldName .= omitNulls base] where - base = [ "order" .= dsSortOrder - , "ignore_unmapped" .= dsIgnoreUnmapped - , "mode" .= dsSortMode - , "missing" .= dsMissingSort - , "nested_filter" .= dsNestedFilter ] - - toJSON (GeoDistanceSortSpec gdsSortOrder (GeoPoint (FieldName field) gdsLatLon) units) = - object [ "unit" .= units - , field .= gdsLatLon - , "order" .= gdsSortOrder ] - - -instance ToJSON SortOrder where - toJSON Ascending = String "asc" - toJSON Descending = String "desc" - - -instance ToJSON SortMode where - toJSON SortMin = String "min" - toJSON SortMax = String "max" - toJSON SortSum = String "sum" - toJSON SortAvg = String "avg" - - -instance ToJSON Missing where - toJSON LastMissing = String "_last" - toJSON FirstMissing = String "_first" - toJSON (CustomMissing txt) = String txt - - -instance ToJSON ScoreType where - toJSON ScoreTypeMax = "max" - toJSON ScoreTypeAvg = "avg" - toJSON ScoreTypeSum = "sum" - toJSON ScoreTypeNone = "none" - -instance FromJSON ScoreType where - parseJSON = withText "ScoreType" parse - where parse "max" = pure ScoreTypeMax - parse "avg" = pure ScoreTypeAvg - parse "sum" = pure ScoreTypeSum - parse "none" = pure ScoreTypeNone - parse t = fail ("Unexpected ScoreType: " <> show t) - -instance ToJSON Distance where - toJSON (Distance dCoefficient dUnit) = - String boltedTogether where - coefText = showText dCoefficient - (String unitText) = toJSON dUnit - boltedTogether = mappend coefText unitText - -instance FromJSON Distance where - parseJSON = withText "Distance" parse - where parse t = Distance <$> parseCoeff nT - <*> parseJSON (String unitT) - where (nT, unitT) = T.span validForNumber t - -- may be a better way to do this - validForNumber '-' = True - validForNumber '.' = True - validForNumber 'e' = True - validForNumber c = isNumber c - parseCoeff "" = fail "Empty string cannot be parsed as number" - parseCoeff s = return (read (T.unpack s)) - -instance ToJSON DistanceUnit where - toJSON Miles = String "mi" - toJSON Yards = String "yd" - toJSON Feet = String "ft" - toJSON Inches = String "in" - toJSON Kilometers = String "km" - toJSON Meters = String "m" - toJSON Centimeters = String "cm" - toJSON Millimeters = String "mm" - toJSON NauticalMiles = String "nmi" - - -instance FromJSON DistanceUnit where - parseJSON = withText "DistanceUnit" parse - where parse "mi" = pure Miles - parse "yd" = pure Yards - parse "ft" = pure Feet - parse "in" = pure Inches - parse "km" = pure Kilometers - parse "m" = pure Meters - parse "cm" = pure Centimeters - parse "mm" = pure Millimeters - parse "nmi" = pure NauticalMiles - parse u = fail ("Unrecognized DistanceUnit: " <> show u) - -instance ToJSON DistanceType where - toJSON Arc = String "arc" - toJSON SloppyArc = String "sloppy_arc" - toJSON Plane = String "plane" - -instance FromJSON DistanceType where - parseJSON = withText "DistanceType" parse - where parse "arc" = pure Arc - parse "sloppy_arc" = pure SloppyArc - parse "plane" = pure Plane - parse t = fail ("Unrecognized DistanceType: " <> show t) - - -instance ToJSON OptimizeBbox where - toJSON NoOptimizeBbox = String "none" - toJSON (OptimizeGeoFilterType gft) = toJSON gft - -instance FromJSON OptimizeBbox where - parseJSON v = withText "NoOptimizeBbox" parseNoOptimize v - <|> parseOptimize v - where parseNoOptimize "none" = pure NoOptimizeBbox - parseNoOptimize _ = mzero - parseOptimize = fmap OptimizeGeoFilterType . parseJSON - -instance ToJSON GeoBoundingBoxConstraint where - toJSON (GeoBoundingBoxConstraint - (FieldName gbbcGeoBBField) gbbcConstraintBox cache type') = - object [gbbcGeoBBField .= gbbcConstraintBox - , "_cache" .= cache - , "type" .= type'] - -instance FromJSON GeoBoundingBoxConstraint where - parseJSON = withObject "GeoBoundingBoxConstraint" parse - where parse o = case HM.toList (deleteSeveral ["type", "_cache"] o) of - [(fn, v)] -> GeoBoundingBoxConstraint (FieldName fn) - <$> parseJSON v - <*> o .:? "_cache" .!= defaultCache - <*> o .: "type" - _ -> fail "Could not find field name for GeoBoundingBoxConstraint" - -instance ToJSON GeoFilterType where - toJSON GeoFilterMemory = String "memory" - toJSON GeoFilterIndexed = String "indexed" - -instance FromJSON GeoFilterType where - parseJSON = withText "GeoFilterType" parse - where parse "memory" = pure GeoFilterMemory - parse "indexed" = pure GeoFilterIndexed - parse t = fail ("Unrecognized GeoFilterType: " <> show t) - -instance ToJSON GeoBoundingBox where - toJSON (GeoBoundingBox gbbTopLeft gbbBottomRight) = - object ["top_left" .= gbbTopLeft - , "bottom_right" .= gbbBottomRight] - -instance FromJSON GeoBoundingBox where - parseJSON = withObject "GeoBoundingBox" parse - where parse o = GeoBoundingBox - <$> o .: "top_left" - <*> o .: "bottom_right" - -instance ToJSON LatLon where - toJSON (LatLon lLat lLon) = - object ["lat" .= lLat - , "lon" .= lLon] - -instance FromJSON LatLon where - parseJSON = withObject "LatLon" parse - where parse o = LatLon <$> o .: "lat" - <*> o .: "lon" - --- index for smaller ranges, fielddata for longer ranges -instance ToJSON RangeExecution where - toJSON RangeExecutionIndex = "index" - toJSON RangeExecutionFielddata = "fielddata" - - -instance FromJSON RangeExecution where - parseJSON = withText "RangeExecution" parse - where parse "index" = pure RangeExecutionIndex - parse "fielddata" = pure RangeExecutionFielddata - parse t = error ("Unrecognized RangeExecution " <> show t) - -instance ToJSON RegexpFlags where - toJSON AllRegexpFlags = String "ALL" - toJSON NoRegexpFlags = String "NONE" - toJSON (SomeRegexpFlags (h :| fs)) = String $ T.intercalate "|" flagStrs - where flagStrs = map flagStr . nub $ h:fs - flagStr AnyString = "ANYSTRING" - flagStr Automaton = "AUTOMATON" - flagStr Complement = "COMPLEMENT" - flagStr Empty = "EMPTY" - flagStr Intersection = "INTERSECTION" - flagStr Interval = "INTERVAL" - -instance FromJSON RegexpFlags where - parseJSON = withText "RegexpFlags" parse - where parse "ALL" = pure AllRegexpFlags - parse "NONE" = pure NoRegexpFlags - parse t = SomeRegexpFlags <$> parseNEJSON (String <$> T.splitOn "|" t) - -instance FromJSON RegexpFlag where - parseJSON = withText "RegexpFlag" parse - where parse "ANYSTRING" = pure AnyString - parse "AUTOMATON" = pure Automaton - parse "COMPLEMENT" = pure Complement - parse "EMPTY" = pure Empty - parse "INTERSECTION" = pure Intersection - parse "INTERVAL" = pure Interval - parse f = fail ("Unknown RegexpFlag: " <> show f) - -instance ToJSON Term where - toJSON (Term field value) = object ["term" .= object - [field .= value]] - -instance FromJSON Term where - parseJSON = withObject "Term" parse - where parse o = do termObj <- o .: "term" - case HM.toList termObj of - [(fn, v)] -> Term fn <$> parseJSON v - _ -> fail "Expected object with 1 field-named key" - -instance ToJSON BoolMatch where - toJSON (MustMatch term cache) = object ["must" .= term, - "_cache" .= cache] - toJSON (MustNotMatch term cache) = object ["must_not" .= term, - "_cache" .= cache] - toJSON (ShouldMatch terms cache) = object ["should" .= fmap toJSON terms, - "_cache" .= cache] - -instance FromJSON BoolMatch where - parseJSON = withObject "BoolMatch" parse - where parse o = mustMatch `taggedWith` "must" - <|> mustNotMatch `taggedWith` "must_not" - <|> shouldMatch `taggedWith` "should" - where taggedWith parser k = parser =<< o .: k - mustMatch t = MustMatch t <$> o .:? "_cache" .!= defaultCache - mustNotMatch t = MustNotMatch t <$> o .:? "_cache" .!= defaultCache - shouldMatch t = ShouldMatch t <$> o .:? "_cache" .!= defaultCache - instance (FromJSON a) => FromJSON (SearchResult a) where parseJSON (Object v) = SearchResult <$> v .: "took" <*> @@ -3504,1747 +451,3 @@ instance (FromJSON a) => FromJSON (SearchResult a) where v .:? "_scroll_id" <*> v .:? "suggest" parseJSON _ = empty - -instance (FromJSON a) => FromJSON (SearchHits a) where - parseJSON (Object v) = SearchHits <$> - v .: "total" <*> - v .: "max_score" <*> - v .: "hits" - parseJSON _ = empty - -instance (FromJSON a) => FromJSON (Hit a) where - parseJSON (Object v) = Hit <$> - v .: "_index" <*> - v .: "_type" <*> - v .: "_id" <*> - v .: "_score" <*> - v .:? "_source" <*> - v .:? "highlight" - parseJSON _ = empty - -instance FromJSON ShardResult where - parseJSON (Object v) = ShardResult <$> - v .: "total" <*> - v .: "successful" <*> - v .: "failed" - parseJSON _ = empty - - -instance FromJSON DocVersion where - parseJSON v = do - i <- parseJSON v - maybe (fail "DocVersion out of range") return $ mkDocVersion i - --- This insanity is because ES *sometimes* returns Replica/Shard counts as strings -instance FromJSON ReplicaCount where - parseJSON v = parseAsInt v - <|> parseAsString v - where parseAsInt = fmap ReplicaCount . parseJSON - parseAsString = withText "ReplicaCount" (fmap ReplicaCount . parseReadText) - -instance FromJSON ShardCount where - parseJSON v = parseAsInt v - <|> parseAsString v - where parseAsInt = fmap ShardCount . parseJSON - parseAsString = withText "ShardCount" (fmap ShardCount . parseReadText) - -instance Bounded DocVersion where - minBound = DocVersion 1 - maxBound = DocVersion 9200000000000000000 -- 9.2e+18 - -instance Enum DocVersion where - succ x - | x /= maxBound = DocVersion (succ $ docVersionNumber x) - | otherwise = succError "DocVersion" - pred x - | x /= minBound = DocVersion (pred $ docVersionNumber x) - | otherwise = predError "DocVersion" - toEnum i = - fromMaybe (error $ show i ++ " out of DocVersion range") $ mkDocVersion i - fromEnum = docVersionNumber - enumFrom = boundedEnumFrom - enumFromThen = boundedEnumFromThen - --- | Username type used for HTTP Basic authentication. See 'basicAuthHook'. -newtype EsUsername = EsUsername { esUsername :: Text } deriving (Read, Show, Eq) - --- | Password type used for HTTP Basic authentication. See 'basicAuthHook'. -newtype EsPassword = EsPassword { esPassword :: Text } deriving (Read, Show, Eq) - - -data SnapshotRepoSelection = SnapshotRepoList (NonEmpty SnapshotRepoPattern) - | AllSnapshotRepos deriving (Eq, Generic, Show, Typeable) - - --- | Either specifies an exact repo name or one with globs in it, --- e.g. @RepoPattern "foo*"@ __NOTE__: Patterns are not supported on ES < 1.7 -data SnapshotRepoPattern = ExactRepo SnapshotRepoName - | RepoPattern Text - deriving (Eq, Generic, Show, Typeable) - --- | The unique name of a snapshot repository. -newtype SnapshotRepoName = SnapshotRepoName { snapshotRepoName :: Text } - deriving (Eq, Ord, Generic, Show, Typeable, ToJSON, FromJSON) - - --- | A generic representation of a snapshot repo. This is what gets --- sent to and parsed from the server. For repo types enabled by --- plugins that aren't exported by this library, consider making a --- custom type which implements 'SnapshotRepo'. If it is a common repo --- type, consider submitting a pull request to have it included in the --- library proper -data GenericSnapshotRepo = GenericSnapshotRepo { - gSnapshotRepoName :: SnapshotRepoName - , gSnapshotRepoType :: SnapshotRepoType - , gSnapshotRepoSettings :: GenericSnapshotRepoSettings - } deriving (Eq, Generic, Show, Typeable) - - -instance SnapshotRepo GenericSnapshotRepo where - toGSnapshotRepo = id - fromGSnapshotRepo = Right - - -newtype SnapshotRepoType = SnapshotRepoType { snapshotRepoType :: Text } - deriving (Eq, Ord, Generic, Show, Typeable, ToJSON, FromJSON) - - --- | Opaque representation of snapshot repo settings. Instances of --- 'SnapshotRepo' will produce this. -newtype GenericSnapshotRepoSettings = GenericSnapshotRepoSettings { gSnapshotRepoSettingsObject :: Object } - deriving (Eq, Generic, Show, Typeable, ToJSON) - - - -- Regardless of whether you send strongly typed json, my version of - -- ES sends back stringly typed json in the settings, e.g. booleans - -- as strings, so we'll try to convert them. -instance FromJSON GenericSnapshotRepoSettings where - parseJSON = fmap (GenericSnapshotRepoSettings . fmap unStringlyTypeJSON). parseJSON - --- | The result of running 'verifySnapshotRepo'. -newtype SnapshotVerification = SnapshotVerification { snapshotNodeVerifications :: [SnapshotNodeVerification] } - deriving (Eq, Generic, Show, Typeable) - - -instance FromJSON SnapshotVerification where - parseJSON = withObject "SnapshotVerification" parse - where - parse o = do - o2 <- o .: "nodes" - SnapshotVerification <$> mapM (uncurry parse') (HM.toList o2) - parse' rawFullId = withObject "SnapshotNodeVerification" $ \o -> - SnapshotNodeVerification (FullNodeId rawFullId) <$> o .: "name" - - --- | A node that has verified a snapshot -data SnapshotNodeVerification = SnapshotNodeVerification { - snvFullId :: FullNodeId - , snvNodeName :: NodeName - } deriving (Eq, Generic, Show, Typeable) - - --- | Unique, automatically-generated name assigned to nodes that are --- usually returned in node-oriented APIs. -newtype FullNodeId = FullNodeId { fullNodeId :: Text } - deriving (Eq, Ord, Generic, Show, Typeable, FromJSON) - - --- | A human-readable node name that is supplied by the user in the --- node config or automatically generated by ElasticSearch. -newtype NodeName = NodeName { nodeName :: Text } - deriving (Eq, Ord, Generic, Show, Typeable, FromJSON) - -newtype ClusterName = ClusterName { clusterName :: Text } - deriving (Eq, Ord, Generic, Show, Typeable, FromJSON) - -data NodesInfo = NodesInfo { - nodesInfo :: [NodeInfo] - , nodesClusterName :: ClusterName - } deriving (Eq, Show, Generic, Typeable) - -data NodesStats = NodesStats { - nodesStats :: [NodeStats] - , nodesStatsClusterName :: ClusterName - } deriving (Eq, Show, Generic, Typeable) - -data NodeStats = NodeStats { - nodeStatsName :: NodeName - , nodeStatsFullId :: FullNodeId - , nodeStatsBreakersStats :: Maybe NodeBreakersStats - , nodeStatsHTTP :: NodeHTTPStats - , nodeStatsTransport :: NodeTransportStats - , nodeStatsFS :: NodeFSStats - , nodeStatsNetwork :: NodeNetworkStats - , nodeStatsThreadPool :: NodeThreadPoolsStats - , nodeStatsJVM :: NodeJVMStats - , nodeStatsProcess :: NodeProcessStats - , nodeStatsOS :: NodeOSStats - , nodeStatsIndices :: NodeIndicesStats - } deriving (Eq, Show, Generic, Typeable) - -data NodeBreakersStats = NodeBreakersStats { - nodeStatsParentBreaker :: NodeBreakerStats - , nodeStatsRequestBreaker :: NodeBreakerStats - , nodeStatsFieldDataBreaker :: NodeBreakerStats - } deriving (Eq, Show, Generic, Typeable) - -data NodeBreakerStats = NodeBreakerStats { - nodeBreakersTripped :: Int - , nodeBreakersOverhead :: Double - , nodeBreakersEstSize :: Bytes - , nodeBreakersLimitSize :: Bytes - } deriving (Eq, Show, Generic, Typeable) - -data NodeHTTPStats = NodeHTTPStats { - nodeHTTPTotalOpened :: Int - , nodeHTTPCurrentOpen :: Int - } deriving (Eq, Show, Generic, Typeable) - -data NodeTransportStats = NodeTransportStats { - nodeTransportTXSize :: Bytes - , nodeTransportCount :: Int - , nodeTransportRXSize :: Bytes - , nodeTransportRXCount :: Int - , nodeTransportServerOpen :: Int - } deriving (Eq, Show, Generic, Typeable) - -data NodeFSStats = NodeFSStats { - nodeFSDataPaths :: [NodeDataPathStats] - , nodeFSTotal :: NodeFSTotalStats - , nodeFSTimestamp :: UTCTime - } deriving (Eq, Show, Generic, Typeable) - -data NodeDataPathStats = NodeDataPathStats { - nodeDataPathDiskServiceTime :: Maybe Double - , nodeDataPathDiskQueue :: Maybe Double - , nodeDataPathIOSize :: Maybe Bytes - , nodeDataPathWriteSize :: Maybe Bytes - , nodeDataPathReadSize :: Maybe Bytes - , nodeDataPathIOOps :: Maybe Int - , nodeDataPathWrites :: Maybe Int - , nodeDataPathReads :: Maybe Int - , nodeDataPathAvailable :: Bytes - , nodeDataPathFree :: Bytes - , nodeDataPathTotal :: Bytes - , nodeDataPathType :: Maybe Text - , nodeDataPathDevice :: Text - , nodeDataPathMount :: Text - , nodeDataPathPath :: Text - } deriving (Eq, Show, Generic, Typeable) - -data NodeFSTotalStats = NodeFSTotalStats { - nodeFSTotalDiskServiceTime :: Maybe Double - , nodeFSTotalDiskQueue :: Maybe Double - , nodeFSTotalIOSize :: Maybe Bytes - , nodeFSTotalWriteSize :: Maybe Bytes - , nodeFSTotalReadSize :: Maybe Bytes - , nodeFSTotalIOOps :: Maybe Int - , nodeFSTotalWrites :: Maybe Int - , nodeFSTotalReads :: Maybe Int - , nodeFSTotalAvailable :: Bytes - , nodeFSTotalFree :: Bytes - , nodeFSTotalTotal :: Bytes - } deriving (Eq, Show, Generic, Typeable) - -data NodeNetworkStats = NodeNetworkStats { - nodeNetTCPOutRSTs :: Int - , nodeNetTCPInErrs :: Int - , nodeNetTCPAttemptFails :: Int - , nodeNetTCPEstabResets :: Int - , nodeNetTCPRetransSegs :: Int - , nodeNetTCPOutSegs :: Int - , nodeNetTCPInSegs :: Int - , nodeNetTCPCurrEstab :: Int - , nodeNetTCPPassiveOpens :: Int - , nodeNetTCPActiveOpens :: Int - } deriving (Eq, Show, Generic, Typeable) - -data NodeThreadPoolsStats = NodeThreadPoolsStats { - nodeThreadPoolsStatsSnapshot :: NodeThreadPoolStats - , nodeThreadPoolsStatsBulk :: NodeThreadPoolStats - , nodeThreadPoolsStatsMerge :: NodeThreadPoolStats - , nodeThreadPoolsStatsGet :: NodeThreadPoolStats - , nodeThreadPoolsStatsManagement :: NodeThreadPoolStats - , nodeThreadPoolsStatsFetchShardStore :: Maybe NodeThreadPoolStats - , nodeThreadPoolsStatsOptimize :: NodeThreadPoolStats - , nodeThreadPoolsStatsFlush :: NodeThreadPoolStats - , nodeThreadPoolsStatsSearch :: NodeThreadPoolStats - , nodeThreadPoolsStatsWarmer :: NodeThreadPoolStats - , nodeThreadPoolsStatsGeneric :: NodeThreadPoolStats - , nodeThreadPoolsStatsSuggest :: NodeThreadPoolStats - , nodeThreadPoolsStatsRefresh :: NodeThreadPoolStats - , nodeThreadPoolsStatsIndex :: NodeThreadPoolStats - , nodeThreadPoolsStatsListener :: Maybe NodeThreadPoolStats - , nodeThreadPoolsStatsFetchShardStarted :: Maybe NodeThreadPoolStats - , nodeThreadPoolsStatsPercolate :: NodeThreadPoolStats - } deriving (Eq, Show, Generic, Typeable) - -data NodeThreadPoolStats = NodeThreadPoolStats { - nodeThreadPoolCompleted :: Int - , nodeThreadPoolLargest :: Int - , nodeThreadPoolRejected :: Int - , nodeThreadPoolActive :: Int - , nodeThreadPoolQueue :: Int - , nodeThreadPoolThreads :: Int - } deriving (Eq, Show, Generic, Typeable) - -data NodeJVMStats = NodeJVMStats { - nodeJVMStatsMappedBufferPool :: JVMBufferPoolStats - , nodeJVMStatsDirectBufferPool :: JVMBufferPoolStats - , nodeJVMStatsGCOldCollector :: JVMGCStats - , nodeJVMStatsGCYoungCollector :: JVMGCStats - , nodeJVMStatsPeakThreadsCount :: Int - , nodeJVMStatsThreadsCount :: Int - , nodeJVMStatsOldPool :: JVMPoolStats - , nodeJVMStatsSurvivorPool :: JVMPoolStats - , nodeJVMStatsYoungPool :: JVMPoolStats - , nodeJVMStatsNonHeapCommitted :: Bytes - , nodeJVMStatsNonHeapUsed :: Bytes - , nodeJVMStatsHeapMax :: Bytes - , nodeJVMStatsHeapCommitted :: Bytes - , nodeJVMStatsHeapUsedPercent :: Int - , nodeJVMStatsHeapUsed :: Bytes - , nodeJVMStatsUptime :: NominalDiffTime - , nodeJVMStatsTimestamp :: UTCTime - } deriving (Eq, Show, Generic, Typeable) - -data JVMBufferPoolStats = JVMBufferPoolStats { - jvmBufferPoolStatsTotalCapacity :: Bytes - , jvmBufferPoolStatsUsed :: Bytes - , jvmBufferPoolStatsCount :: Int - } deriving (Eq, Show, Generic, Typeable) - -data JVMGCStats = JVMGCStats { - jvmGCStatsCollectionTime :: NominalDiffTime - , jvmGCStatsCollectionCount :: Int - } deriving (Eq, Show, Generic, Typeable) - -data JVMPoolStats = JVMPoolStats { - jvmPoolStatsPeakMax :: Bytes - , jvmPoolStatsPeakUsed :: Bytes - , jvmPoolStatsMax :: Bytes - , jvmPoolStatsUsed :: Bytes - } deriving (Eq, Show, Generic, Typeable) - -data NodeProcessStats = NodeProcessStats { - nodeProcessMemTotalVirtual :: Bytes - , nodeProcessMemShare :: Bytes - , nodeProcessMemResident :: Bytes - , nodeProcessCPUTotal :: NominalDiffTime - , nodeProcessCPUUser :: NominalDiffTime - , nodeProcessCPUSys :: NominalDiffTime - , nodeProcessCPUPercent :: Int - , nodeProcessOpenFDs :: Int - , nodeProcessTimestamp :: UTCTime - } deriving (Eq, Show, Generic, Typeable) - -data NodeOSStats = NodeOSStats { - nodeOSSwapFree :: Bytes - , nodeOSSwapUsed :: Bytes - , nodeOSMemActualUsed :: Bytes - , nodeOSMemActualFree :: Bytes - , nodeOSMemUsedPercent :: Int - , nodeOSMemFreePercent :: Int - , nodeOSMemUsed :: Bytes - , nodeOSMemFree :: Bytes - , nodeOSCPUStolen :: Int - , nodeOSCPUUsage :: Int - , nodeOSCPUIdle :: Int - , nodeOSCPUUser :: Int - , nodeOSCPUSys :: Int - , nodeOSLoad :: Maybe LoadAvgs - , nodeOSUptime :: NominalDiffTime - , nodeOSTimestamp :: UTCTime - } deriving (Eq, Show, Generic, Typeable) - -data LoadAvgs = LoadAvgs { - loadAvg1Min :: Double - , loadAvg5Min :: Double - , loadAvg15Min :: Double - } deriving (Eq, Show, Generic, Typeable) - -data NodeIndicesStats = NodeIndicesStats { - nodeIndicesStatsRecoveryThrottleTime :: Maybe NominalDiffTime - , nodeIndicesStatsRecoveryCurrentAsTarget :: Maybe Int - , nodeIndicesStatsRecoveryCurrentAsSource :: Maybe Int - , nodeIndicesStatsQueryCacheMisses :: Maybe Int - , nodeIndicesStatsQueryCacheHits :: Maybe Int - , nodeIndicesStatsQueryCacheEvictions :: Maybe Int - , nodeIndicesStatsQueryCacheSize :: Maybe Bytes - , nodeIndicesStatsSuggestCurrent :: Int - , nodeIndicesStatsSuggestTime :: NominalDiffTime - , nodeIndicesStatsSuggestTotal :: Int - , nodeIndicesStatsTranslogSize :: Bytes - , nodeIndicesStatsTranslogOps :: Int - , nodeIndicesStatsSegFixedBitSetMemory :: Maybe Bytes - , nodeIndicesStatsSegVersionMapMemory :: Bytes - , nodeIndicesStatsSegIndexWriterMaxMemory :: Maybe Bytes - , nodeIndicesStatsSegIndexWriterMemory :: Bytes - , nodeIndicesStatsSegMemory :: Bytes - , nodeIndicesStatsSegCount :: Int - , nodeIndicesStatsCompletionSize :: Bytes - , nodeIndicesStatsPercolateQueries :: Int - , nodeIndicesStatsPercolateMemory :: Bytes - , nodeIndicesStatsPercolateCurrent :: Int - , nodeIndicesStatsPercolateTime :: NominalDiffTime - , nodeIndicesStatsPercolateTotal :: Int - , nodeIndicesStatsFieldDataEvictions :: Int - , nodeIndicesStatsFieldDataMemory :: Bytes - , nodeIndicesStatsIDCacheMemory :: Bytes - , nodeIndicesStatsFilterCacheEvictions :: Int - , nodeIndicesStatsFilterCacheMemory :: Bytes - , nodeIndicesStatsWarmerTotalTime :: NominalDiffTime - , nodeIndicesStatsWarmerTotal :: Int - , nodeIndicesStatsWarmerCurrent :: Int - , nodeIndicesStatsFlushTotalTime :: NominalDiffTime - , nodeIndicesStatsFlushTotal :: Int - , nodeIndicesStatsRefreshTotalTime :: NominalDiffTime - , nodeIndicesStatsRefreshTotal :: Int - , nodeIndicesStatsMergesTotalSize :: Bytes - , nodeIndicesStatsMergesTotalDocs :: Int - , nodeIndicesStatsMergesTotalTime :: NominalDiffTime - , nodeIndicesStatsMergesTotal :: Int - , nodeIndicesStatsMergesCurrentSize :: Bytes - , nodeIndicesStatsMergesCurrentDocs :: Int - , nodeIndicesStatsMergesCurrent :: Int - , nodeIndicesStatsSearchFetchCurrent :: Int - , nodeIndicesStatsSearchFetchTime :: NominalDiffTime - , nodeIndicesStatsSearchFetchTotal :: Int - , nodeIndicesStatsSearchQueryCurrent :: Int - , nodeIndicesStatsSearchQueryTime :: NominalDiffTime - , nodeIndicesStatsSearchQueryTotal :: Int - , nodeIndicesStatsSearchOpenContexts :: Int - , nodeIndicesStatsGetCurrent :: Int - , nodeIndicesStatsGetMissingTime :: NominalDiffTime - , nodeIndicesStatsGetMissingTotal :: Int - , nodeIndicesStatsGetExistsTime :: NominalDiffTime - , nodeIndicesStatsGetExistsTotal :: Int - , nodeIndicesStatsGetTime :: NominalDiffTime - , nodeIndicesStatsGetTotal :: Int - , nodeIndicesStatsIndexingThrottleTime :: Maybe NominalDiffTime - , nodeIndicesStatsIndexingIsThrottled :: Maybe Bool - , nodeIndicesStatsIndexingNoopUpdateTotal :: Maybe Int - , nodeIndicesStatsIndexingDeleteCurrent :: Int - , nodeIndicesStatsIndexingDeleteTime :: NominalDiffTime - , nodeIndicesStatsIndexingDeleteTotal :: Int - , nodeIndicesStatsIndexingIndexCurrent :: Int - , nodeIndicesStatsIndexingIndexTime :: NominalDiffTime - , nodeIndicesStatsIndexingTotal :: Int - , nodeIndicesStatsStoreThrottleTime :: NominalDiffTime - , nodeIndicesStatsStoreSize :: Bytes - , nodeIndicesStatsDocsDeleted :: Int - , nodeIndicesStatsDocsCount :: Int - } deriving (Eq, Show, Generic, Typeable) - --- | A quirky address format used throughout ElasticSearch. An example --- would be inet[/1.1.1.1:9200]. inet may be a placeholder for a --- . -newtype EsAddress = EsAddress { esAddress :: Text } - deriving (Eq, Ord, Generic, Show, Typeable, FromJSON) - --- | Typically a 7 character hex string. -newtype BuildHash = BuildHash { buildHash :: Text } - deriving (Eq, Ord, Generic, Read, Show, Typeable, FromJSON, ToJSON) - -newtype PluginName = PluginName { pluginName :: Text } - deriving (Eq, Ord, Generic, Show, Typeable, FromJSON) - -data NodeInfo = NodeInfo { - nodeInfoHTTPAddress :: EsAddress - , nodeInfoBuild :: BuildHash - , nodeInfoESVersion :: VersionNumber - , nodeInfoIP :: Server - , nodeInfoHost :: Server - , nodeInfoTransportAddress :: EsAddress - , nodeInfoName :: NodeName - , nodeInfoFullId :: FullNodeId - , nodeInfoPlugins :: [NodePluginInfo] - , nodeInfoHTTP :: NodeHTTPInfo - , nodeInfoTransport :: NodeTransportInfo - , nodeInfoNetwork :: NodeNetworkInfo - , nodeInfoThreadPool :: NodeThreadPoolsInfo - , nodeInfoJVM :: NodeJVMInfo - , nodeInfoProcess :: NodeProcessInfo - , nodeInfoOS :: NodeOSInfo - , nodeInfoSettings :: Object - -- ^ The members of the settings objects are not consistent, - -- dependent on plugins, etc. - } deriving (Eq, Show, Generic, Typeable) - -data NodePluginInfo = NodePluginInfo { - nodePluginSite :: Bool - -- ^ Is this a site plugin? - , nodePluginJVM :: Bool - -- ^ Is this plugin running on the JVM - , nodePluginDescription :: Text - , nodePluginVersion :: MaybeNA VersionNumber - , nodePluginName :: PluginName - } deriving (Eq, Show, Generic, Typeable) - -data NodeHTTPInfo = NodeHTTPInfo { - nodeHTTPMaxContentLength :: Bytes - , nodeHTTPTransportAddress :: BoundTransportAddress - } deriving (Eq, Show, Generic, Typeable) - -data NodeTransportInfo = NodeTransportInfo { - nodeTransportProfiles :: [BoundTransportAddress] - , nodeTransportAddress :: BoundTransportAddress - } deriving (Eq, Show, Generic, Typeable) - -data BoundTransportAddress = BoundTransportAddress { - publishAddress :: EsAddress - , boundAddress :: EsAddress - } deriving (Eq, Show, Generic, Typeable) - -data NodeNetworkInfo = NodeNetworkInfo { - nodeNetworkPrimaryInterface :: NodeNetworkInterface - , nodeNetworkRefreshInterval :: NominalDiffTime - } deriving (Eq, Show, Generic, Typeable) - -newtype MacAddress = MacAddress { macAddress :: Text } - deriving (Eq, Ord, Generic, Show, Typeable, FromJSON) - -newtype NetworkInterfaceName = NetworkInterfaceName { networkInterfaceName :: Text } - deriving (Eq, Ord, Generic, Show, Typeable, FromJSON) - -data NodeNetworkInterface = NodeNetworkInterface { - nodeNetIfaceMacAddress :: MacAddress - , nodeNetIfaceName :: NetworkInterfaceName - , nodeNetIfaceAddress :: Server - } deriving (Eq, Show, Generic, Typeable) - -data NodeThreadPoolsInfo = NodeThreadPoolsInfo { - nodeThreadPoolsRefresh :: NodeThreadPoolInfo - , nodeThreadPoolsManagement :: NodeThreadPoolInfo - , nodeThreadPoolsPercolate :: NodeThreadPoolInfo - , nodeThreadPoolsListener :: Maybe NodeThreadPoolInfo - , nodeThreadPoolsFetchShardStarted :: Maybe NodeThreadPoolInfo - , nodeThreadPoolsSearch :: NodeThreadPoolInfo - , nodeThreadPoolsFlush :: NodeThreadPoolInfo - , nodeThreadPoolsWarmer :: NodeThreadPoolInfo - , nodeThreadPoolsOptimize :: NodeThreadPoolInfo - , nodeThreadPoolsBulk :: NodeThreadPoolInfo - , nodeThreadPoolsSuggest :: NodeThreadPoolInfo - , nodeThreadPoolsMerge :: NodeThreadPoolInfo - , nodeThreadPoolsSnapshot :: NodeThreadPoolInfo - , nodeThreadPoolsGet :: NodeThreadPoolInfo - , nodeThreadPoolsFetchShardStore :: Maybe NodeThreadPoolInfo - , nodeThreadPoolsIndex :: NodeThreadPoolInfo - , nodeThreadPoolsGeneric :: NodeThreadPoolInfo - } deriving (Eq, Show, Generic, Typeable) - -data NodeThreadPoolInfo = NodeThreadPoolInfo { - nodeThreadPoolQueueSize :: ThreadPoolSize - , nodeThreadPoolKeepalive :: Maybe NominalDiffTime - , nodeThreadPoolMin :: Maybe Int - , nodeThreadPoolMax :: Maybe Int - , nodeThreadPoolType :: ThreadPoolType - } deriving (Eq, Show, Generic, Typeable) - -data ThreadPoolSize = ThreadPoolBounded Int - | ThreadPoolUnbounded - deriving (Eq, Show, Generic, Typeable) - -data ThreadPoolType = ThreadPoolScaling - | ThreadPoolFixed - | ThreadPoolCached - deriving (Eq, Show, Generic, Typeable) - -data NodeJVMInfo = NodeJVMInfo { - nodeJVMInfoMemoryPools :: [JVMMemoryPool] - , nodeJVMInfoMemoryPoolsGCCollectors :: [JVMGCCollector] - , nodeJVMInfoMemoryInfo :: JVMMemoryInfo - , nodeJVMInfoStartTime :: UTCTime - , nodeJVMInfoVMVendor :: Text - , nodeJVMVMVersion :: VersionNumber - -- ^ JVM doesn't seme to follow normal version conventions - , nodeJVMVMName :: Text - , nodeJVMVersion :: VersionNumber - , nodeJVMPID :: PID - } deriving (Eq, Show, Generic, Typeable) - --- | Handles quirks in the way JVM versions are rendered (1.7.0_101 -> 1.7.0.101) -newtype JVMVersion = JVMVersion { unJVMVersion :: VersionNumber } - -data JVMMemoryInfo = JVMMemoryInfo { - jvmMemoryInfoDirectMax :: Bytes - , jvmMemoryInfoNonHeapMax :: Bytes - , jvmMemoryInfoNonHeapInit :: Bytes - , jvmMemoryInfoHeapMax :: Bytes - , jvmMemoryInfoHeapInit :: Bytes - } deriving (Eq, Show, Generic, Typeable) - -newtype JVMMemoryPool = JVMMemoryPool { - jvmMemoryPool :: Text - } deriving (Eq, Show, Generic, Typeable, FromJSON) - -newtype JVMGCCollector = JVMGCCollector { - jvmGCCollector :: Text - } deriving (Eq, Show, Generic, Typeable, FromJSON) - -newtype PID = PID { - pid :: Int - } deriving (Eq, Show, Generic, Typeable, FromJSON) - -data NodeOSInfo = NodeOSInfo { - nodeOSSwap :: Bytes - , nodeOSMem :: Bytes - , nodeOSCPUInfo :: CPUInfo - , nodeOSAvailableProcessors :: Int - , nodeOSRefreshInterval :: NominalDiffTime - } deriving (Eq, Show, Generic, Typeable) - -data CPUInfo = CPUInfo { - cpuCacheSize :: Bytes - , cpuCoresPerSocket :: Int - , cpuTotalSockets :: Int - , cpuTotalCores :: Int - , cpuMHZ :: Int - , cpuModel :: Text - , cpuVendor :: Text - } deriving (Eq, Show, Generic, Typeable) - -data NodeProcessInfo = NodeProcessInfo { - nodeProcessMLockAll :: Bool - -- ^ See - , nodeProcessMaxFileDescriptors :: Int - , nodeProcessId :: PID - , nodeProcessRefreshInterval :: NominalDiffTime - } deriving (Eq, Show, Generic, Typeable) - -data SnapshotRepoUpdateSettings = SnapshotRepoUpdateSettings { - repoUpdateVerify :: Bool - -- ^ After creation/update, synchronously check that nodes can - -- write to this repo. Defaults to True. You may use False if you - -- need a faster response and plan on verifying manually later - -- with 'verifySnapshotRepo'. - } deriving (Eq, Show, Generic, Typeable) - - --- | Reasonable defaults for repo creation/update --- --- * repoUpdateVerify True -defaultSnapshotRepoUpdateSettings :: SnapshotRepoUpdateSettings -defaultSnapshotRepoUpdateSettings = SnapshotRepoUpdateSettings True - - --- | A filesystem-based snapshot repo that ships with --- ElasticSearch. This is an instance of 'SnapshotRepo' so it can be --- used with 'updateSnapshotRepo' -data FsSnapshotRepo = FsSnapshotRepo { - fsrName :: SnapshotRepoName - , fsrLocation :: FilePath - , fsrCompressMetadata :: Bool - , fsrChunkSize :: Maybe Bytes - -- ^ Size by which to split large files during snapshotting. - , fsrMaxRestoreBytesPerSec :: Maybe Bytes - -- ^ Throttle node restore rate. If not supplied, defaults to 40mb/sec - , fsrMaxSnapshotBytesPerSec :: Maybe Bytes - -- ^ Throttle node snapshot rate. If not supplied, defaults to 40mb/sec - } deriving (Eq, Generic, Show, Typeable) - - -instance SnapshotRepo FsSnapshotRepo where - toGSnapshotRepo FsSnapshotRepo {..} = - GenericSnapshotRepo fsrName fsRepoType (GenericSnapshotRepoSettings settings) - where - Object settings = object $ [ "location" .= fsrLocation - , "compress" .= fsrCompressMetadata - ] ++ optionalPairs - optionalPairs = catMaybes [ ("chunk_size" .=) <$> fsrChunkSize - , ("max_restore_bytes_per_sec" .=) <$> fsrMaxRestoreBytesPerSec - , ("max_snapshot_bytes_per_sec" .=) <$> fsrMaxSnapshotBytesPerSec - ] - fromGSnapshotRepo GenericSnapshotRepo {..} - | gSnapshotRepoType == fsRepoType = do - let o = gSnapshotRepoSettingsObject gSnapshotRepoSettings - parseRepo $ do - FsSnapshotRepo gSnapshotRepoName <$> o .: "location" - <*> o .:? "compress" .!= False - <*> o .:? "chunk_size" - <*> o .:? "max_restore_bytes_per_sec" - <*> o .:? "max_snapshot_bytes_per_sec" - | otherwise = Left (RepoTypeMismatch fsRepoType gSnapshotRepoType) - - -parseRepo :: Parser a -> Either SnapshotRepoConversionError a -parseRepo parser = case parseEither (const parser) () of - Left e -> Left (OtherRepoConversionError (T.pack e)) - Right a -> Right a - - -fsRepoType :: SnapshotRepoType -fsRepoType = SnapshotRepoType "fs" - --- | Law: fromGSnapshotRepo (toGSnapshotRepo r) == Right r -class SnapshotRepo r where - toGSnapshotRepo :: r -> GenericSnapshotRepo - fromGSnapshotRepo :: GenericSnapshotRepo -> Either SnapshotRepoConversionError r - - -data SnapshotRepoConversionError = RepoTypeMismatch SnapshotRepoType SnapshotRepoType - -- ^ Expected type and actual type - | OtherRepoConversionError Text - deriving (Show, Eq, Generic, Typeable) - - -instance Exception SnapshotRepoConversionError - - -data SnapshotCreateSettings = SnapshotCreateSettings { - snapWaitForCompletion :: Bool - -- ^ Should the API call return immediately after initializing - -- the snapshot or wait until completed? Note that if this is - -- enabled it could wait a long time, so you should adjust your - -- 'ManagerSettings' accordingly to set long timeouts or - -- explicitly handle timeouts. - , snapIndices :: Maybe IndexSelection - -- ^ Nothing will snapshot all indices. Just [] is permissable and - -- will essentially be a no-op snapshot. - , snapIgnoreUnavailable :: Bool - -- ^ If set to True, any matched indices that don't exist will be - -- ignored. Otherwise it will be an error and fail. - , snapIncludeGlobalState :: Bool - , snapPartial :: Bool - -- ^ If some indices failed to snapshot (e.g. if not all primary - -- shards are available), should the process proceed? - } deriving (Eq, Generic, Show, Typeable) - - --- | Reasonable defaults for snapshot creation --- --- * snapWaitForCompletion False --- * snapIndices Nothing --- * snapIgnoreUnavailable False --- * snapIncludeGlobalState True --- * snapPartial False -defaultSnapshotCreateSettings :: SnapshotCreateSettings -defaultSnapshotCreateSettings = SnapshotCreateSettings { - snapWaitForCompletion = False - , snapIndices = Nothing - , snapIgnoreUnavailable = False - , snapIncludeGlobalState = True - , snapPartial = False - } - - -data SnapshotSelection = SnapshotList (NonEmpty SnapshotPattern) - | AllSnapshots deriving (Eq, Generic, Show, Typeable) - - --- | Either specifies an exact snapshot name or one with globs in it, --- e.g. @SnapPattern "foo*"@ __NOTE__: Patterns are not supported on --- ES < 1.7 -data SnapshotPattern = ExactSnap SnapshotName - | SnapPattern Text - deriving (Eq, Generic, Show, Typeable) - - --- | General information about the state of a snapshot. Has some --- redundancies with 'SnapshotStatus' -data SnapshotInfo = SnapshotInfo { - snapInfoShards :: ShardResult - , snapInfoFailures :: [SnapshotShardFailure] - , snapInfoDuration :: NominalDiffTime - , snapInfoEndTime :: UTCTime - , snapInfoStartTime :: UTCTime - , snapInfoState :: SnapshotState - , snapInfoIndices :: [IndexName] - , snapInfoName :: SnapshotName - } deriving (Eq, Generic, Show, Typeable) - - -instance FromJSON SnapshotInfo where - parseJSON = withObject "SnapshotInfo" parse - where - parse o = SnapshotInfo <$> o .: "shards" - <*> o .: "failures" - <*> (unMS <$> o .: "duration_in_millis") - <*> (posixMS <$> o .: "end_time_in_millis") - <*> (posixMS <$> o .: "start_time_in_millis") - <*> o .: "state" - <*> o .: "indices" - <*> o .: "snapshot" - -data SnapshotShardFailure = SnapshotShardFailure { - snapShardFailureIndex :: IndexName - , snapShardFailureNodeId :: Maybe NodeName -- I'm not 100% sure this isn't actually 'FullNodeId' - , snapShardFailureReason :: Text - , snapShardFailureShardId :: ShardId - } deriving (Eq, Show, Generic, Typeable) - - -instance FromJSON SnapshotShardFailure where - parseJSON = withObject "SnapshotShardFailure" parse - where - parse o = SnapshotShardFailure <$> o .: "index" - <*> o .:? "node_id" - <*> o .: "reason" - <*> o .: "shard_id" - - -newtype ShardId = ShardId { shardId :: Int } - deriving (Eq, Show, Generic, Typeable, FromJSON) - --- | Milliseconds -newtype MS = MS NominalDiffTime - - --- keeps the unexported constructor warnings at bay -unMS :: MS -> NominalDiffTime -unMS (MS t) = t - - -instance FromJSON MS where - parseJSON = withScientific "MS" (return . MS . parse) - where - parse n = fromInteger ((truncate n) * 1000) - - -data SnapshotState = SnapshotInit - | SnapshotStarted - | SnapshotSuccess - | SnapshotFailed - | SnapshotAborted - | SnapshotMissing - | SnapshotWaiting - deriving (Show, Eq, Generic, Typeable) - -instance FromJSON SnapshotState where - parseJSON = withText "SnapshotState" parse - where - parse "INIT" = return SnapshotInit - parse "STARTED" = return SnapshotStarted - parse "SUCCESS" = return SnapshotSuccess - parse "FAILED" = return SnapshotFailed - parse "ABORTED" = return SnapshotAborted - parse "MISSING" = return SnapshotMissing - parse "WAITING" = return SnapshotWaiting - parse t = fail ("Invalid snapshot state " <> T.unpack t) - - -newtype SnapshotName = SnapshotName { snapshotName :: Text } - deriving (Show, Eq, Ord, Generic, Typeable, ToJSON, FromJSON) - - -data SnapshotRestoreSettings = SnapshotRestoreSettings { - snapRestoreWaitForCompletion :: Bool - -- ^ Should the API call return immediately after initializing - -- the restore or wait until completed? Note that if this is - -- enabled, it could wait a long time, so you should adjust your - -- 'ManagerSettings' accordingly to set long timeouts or - -- explicitly handle timeouts. - , snapRestoreIndices :: Maybe IndexSelection - -- ^ Nothing will restore all indices in the snapshot. Just [] is - -- permissable and will essentially be a no-op restore. - , snapRestoreIgnoreUnavailable :: Bool - -- ^ If set to True, any indices that do not exist will be ignored - -- during snapshot rather than failing the restore. - , snapRestoreIncludeGlobalState :: Bool - -- ^ If set to false, will ignore any global state in the snapshot - -- and will not restore it. - , snapRestoreRenamePattern :: Maybe RestoreRenamePattern - -- ^ A regex pattern for matching indices. Used with - -- 'snapRestoreRenameReplacement', the restore can reference the - -- matched index and create a new index name upon restore. - , snapRestoreRenameReplacement :: Maybe (NonEmpty RestoreRenameToken) - -- ^ Expression of how index renames should be constructed. - , snapRestorePartial :: Bool - -- ^ If some indices fail to restore, should the process proceed? - , snapRestoreIncludeAliases :: Bool - -- ^ Should the restore also restore the aliases captured in the - -- snapshot. - , snapRestoreIndexSettingsOverrides :: Maybe RestoreIndexSettings - -- ^ Settings to apply during the restore process. __NOTE:__ This - -- option is not supported in ES < 1.5 and should be set to - -- Nothing in that case. - , snapRestoreIgnoreIndexSettings :: Maybe (NonEmpty Text) - -- ^ This type could be more rich but it isn't clear which - -- settings are allowed to be ignored during restore, so we're - -- going with including this feature in a basic form rather than - -- omitting it. One example here would be - -- "index.refresh_interval". Any setting specified here will - -- revert back to the server default during the restore process. - } deriving (Eq, Generic, Show, Typeable) - --- | Regex-stype pattern, e.g. "index_(.+)" to match index names -newtype RestoreRenamePattern = RestoreRenamePattern { rrPattern :: Text } - deriving (Show, Eq, Generic, Typeable, Ord, ToJSON) - - --- | A single token in a index renaming scheme for a restore. These --- are concatenated into a string before being sent to --- ElasticSearch. Check out these Java --- to find out more if you're into that sort of thing. -data RestoreRenameToken = RRTLit Text - -- ^ Just a literal string of characters - | RRSubWholeMatch - -- ^ Equivalent to $0. The entire matched pattern, not any subgroup - | RRSubGroup RRGroupRefNum - -- ^ A specific reference to a group number - deriving (Show, Eq, Generic, Typeable) - - --- | A group number for regex matching. Only values from 1-9 are --- supported. Construct with 'mkRRGroupRefNum' -newtype RRGroupRefNum = RRGroupRefNum { rrGroupRefNum :: Int } - deriving (Show, Eq, Generic, Typeable, Ord) - -instance Bounded RRGroupRefNum where - minBound = RRGroupRefNum 1 - maxBound = RRGroupRefNum 9 - - --- | Only allows valid group number references (1-9). -mkRRGroupRefNum :: Int -> Maybe RRGroupRefNum -mkRRGroupRefNum i - | i >= (rrGroupRefNum minBound) && i <= (rrGroupRefNum maxBound) = - Just $ RRGroupRefNum i - | otherwise = Nothing - - --- | Reasonable defaults for snapshot restores --- --- * snapRestoreWaitForCompletion False --- * snapRestoreIndices Nothing --- * snapRestoreIgnoreUnavailable False --- * snapRestoreIncludeGlobalState True --- * snapRestoreRenamePattern Nothing --- * snapRestoreRenameReplacement Nothing --- * snapRestorePartial False --- * snapRestoreIncludeAliases True --- * snapRestoreIndexSettingsOverrides Nothing --- * snapRestoreIgnoreIndexSettings Nothing -defaultSnapshotRestoreSettings :: SnapshotRestoreSettings -defaultSnapshotRestoreSettings = SnapshotRestoreSettings { - snapRestoreWaitForCompletion = False - , snapRestoreIndices = Nothing - , snapRestoreIgnoreUnavailable = False - , snapRestoreIncludeGlobalState = True - , snapRestoreRenamePattern = Nothing - , snapRestoreRenameReplacement = Nothing - , snapRestorePartial = False - , snapRestoreIncludeAliases = True - , snapRestoreIndexSettingsOverrides = Nothing - , snapRestoreIgnoreIndexSettings = Nothing - } - - --- | Index settings that can be overridden. The docs only mention you --- can update number of replicas, but there may be more. You --- definitely cannot override shard count. -data RestoreIndexSettings = RestoreIndexSettings { - restoreOverrideReplicas :: Maybe ReplicaCount - } deriving (Show, Eq, Generic, Typeable) - - -instance ToJSON RestoreIndexSettings where - toJSON RestoreIndexSettings {..} = object prs - where - prs = catMaybes [("index.number_of_replicas" .=) <$> restoreOverrideReplicas] - - -instance FromJSON NodesInfo where - parseJSON = withObject "NodesInfo" parse - where - parse o = do - nodes <- o .: "nodes" - infos <- forM (HM.toList nodes) $ \(fullNID, v) -> do - node <- parseJSON v - parseNodeInfo (FullNodeId fullNID) node - cn <- o .: "cluster_name" - return (NodesInfo infos cn) - -instance FromJSON NodesStats where - parseJSON = withObject "NodesStats" parse - where - parse o = do - nodes <- o .: "nodes" - stats <- forM (HM.toList nodes) $ \(fullNID, v) -> do - node <- parseJSON v - parseNodeStats (FullNodeId fullNID) node - cn <- o .: "cluster_name" - return (NodesStats stats cn) - -instance FromJSON NodeBreakerStats where - parseJSON = withObject "NodeBreakerStats" parse - where - parse o = NodeBreakerStats <$> o .: "tripped" - <*> o .: "overhead" - <*> o .: "estimated_size_in_bytes" - <*> o .: "limit_size_in_bytes" - -instance FromJSON NodeHTTPStats where - parseJSON = withObject "NodeHTTPStats" parse - where - parse o = NodeHTTPStats <$> o .: "total_opened" - <*> o .: "current_open" - -instance FromJSON NodeTransportStats where - parseJSON = withObject "NodeTransportStats" parse - where - parse o = NodeTransportStats <$> o .: "tx_size_in_bytes" - <*> o .: "tx_count" - <*> o .: "rx_size_in_bytes" - <*> o .: "rx_count" - <*> o .: "server_open" - -instance FromJSON NodeFSStats where - parseJSON = withObject "NodeFSStats" parse - where - parse o = NodeFSStats <$> o .: "data" - <*> o .: "total" - <*> (posixMS <$> o .: "timestamp") - -instance FromJSON NodeDataPathStats where - parseJSON = withObject "NodeDataPathStats" parse - where - parse o = - NodeDataPathStats <$> (fmap unStringlyTypedDouble <$> o .:? "disk_service_time") - <*> (fmap unStringlyTypedDouble <$> o .:? "disk_queue") - <*> o .:? "disk_io_size_in_bytes" - <*> o .:? "disk_write_size_in_bytes" - <*> o .:? "disk_read_size_in_bytes" - <*> o .:? "disk_io_op" - <*> o .:? "disk_writes" - <*> o .:? "disk_reads" - <*> o .: "available_in_bytes" - <*> o .: "free_in_bytes" - <*> o .: "total_in_bytes" - <*> o .:? "type" - <*> o .: "dev" - <*> o .: "mount" - <*> o .: "path" - -newtype StringlyTypedDouble = StringlyTypedDouble { unStringlyTypedDouble :: Double } - - -instance FromJSON StringlyTypedDouble where - parseJSON = fmap StringlyTypedDouble . parseJSON . unStringlyTypeJSON - - -instance FromJSON NodeFSTotalStats where - parseJSON = withObject "NodeFSTotalStats" parse - where - parse o = NodeFSTotalStats <$> (fmap unStringlyTypedDouble <$> o .:? "disk_service_time") - <*> (fmap unStringlyTypedDouble <$> o .:? "disk_queue") - <*> o .:? "disk_io_size_in_bytes" - <*> o .:? "disk_write_size_in_bytes" - <*> o .:? "disk_read_size_in_bytes" - <*> o .:? "disk_io_op" - <*> o .:? "disk_writes" - <*> o .:? "disk_reads" - <*> o .: "available_in_bytes" - <*> o .: "free_in_bytes" - <*> o .: "total_in_bytes" - -instance FromJSON NodeNetworkStats where - parseJSON = withObject "NodeNetworkStats" parse - where - parse o = do - tcp <- o .: "tcp" - NodeNetworkStats <$> tcp .: "out_rsts" - <*> tcp .: "in_errs" - <*> tcp .: "attempt_fails" - <*> tcp .: "estab_resets" - <*> tcp .: "retrans_segs" - <*> tcp .: "out_segs" - <*> tcp .: "in_segs" - <*> tcp .: "curr_estab" - <*> tcp .: "passive_opens" - <*> tcp .: "active_opens" - -instance FromJSON NodeThreadPoolsStats where - parseJSON = withObject "NodeThreadPoolsStats" parse - where - parse o = NodeThreadPoolsStats <$> o .: "snapshot" - <*> o .: "bulk" - <*> o .: "merge" - <*> o .: "get" - <*> o .: "management" - <*> o .:? "fetch_shard_store" - <*> o .: "optimize" - <*> o .: "flush" - <*> o .: "search" - <*> o .: "warmer" - <*> o .: "generic" - <*> o .: "suggest" - <*> o .: "refresh" - <*> o .: "index" - <*> o .:? "listener" - <*> o .:? "fetch_shard_started" - <*> o .: "percolate" -instance FromJSON NodeThreadPoolStats where - parseJSON = withObject "NodeThreadPoolStats" parse - where - parse o = NodeThreadPoolStats <$> o .: "completed" - <*> o .: "largest" - <*> o .: "rejected" - <*> o .: "active" - <*> o .: "queue" - <*> o .: "threads" - -instance FromJSON NodeJVMStats where - parseJSON = withObject "NodeJVMStats" parse - where - parse o = do - bufferPools <- o .: "buffer_pools" - mapped <- bufferPools .: "mapped" - direct <- bufferPools .: "direct" - gc <- o .: "gc" - collectors <- gc .: "collectors" - oldC <- collectors .: "old" - youngC <- collectors .: "young" - threads <- o .: "threads" - mem <- o .: "mem" - pools <- mem .: "pools" - oldM <- pools .: "old" - survivorM <- pools .: "survivor" - youngM <- pools .: "young" - NodeJVMStats <$> pure mapped - <*> pure direct - <*> pure oldC - <*> pure youngC - <*> threads .: "peak_count" - <*> threads .: "count" - <*> pure oldM - <*> pure survivorM - <*> pure youngM - <*> mem .: "non_heap_committed_in_bytes" - <*> mem .: "non_heap_used_in_bytes" - <*> mem .: "heap_max_in_bytes" - <*> mem .: "heap_committed_in_bytes" - <*> mem .: "heap_used_percent" - <*> mem .: "heap_used_in_bytes" - <*> (unMS <$> o .: "uptime_in_millis") - <*> (posixMS <$> o .: "timestamp") - -instance FromJSON JVMBufferPoolStats where - parseJSON = withObject "JVMBufferPoolStats" parse - where - parse o = JVMBufferPoolStats <$> o .: "total_capacity_in_bytes" - <*> o .: "used_in_bytes" - <*> o .: "count" - -instance FromJSON JVMGCStats where - parseJSON = withObject "JVMGCStats" parse - where - parse o = JVMGCStats <$> (unMS <$> o .: "collection_time_in_millis") - <*> o .: "collection_count" - -instance FromJSON JVMPoolStats where - parseJSON = withObject "JVMPoolStats" parse - where - parse o = JVMPoolStats <$> o .: "peak_max_in_bytes" - <*> o .: "peak_used_in_bytes" - <*> o .: "max_in_bytes" - <*> o .: "used_in_bytes" - -instance FromJSON NodeProcessStats where - parseJSON = withObject "NodeProcessStats" parse - where - parse o = do - mem <- o .: "mem" - cpu <- o .: "cpu" - NodeProcessStats <$> mem .: "total_virtual_in_bytes" - <*> mem .: "share_in_bytes" - <*> mem .: "resident_in_bytes" - <*> (unMS <$> cpu .: "total_in_millis") - <*> (unMS <$> cpu .: "user_in_millis") - <*> (unMS <$> cpu .: "sys_in_millis") - <*> cpu .: "percent" - <*> o .: "open_file_descriptors" - <*> (posixMS <$> o .: "timestamp") - -instance FromJSON NodeOSStats where - parseJSON = withObject "NodeOSStats" parse - where - parse o = do - swap <- o .: "swap" - mem <- o .: "mem" - cpu <- o .: "cpu" - load <- o .:? "load_average" - NodeOSStats <$> swap .: "free_in_bytes" - <*> swap .: "used_in_bytes" - <*> mem .: "actual_used_in_bytes" - <*> mem .: "actual_free_in_bytes" - <*> mem .: "used_percent" - <*> mem .: "free_percent" - <*> mem .: "used_in_bytes" - <*> mem .: "free_in_bytes" - <*> cpu .: "stolen" - <*> cpu .: "usage" - <*> cpu .: "idle" - <*> cpu .: "user" - <*> cpu .: "sys" - <*> pure load - <*> (unMS <$> o .: "uptime_in_millis") - <*> (posixMS <$> o .: "timestamp") - -instance FromJSON LoadAvgs where - parseJSON = withArray "LoadAvgs" parse - where - parse v = case V.toList v of - [one, five, fifteen] -> LoadAvgs <$> parseJSON one - <*> parseJSON five - <*> parseJSON fifteen - _ -> fail "Expecting a triple of Doubles" - -instance FromJSON NodeIndicesStats where - parseJSON = withObject "NodeIndicesStats" parse - where - parse o = do - let (.::) mv k = case mv of - Just v -> Just <$> v .: k - Nothing -> pure Nothing - mRecovery <- o .:? "recovery" - mQueryCache <- o .:? "query_cache" - suggest <- o .: "suggest" - translog <- o .: "translog" - segments <- o .: "segments" - completion <- o .: "completion" - percolate <- o .: "percolate" - fielddata <- o .: "fielddata" - idCache <- o .: "id_cache" - filterCache <- o .: "filter_cache" - warmer <- o .: "warmer" - flush <- o .: "flush" - refresh <- o .: "refresh" - merges <- o .: "merges" - search <- o .: "search" - getStats <- o .: "get" - indexing <- o .: "indexing" - store <- o .: "store" - docs <- o .: "docs" - NodeIndicesStats <$> (fmap unMS <$> mRecovery .:: "throttle_time_in_millis") - <*> mRecovery .:: "current_as_target" - <*> mRecovery .:: "current_as_source" - <*> mQueryCache .:: "miss_count" - <*> mQueryCache .:: "hit_count" - <*> mQueryCache .:: "evictions" - <*> mQueryCache .:: "memory_size_in_bytes" - <*> suggest .: "current" - <*> (unMS <$> suggest .: "time_in_millis") - <*> suggest .: "total" - <*> translog .: "size_in_bytes" - <*> translog .: "operations" - <*> segments .:? "fixed_bit_set_memory_in_bytes" - <*> segments .: "version_map_memory_in_bytes" - <*> segments .:? "index_writer_max_memory_in_bytes" - <*> segments .: "index_writer_memory_in_bytes" - <*> segments .: "memory_in_bytes" - <*> segments .: "count" - <*> completion .: "size_in_bytes" - <*> percolate .: "queries" - <*> percolate .: "memory_size_in_bytes" - <*> percolate .: "current" - <*> (unMS <$> percolate .: "time_in_millis") - <*> percolate .: "total" - <*> fielddata .: "evictions" - <*> fielddata .: "memory_size_in_bytes" - <*> idCache .: "memory_size_in_bytes" - <*> filterCache .: "evictions" - <*> filterCache .: "memory_size_in_bytes" - <*> (unMS <$> warmer .: "total_time_in_millis") - <*> warmer .: "total" - <*> warmer .: "current" - <*> (unMS <$> flush .: "total_time_in_millis") - <*> flush .: "total" - <*> (unMS <$> refresh .: "total_time_in_millis") - <*> refresh .: "total" - <*> merges .: "total_size_in_bytes" - <*> merges .: "total_docs" - <*> (unMS <$> merges .: "total_time_in_millis") - <*> merges .: "total" - <*> merges .: "current_size_in_bytes" - <*> merges .: "current_docs" - <*> merges .: "current" - <*> search .: "fetch_current" - <*> (unMS <$> search .: "fetch_time_in_millis") - <*> search .: "fetch_total" - <*> search .: "query_current" - <*> (unMS <$> search .: "query_time_in_millis") - <*> search .: "query_total" - <*> search .: "open_contexts" - <*> getStats .: "current" - <*> (unMS <$> getStats .: "missing_time_in_millis") - <*> getStats .: "missing_total" - <*> (unMS <$> getStats .: "exists_time_in_millis") - <*> getStats .: "exists_total" - <*> (unMS <$> getStats .: "time_in_millis") - <*> getStats .: "total" - <*> (fmap unMS <$> indexing .:? "throttle_time_in_millis") - <*> indexing .:? "is_throttled" - <*> indexing .:? "noop_update_total" - <*> indexing .: "delete_current" - <*> (unMS <$> indexing .: "delete_time_in_millis") - <*> indexing .: "delete_total" - <*> indexing .: "index_current" - <*> (unMS <$> indexing .: "index_time_in_millis") - <*> indexing .: "index_total" - <*> (unMS <$> store .: "throttle_time_in_millis") - <*> store .: "size_in_bytes" - <*> docs .: "deleted" - <*> docs .: "count" - -instance FromJSON NodeBreakersStats where - parseJSON = withObject "NodeBreakersStats" parse - where - parse o = NodeBreakersStats <$> o .: "parent" - <*> o .: "request" - <*> o .: "fielddata" - -parseNodeStats :: FullNodeId -> Object -> Parser NodeStats -parseNodeStats fnid o = do - NodeStats <$> o .: "name" - <*> pure fnid - <*> o .:? "breakers" - <*> o .: "http" - <*> o .: "transport" - <*> o .: "fs" - <*> o .: "network" - <*> o .: "thread_pool" - <*> o .: "jvm" - <*> o .: "process" - <*> o .: "os" - <*> o .: "indices" - -parseNodeInfo :: FullNodeId -> Object -> Parser NodeInfo -parseNodeInfo nid o = - NodeInfo <$> o .: "http_address" - <*> o .: "build" - <*> o .: "version" - <*> o .: "ip" - <*> o .: "host" - <*> o .: "transport_address" - <*> o .: "name" - <*> pure nid - <*> o .: "plugins" - <*> o .: "http" - <*> o .: "transport" - <*> o .: "network" - <*> o .: "thread_pool" - <*> o .: "jvm" - <*> o .: "process" - <*> o .: "os" - <*> o .: "settings" - -instance FromJSON NodePluginInfo where - parseJSON = withObject "NodePluginInfo" parse - where - parse o = NodePluginInfo <$> o .: "site" - <*> o .: "jvm" - <*> o .: "description" - <*> o .: "version" - <*> o .: "name" - -instance FromJSON NodeHTTPInfo where - parseJSON = withObject "NodeHTTPInfo" parse - where - parse o = NodeHTTPInfo <$> o .: "max_content_length_in_bytes" - <*> parseJSON (Object o) - -instance FromJSON BoundTransportAddress where - parseJSON = withObject "BoundTransportAddress" parse - where - parse o = BoundTransportAddress <$> o .: "publish_address" - <*> o .: "bound_address" - -instance FromJSON NodeOSInfo where - parseJSON = withObject "NodeOSInfo" parse - where - parse o = do - swap <- o .: "swap" - mem <- o .: "mem" - NodeOSInfo <$> swap .: "total_in_bytes" - <*> mem .: "total_in_bytes" - <*> o .: "cpu" - <*> o .: "available_processors" - <*> (unMS <$> o .: "refresh_interval_in_millis") - - -instance FromJSON CPUInfo where - parseJSON = withObject "CPUInfo" parse - where - parse o = CPUInfo <$> o .: "cache_size_in_bytes" - <*> o .: "cores_per_socket" - <*> o .: "total_sockets" - <*> o .: "total_cores" - <*> o .: "mhz" - <*> o .: "model" - <*> o .: "vendor" - -instance FromJSON NodeProcessInfo where - parseJSON = withObject "NodeProcessInfo" parse - where - parse o = NodeProcessInfo <$> o .: "mlockall" - <*> o .: "max_file_descriptors" - <*> o .: "id" - <*> (unMS <$> o .: "refresh_interval_in_millis") - -instance FromJSON NodeJVMInfo where - parseJSON = withObject "NodeJVMInfo" parse - where - parse o = NodeJVMInfo <$> o .: "memory_pools" - <*> o .: "gc_collectors" - <*> o .: "mem" - <*> (posixMS <$> o .: "start_time_in_millis") - <*> o .: "vm_vendor" - <*> o .: "vm_version" - <*> o .: "vm_name" - <*> (unJVMVersion <$> o .: "version") - <*> o .: "pid" - -instance FromJSON JVMVersion where - parseJSON (String t) = - JVMVersion <$> parseJSON (String (T.replace "_" "." t)) - parseJSON v = JVMVersion <$> parseJSON v - -instance FromJSON JVMMemoryInfo where - parseJSON = withObject "JVMMemoryInfo" parse - where - parse o = JVMMemoryInfo <$> o .: "direct_max_in_bytes" - <*> o .: "non_heap_max_in_bytes" - <*> o .: "non_heap_init_in_bytes" - <*> o .: "heap_max_in_bytes" - <*> o .: "heap_init_in_bytes" - -instance FromJSON NodeThreadPoolsInfo where - parseJSON = withObject "NodeThreadPoolsInfo" parse - where - parse o = NodeThreadPoolsInfo <$> o .: "refresh" - <*> o .: "management" - <*> o .: "percolate" - <*> o .:? "listener" - <*> o .:? "fetch_shard_started" - <*> o .: "search" - <*> o .: "flush" - <*> o .: "warmer" - <*> o .: "optimize" - <*> o .: "bulk" - <*> o .: "suggest" - <*> o .: "merge" - <*> o .: "snapshot" - <*> o .: "get" - <*> o .:? "fetch_shard_store" - <*> o .: "index" - <*> o .: "generic" - -instance FromJSON NodeThreadPoolInfo where - parseJSON = withObject "NodeThreadPoolInfo" parse - where - parse o = do - ka <- maybe (return Nothing) (fmap Just . parseStringInterval) =<< o .:? "keep_alive" - NodeThreadPoolInfo <$> (parseJSON . unStringlyTypeJSON =<< o .: "queue_size") - <*> pure ka - <*> o .:? "min" - <*> o .:? "max" - <*> o .: "type" - -parseStringInterval :: (Monad m) => String -> m NominalDiffTime -parseStringInterval s = case span isNumber s of - ("", _) -> fail "Invalid interval" - (nS, unitS) -> case (readMay nS, readMay unitS) of - (Just n, Just unit) -> return (fromInteger (n * unitNDT unit)) - (Nothing, _) -> fail "Invalid interval number" - (_, Nothing) -> fail "Invalid interval unit" - where - unitNDT Seconds = 1 - unitNDT Minutes = 60 - unitNDT Hours = 60 * 60 - unitNDT Days = 24 * 60 * 60 - unitNDT Weeks = 7 * 24 * 60 * 60 - -instance FromJSON ThreadPoolSize where - parseJSON v = parseAsNumber v <|> parseAsString v - where - parseAsNumber = parseAsInt <=< parseJSON - parseAsInt (-1) = return ThreadPoolUnbounded - parseAsInt n - | n >= 0 = return (ThreadPoolBounded n) - | otherwise = fail "Thread pool size must be >= -1." - parseAsString = withText "ThreadPoolSize" $ \t -> - case first (readMay . T.unpack) (T.span isNumber t) of - (Just n, "k") -> return (ThreadPoolBounded (n * 1000)) - (Just n, "") -> return (ThreadPoolBounded n) - _ -> fail ("Invalid thread pool size " <> T.unpack t) - -instance FromJSON ThreadPoolType where - parseJSON = withText "ThreadPoolType" parse - where - parse "scaling" = return ThreadPoolScaling - parse "fixed" = return ThreadPoolFixed - parse "cached" = return ThreadPoolCached - parse e = fail ("Unexpected thread pool type" <> T.unpack e) - -instance FromJSON NodeTransportInfo where - parseJSON = withObject "NodeTransportInfo" parse - where - parse o = NodeTransportInfo <$> (maybe (return mempty) parseProfiles =<< o .:? "profiles") - <*> parseJSON (Object o) - parseProfiles (Object o) | HM.null o = return [] - parseProfiles v@(Array _) = parseJSON v - parseProfiles Null = return [] - parseProfiles _ = fail "Could not parse profiles" - -instance FromJSON NodeNetworkInfo where - parseJSON = withObject "NodeNetworkInfo" parse - where - parse o = NodeNetworkInfo <$> o .: "primary_interface" - <*> (unMS <$> o .: "refresh_interval_in_millis") - - -instance FromJSON NodeNetworkInterface where - parseJSON = withObject "NodeNetworkInterface" parse - where - parse o = NodeNetworkInterface <$> o .: "mac_address" - <*> o .: "name" - <*> o .: "address" - -newtype MaybeNA a = MaybeNA { unMaybeNA :: Maybe a } - deriving (Show, Eq) - -instance FromJSON a => FromJSON (MaybeNA a) where - parseJSON (String "NA") = pure $ MaybeNA Nothing - parseJSON o = MaybeNA . Just <$> parseJSON o - - -data Suggest = Suggest { suggestText :: Text - , suggestName :: Text - , suggestType :: SuggestType - } - deriving (Show, Generic, Eq, Read, Typeable) - -instance ToJSON Suggest where - toJSON Suggest{..} = object [ "text" .= suggestText - , suggestName .= suggestType - ] - -instance FromJSON Suggest where - parseJSON (Object o) = do - suggestText' <- o .: "text" - let dropTextList = HM.toList $ HM.filterWithKey (\x _ -> x /= "text") o - suggestName' <- case dropTextList of - [(x, _)] -> return x - _ -> fail "error parsing Suggest field name" - suggestType' <- o .: suggestName' - return $ Suggest suggestText' suggestName' suggestType' - parseJSON x = typeMismatch "Suggest" x - -data SuggestType = SuggestTypePhraseSuggester PhraseSuggester - deriving (Show, Generic, Eq, Read, Typeable) - -instance ToJSON SuggestType where - toJSON (SuggestTypePhraseSuggester x) = object ["phrase" .= x] - -instance FromJSON SuggestType where - parseJSON = withObject "SuggestType" parse - where parse o = phraseSuggester `taggedWith` "phrase" - where taggedWith parser k = parser =<< o .: k - phraseSuggester = pure . SuggestTypePhraseSuggester - -data PhraseSuggester = - PhraseSuggester { phraseSuggesterField :: FieldName - , phraseSuggesterGramSize :: Maybe Int - , phraseSuggesterRealWordErrorLikelihood :: Maybe Int - , phraseSuggesterConfidence :: Maybe Int - , phraseSuggesterMaxErrors :: Maybe Int - , phraseSuggesterSeparator :: Maybe Text - , phraseSuggesterSize :: Maybe Size - , phraseSuggesterAnalyzer :: Maybe Analyzer - , phraseSuggesterShardSize :: Maybe Int - , phraseSuggesterHighlight :: Maybe PhraseSuggesterHighlighter - , phraseSuggesterCollate :: Maybe PhraseSuggesterCollate - , phraseSuggesterCandidateGenerators :: [DirectGenerators] - } - deriving (Show, Generic, Eq, Read, Typeable) - -instance ToJSON PhraseSuggester where - toJSON PhraseSuggester{..} = omitNulls [ "field" .= phraseSuggesterField - , "gram_size" .= phraseSuggesterGramSize - , "real_word_error_likelihood" .= phraseSuggesterRealWordErrorLikelihood - , "confidence" .= phraseSuggesterConfidence - , "max_errors" .= phraseSuggesterMaxErrors - , "separator" .= phraseSuggesterSeparator - , "size" .= phraseSuggesterSize - , "analyzer" .= phraseSuggesterAnalyzer - , "shard_size" .= phraseSuggesterShardSize - , "highlight" .= phraseSuggesterHighlight - , "collate" .= phraseSuggesterCollate - , "direct_generator" .= phraseSuggesterCandidateGenerators - ] - -instance FromJSON PhraseSuggester where - parseJSON = withObject "PhraseSuggester" parse - where parse o = PhraseSuggester - <$> o .: "field" - <*> o .:? "gram_size" - <*> o .:? "real_word_error_likelihood" - <*> o .:? "confidence" - <*> o .:? "max_errors" - <*> o .:? "separator" - <*> o .:? "size" - <*> o .:? "analyzer" - <*> o .:? "shard_size" - <*> o .:? "highlight" - <*> o .:? "collate" - <*> o .:? "direct_generator" .!= [] - -mkPhraseSuggester :: FieldName -> PhraseSuggester -mkPhraseSuggester fName = - PhraseSuggester fName Nothing Nothing Nothing Nothing Nothing Nothing - Nothing Nothing Nothing Nothing [] - -data PhraseSuggesterHighlighter = - PhraseSuggesterHighlighter { phraseSuggesterHighlighterPreTag :: Text - , phraseSuggesterHighlighterPostTag :: Text - } - deriving (Show, Generic, Eq, Read, Typeable) - -instance ToJSON PhraseSuggesterHighlighter where - toJSON PhraseSuggesterHighlighter{..} = - object [ "pre_tag" .= phraseSuggesterHighlighterPreTag - , "post_tag" .= phraseSuggesterHighlighterPostTag - ] - -instance FromJSON PhraseSuggesterHighlighter where - parseJSON = withObject "PhraseSuggesterHighlighter" parse - where parse o = PhraseSuggesterHighlighter - <$> o .: "pre_tag" - <*> o .: "post_tag" - -data PhraseSuggesterCollate = - PhraseSuggesterCollate { phraseSuggesterCollateTemplateQuery :: TemplateQueryInline - , phraseSuggesterCollatePrune :: Bool - } - deriving (Show, Generic, Eq, Read, Typeable) - -instance ToJSON PhraseSuggesterCollate where - toJSON PhraseSuggesterCollate{..} = object [ "query" .= object - [ "inline" .= (inline phraseSuggesterCollateTemplateQuery) - ] - , "params" .= (params phraseSuggesterCollateTemplateQuery) - , "prune" .= phraseSuggesterCollatePrune - ] - -instance FromJSON PhraseSuggesterCollate where - parseJSON (Object o) = do - query' <- o .: "query" - inline' <- query' .: "inline" - params' <- o .: "params" - prune' <- o .:? "prune" .!= False - return $ PhraseSuggesterCollate (TemplateQueryInline inline' params') prune' - parseJSON x = typeMismatch "PhraseSuggesterCollate" x - -data SuggestOptions = - SuggestOptions { suggestOptionsText :: Text - , suggestOptionsScore :: Double - , suggestOptionsFreq :: Maybe Int - , suggestOptionsHighlighted :: Maybe Text - } - deriving (Eq, Read, Show) - -instance FromJSON SuggestOptions where - parseJSON = withObject "SuggestOptions" parse - where parse o = SuggestOptions - <$> o .: "text" - <*> o .: "score" - <*> o .:? "freq" - <*> o .:? "highlighted" - -data SuggestResponse = - SuggestResponse { suggestResponseText :: Text - , suggestResponseOffset :: Int - , suggestResponseLength :: Int - , suggestResponseOptions :: [SuggestOptions] - } - deriving (Eq, Read, Show) - -instance FromJSON SuggestResponse where - parseJSON = withObject "SuggestResponse" parse - where parse o = SuggestResponse - <$> o .: "text" - <*> o .: "offset" - <*> o .: "length" - <*> o .: "options" - -data NamedSuggestionResponse = - NamedSuggestionResponse { nsrName :: Text - , nsrResponses :: [SuggestResponse] - } - deriving (Eq, Read, Show) - -instance FromJSON NamedSuggestionResponse where - parseJSON (Object o) = do - suggestionName' <- case HM.toList o of - [(x, _)] -> return x - _ -> fail "error parsing NamedSuggestionResponse name" - suggestionResponses' <- o .: suggestionName' - return $ NamedSuggestionResponse suggestionName' suggestionResponses' - - parseJSON x = typeMismatch "NamedSuggestionResponse" x - -data DirectGeneratorSuggestModeTypes = DirectGeneratorSuggestModeMissing - | DirectGeneratorSuggestModePopular - | DirectGeneratorSuggestModeAlways - deriving (Show, Eq, Read, Generic, Typeable) - -instance ToJSON DirectGeneratorSuggestModeTypes where - toJSON DirectGeneratorSuggestModeMissing = "missing" - toJSON DirectGeneratorSuggestModePopular = "popular" - toJSON DirectGeneratorSuggestModeAlways = "always" - -instance FromJSON DirectGeneratorSuggestModeTypes where - parseJSON = withText "DirectGeneratorSuggestModeTypes" parse - where parse "missing" = pure DirectGeneratorSuggestModeMissing - parse "popular" = pure DirectGeneratorSuggestModePopular - parse "always" = pure DirectGeneratorSuggestModeAlways - parse f = fail ("Unexpected DirectGeneratorSuggestModeTypes: " <> show f) - -data DirectGenerators = DirectGenerators - { directGeneratorsField :: FieldName - , directGeneratorsSize :: Maybe Int - , directGeneratorSuggestMode :: DirectGeneratorSuggestModeTypes - , directGeneratorMaxEdits :: Maybe Double - , directGeneratorPrefixLength :: Maybe Int - , directGeneratorMinWordLength :: Maybe Int - , directGeneratorMaxInspections :: Maybe Int - , directGeneratorMinDocFreq :: Maybe Double - , directGeneratorMaxTermFreq :: Maybe Double - , directGeneratorPreFilter :: Maybe Text - , directGeneratorPostFilter :: Maybe Text - } - deriving (Show, Eq, Read, Generic, Typeable) - - -instance ToJSON DirectGenerators where - toJSON DirectGenerators{..} = omitNulls [ "field" .= directGeneratorsField - , "size" .= directGeneratorsSize - , "suggest_mode" .= directGeneratorSuggestMode - , "max_edits" .= directGeneratorMaxEdits - , "prefix_length" .= directGeneratorPrefixLength - , "min_word_length" .= directGeneratorMinWordLength - , "max_inspections" .= directGeneratorMaxInspections - , "min_doc_freq" .= directGeneratorMinDocFreq - , "max_term_freq" .= directGeneratorMaxTermFreq - , "pre_filter" .= directGeneratorPreFilter - , "post_filter" .= directGeneratorPostFilter - ] - -instance FromJSON DirectGenerators where - parseJSON = withObject "DirectGenerators" parse - where parse o = DirectGenerators - <$> o .: "field" - <*> o .:? "size" - <*> o .: "suggest_mode" - <*> o .:? "max_edits" - <*> o .:? "prefix_length" - <*> o .:? "min_word_length" - <*> o .:? "max_inspections" - <*> o .:? "min_doc_freq" - <*> o .:? "max_term_freq" - <*> o .:? "pre_filter" - <*> o .:? "post_filter" - -mkDirectGenerators :: FieldName -> DirectGenerators -mkDirectGenerators fn = DirectGenerators fn Nothing DirectGeneratorSuggestModeMissing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing diff --git a/src/Database/V5/Bloodhound/Internal/Client.hs b/src/Database/V5/Bloodhound/Internal/Client.hs index 11f6653..27c807b 100644 --- a/src/Database/V5/Bloodhound/Internal/Client.hs +++ b/src/Database/V5/Bloodhound/Internal/Client.hs @@ -236,10 +236,6 @@ data UpdatableIndexSetting = NumberOfReplicas ReplicaCount -- ^ Analysis is not a dynamic setting and can only be performed on a closed index. deriving (Eq, Show) -oPath :: ToJSON a => NonEmpty Text -> a -> Value -oPath (k :| []) v = object [k .= v] -oPath (k:| (h:t)) v = object [k .= oPath (h :| t) v] - attrFilterJSON :: NonEmpty NodeAttrFilter -> Value attrFilterJSON fs = object [ n .= T.intercalate "," (toList vs) | NodeAttrFilter (NodeAttrName n) vs <- toList fs] diff --git a/tests/V1/Test/Aggregation.hs b/tests/V1/Test/Aggregation.hs new file mode 100644 index 0000000..0876832 --- /dev/null +++ b/tests/V1/Test/Aggregation.hs @@ -0,0 +1,132 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Test.Aggregation (spec) where + +import Test.Common +import Test.Import + +import Control.Error (fmapL, note) +import qualified Data.Map as M +import qualified Database.V1.Bloodhound + +spec :: Spec +spec = + describe "Aggregation API" $ do + it "returns term aggregation results" $ withTestEnv $ do + _ <- insertData + let terms = TermsAgg $ mkTermsAggregation "user" + let search = mkAggregateSearch Nothing $ mkAggregations "users" terms + searchExpectAggs search + searchValidBucketAgg search "users" toTerms + + it "return sub-aggregation results" $ withTestEnv $ do + _ <- insertData + let subaggs = mkAggregations "age_agg" . TermsAgg $ mkTermsAggregation "age" + agg = TermsAgg $ (mkTermsAggregation "user") { termAggs = Just subaggs} + search = mkAggregateSearch Nothing $ mkAggregations "users" agg + reply <- searchByIndex testIndex search + let result = decode (responseBody reply) :: Maybe (SearchResult Tweet) + usersAggResults = result >>= aggregations >>= toTerms "users" + subAggResults = usersAggResults >>= (listToMaybe . buckets) >>= termsAggs >>= toTerms "age_agg" + subAddResultsExists = isJust subAggResults + liftIO $ subAddResultsExists `shouldBe` True + + it "returns cardinality aggregation results" $ withTestEnv $ do + _ <- insertData + let cardinality = CardinalityAgg $ mkCardinalityAggregation $ FieldName "user" + let search = mkAggregateSearch Nothing $ mkAggregations "users" cardinality + let search' = search { Database.V1.Bloodhound.from = From 0, size = Size 0 } + searchExpectAggs search' + let docCountPair k n = (k, object ["value" .= Number n]) + res <- searchTweets search' + liftIO $ + fmap aggregations res `shouldBe` Right (Just (M.fromList [ docCountPair "users" 1])) + + it "can give collection hint parameters to term aggregations" $ when' (atleast es13) $ withTestEnv $ do + _ <- insertData + let terms = TermsAgg $ (mkTermsAggregation "user") { termCollectMode = Just BreadthFirst } + let search = mkAggregateSearch Nothing $ mkAggregations "users" terms + searchExpectAggs search + searchValidBucketAgg search "users" toTerms + + -- One of these fails with 1.7.3 + it "can give execution hint parameters to term aggregations" $ when' (atmost es11) $ withTestEnv $ do + _ <- insertData + searchTermsAggHint [Map, Ordinals] + + it "can give execution hint parameters to term aggregations" $ when' (is es12) $ withTestEnv $ do + _ <- insertData + searchTermsAggHint [GlobalOrdinals, GlobalOrdinalsHash, GlobalOrdinalsLowCardinality, Map, Ordinals] + + it "can give execution hint parameters to term aggregations" $ when' (atleast es12) $ withTestEnv $ do + _ <- insertData + searchTermsAggHint [GlobalOrdinals, GlobalOrdinalsHash, GlobalOrdinalsLowCardinality, Map] + -- One of the above. + + it "can execute value_count aggregations" $ withTestEnv $ do + _ <- insertData + _ <- insertOther + let ags = mkAggregations "user_count" (ValueCountAgg (FieldValueCount (FieldName "user"))) <> + mkAggregations "bogus_count" (ValueCountAgg (FieldValueCount (FieldName "bogus"))) + let search = mkAggregateSearch Nothing ags + let docCountPair k n = (k, object ["value" .= Number n]) + res <- searchTweets search + liftIO $ + fmap aggregations res `shouldBe` Right (Just (M.fromList [ docCountPair "user_count" 2 + , docCountPair "bogus_count" 0 + ])) + + it "can execute date_range aggregations" $ withTestEnv $ do + let now = fromGregorian 2015 3 14 + let ltAMonthAgo = UTCTime (fromGregorian 2015 3 1) 0 + let ltAWeekAgo = UTCTime (fromGregorian 2015 3 10) 0 + let oldDoc = exampleTweet { postDate = ltAMonthAgo } + let newDoc = exampleTweet { postDate = ltAWeekAgo } + _ <- indexDocument testIndex testMapping defaultIndexDocumentSettings oldDoc (DocId "1") + _ <- indexDocument testIndex testMapping defaultIndexDocumentSettings newDoc (DocId "2") + _ <- refreshIndex testIndex + let thisMonth = DateRangeFrom (DateMathExpr (DMDate now) [SubtractTime 1 DMMonth]) + let thisWeek = DateRangeFrom (DateMathExpr (DMDate now) [SubtractTime 1 DMWeek]) + let agg = DateRangeAggregation (FieldName "postDate") Nothing (thisMonth :| [thisWeek]) + let ags = mkAggregations "date_ranges" (DateRangeAgg agg) + let search = mkAggregateSearch Nothing ags + res <- searchTweets search + liftIO $ hitsTotal . searchHits <$> res `shouldBe` Right 2 + let bucks = do magrs <- fmapL show (aggregations <$> res) + agrs <- note "no aggregations returned" magrs + rawBucks <- note "no date_ranges aggregation" $ M.lookup "date_ranges" agrs + parseEither parseJSON rawBucks + let fromMonthT = UTCTime (fromGregorian 2015 2 14) 0 + let fromWeekT = UTCTime (fromGregorian 2015 3 7) 0 + liftIO $ buckets <$> bucks `shouldBe` Right [ DateRangeResult "2015-02-14T00:00:00.000Z-*" + (Just fromMonthT) + (Just "2015-02-14T00:00:00.000Z") + Nothing + Nothing + 2 + Nothing + , DateRangeResult "2015-03-07T00:00:00.000Z-*" + (Just fromWeekT) + (Just "2015-03-07T00:00:00.000Z") + Nothing + Nothing + 1 + Nothing + ] + + it "returns date histogram aggregation results" $ withTestEnv $ do + _ <- insertData + let histogram = DateHistogramAgg $ mkDateHistogram (FieldName "postDate") Minute + let search = mkAggregateSearch Nothing (mkAggregations "byDate" histogram) + searchExpectAggs search + searchValidBucketAgg search "byDate" toDateHistogram + + it "can execute missing aggregations" $ withTestEnv $ do + _ <- insertData + _ <- insertExtra + let ags = mkAggregations "missing_agg" (MissingAgg (MissingAggregation "extra")) + let search = mkAggregateSearch Nothing ags + let docCountPair k n = (k, object ["doc_count" .= Number n]) + res <- searchTweets search + liftIO $ + fmap aggregations res `shouldBe` Right (Just (M.fromList [docCountPair "missing_agg" 1])) diff --git a/tests/V1/Test/ApproxEq.hs b/tests/V1/Test/ApproxEq.hs new file mode 100644 index 0000000..551a06a --- /dev/null +++ b/tests/V1/Test/ApproxEq.hs @@ -0,0 +1,63 @@ +{-# LANGUAGE DefaultSignatures #-} +{-# LANGUAGE OverloadedStrings #-} + +module Test.ApproxEq where + +import Database.V1.Bloodhound + +import Test.Import + +import qualified Data.List.NonEmpty as NE + +-- | Typeclass for "equal where it matters". Use this to specify +-- less-strict equivalence for things such as lists that can wind up +-- in an unpredictable order +class ApproxEq a where + (=~) :: a -> a -> Bool + + showApproxEq :: a -> String + default showApproxEq :: (Show a) => a -> String + showApproxEq = show + +(==~) :: (ApproxEq a) => a -> a -> Property +a ==~ b = counterexample (showApproxEq a ++ " !=~ " ++ showApproxEq b) (a =~ b) + +instance ApproxEq NominalDiffTime where (=~) = (==) +instance ApproxEq Bool where (=~) = (==) +instance ApproxEq Int where (=~) = (==) +instance (Eq a, Show a) => ApproxEq (Maybe a) where (=~) = (==) +instance ApproxEq Char where + (=~) = (==) + +instance ApproxEq NodeAttrFilter where (=~) = (==) +instance ApproxEq NodeAttrName where (=~) = (==) +instance (Eq a, Show a) => ApproxEq (NonEmpty a) where (=~) = (==) +instance (ApproxEq l, Show l, ApproxEq r, Show r) => ApproxEq (Either l r) where + Left a =~ Left b = a =~ b + Right a =~ Right b = a =~ b + _ =~ _ = False + showApproxEq (Left x) = "Left " <> showApproxEq x + showApproxEq (Right x) = "Right " <> showApproxEq x +instance (ApproxEq a, Show a) => ApproxEq [a] where + as =~ bs = and (zipWith (=~) as bs) +instance ApproxEq ReplicaCount where (=~) = (==) +instance ApproxEq ReplicaBounds where (=~) = (==) +instance ApproxEq Bytes where (=~) = (==) +instance ApproxEq AllocationPolicy where (=~) = (==) +instance ApproxEq InitialShardCount where (=~) = (==) +instance ApproxEq FSType where (=~) = (==) + +-- | Due to the way nodeattrfilters get serialized here, they may come +-- out in a different order, but they are morally equivalent +instance ApproxEq UpdatableIndexSetting where + RoutingAllocationInclude a =~ RoutingAllocationInclude b = + NE.sort a =~ NE.sort b + RoutingAllocationExclude a =~ RoutingAllocationExclude b = + NE.sort a =~ NE.sort b + RoutingAllocationRequire a =~ RoutingAllocationRequire b = + NE.sort a =~ NE.sort b + a =~ b = a == b + showApproxEq (RoutingAllocationInclude xs) = show (RoutingAllocationInclude (NE.sort xs)) + showApproxEq (RoutingAllocationExclude xs) = show (RoutingAllocationExclude (NE.sort xs)) + showApproxEq (RoutingAllocationRequire xs) = show (RoutingAllocationRequire (NE.sort xs)) + showApproxEq x = show x diff --git a/tests/V1/Test/BulkAPI.hs b/tests/V1/Test/BulkAPI.hs new file mode 100644 index 0000000..496b0a6 --- /dev/null +++ b/tests/V1/Test/BulkAPI.hs @@ -0,0 +1,77 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Test.BulkAPI (spec) where + +import Test.Common +import Test.Import + +import qualified Data.Vector as V +import qualified Lens.Micro.Aeson as LMA + +newtype BulkTest = + BulkTest Text + deriving (Eq, Show) + +instance ToJSON BulkTest where + toJSON (BulkTest name') = + object ["name" .= name'] + +instance FromJSON BulkTest where + parseJSON = withObject "BulkTest" parse + where + parse o = do + t <- o .: "name" + BulkTest <$> parseJSON t + +spec :: Spec +spec = + describe "Bulk API" $ + it "inserts all documents we request" $ withTestEnv $ do + _ <- insertData + let firstTest = BulkTest "blah" + let secondTest = BulkTest "bloo" + let firstDoc = BulkIndex testIndex + testMapping (DocId "2") (toJSON firstTest) + let secondDoc = BulkCreate testIndex + testMapping (DocId "3") (toJSON secondTest) + let stream = V.fromList [firstDoc, secondDoc] + _ <- bulk stream + _ <- refreshIndex testIndex + fDoc <- getDocument testIndex testMapping (DocId "2") + sDoc <- getDocument testIndex testMapping (DocId "3") + -- note that we cannot query for fourthDoc and fifthDoc since we + -- do not know their autogenerated ids. + let maybeFirst = + eitherDecode + $ responseBody fDoc + :: Either String (EsResult BulkTest) + let maybeSecond = + eitherDecode + $ responseBody sDoc + :: Either String (EsResult BulkTest) + liftIO $ do + fmap getSource maybeFirst `shouldBe` Right (Just firstTest) + fmap getSource maybeSecond `shouldBe` Right (Just secondTest) + -- Since we can't get the docs by doc id, we check for their existence in + -- a match all query. + let query = MatchAllQuery Nothing + let search = mkSearch (Just query) Nothing + resp <- searchByIndex testIndex search + parsed <- parseEsResponse resp :: BH IO (Either EsError (SearchResult Value)) + case parsed of + Left e -> + liftIO $ expectationFailure ("Expected a script-transformed result but got: " <> show e) + (Right sr) -> do + liftIO $ + hitsTotal (searchHits sr) `shouldBe` 3 + let nameList :: [Text] + nameList = + hits (searchHits sr) + ^.. traverse + . to hitSource + . _Just + . LMA.key "name" + . _String + liftIO $ + nameList + `shouldBe` ["blah","bloo"] diff --git a/tests/V1/Test/Common.hs b/tests/V1/Test/Common.hs new file mode 100644 index 0000000..69c3d25 --- /dev/null +++ b/tests/V1/Test/Common.hs @@ -0,0 +1,289 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TemplateHaskell #-} + +module Test.Common where + +import Test.Import + +import qualified Data.Map as M +import qualified Data.Version as Vers +import qualified Network.HTTP.Types.Status as NHTS + +testServer :: Server +testServer = Server "http://localhost:9200" +testIndex :: IndexName +testIndex = IndexName "bloodhound-tests-twitter-1" +testMapping :: MappingName +testMapping = MappingName "tweet" + +withTestEnv :: BH IO a -> IO a +withTestEnv = withBH defaultManagerSettings testServer + +data Location = Location { lat :: Double + , lon :: Double } deriving (Eq, Show) + +data Tweet = Tweet { user :: Text + , postDate :: UTCTime + , message :: Text + , age :: Int + , location :: Location + , extra :: Maybe Text } + deriving (Eq, Show) + +$(deriveJSON defaultOptions ''Location) +$(deriveJSON defaultOptions ''Tweet) + +data ParentMapping = ParentMapping deriving (Eq, Show) + +instance ToJSON ParentMapping where + toJSON ParentMapping = + object ["properties" .= + object [ "user" .= object ["type" .= ("string" :: Text) + ] + -- Serializing the date as a date is breaking other tests, mysteriously. + -- , "postDate" .= object [ "type" .= ("date" :: Text) + -- , "format" .= ("YYYY-MM-dd`T`HH:mm:ss.SSSZZ" :: Text)] + , "message" .= object ["type" .= ("string" :: Text)] + , "age" .= object ["type" .= ("integer" :: Text)] + , "location" .= object ["type" .= ("geo_point" :: Text)] + , "extra" .= object ["type" .= ("string" :: Text), "index" .= ("not_analyzed" :: Text)] + ]] + +es13 :: Vers.Version +es13 = Vers.Version [1, 3, 0] [] + +es12 :: Vers.Version +es12 = Vers.Version [1, 2, 0] [] + +es11 :: Vers.Version +es11 = Vers.Version [1, 1, 0] [] + +es14 :: Vers.Version +es14 = Vers.Version [1, 4, 0] [] + +es15 :: Vers.Version +es15 = Vers.Version [1, 5, 0] [] + +es16 :: Vers.Version +es16 = Vers.Version [1, 6, 0] [] + +es20 :: Vers.Version +es20 = Vers.Version [2, 0, 0] [] + +es50 :: Vers.Version +es50 = Vers.Version [5, 0, 0] [] + +getServerVersion :: IO (Maybe Vers.Version) +getServerVersion = fmap extractVersion <$> withTestEnv getStatus + where + extractVersion = versionNumber . number . version + +createExampleIndex :: (MonadBH m) => m Reply +createExampleIndex = + createIndex (IndexSettings (ShardCount 1) (ReplicaCount 0)) testIndex + +deleteExampleIndex :: (MonadBH m) => m Reply +deleteExampleIndex = + deleteIndex testIndex + +validateStatus :: Show body => Response body -> Int -> Expectation +validateStatus resp expected = + if actual == expected + then return () + else expectationFailure ("Expected " <> show expected <> " but got " <> show actual <> ": " <> show body) + where + actual = NHTS.statusCode (responseStatus resp) + body = responseBody resp + +data ChildMapping = ChildMapping deriving (Eq, Show) + +instance ToJSON ChildMapping where + toJSON ChildMapping = + object ["_parent" .= object ["type" .= ("parent" :: Text)] + , "properties" .= + object [ "user" .= object ["type" .= ("string" :: Text) + ] + -- Serializing the date as a date is breaking other tests, mysteriously. + -- , "postDate" .= object [ "type" .= ("date" :: Text) + -- , "format" .= ("YYYY-MM-dd`T`HH:mm:ss.SSSZZ" :: Text)] + , "message" .= object ["type" .= ("string" :: Text)] + , "age" .= object ["type" .= ("integer" :: Text)] + , "location" .= object ["type" .= ("geo_point" :: Text)] + , "extra" .= object ["type" .= ("string" :: Text), "index" .= ("not_analyzed" :: Text)] + ]] + +data TweetMapping = TweetMapping deriving (Eq, Show) + +instance ToJSON TweetMapping where + toJSON TweetMapping = + object ["tweet" .= + object ["properties" .= + object [ "user" .= object [ "type" .= ("string" :: Text) + ] + -- Serializing the date as a date is breaking other tests, mysteriously. + -- , "postDate" .= object [ "type" .= ("date" :: Text) + -- , "format" .= ("YYYY-MM-dd`T`HH:mm:ss.SSSZZ" :: Text)] + , "message" .= object ["type" .= ("string" :: Text)] + , "age" .= object ["type" .= ("integer" :: Text)] + , "location" .= object ["type" .= ("geo_point" :: Text)] + , "extra" .= object ["type" .= ("string" :: Text), "index" .= ("not_analyzed" :: Text)] + ]]] + +exampleTweet :: Tweet +exampleTweet = Tweet { user = "bitemyapp" + , postDate = UTCTime + (ModifiedJulianDay 55000) + (secondsToDiffTime 10) + , message = "Use haskell!" + , age = 10000 + , location = Location 40.12 (-71.34) + , extra = Nothing } + +tweetWithExtra :: Tweet +tweetWithExtra = Tweet { user = "bitemyapp" + , postDate = UTCTime + (ModifiedJulianDay 55000) + (secondsToDiffTime 10) + , message = "Use haskell!" + , age = 10000 + , location = Location 40.12 (-71.34) + , extra = Just "blah blah" } + +newAge :: Int +newAge = 31337 + +newUser :: Text +newUser = "someotherapp" + +tweetPatch :: Value +tweetPatch = + object [ "age" .= newAge + , "user" .= newUser + ] + +patchedTweet :: Tweet +patchedTweet = exampleTweet{age = newAge, user = newUser} + +otherTweet :: Tweet +otherTweet = Tweet { user = "notmyapp" + , postDate = UTCTime + (ModifiedJulianDay 55000) + (secondsToDiffTime 11) + , message = "Use haskell!" + , age = 1000 + , location = Location 40.12 (-71.34) + , extra = Nothing } + +resetIndex :: BH IO () +resetIndex = do + _ <- deleteExampleIndex + _ <- createExampleIndex + _ <- putMapping testIndex testMapping TweetMapping + return () + +insertData :: BH IO Reply +insertData = do + resetIndex + insertData' defaultIndexDocumentSettings + +insertData' :: IndexDocumentSettings -> BH IO Reply +insertData' ids = do + r <- indexDocument testIndex testMapping ids exampleTweet (DocId "1") + _ <- refreshIndex testIndex + return r + +updateData :: BH IO Reply +updateData = do + r <- updateDocument testIndex testMapping defaultIndexDocumentSettings tweetPatch (DocId "1") + _ <- refreshIndex testIndex + return r + +insertOther :: BH IO () +insertOther = do + _ <- indexDocument testIndex testMapping defaultIndexDocumentSettings otherTweet (DocId "2") + _ <- refreshIndex testIndex + return () + +insertExtra :: BH IO () +insertExtra = do + _ <- indexDocument testIndex testMapping defaultIndexDocumentSettings tweetWithExtra (DocId "4") + _ <- refreshIndex testIndex + return () + +insertWithSpaceInId :: BH IO () +insertWithSpaceInId = do + _ <- indexDocument testIndex testMapping defaultIndexDocumentSettings exampleTweet (DocId "Hello World") + _ <- refreshIndex testIndex + return () + +searchTweet :: Search -> BH IO (Either EsError Tweet) +searchTweet search = do + result <- searchTweets search + let myTweet :: Either EsError Tweet + myTweet = grabFirst result + return myTweet + +searchTweets :: Search -> BH IO (Either EsError (SearchResult Tweet)) +searchTweets search = parseEsResponse =<< searchByIndex testIndex search + +searchExpectNoResults :: Search -> BH IO () +searchExpectNoResults search = do + result <- searchTweets search + let emptyHits = fmap (hits . searchHits) result + liftIO $ + emptyHits `shouldBe` Right [] + +searchExpectAggs :: Search -> BH IO () +searchExpectAggs search = do + reply <- searchByIndex testIndex search + let isEmpty x = return (M.null x) + let result = decode (responseBody reply) :: Maybe (SearchResult Tweet) + liftIO $ + (result >>= aggregations >>= isEmpty) `shouldBe` Just False + +searchValidBucketAgg :: (BucketAggregation a, FromJSON a, Show a) => + Search -> Text -> (Text -> AggregationResults -> Maybe (Bucket a)) -> BH IO () +searchValidBucketAgg search aggKey extractor = do + reply <- searchByIndex testIndex search + let bucketDocs = docCount . head . buckets + let result = decode (responseBody reply) :: Maybe (SearchResult Tweet) + let count = result >>= aggregations >>= extractor aggKey >>= \x -> return (bucketDocs x) + liftIO $ + count `shouldBe` Just 1 + +searchTermsAggHint :: [ExecutionHint] -> BH IO () +searchTermsAggHint hints = do + let terms hint = TermsAgg $ (mkTermsAggregation "user") { termExecutionHint = Just hint } + let search hint = mkAggregateSearch Nothing $ mkAggregations "users" $ terms hint + forM_ hints $ searchExpectAggs . search + forM_ hints (\x -> searchValidBucketAgg (search x) "users" toTerms) + +searchTweetHighlight :: Search + -> BH IO (Either EsError (Maybe HitHighlight)) +searchTweetHighlight search = do + result <- searchTweets search + let tweetHit :: Either EsError (Maybe (Hit Tweet)) + tweetHit = fmap (headMay . hits . searchHits) result + myHighlight :: Either EsError (Maybe HitHighlight) + myHighlight = (join . fmap hitHighlight) <$> tweetHit + return myHighlight + +searchExpectSource :: Source -> Either EsError Value -> BH IO () +searchExpectSource src expected = do + _ <- insertData + let query = QueryMatchQuery $ mkMatchQuery (FieldName "_all") (QueryString "haskell") + let search = (mkSearch (Just query) Nothing) { source = Just src } + reply <- searchByIndex testIndex search + result <- parseEsResponse reply + let value = grabFirst result + liftIO $ + value `shouldBe` expected + +atleast :: Vers.Version -> IO Bool +atleast v = getServerVersion >>= \x -> return $ x >= Just v + +atmost :: Vers.Version -> IO Bool +atmost v = getServerVersion >>= \x -> return $ x <= Just v + +is :: Vers.Version -> IO Bool +is v = getServerVersion >>= \x -> return $ x == Just v diff --git a/tests/V1/Test/Documents.hs b/tests/V1/Test/Documents.hs new file mode 100644 index 0000000..7eaa1e6 --- /dev/null +++ b/tests/V1/Test/Documents.hs @@ -0,0 +1,52 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Test.Documents where + +import Test.Common +import Test.Import + +spec :: Spec +spec = + describe "document API" $ do + it "indexes, updates, gets, and then deletes the generated document" $ withTestEnv $ do + _ <- insertData + _ <- updateData + docInserted <- getDocument testIndex testMapping (DocId "1") + let newTweet = eitherDecode + (responseBody docInserted) :: Either String (EsResult Tweet) + liftIO $ fmap getSource newTweet `shouldBe` Right (Just patchedTweet) + + it "indexes, gets, and then deletes the generated document with a DocId containing a space" $ withTestEnv $ do + _ <- insertWithSpaceInId + docInserted <- getDocument testIndex testMapping (DocId "Hello World") + let newTweet = eitherDecode + (responseBody docInserted) :: Either String (EsResult Tweet) + liftIO $ fmap getSource newTweet `shouldBe` Right (Just exampleTweet) + + it "produces a parseable result when looking up a bogus document" $ withTestEnv $ do + doc <- getDocument testIndex testMapping (DocId "bogus") + let noTweet = eitherDecode + (responseBody doc) :: Either String (EsResult Tweet) + liftIO $ fmap foundResult noTweet `shouldBe` Right Nothing + + it "can use optimistic concurrency control" $ withTestEnv $ do + let ev = ExternalDocVersion minBound + let cfg = defaultIndexDocumentSettings { idsVersionControl = ExternalGT ev } + resetIndex + res <- insertData' cfg + liftIO $ isCreated res `shouldBe` True + res' <- insertData' cfg + liftIO $ isVersionConflict res' `shouldBe` True + + it "indexes two documents in a parent/child relationship and checks that the child exists" $ withTestEnv $ do + resetIndex + let validateStatus' stat = liftIO . flip validateStatus stat + _ <- validateStatus' 200 =<< putMapping testIndex (MappingName "child") ChildMapping + _ <- validateStatus' 200 =<< putMapping testIndex (MappingName "parent") ParentMapping + _ <- validateStatus' 201 =<< indexDocument testIndex (MappingName "parent") defaultIndexDocumentSettings exampleTweet (DocId "1") + let parent = (Just . DocumentParent . DocId) "1" + ids = IndexDocumentSettings NoVersionControl parent + _ <- validateStatus' 201 =<< indexDocument testIndex (MappingName "child") ids otherTweet (DocId "2") + _ <- refreshIndex testIndex + exists <- documentExists testIndex (MappingName "child") parent (DocId "2") + liftIO $ exists `shouldBe` True diff --git a/tests/V1/Test/Generators.hs b/tests/V1/Test/Generators.hs new file mode 100644 index 0000000..eac7052 --- /dev/null +++ b/tests/V1/Test/Generators.hs @@ -0,0 +1,432 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TemplateHaskell #-} + +module Test.Generators where + +import Database.V1.Bloodhound + +import Test.Import + +import qualified Data.HashMap.Strict as HM +import qualified Data.List as L +import qualified Data.List.NonEmpty as NE +import qualified Data.Map as M +import qualified Data.Text as T +import qualified Data.Version as Vers +import Test.QuickCheck.TH.Generators + +import Test.ApproxEq + +instance Arbitrary NominalDiffTime where + arbitrary = fromInteger <$> arbitrary + +#if !MIN_VERSION_QuickCheck(2,8,0) +instance (Arbitrary k, Ord k, Arbitrary v) => Arbitrary (M.Map k v) where + arbitrary = M.fromList <$> arbitrary +#endif + +instance Arbitrary Text where + arbitrary = T.pack <$> arbitrary + +instance Arbitrary UTCTime where + arbitrary = UTCTime + <$> arbitrary + <*> (fromRational . toRational <$> choose (0::Double, 86400)) + +instance Arbitrary Day where + arbitrary = + ModifiedJulianDay . (2000 +) <$> arbitrary + shrink = + (ModifiedJulianDay <$>) . shrink . toModifiedJulianDay + +#if !MIN_VERSION_QuickCheck(2,9,0) +instance Arbitrary a => Arbitrary (NonEmpty a) where + arbitrary = liftA2 (:|) arbitrary arbitrary +#endif + +arbitraryScore :: Gen Score +arbitraryScore = fmap getPositive <$> arbitrary + +instance (Arbitrary a, Typeable a) => Arbitrary (Hit a) where + arbitrary = Hit <$> arbitrary + <*> arbitrary + <*> arbitrary + <*> arbitraryScore + <*> arbitrary + <*> arbitrary + +instance (Arbitrary a, Typeable a) => Arbitrary (SearchHits a) where + arbitrary = reduceSize $ do + tot <- getPositive <$> arbitrary + score <- arbitraryScore + hs <- arbitrary + return $ SearchHits tot score hs + + +reduceSize :: Gen a -> Gen a +reduceSize f = sized $ \n -> resize (n `div` 2) f + +arbitraryAlphaNum :: Gen Char +arbitraryAlphaNum = oneof [choose ('a', 'z') + ,choose ('A','Z') + , choose ('0', '9')] + +instance Arbitrary RoutingValue where + arbitrary = RoutingValue . T.pack <$> listOf1 arbitraryAlphaNum + +instance Arbitrary AliasRouting where + arbitrary = oneof [allAlias + ,one + ,theOther + ,both'] + where one = GranularAliasRouting + <$> (Just <$> arbitrary) + <*> pure Nothing + theOther = GranularAliasRouting Nothing + <$> (Just <$> arbitrary) + both' = GranularAliasRouting + <$> (Just <$> arbitrary) + <*> (Just <$> arbitrary) + allAlias = AllAliasRouting <$> arbitrary + + + +instance Arbitrary FieldName where + arbitrary = + FieldName + . T.pack + <$> listOf1 arbitraryAlphaNum + + +#if MIN_VERSION_base(4,10,0) +-- Test.QuickCheck.Modifiers + +qcNonEmptyToNonEmpty :: NonEmptyList a -> NonEmpty a +qcNonEmptyToNonEmpty (NonEmpty (a : xs)) = (a :| xs) +qcNonEmptyToNonEmpty (NonEmpty []) = error "NonEmpty was empty!" + +instance Arbitrary a => Arbitrary (NonEmpty a) where + arbitrary = + qcNonEmptyToNonEmpty + <$> arbitrary +#endif + +instance Arbitrary RegexpFlags where + arbitrary = oneof [ pure AllRegexpFlags + , pure NoRegexpFlags + , SomeRegexpFlags <$> genUniqueFlags + ] + where genUniqueFlags = + NE.fromList . L.nub + <$> listOf1 arbitrary + +instance Arbitrary IndexAliasCreate where + arbitrary = + IndexAliasCreate + <$> arbitrary + <*> reduceSize arbitrary + +instance Arbitrary ReplicaBounds where + arbitrary = oneof [ replicasBounded + , replicasLowerBounded + , pure ReplicasUnbounded + ] + where replicasBounded = do + Positive a <- arbitrary + Positive b <- arbitrary + return (ReplicasBounded a b) + replicasLowerBounded = do + Positive a <- arbitrary + return (ReplicasLowerBounded a) + +instance Arbitrary NodeAttrName where + arbitrary = + NodeAttrName + . T.pack + <$> listOf1 arbitraryAlphaNum + + +instance Arbitrary NodeAttrFilter where + arbitrary = do + n <- arbitrary + s:ss <- listOf1 (listOf1 arbitraryAlphaNum) + let ts = T.pack <$> s :| ss + return (NodeAttrFilter n ts) + +instance Arbitrary VersionNumber where + arbitrary = mk . fmap getPositive . getNonEmpty <$> arbitrary + where + mk versions = VersionNumber (Vers.Version versions []) + +instance Arbitrary TemplateQueryKeyValuePairs where + arbitrary = TemplateQueryKeyValuePairs . HM.fromList <$> arbitrary + shrink (TemplateQueryKeyValuePairs x) = map (TemplateQueryKeyValuePairs . HM.fromList) . shrink $ HM.toList x + +makeArbitrary ''FilteredQuery +instance Arbitrary FilteredQuery where arbitrary = reduceSize arbitraryFilteredQuery +makeArbitrary ''Query +instance Arbitrary Query where arbitrary = reduceSize arbitraryQuery +makeArbitrary ''Filter +instance Arbitrary Filter where arbitrary = reduceSize arbitraryFilter +makeArbitrary ''IndexName +instance Arbitrary IndexName where arbitrary = arbitraryIndexName +makeArbitrary ''MappingName +instance Arbitrary MappingName where arbitrary = arbitraryMappingName +makeArbitrary ''DocId +instance Arbitrary DocId where arbitrary = arbitraryDocId +makeArbitrary ''Version +instance Arbitrary Version where arbitrary = arbitraryVersion +makeArbitrary ''BuildHash +instance Arbitrary BuildHash where arbitrary = arbitraryBuildHash +makeArbitrary ''IndexAliasRouting +instance Arbitrary IndexAliasRouting where arbitrary = arbitraryIndexAliasRouting +makeArbitrary ''ShardCount +instance Arbitrary ShardCount where arbitrary = arbitraryShardCount +makeArbitrary ''ReplicaCount +instance Arbitrary ReplicaCount where arbitrary = arbitraryReplicaCount +makeArbitrary ''TemplateName +instance Arbitrary TemplateName where arbitrary = arbitraryTemplateName +makeArbitrary ''TemplatePattern +instance Arbitrary TemplatePattern where arbitrary = arbitraryTemplatePattern +makeArbitrary ''QueryString +instance Arbitrary QueryString where arbitrary = arbitraryQueryString +makeArbitrary ''CacheName +instance Arbitrary CacheName where arbitrary = arbitraryCacheName +makeArbitrary ''CacheKey +instance Arbitrary CacheKey where arbitrary = arbitraryCacheKey +makeArbitrary ''Existence +instance Arbitrary Existence where arbitrary = arbitraryExistence +makeArbitrary ''CutoffFrequency +instance Arbitrary CutoffFrequency where arbitrary = arbitraryCutoffFrequency +makeArbitrary ''Analyzer +instance Arbitrary Analyzer where arbitrary = arbitraryAnalyzer +makeArbitrary ''MaxExpansions +instance Arbitrary MaxExpansions where arbitrary = arbitraryMaxExpansions +makeArbitrary ''Lenient +instance Arbitrary Lenient where arbitrary = arbitraryLenient +makeArbitrary ''Tiebreaker +instance Arbitrary Tiebreaker where arbitrary = arbitraryTiebreaker +makeArbitrary ''Boost +instance Arbitrary Boost where arbitrary = arbitraryBoost +makeArbitrary ''BoostTerms +instance Arbitrary BoostTerms where arbitrary = arbitraryBoostTerms +makeArbitrary ''MinimumMatch +instance Arbitrary MinimumMatch where arbitrary = arbitraryMinimumMatch +makeArbitrary ''DisableCoord +instance Arbitrary DisableCoord where arbitrary = arbitraryDisableCoord +makeArbitrary ''IgnoreTermFrequency +instance Arbitrary IgnoreTermFrequency where arbitrary = arbitraryIgnoreTermFrequency +makeArbitrary ''MinimumTermFrequency +instance Arbitrary MinimumTermFrequency where arbitrary = arbitraryMinimumTermFrequency +makeArbitrary ''MaxQueryTerms +instance Arbitrary MaxQueryTerms where arbitrary = arbitraryMaxQueryTerms +makeArbitrary ''Fuzziness +instance Arbitrary Fuzziness where arbitrary = arbitraryFuzziness +makeArbitrary ''PrefixLength +instance Arbitrary PrefixLength where arbitrary = arbitraryPrefixLength +makeArbitrary ''TypeName +instance Arbitrary TypeName where arbitrary = arbitraryTypeName +makeArbitrary ''PercentMatch +instance Arbitrary PercentMatch where arbitrary = arbitraryPercentMatch +makeArbitrary ''StopWord +instance Arbitrary StopWord where arbitrary = arbitraryStopWord +makeArbitrary ''QueryPath +instance Arbitrary QueryPath where arbitrary = arbitraryQueryPath +makeArbitrary ''AllowLeadingWildcard +instance Arbitrary AllowLeadingWildcard where arbitrary = arbitraryAllowLeadingWildcard +makeArbitrary ''LowercaseExpanded +instance Arbitrary LowercaseExpanded where arbitrary = arbitraryLowercaseExpanded +makeArbitrary ''EnablePositionIncrements +instance Arbitrary EnablePositionIncrements where arbitrary = arbitraryEnablePositionIncrements +makeArbitrary ''AnalyzeWildcard +instance Arbitrary AnalyzeWildcard where arbitrary = arbitraryAnalyzeWildcard +makeArbitrary ''GeneratePhraseQueries +instance Arbitrary GeneratePhraseQueries where arbitrary = arbitraryGeneratePhraseQueries +makeArbitrary ''Locale +instance Arbitrary Locale where arbitrary = arbitraryLocale +makeArbitrary ''MaxWordLength +instance Arbitrary MaxWordLength where arbitrary = arbitraryMaxWordLength +makeArbitrary ''MinWordLength +instance Arbitrary MinWordLength where arbitrary = arbitraryMinWordLength +makeArbitrary ''PhraseSlop +instance Arbitrary PhraseSlop where arbitrary = arbitraryPhraseSlop +makeArbitrary ''MinDocFrequency +instance Arbitrary MinDocFrequency where arbitrary = arbitraryMinDocFrequency +makeArbitrary ''MaxDocFrequency +instance Arbitrary MaxDocFrequency where arbitrary = arbitraryMaxDocFrequency +makeArbitrary ''Regexp +instance Arbitrary Regexp where arbitrary = arbitraryRegexp +makeArbitrary ''SimpleQueryStringQuery +instance Arbitrary SimpleQueryStringQuery where arbitrary = arbitrarySimpleQueryStringQuery +makeArbitrary ''FieldOrFields +instance Arbitrary FieldOrFields where arbitrary = arbitraryFieldOrFields +makeArbitrary ''SimpleQueryFlag +instance Arbitrary SimpleQueryFlag where arbitrary = arbitrarySimpleQueryFlag +makeArbitrary ''RegexpQuery +instance Arbitrary RegexpQuery where arbitrary = arbitraryRegexpQuery +makeArbitrary ''QueryStringQuery +instance Arbitrary QueryStringQuery where arbitrary = arbitraryQueryStringQuery +makeArbitrary ''RangeQuery +instance Arbitrary RangeQuery where arbitrary = arbitraryRangeQuery +makeArbitrary ''RangeValue +instance Arbitrary RangeValue where arbitrary = arbitraryRangeValue +makeArbitrary ''PrefixQuery +instance Arbitrary PrefixQuery where arbitrary = arbitraryPrefixQuery +makeArbitrary ''NestedQuery +instance Arbitrary NestedQuery where arbitrary = arbitraryNestedQuery +makeArbitrary ''MoreLikeThisFieldQuery +instance Arbitrary MoreLikeThisFieldQuery where arbitrary = arbitraryMoreLikeThisFieldQuery +makeArbitrary ''MoreLikeThisQuery +instance Arbitrary MoreLikeThisQuery where arbitrary = arbitraryMoreLikeThisQuery +makeArbitrary ''IndicesQuery +instance Arbitrary IndicesQuery where arbitrary = arbitraryIndicesQuery +makeArbitrary ''HasParentQuery +instance Arbitrary HasParentQuery where arbitrary = arbitraryHasParentQuery +makeArbitrary ''HasChildQuery +instance Arbitrary HasChildQuery where arbitrary = arbitraryHasChildQuery +makeArbitrary ''FuzzyQuery +instance Arbitrary FuzzyQuery where arbitrary = arbitraryFuzzyQuery +makeArbitrary ''FuzzyLikeFieldQuery +instance Arbitrary FuzzyLikeFieldQuery where arbitrary = arbitraryFuzzyLikeFieldQuery +makeArbitrary ''FuzzyLikeThisQuery +instance Arbitrary FuzzyLikeThisQuery where arbitrary = arbitraryFuzzyLikeThisQuery +makeArbitrary ''DisMaxQuery +instance Arbitrary DisMaxQuery where arbitrary = arbitraryDisMaxQuery +makeArbitrary ''CommonTermsQuery +instance Arbitrary CommonTermsQuery where arbitrary = arbitraryCommonTermsQuery +makeArbitrary ''DistanceRange +instance Arbitrary DistanceRange where arbitrary = arbitraryDistanceRange +makeArbitrary ''MultiMatchQuery +instance Arbitrary MultiMatchQuery where arbitrary = arbitraryMultiMatchQuery +makeArbitrary ''LessThanD +instance Arbitrary LessThanD where arbitrary = arbitraryLessThanD +makeArbitrary ''LessThanEqD +instance Arbitrary LessThanEqD where arbitrary = arbitraryLessThanEqD +makeArbitrary ''GreaterThanD +instance Arbitrary GreaterThanD where arbitrary = arbitraryGreaterThanD +makeArbitrary ''GreaterThanEqD +instance Arbitrary GreaterThanEqD where arbitrary = arbitraryGreaterThanEqD +makeArbitrary ''LessThan +instance Arbitrary LessThan where arbitrary = arbitraryLessThan +makeArbitrary ''LessThanEq +instance Arbitrary LessThanEq where arbitrary = arbitraryLessThanEq +makeArbitrary ''GreaterThan +instance Arbitrary GreaterThan where arbitrary = arbitraryGreaterThan +makeArbitrary ''GreaterThanEq +instance Arbitrary GreaterThanEq where arbitrary = arbitraryGreaterThanEq +makeArbitrary ''GeoPoint +instance Arbitrary GeoPoint where arbitrary = arbitraryGeoPoint +makeArbitrary ''NullValue +instance Arbitrary NullValue where arbitrary = arbitraryNullValue +makeArbitrary ''MinimumMatchHighLow +instance Arbitrary MinimumMatchHighLow where arbitrary = arbitraryMinimumMatchHighLow +makeArbitrary ''CommonMinimumMatch +instance Arbitrary CommonMinimumMatch where arbitrary = arbitraryCommonMinimumMatch +makeArbitrary ''BoostingQuery +instance Arbitrary BoostingQuery where arbitrary = arbitraryBoostingQuery +makeArbitrary ''BoolQuery +instance Arbitrary BoolQuery where arbitrary = arbitraryBoolQuery +makeArbitrary ''MatchQuery +instance Arbitrary MatchQuery where arbitrary = arbitraryMatchQuery +makeArbitrary ''MultiMatchQueryType +instance Arbitrary MultiMatchQueryType where arbitrary = arbitraryMultiMatchQueryType +makeArbitrary ''BooleanOperator +instance Arbitrary BooleanOperator where arbitrary = arbitraryBooleanOperator +makeArbitrary ''ZeroTermsQuery +instance Arbitrary ZeroTermsQuery where arbitrary = arbitraryZeroTermsQuery +makeArbitrary ''MatchQueryType +instance Arbitrary MatchQueryType where arbitrary = arbitraryMatchQueryType +makeArbitrary ''SearchAliasRouting +instance Arbitrary SearchAliasRouting where arbitrary = arbitrarySearchAliasRouting +makeArbitrary ''ScoreType +instance Arbitrary ScoreType where arbitrary = arbitraryScoreType +makeArbitrary ''Distance +instance Arbitrary Distance where arbitrary = arbitraryDistance +makeArbitrary ''DistanceUnit +instance Arbitrary DistanceUnit where arbitrary = arbitraryDistanceUnit +makeArbitrary ''DistanceType +instance Arbitrary DistanceType where arbitrary = arbitraryDistanceType +makeArbitrary ''OptimizeBbox +instance Arbitrary OptimizeBbox where arbitrary = arbitraryOptimizeBbox +makeArbitrary ''GeoBoundingBoxConstraint +instance Arbitrary GeoBoundingBoxConstraint where arbitrary = arbitraryGeoBoundingBoxConstraint +makeArbitrary ''GeoFilterType +instance Arbitrary GeoFilterType where arbitrary = arbitraryGeoFilterType +makeArbitrary ''GeoBoundingBox +instance Arbitrary GeoBoundingBox where arbitrary = arbitraryGeoBoundingBox +makeArbitrary ''LatLon +instance Arbitrary LatLon where arbitrary = arbitraryLatLon +makeArbitrary ''RangeExecution +instance Arbitrary RangeExecution where arbitrary = arbitraryRangeExecution +makeArbitrary ''RegexpFlag +instance Arbitrary RegexpFlag where arbitrary = arbitraryRegexpFlag +makeArbitrary ''BoolMatch +instance Arbitrary BoolMatch where arbitrary = arbitraryBoolMatch +makeArbitrary ''Term +instance Arbitrary Term where arbitrary = arbitraryTerm +makeArbitrary ''IndexSettings +instance Arbitrary IndexSettings where arbitrary = arbitraryIndexSettings +makeArbitrary ''UpdatableIndexSetting +instance Arbitrary UpdatableIndexSetting where + arbitrary = arbitraryUpdatableIndexSetting +makeArbitrary ''Bytes +instance Arbitrary Bytes where arbitrary = arbitraryBytes +makeArbitrary ''AllocationPolicy +instance Arbitrary AllocationPolicy where arbitrary = arbitraryAllocationPolicy +makeArbitrary ''InitialShardCount +instance Arbitrary InitialShardCount where arbitrary = arbitraryInitialShardCount +makeArbitrary ''FSType +instance Arbitrary FSType where arbitrary = arbitraryFSType +makeArbitrary ''CompoundFormat +instance Arbitrary CompoundFormat where arbitrary = arbitraryCompoundFormat +makeArbitrary ''FsSnapshotRepo +instance Arbitrary FsSnapshotRepo where arbitrary = arbitraryFsSnapshotRepo +makeArbitrary ''SnapshotRepoName +instance Arbitrary SnapshotRepoName where arbitrary = arbitrarySnapshotRepoName +makeArbitrary ''TemplateQueryInline +instance Arbitrary TemplateQueryInline where arbitrary = arbitraryTemplateQueryInline +makeArbitrary ''DirectGeneratorSuggestModeTypes +instance Arbitrary DirectGeneratorSuggestModeTypes where arbitrary = arbitraryDirectGeneratorSuggestModeTypes +makeArbitrary ''DirectGenerators +instance Arbitrary DirectGenerators where arbitrary = arbitraryDirectGenerators +makeArbitrary ''PhraseSuggesterCollate +instance Arbitrary PhraseSuggesterCollate where arbitrary = arbitraryPhraseSuggesterCollate +makeArbitrary ''PhraseSuggesterHighlighter +instance Arbitrary PhraseSuggesterHighlighter where arbitrary = arbitraryPhraseSuggesterHighlighter +makeArbitrary ''Size +instance Arbitrary Size where arbitrary = arbitrarySize +makeArbitrary ''PhraseSuggester +instance Arbitrary PhraseSuggester where arbitrary = arbitraryPhraseSuggester +makeArbitrary ''SuggestType +instance Arbitrary SuggestType where arbitrary = arbitrarySuggestType +makeArbitrary ''Suggest +instance Arbitrary Suggest where arbitrary = arbitrarySuggest + + +makeArbitrary ''Script +instance Arbitrary Script where arbitrary = arbitraryScript + +newtype UpdatableIndexSetting' = + UpdatableIndexSetting' UpdatableIndexSetting + deriving (Show, Eq, ToJSON, FromJSON, ApproxEq, Typeable) + +instance Arbitrary UpdatableIndexSetting' where + arbitrary = do + settings <- arbitrary + return $ UpdatableIndexSetting' $ case settings of + RoutingAllocationInclude xs -> + RoutingAllocationInclude (dropDuplicateAttrNames xs) + RoutingAllocationExclude xs -> + RoutingAllocationExclude (dropDuplicateAttrNames xs) + RoutingAllocationRequire xs -> + RoutingAllocationRequire (dropDuplicateAttrNames xs) + x -> x + where + dropDuplicateAttrNames = + NE.fromList . L.nubBy sameAttrName . NE.toList + sameAttrName a b = + nodeAttrFilterName a == nodeAttrFilterName b + -- shrink (UpdatableIndexSetting' x) = map UpdatableIndexSetting' (shrink x) diff --git a/tests/V1/Test/Highlights.hs b/tests/V1/Test/Highlights.hs new file mode 100644 index 0000000..baa2234 --- /dev/null +++ b/tests/V1/Test/Highlights.hs @@ -0,0 +1,32 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Test.Highlights where + +import Test.Common +import Test.Import + +import qualified Data.Map as M + +initHighlights :: Text -> BH IO (Either EsError (Maybe HitHighlight)) +initHighlights fieldName = do + _ <- insertData + _ <- insertOther + let query = QueryMatchQuery $ mkMatchQuery (FieldName fieldName) (QueryString "haskell") + let testHighlight = Highlights Nothing [FieldHighlight (FieldName fieldName) Nothing] + let search = mkHighlightSearch (Just query) testHighlight + searchTweetHighlight search + +spec :: Spec +spec = + describe "Highlights API" $ do + it "returns highlight from query when there should be one" $ withTestEnv $ do + myHighlight <- initHighlights "message" + liftIO $ + myHighlight `shouldBe` + Right (Just (M.fromList [("message", ["Use haskell!"])])) + + it "doesn't return highlight from a query when it shouldn't" $ withTestEnv $ do + myHighlight <- initHighlights "user" + liftIO $ + myHighlight `shouldBe` + Right Nothing diff --git a/tests/V1/Test/Import.hs b/tests/V1/Test/Import.hs new file mode 100644 index 0000000..5af7ffb --- /dev/null +++ b/tests/V1/Test/Import.hs @@ -0,0 +1,58 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Test.Import + ( module X + , module Test.Import + ) where + + +import Control.Applicative as X +import Control.Exception as X (evaluate) +import Control.Monad as X +import Control.Monad.Catch as X +import Control.Monad.Reader as X +import Data.Aeson as X +import Data.Aeson.TH as X +import Data.Aeson.Types as X (parseEither) +import Data.Maybe as X +import Data.List.NonEmpty as X (NonEmpty(..)) +import Data.Monoid as X +import Data.Ord as X (comparing) +import Data.Proxy as X +import Data.Text as X (Text) +import Data.Time.Calendar as X (Day(..), fromGregorian) +import Data.Time.Clock as X +import Data.Typeable as X +import Database.V1.Bloodhound as X hiding (key) +import Lens.Micro as X +import Lens.Micro.Aeson as X +import Network.HTTP.Client as X hiding (Proxy, fileSize) +import System.IO.Temp as X +import System.PosixCompat.Files as X +import Test.Hspec as X +import Test.Hspec.QuickCheck as X (prop) +import Test.QuickCheck as X hiding (Result, Success) +import Test.QuickCheck.Property.Monoid as X (T (..), eq, prop_Monoid) +import Text.Pretty.Simple as X (pPrint) + +import qualified Data.List as L + +noDuplicates :: Eq a => [a] -> Bool +noDuplicates xs = L.nub xs == xs + +getSource :: EsResult a -> Maybe a +getSource = fmap _source . foundResult + +grabFirst :: Either EsError (SearchResult a) -> Either EsError a +grabFirst r = + case fmap (hitSource . head . hits . searchHits) r of + (Left e) -> Left e + (Right Nothing) -> Left (EsError 500 "Source was missing") + (Right (Just x)) -> Right x + +when' :: Monad m => m Bool -> m () -> m () +when' b f = b >>= \x -> when x f + +headMay :: [a] -> Maybe a +headMay (x : _) = Just x +headMay _ = Nothing diff --git a/tests/V1/Test/Indices.hs b/tests/V1/Test/Indices.hs new file mode 100644 index 0000000..75cf585 --- /dev/null +++ b/tests/V1/Test/Indices.hs @@ -0,0 +1,78 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Test.Indices where + +import Test.Common +import Test.Import + +import qualified Data.List as L +import qualified Data.List.NonEmpty as NE + +spec :: Spec +spec = do + describe "Index create/delete API" $ do + it "creates and then deletes the requested index" $ withTestEnv $ do + -- priming state. + _ <- deleteExampleIndex + resp <- createExampleIndex + deleteResp <- deleteExampleIndex + liftIO $ do + validateStatus resp 200 + validateStatus deleteResp 200 + + describe "Index aliases" $ do + let aname = IndexAliasName (IndexName "bloodhound-tests-twitter-1-alias") + let alias = IndexAlias (testIndex) aname + let create = IndexAliasCreate Nothing Nothing + let action = AddAlias alias create + it "handles the simple case of aliasing an existing index" $ do + withTestEnv $ do + resetIndex + resp <- updateIndexAliases (action :| []) + liftIO $ validateStatus resp 200 + let cleanup = withTestEnv (updateIndexAliases (RemoveAlias alias :| [])) + (do aliases <- withTestEnv getIndexAliases + let expected = IndexAliasSummary alias create + case aliases of + Right (IndexAliasesSummary summs) -> + L.find ((== alias) . indexAliasSummaryAlias) summs `shouldBe` Just expected + Left e -> expectationFailure ("Expected an IndexAliasesSummary but got " <> show e)) `finally` cleanup + + describe "Index Listing" $ do + it "returns a list of index names" $ withTestEnv $ do + _ <- createExampleIndex + ixns <- listIndices + liftIO (ixns `shouldContain` [testIndex]) + + describe "Index Settings" $ do + it "persists settings" $ withTestEnv $ do + _ <- deleteExampleIndex + _ <- createExampleIndex + let updates = BlocksWrite False :| [] + updateResp <- updateIndexSettings updates testIndex + liftIO $ validateStatus updateResp 200 + getResp <- getIndexSettings testIndex + liftIO $ + getResp `shouldBe` Right (IndexSettingsSummary + testIndex + (IndexSettings (ShardCount 1) (ReplicaCount 0)) + (NE.toList updates)) + + it "allows total fields to be set" $ when' (atleast es50) $ withTestEnv $ do + _ <- deleteExampleIndex + _ <- createExampleIndex + let updates = FailOnMergeFailure True :| [] + updateResp <- updateIndexSettings updates testIndex + liftIO $ validateStatus updateResp 200 + getResp <- getIndexSettings testIndex + liftIO $ + getResp `shouldBe` Right (IndexSettingsSummary + testIndex + (IndexSettings (ShardCount 1) (ReplicaCount 0)) + (NE.toList updates)) + + describe "Index Optimization" $ do + it "returns a successful response upon completion" $ withTestEnv $ do + _ <- createExampleIndex + resp <- optimizeIndex (IndexList (testIndex :| [])) defaultIndexOptimizationSettings + liftIO $ validateStatus resp 200 diff --git a/tests/V1/Test/JSON.hs b/tests/V1/Test/JSON.hs new file mode 100644 index 0000000..cc8dc9b --- /dev/null +++ b/tests/V1/Test/JSON.hs @@ -0,0 +1,202 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ScopedTypeVariables #-} + +module Test.JSON (spec) where + +import Test.Import + +import qualified Data.ByteString.Lazy.Char8 as BL8 +import qualified Data.List as L +import Data.List.NonEmpty (NonEmpty(..)) +import qualified Data.List.NonEmpty as NE +import qualified Data.HashMap.Strict as HM +import qualified Data.Text as T +import qualified Data.Vector as V + +import Test.ApproxEq +import Test.Generators + +propJSON :: forall a + . ( Arbitrary a + , ToJSON a + , FromJSON a + , Show a + , Eq a + , Typeable a + ) + => Proxy a -> Spec +propJSON _ = prop testName $ \(a :: a) -> + let jsonStr = "via " <> BL8.unpack (encode a) + in counterexample jsonStr (parseEither parseJSON (toJSON a) + === Right a) + where testName = show ty <> " FromJSON/ToJSON roundtrips" + ty = typeOf (undefined :: a) + +propApproxJSON :: forall a + . ( Arbitrary a + , ToJSON a + , FromJSON a + , Show a + , ApproxEq a + , Typeable a + ) + => Proxy a -> Spec +propApproxJSON _ = prop testName $ \(a :: a) -> + let jsonStr = "via " <> BL8.unpack (encode a) + in counterexample jsonStr (parseEither parseJSON (toJSON a) + ==~ Right a) + where testName = show ty <> " FromJSON/ToJSON roundtrips" + ty = typeOf (undefined :: a) + +spec :: Spec +spec = do + describe "ToJSON RegexpFlags" $ do + it "generates the correct JSON for AllRegexpFlags" $ + toJSON AllRegexpFlags `shouldBe` String "ALL" + + it "generates the correct JSON for NoRegexpFlags" $ + toJSON NoRegexpFlags `shouldBe` String "NONE" + + it "generates the correct JSON for SomeRegexpFlags" $ + let flags = AnyString :| [ Automaton + , Complement + , Empty + , Intersection + , Interval ] + in toJSON (SomeRegexpFlags flags) `shouldBe` String "ANYSTRING|AUTOMATON|COMPLEMENT|EMPTY|INTERSECTION|INTERVAL" + + prop "removes duplicates from flags" $ \(flags :: RegexpFlags) -> + let String str = toJSON flags + flagStrs = T.splitOn "|" str + in noDuplicates flagStrs + + describe "omitNulls" $ do + it "checks that omitNulls drops list elements when it should" $ + let dropped = omitNulls $ [ "test1" .= (toJSON ([] :: [Int])) + , "test2" .= (toJSON ("some value" :: Text))] + in dropped `shouldBe` Object (HM.fromList [("test2", String "some value")]) + + it "checks that omitNulls doesn't drop list elements when it shouldn't" $ + let notDropped = omitNulls $ [ "test1" .= (toJSON ([1] :: [Int])) + , "test2" .= (toJSON ("some value" :: Text))] + in notDropped `shouldBe` Object (HM.fromList [ ("test1", Array (V.fromList [Number 1.0])) + , ("test2", String "some value")]) + it "checks that omitNulls drops non list elements when it should" $ + let dropped = omitNulls $ [ "test1" .= (toJSON Null) + , "test2" .= (toJSON ("some value" :: Text))] + in dropped `shouldBe` Object (HM.fromList [("test2", String "some value")]) + it "checks that omitNulls doesn't drop non list elements when it shouldn't" $ + let notDropped = omitNulls $ [ "test1" .= (toJSON (1 :: Int)) + , "test2" .= (toJSON ("some value" :: Text))] + in notDropped `shouldBe` Object (HM.fromList [ ("test1", Number 1.0) + , ("test2", String "some value")]) + + describe "Exact isomorphism JSON instances" $ do + propJSON (Proxy :: Proxy Version) + propJSON (Proxy :: Proxy IndexName) + propJSON (Proxy :: Proxy MappingName) + propJSON (Proxy :: Proxy DocId) + propJSON (Proxy :: Proxy IndexAliasRouting) + propJSON (Proxy :: Proxy RoutingValue) + propJSON (Proxy :: Proxy ShardCount) + propJSON (Proxy :: Proxy ReplicaCount) + propJSON (Proxy :: Proxy TemplateName) + propJSON (Proxy :: Proxy TemplatePattern) + propJSON (Proxy :: Proxy QueryString) + propJSON (Proxy :: Proxy FieldName) + propJSON (Proxy :: Proxy CacheName) + propJSON (Proxy :: Proxy CacheKey) + propJSON (Proxy :: Proxy Existence) + propJSON (Proxy :: Proxy CutoffFrequency) + propJSON (Proxy :: Proxy Analyzer) + propJSON (Proxy :: Proxy MaxExpansions) + propJSON (Proxy :: Proxy Lenient) + propJSON (Proxy :: Proxy Tiebreaker) + propJSON (Proxy :: Proxy Boost) + propJSON (Proxy :: Proxy BoostTerms) + propJSON (Proxy :: Proxy MinimumMatch) + propJSON (Proxy :: Proxy DisableCoord) + propJSON (Proxy :: Proxy IgnoreTermFrequency) + propJSON (Proxy :: Proxy MinimumTermFrequency) + propJSON (Proxy :: Proxy MaxQueryTerms) + propJSON (Proxy :: Proxy Fuzziness) + propJSON (Proxy :: Proxy PrefixLength) + propJSON (Proxy :: Proxy TypeName) + propJSON (Proxy :: Proxy PercentMatch) + propJSON (Proxy :: Proxy StopWord) + propJSON (Proxy :: Proxy QueryPath) + propJSON (Proxy :: Proxy AllowLeadingWildcard) + propJSON (Proxy :: Proxy LowercaseExpanded) + propJSON (Proxy :: Proxy EnablePositionIncrements) + propJSON (Proxy :: Proxy AnalyzeWildcard) + propJSON (Proxy :: Proxy GeneratePhraseQueries) + propJSON (Proxy :: Proxy Locale) + propJSON (Proxy :: Proxy MaxWordLength) + propJSON (Proxy :: Proxy MinWordLength) + propJSON (Proxy :: Proxy PhraseSlop) + propJSON (Proxy :: Proxy MinDocFrequency) + propJSON (Proxy :: Proxy MaxDocFrequency) + propJSON (Proxy :: Proxy Filter) + propJSON (Proxy :: Proxy Query) + propJSON (Proxy :: Proxy SimpleQueryStringQuery) + propJSON (Proxy :: Proxy FieldOrFields) + propJSON (Proxy :: Proxy SimpleQueryFlag) + propJSON (Proxy :: Proxy RegexpQuery) + propJSON (Proxy :: Proxy QueryStringQuery) + propJSON (Proxy :: Proxy RangeQuery) + propJSON (Proxy :: Proxy PrefixQuery) + propJSON (Proxy :: Proxy NestedQuery) + propJSON (Proxy :: Proxy MoreLikeThisFieldQuery) + propJSON (Proxy :: Proxy MoreLikeThisQuery) + propJSON (Proxy :: Proxy IndicesQuery) + propJSON (Proxy :: Proxy HasParentQuery) + propJSON (Proxy :: Proxy HasChildQuery) + propJSON (Proxy :: Proxy FuzzyQuery) + propJSON (Proxy :: Proxy FuzzyLikeFieldQuery) + propJSON (Proxy :: Proxy FuzzyLikeThisQuery) + propJSON (Proxy :: Proxy DisMaxQuery) + propJSON (Proxy :: Proxy CommonTermsQuery) + propJSON (Proxy :: Proxy CommonMinimumMatch) + propJSON (Proxy :: Proxy BoostingQuery) + propJSON (Proxy :: Proxy BoolQuery) + propJSON (Proxy :: Proxy MatchQuery) + propJSON (Proxy :: Proxy MultiMatchQueryType) + propJSON (Proxy :: Proxy BooleanOperator) + propJSON (Proxy :: Proxy ZeroTermsQuery) + propJSON (Proxy :: Proxy MatchQueryType) + propJSON (Proxy :: Proxy AliasRouting) + propJSON (Proxy :: Proxy IndexAliasCreate) + propJSON (Proxy :: Proxy SearchAliasRouting) + propJSON (Proxy :: Proxy ScoreType) + propJSON (Proxy :: Proxy Distance) + propJSON (Proxy :: Proxy DistanceUnit) + propJSON (Proxy :: Proxy DistanceType) + propJSON (Proxy :: Proxy OptimizeBbox) + propJSON (Proxy :: Proxy GeoBoundingBoxConstraint) + propJSON (Proxy :: Proxy GeoFilterType) + propJSON (Proxy :: Proxy GeoBoundingBox) + propJSON (Proxy :: Proxy LatLon) + propJSON (Proxy :: Proxy RangeExecution) + prop "RegexpFlags FromJSON/ToJSON roundtrips, removing dups " $ \rfs -> + let expected = case rfs of + SomeRegexpFlags fs -> SomeRegexpFlags (NE.fromList (L.nub (NE.toList fs))) + x -> x + in parseEither parseJSON (toJSON rfs) === Right expected + propJSON (Proxy :: Proxy BoolMatch) + propJSON (Proxy :: Proxy Term) + propJSON (Proxy :: Proxy MultiMatchQuery) + propJSON (Proxy :: Proxy IndexSettings) + propJSON (Proxy :: Proxy CompoundFormat) + propJSON (Proxy :: Proxy TemplateQueryInline) + propJSON (Proxy :: Proxy Suggest) + propJSON (Proxy :: Proxy DirectGenerators) + propJSON (Proxy :: Proxy DirectGeneratorSuggestModeTypes) + + describe "Approximate isomorphism JSON instances" $ do + propApproxJSON (Proxy :: Proxy UpdatableIndexSetting') + propApproxJSON (Proxy :: Proxy ReplicaCount) + propApproxJSON (Proxy :: Proxy ReplicaBounds) + propApproxJSON (Proxy :: Proxy Bytes) + propApproxJSON (Proxy :: Proxy AllocationPolicy) + propApproxJSON (Proxy :: Proxy InitialShardCount) + propApproxJSON (Proxy :: Proxy FSType) diff --git a/tests/V1/Test/Query.hs b/tests/V1/Test/Query.hs new file mode 100644 index 0000000..811649e --- /dev/null +++ b/tests/V1/Test/Query.hs @@ -0,0 +1,115 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Test.Query where + +import Test.Common +import Test.Import + +import qualified Data.HashMap.Strict as HM + +spec :: Spec +spec = + describe "query API" $ do + it "returns document for term query and identity filter" $ withTestEnv $ do + _ <- insertData + let query = TermQuery (Term "user" "bitemyapp") Nothing + let filter' = IdentityFilter + let search = mkSearch (Just query) (Just filter') + myTweet <- searchTweet search + liftIO $ + myTweet `shouldBe` Right exampleTweet + + it "handles constant score queries" $ withTestEnv $ do + _ <- insertData + let query = TermsQuery "user" ("bitemyapp" :| []) + let cfQuery = ConstantScoreQuery query (Boost 1.0) + let filter' = IdentityFilter + let search = mkSearch (Just cfQuery) (Just filter') + myTweet <- searchTweet search + liftIO $ + myTweet `shouldBe` Right exampleTweet + + it "returns document for terms query and identity filter" $ withTestEnv $ do + _ <- insertData + let query = TermsQuery "user" ("bitemyapp" :| []) + let filter' = IdentityFilter + let search = mkSearch (Just query) (Just filter') + myTweet <- searchTweet search + liftIO $ + myTweet `shouldBe` Right exampleTweet + + it "returns document for match query" $ withTestEnv $ do + _ <- insertData + let query = QueryMatchQuery $ mkMatchQuery (FieldName "user") (QueryString "bitemyapp") + let search = mkSearch (Just query) Nothing + myTweet <- searchTweet search + liftIO $ + myTweet `shouldBe` Right exampleTweet + + it "returns document for multi-match query" $ withTestEnv $ do + _ <- insertData + let flds = [FieldName "user", FieldName "message"] + let query = QueryMultiMatchQuery $ mkMultiMatchQuery flds (QueryString "bitemyapp") + let search = mkSearch (Just query) Nothing + myTweet <- searchTweet search + liftIO $ + myTweet `shouldBe` Right exampleTweet + + it "returns document for multi-match query with a custom tiebreaker" $ withTestEnv $ do + _ <- insertData + let tiebreaker = Just $ Tiebreaker 0.3 + flds = [FieldName "user", FieldName "message"] + multiQuery' = mkMultiMatchQuery flds (QueryString "bitemyapp") + query = QueryMultiMatchQuery $ multiQuery' { multiMatchQueryTiebreaker = tiebreaker } + search = mkSearch (Just query) Nothing + myTweet <- searchTweet search + liftIO $ + myTweet `shouldBe` Right exampleTweet + + it "returns document for bool query" $ withTestEnv $ do + _ <- insertData + let innerQuery = QueryMatchQuery $ + mkMatchQuery (FieldName "user") (QueryString "bitemyapp") + let query = QueryBoolQuery $ + mkBoolQuery [innerQuery] [] [] + let search = mkSearch (Just query) Nothing + myTweet <- searchTweet search + liftIO $ + myTweet `shouldBe` Right exampleTweet + + it "returns document for boosting query" $ withTestEnv $ do + _ <- insertData + let posQuery = QueryMatchQuery $ mkMatchQuery (FieldName "user") (QueryString "bitemyapp") + let negQuery = QueryMatchQuery $ mkMatchQuery (FieldName "user") (QueryString "notmyapp") + let query = QueryBoostingQuery $ BoostingQuery posQuery negQuery (Boost 0.2) + let search = mkSearch (Just query) Nothing + myTweet <- searchTweet search + liftIO $ + myTweet `shouldBe` Right exampleTweet + + it "returns document for common terms query" $ withTestEnv $ do + _ <- insertData + let query = QueryCommonTermsQuery $ + CommonTermsQuery (FieldName "user") + (QueryString "bitemyapp") + (CutoffFrequency 0.0001) + Or Or Nothing Nothing Nothing Nothing + let search = mkSearch (Just query) Nothing + myTweet <- searchTweet search + liftIO $ + myTweet `shouldBe` Right exampleTweet + + it "returns document for for inline template query" $ withTestEnv $ do + _ <- insertData + let innerQuery = QueryMatchQuery $ + mkMatchQuery (FieldName "{{userKey}}") + (QueryString "{{bitemyappKey}}") + templateParams = TemplateQueryKeyValuePairs $ HM.fromList + [ ("userKey", "user") + , ("bitemyappKey", "bitemyapp") + ] + templateQuery = QueryTemplateQueryInline $ + TemplateQueryInline innerQuery templateParams + search = mkSearch (Just templateQuery) Nothing + myTweet <- searchTweet search + liftIO $ myTweet `shouldBe` Right exampleTweet diff --git a/tests/V1/Test/Snapshots.hs b/tests/V1/Test/Snapshots.hs new file mode 100644 index 0000000..cead610 --- /dev/null +++ b/tests/V1/Test/Snapshots.hs @@ -0,0 +1,201 @@ +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE OverloadedStrings #-} + +module Test.Snapshots (spec) where + +import Test.Common +import Test.Import + +import Data.Maybe (fromMaybe) +import qualified Data.List as L +import qualified Data.HashMap.Strict as HM +import qualified Data.Text as T +import qualified Data.Vector as V +import qualified Network.HTTP.Types.Method as NHTM +import qualified Network.URI as URI + +import Test.Generators () + +spec :: Spec +spec = do + describe "FsSnapshotRepo" $ + prop "SnapshotRepo laws" $ \fsr -> + fromGSnapshotRepo (toGSnapshotRepo fsr) === Right (fsr :: FsSnapshotRepo) + + describe "Snapshot repos" $ do + it "always parses all snapshot repos API" $ when' canSnapshot $ withTestEnv $ do + res <- getSnapshotRepos AllSnapshotRepos + liftIO $ case res of + Left e -> expectationFailure ("Expected a right but got Left " <> show e) + Right _ -> return () + + it "finds an existing list of repos" $ when' canSnapshot $ withTestEnv $ do + let r1n = SnapshotRepoName "bloodhound-repo1" + let r2n = SnapshotRepoName "bloodhound-repo2" + withSnapshotRepo r1n $ \r1 -> + withSnapshotRepo r2n $ \r2 -> do + repos <- getSnapshotRepos (SnapshotRepoList (ExactRepo r1n :| [ExactRepo r2n])) + liftIO $ case repos of + Right xs -> do + let srt = L.sortBy (comparing gSnapshotRepoName) + srt xs `shouldBe` srt [r1, r2] + Left e -> expectationFailure (show e) + + it "creates and updates with updateSnapshotRepo" $ when' canSnapshot $ withTestEnv $ do + let r1n = SnapshotRepoName "bloodhound-repo1" + withSnapshotRepo r1n $ \r1 -> do + let Just (String dir) = HM.lookup "location" (gSnapshotRepoSettingsObject (gSnapshotRepoSettings r1)) + let noCompression = FsSnapshotRepo r1n (T.unpack dir) False Nothing Nothing Nothing + resp <- updateSnapshotRepo defaultSnapshotRepoUpdateSettings noCompression + liftIO (validateStatus resp 200) + Right [roundtrippedNoCompression] <- getSnapshotRepos (SnapshotRepoList (ExactRepo r1n :| [])) + liftIO (roundtrippedNoCompression `shouldBe` toGSnapshotRepo noCompression) + + -- verify came around in 1.4 it seems + it "can verify existing repos" $ when' canSnapshot $ when' (atleast es14) $ withTestEnv $ do + let r1n = SnapshotRepoName "bloodhound-repo1" + withSnapshotRepo r1n $ \_ -> do + res <- verifySnapshotRepo r1n + liftIO $ case res of + Right (SnapshotVerification vs) + | null vs -> expectationFailure "Expected nonempty set of verifying nodes" + | otherwise -> return () + Left e -> expectationFailure (show e) + + describe "Snapshots" $ do + it "always parses all snapshots API" $ when' canSnapshot $ withTestEnv $ do + let r1n = SnapshotRepoName "bloodhound-repo1" + withSnapshotRepo r1n $ \_ -> do + res <- getSnapshots r1n AllSnapshots + liftIO $ case res of + Left e -> expectationFailure ("Expected a right but got Left " <> show e) + Right _ -> return () + + it "can parse a snapshot that it created" $ when' canSnapshot $ withTestEnv $ do + let r1n = SnapshotRepoName "bloodhound-repo1" + withSnapshotRepo r1n $ \_ -> do + let s1n = SnapshotName "example-snapshot" + withSnapshot r1n s1n $ do + res <- getSnapshots r1n (SnapshotList (ExactSnap s1n :| [])) + liftIO $ case res of + Right [snap] + | snapInfoState snap == SnapshotSuccess && + snapInfoName snap == s1n -> return () + | otherwise -> expectationFailure (show snap) + Right [] -> expectationFailure "There were no snapshots" + Right snaps -> expectationFailure ("Expected 1 snapshot but got" <> show (length snaps)) + Left e -> expectationFailure (show e) + + describe "Snapshot restore" $ do + it "can restore a snapshot that we create" $ when' canSnapshot $ withTestEnv $ do + let r1n = SnapshotRepoName "bloodhound-repo1" + withSnapshotRepo r1n $ \_ -> do + let s1n = SnapshotName "example-snapshot" + withSnapshot r1n s1n $ do + let settings = defaultSnapshotRestoreSettings { snapRestoreWaitForCompletion = True } + -- have to close an index to restore it + resp1 <- closeIndex testIndex + liftIO (validateStatus resp1 200) + resp2 <- restoreSnapshot r1n s1n settings + liftIO (validateStatus resp2 200) + + it "can restore and rename" $ when' canSnapshot $ withTestEnv $ do + let r1n = SnapshotRepoName "bloodhound-repo1" + withSnapshotRepo r1n $ \_ -> do + let s1n = SnapshotName "example-snapshot" + withSnapshot r1n s1n $ do + let pat = RestoreRenamePattern "bloodhound-tests-twitter-(\\d+)" + let replace = RRTLit "restored-" :| [RRSubWholeMatch] + let expectedIndex = IndexName "restored-bloodhound-tests-twitter-1" + oldEnoughForOverrides <- liftIO (atleast es15) + let overrides = RestoreIndexSettings { restoreOverrideReplicas = Just (ReplicaCount 0) } + let settings = defaultSnapshotRestoreSettings { snapRestoreWaitForCompletion = True + , snapRestoreRenamePattern = Just pat + , snapRestoreRenameReplacement = Just replace + , snapRestoreIndexSettingsOverrides = if oldEnoughForOverrides + then Just overrides + else Nothing + } + -- have to close an index to restore it + let go = do + resp <- restoreSnapshot r1n s1n settings + liftIO (validateStatus resp 200) + exists <- indexExists expectedIndex + liftIO (exists `shouldBe` True) + go `finally` deleteIndex expectedIndex + +-- | Get configured repo paths for snapshotting. Note that by default +-- this is not enabled and if we are over es 1.5, we won't be able to +-- test snapshotting. Note that this can and should be part of the +-- client functionality in a much less ad-hoc incarnation. +getRepoPaths :: IO [FilePath] +getRepoPaths = withTestEnv $ do + bhe <- getBHEnv + let Server s = bhServer bhe + let tUrl = s <> "/" <> "_nodes" + initReq <- parseRequest (URI.escapeURIString URI.isAllowedInURI (T.unpack tUrl)) + let req = setRequestIgnoreStatus $ initReq { method = NHTM.methodGet } + Right (Object o) <- parseEsResponse =<< liftIO (httpLbs req (bhManager bhe)) + return $ fromMaybe mempty $ do + Object nodes <- HM.lookup "nodes" o + Object firstNode <- snd <$> headMay (HM.toList nodes) + Object settings <- HM.lookup "settings" firstNode + Object path <- HM.lookup "path" settings + Array repo <- HM.lookup "repo" path + return [ T.unpack t | String t <- V.toList repo] + +-- | 1.5 and earlier don't care about repo paths +canSnapshot :: IO Bool +canSnapshot = do + caresAboutRepos <- atleast es16 + repoPaths <- getRepoPaths + return (not caresAboutRepos || not (null repoPaths)) + +withSnapshotRepo + :: ( MonadMask m + , MonadBH m + ) + => SnapshotRepoName + -> (GenericSnapshotRepo -> m a) + -> m a +withSnapshotRepo srn@(SnapshotRepoName n) f = do + repoPaths <- liftIO getRepoPaths + -- we'll use the first repo path if available, otherwise system temp + -- dir. Note that this will fail on ES > 1.6, so be sure you use + -- @when' canSnapshot@. + case repoPaths of + (firstRepoPath:_) -> withTempDirectory firstRepoPath (T.unpack n) $ \dir -> bracket (alloc dir) free f + [] -> withSystemTempDirectory (T.unpack n) $ \dir -> bracket (alloc dir) free f + where + alloc dir = do + liftIO (setFileMode dir mode) + let repo = FsSnapshotRepo srn "bloodhound-tests-backups" True Nothing Nothing Nothing + resp <- updateSnapshotRepo defaultSnapshotRepoUpdateSettings repo + liftIO (validateStatus resp 200) + return (toGSnapshotRepo repo) + mode = ownerModes `unionFileModes` groupModes `unionFileModes` otherModes + free GenericSnapshotRepo {..} = do + resp <- deleteSnapshotRepo gSnapshotRepoName + liftIO (validateStatus resp 200) + + +withSnapshot + :: ( MonadMask m + , MonadBH m + ) + => SnapshotRepoName + -> SnapshotName + -> m a + -> m a +withSnapshot srn sn = bracket_ alloc free + where + alloc = do + resp <- createSnapshot srn sn createSettings + liftIO (validateStatus resp 200) + -- We'll make this synchronous for testing purposes + createSettings = defaultSnapshotCreateSettings { snapWaitForCompletion = True + , snapIndices = Just (IndexList (testIndex :| [])) + -- We don't actually need to back up any data + } + free = + deleteSnapshot srn sn diff --git a/tests/V1/Test/Sorting.hs b/tests/V1/Test/Sorting.hs new file mode 100644 index 0000000..9df08ab --- /dev/null +++ b/tests/V1/Test/Sorting.hs @@ -0,0 +1,22 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Test.Sorting where + +import Test.Common +import Test.Import + +spec :: Spec +spec = + describe "sorting" $ + it "returns documents in the right order" $ withTestEnv $ do + _ <- insertData + _ <- insertOther + let sortSpec = DefaultSortSpec $ mkSort (FieldName "age") Ascending + let search = Search Nothing + Nothing (Just [sortSpec]) Nothing Nothing + False (From 0) (Size 10) SearchTypeQueryThenFetch Nothing Nothing + Nothing + result <- searchTweets search + let myTweet = grabFirst result + liftIO $ + myTweet `shouldBe` Right otherTweet diff --git a/tests/V1/Test/SourceFiltering.hs b/tests/V1/Test/SourceFiltering.hs new file mode 100644 index 0000000..447980c --- /dev/null +++ b/tests/V1/Test/SourceFiltering.hs @@ -0,0 +1,38 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Test.SourceFiltering where + +import Test.Common +import Test.Import + +import qualified Data.HashMap.Strict as HM + +spec :: Spec +spec = + describe "Source filtering" $ do + + it "doesn't include source when sources are disabled" $ withTestEnv $ + searchExpectSource + NoSource + (Left (EsError 500 "Source was missing")) + + it "includes a source" $ withTestEnv $ + searchExpectSource + (SourcePatterns (PopPattern (Pattern "message"))) + (Right (Object (HM.fromList [("message", String "Use haskell!")]))) + + it "includes sources" $ withTestEnv $ + searchExpectSource + (SourcePatterns (PopPatterns [Pattern "user", Pattern "message"])) + (Right (Object (HM.fromList [("user",String "bitemyapp"),("message", String "Use haskell!")]))) + + it "includes source patterns" $ withTestEnv $ + searchExpectSource + (SourcePatterns (PopPattern (Pattern "*ge"))) + (Right (Object (HM.fromList [("age", Number 10000),("message", String "Use haskell!")]))) + + it "excludes source patterns" $ withTestEnv $ + searchExpectSource + (SourceIncludeExclude (Include []) + (Exclude [Pattern "l*", Pattern "*ge", Pattern "postDate", Pattern "extra"])) + (Right (Object (HM.fromList [("user",String "bitemyapp")]))) diff --git a/tests/V1/Test/Suggest.hs b/tests/V1/Test/Suggest.hs new file mode 100644 index 0000000..34e4262 --- /dev/null +++ b/tests/V1/Test/Suggest.hs @@ -0,0 +1,22 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Test.Suggest where + +import Test.Common +import Test.Import + +spec :: Spec +spec = + describe "Suggest" $ + it "returns a search suggestion using the phrase suggester" $ withTestEnv $ do + _ <- insertData + let phraseSuggester = mkPhraseSuggester (FieldName "message") + namedSuggester = Suggest "Use haskel" "suggest_name" (SuggestTypePhraseSuggester phraseSuggester) + search' = mkSearch Nothing Nothing + search = search' { suggestBody = Just namedSuggester } + expectedText = Just "use haskell" + resp <- searchByIndex testIndex search + parsed <- parseEsResponse resp :: BH IO (Either EsError (SearchResult Tweet)) + case parsed of + Left e -> liftIO $ expectationFailure ("Expected an search suggestion but got " <> show e) + Right sr -> liftIO $ (suggestOptionsText . head . suggestResponseOptions . head . nsrResponses <$> suggest sr) `shouldBe` expectedText diff --git a/tests/V1/Test/Templates.hs b/tests/V1/Test/Templates.hs new file mode 100644 index 0000000..bda85e0 --- /dev/null +++ b/tests/V1/Test/Templates.hs @@ -0,0 +1,26 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Test.Templates where + +import Test.Common +import Test.Import + +spec :: Spec +spec = + describe "template API" $ do + it "can create a template" $ withTestEnv $ do + let idxTpl = IndexTemplate (TemplatePattern "tweet-*") (Just (IndexSettings (ShardCount 1) (ReplicaCount 1))) [toJSON TweetMapping] + resp <- putTemplate idxTpl (TemplateName "tweet-tpl") + liftIO $ validateStatus resp 200 + + it "can detect if a template exists" $ withTestEnv $ do + exists <- templateExists (TemplateName "tweet-tpl") + liftIO $ exists `shouldBe` True + + it "can delete a template" $ withTestEnv $ do + resp <- deleteTemplate (TemplateName "tweet-tpl") + liftIO $ validateStatus resp 200 + + it "can detect if a template doesn't exist" $ withTestEnv $ do + exists <- templateExists (TemplateName "tweet-tpl") + liftIO $ exists `shouldBe` False diff --git a/tests/V1/tests.hs b/tests/V1/tests.hs index 52f7155..d173605 100644 --- a/tests/V1/tests.hs +++ b/tests/V1/tests.hs @@ -1,9 +1,5 @@ {-# LANGUAGE CPP #-} -{-# LANGUAGE DefaultSignatures #-} -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE ScopedTypeVariables #-} @@ -15,972 +11,40 @@ #if __GLASGOW_HASKELL__ >= 802 {-# LANGUAGE MonoLocalBinds #-} #endif - module Main where -import Control.Applicative -import Control.Error -import Control.Exception (evaluate) -import Control.Monad -import Control.Monad.Catch -import Control.Monad.Reader -import Data.Aeson -import Data.Aeson.Types (parseEither) -import qualified Data.ByteString.Lazy.Char8 as BL8 -import qualified Data.HashMap.Strict as HM -import Data.List (nub) -import qualified Data.List as L -import Data.List.NonEmpty (NonEmpty (..)) -import qualified Data.List.NonEmpty as NE -import qualified Data.Map.Strict as M -import Data.Monoid -import Data.Ord (comparing) -import Data.Proxy -import Data.Text (Text) -import qualified Data.Text as T -import Data.Time.Calendar (Day (..), fromGregorian) -import Data.Time.Clock (NominalDiffTime, UTCTime (..), - secondsToDiffTime) -import Data.Typeable -import qualified Data.Vector as V -import qualified Data.Version as Vers -import Database.V1.Bloodhound -import GHC.Generics as G -import Network.HTTP.Client hiding (Proxy) -import qualified Network.HTTP.Types.Method as NHTM -import qualified Network.HTTP.Types.Status as NHTS -import qualified Network.URI as URI -import Prelude hiding (filter) -import System.IO.Temp -import System.PosixCompat.Files -import Test.Hspec -import Test.QuickCheck.Property.Monoid (T (..), eq, prop_Monoid) - -import Test.Hspec.QuickCheck (prop) -import Test.QuickCheck - -import qualified Generics.SOP as SOP -import qualified Generics.SOP.GGP as SOP - -testServer :: Server -testServer = Server "http://localhost:9200" -testIndex :: IndexName -testIndex = IndexName "bloodhound-tests-twitter-1" -testMapping :: MappingName -testMapping = MappingName "tweet" - -withTestEnv :: BH IO a -> IO a -withTestEnv = withBH defaultManagerSettings testServer - -validateStatus :: Show body => Response body -> Int -> Expectation -validateStatus resp expected = - if actual == expected - then return () - else expectationFailure ("Expected " <> show expected <> " but got " <> show actual <> ": " <> show body) - where - actual = NHTS.statusCode (responseStatus resp) - body = responseBody resp - -createExampleIndex :: (MonadBH m) => m Reply -createExampleIndex = createIndex (IndexSettings (ShardCount 1) (ReplicaCount 0)) testIndex -deleteExampleIndex :: (MonadBH m) => m Reply -deleteExampleIndex = deleteIndex testIndex - -es13 :: Vers.Version -es13 = Vers.Version [1, 3, 0] [] - -es12 :: Vers.Version -es12 = Vers.Version [1, 2, 0] [] - -es11 :: Vers.Version -es11 = Vers.Version [1, 1, 0] [] - -es14 :: Vers.Version -es14 = Vers.Version [1, 4, 0] [] - -es15 :: Vers.Version -es15 = Vers.Version [1, 5, 0] [] - -es16 :: Vers.Version -es16 = Vers.Version [1, 6, 0] [] - -es20 :: Vers.Version -es20 = Vers.Version [2, 0, 0] [] - -getServerVersion :: IO (Maybe Vers.Version) -getServerVersion = fmap extractVersion <$> withTestEnv getStatus - where - extractVersion = versionNumber . number . version - --- | Get configured repo paths for snapshotting. Note that by default --- this is not enabled and if we are over es 1.5, we won't be able to --- test snapshotting. Note that this can and should be part of the --- client functionality in a much less ad-hoc incarnation. -getRepoPaths :: IO [FilePath] -getRepoPaths = withTestEnv $ do - bhe <- getBHEnv - let Server s = bhServer bhe - let tUrl = s <> "/" <> "_nodes" - initReq <- parseRequest (URI.escapeURIString URI.isAllowedInURI (T.unpack tUrl)) - let req = setRequestIgnoreStatus $ initReq { method = NHTM.methodGet } - Right (Object o) <- parseEsResponse =<< liftIO (httpLbs req (bhManager bhe)) - return $ fromMaybe mempty $ do - Object nodes <- HM.lookup "nodes" o - Object firstNode <- snd <$> headMay (HM.toList nodes) - Object settings <- HM.lookup "settings" firstNode - Object path <- HM.lookup "path" settings - Array repo <- HM.lookup "repo" path - return [ T.unpack t | String t <- V.toList repo] - --- | 1.5 and earlier don't care about repo paths -canSnapshot :: IO Bool -canSnapshot = do - caresAboutRepos <- atleast es16 - repoPaths <- getRepoPaths - return (not caresAboutRepos || not (null (repoPaths))) - -atleast :: Vers.Version -> IO Bool -atleast v = getServerVersion >>= \x -> return $ x >= Just v - -atmost :: Vers.Version -> IO Bool -atmost v = getServerVersion >>= \x -> return $ x <= Just v - -is :: Vers.Version -> IO Bool -is v = getServerVersion >>= \x -> return $ x == Just v - -when' :: Monad m => m Bool -> m () -> m () -when' b f = b >>= \x -> when x f - -(==~) :: (ApproxEq a) => a -> a -> Property -a ==~ b = counterexample (showApproxEq a ++ " !=~ " ++ showApproxEq b) (a =~ b) - -propJSON :: forall a. (Arbitrary a, ToJSON a, FromJSON a, Show a, ApproxEq a, Typeable a) => Proxy a -> Spec -propJSON _ = prop testName $ \(a :: a) -> - let jsonStr = "via " <> BL8.unpack (encode a) - in counterexample jsonStr (parseEither parseJSON (toJSON a) ==~ Right a) - where testName = show ty <> " FromJSON/ToJSON roundtrips" - ty = typeOf (undefined :: a) - -data Location = Location { lat :: Double - , lon :: Double } deriving (Eq, Generic, Show) - -data Tweet = Tweet { user :: Text - , postDate :: UTCTime - , message :: Text - , age :: Int - , location :: Location - , extra :: Maybe Text } - deriving (Eq, Generic, Show) - -instance ToJSON Tweet where - toJSON = genericToJSON defaultOptions -instance FromJSON Tweet where - parseJSON = genericParseJSON defaultOptions -instance ToJSON Location where - toJSON = genericToJSON defaultOptions -instance FromJSON Location where - parseJSON = genericParseJSON defaultOptions - -data ParentMapping = ParentMapping deriving (Eq, Show) - -instance ToJSON ParentMapping where - toJSON ParentMapping = - object ["properties" .= - object [ "user" .= object ["type" .= ("string" :: Text)] - -- Serializing the date as a date is breaking other tests, mysteriously. - -- , "postDate" .= object [ "type" .= ("date" :: Text) - -- , "format" .= ("YYYY-MM-dd`T`HH:mm:ss.SSSZZ" :: Text)] - , "message" .= object ["type" .= ("string" :: Text)] - , "age" .= object ["type" .= ("integer" :: Text)] - , "location" .= object ["type" .= ("geo_point" :: Text)] - ]] - -data ChildMapping = ChildMapping deriving (Eq, Show) - -instance ToJSON ChildMapping where - toJSON ChildMapping = - object ["_parent" .= object ["type" .= ("parent" :: Text)] - , "properties" .= - object [ "user" .= object ["type" .= ("string" :: Text)] - -- Serializing the date as a date is breaking other tests, mysteriously. - -- , "postDate" .= object [ "type" .= ("date" :: Text) - -- , "format" .= ("YYYY-MM-dd`T`HH:mm:ss.SSSZZ" :: Text)] - , "message" .= object ["type" .= ("string" :: Text)] - , "age" .= object ["type" .= ("integer" :: Text)] - , "location" .= object ["type" .= ("geo_point" :: Text)] - ]] - -data TweetMapping = TweetMapping deriving (Eq, Show) - -instance ToJSON TweetMapping where - toJSON TweetMapping = - object ["properties" .= - object [ "user" .= object ["type" .= ("string" :: Text)] - -- Serializing the date as a date is breaking other tests, mysteriously. - -- , "postDate" .= object [ "type" .= ("date" :: Text) - -- , "format" .= ("YYYY-MM-dd`T`HH:mm:ss.SSSZZ" :: Text)] - , "message" .= object ["type" .= ("string" :: Text)] - , "age" .= object ["type" .= ("integer" :: Text)] - , "location" .= object ["type" .= ("geo_point" :: Text)] - ]] - -exampleTweet :: Tweet -exampleTweet = Tweet { user = "bitemyapp" - , postDate = UTCTime - (ModifiedJulianDay 55000) - (secondsToDiffTime 10) - , message = "Use haskell!" - , age = 10000 - , location = Location 40.12 (-71.34) - , extra = Nothing } - -tweetWithExtra :: Tweet -tweetWithExtra = Tweet { user = "bitemyapp" - , postDate = UTCTime - (ModifiedJulianDay 55000) - (secondsToDiffTime 10) - , message = "Use haskell!" - , age = 10000 - , location = Location 40.12 (-71.34) - , extra = Just "blah blah" } - -newAge :: Int -newAge = 31337 - -newUser :: Text -newUser = "someotherapp" - -tweetPatch :: Value -tweetPatch = - object [ "age" .= newAge - , "user" .= newUser - ] - -patchedTweet :: Tweet -patchedTweet = exampleTweet{age = newAge, user = newUser} - -otherTweet :: Tweet -otherTweet = Tweet { user = "notmyapp" - , postDate = UTCTime - (ModifiedJulianDay 55000) - (secondsToDiffTime 11) - , message = "Use haskell!" - , age = 1000 - , location = Location 40.12 (-71.34) - , extra = Nothing } - -resetIndex :: BH IO () -resetIndex = do - _ <- deleteExampleIndex - _ <- createExampleIndex - _ <- putMapping testIndex testMapping TweetMapping - return () - -insertData :: BH IO Reply -insertData = do - resetIndex - insertData' defaultIndexDocumentSettings - -insertData' :: IndexDocumentSettings -> BH IO Reply -insertData' ids = do - r <- indexDocument testIndex testMapping ids exampleTweet (DocId "1") - _ <- refreshIndex testIndex - return r - -updateData :: BH IO Reply -updateData = do - r <- updateDocument testIndex testMapping defaultIndexDocumentSettings tweetPatch (DocId "1") - _ <- refreshIndex testIndex - return r - -insertOther :: BH IO () -insertOther = do - _ <- indexDocument testIndex testMapping defaultIndexDocumentSettings otherTweet (DocId "2") - _ <- refreshIndex testIndex - return () - -insertExtra :: BH IO () -insertExtra = do - _ <- indexDocument testIndex testMapping defaultIndexDocumentSettings tweetWithExtra (DocId "4") - _ <- refreshIndex testIndex - return () - -insertWithSpaceInId :: BH IO () -insertWithSpaceInId = do - _ <- indexDocument testIndex testMapping defaultIndexDocumentSettings exampleTweet (DocId "Hello World") - _ <- refreshIndex testIndex - return () - -searchTweet :: Search -> BH IO (Either EsError Tweet) -searchTweet search = do - result <- searchTweets search - let myTweet :: Either EsError Tweet - myTweet = grabFirst result - return myTweet - -searchTweets :: Search -> BH IO (Either EsError (SearchResult Tweet)) -searchTweets search = parseEsResponse =<< searchByIndex testIndex search - -searchExpectNoResults :: Search -> BH IO () -searchExpectNoResults search = do - result <- searchTweets search - let emptyHits = fmap (hits . searchHits) result - liftIO $ - emptyHits `shouldBe` Right [] - -searchExpectAggs :: Search -> BH IO () -searchExpectAggs search = do - reply <- searchByIndex testIndex search - let isEmpty x = return (M.null x) - let result = decode (responseBody reply) :: Maybe (SearchResult Tweet) - liftIO $ - (result >>= aggregations >>= isEmpty) `shouldBe` Just False - -searchValidBucketAgg :: (BucketAggregation a, FromJSON a, Show a) => - Search -> Text -> (Text -> AggregationResults -> Maybe (Bucket a)) -> BH IO () -searchValidBucketAgg search aggKey extractor = do - reply <- searchByIndex testIndex search - let bucketDocs = docCount . head . buckets - let result = decode (responseBody reply) :: Maybe (SearchResult Tweet) - let count = result >>= aggregations >>= extractor aggKey >>= \x -> return (bucketDocs x) - liftIO $ - count `shouldBe` Just 1 - -searchTermsAggHint :: [ExecutionHint] -> BH IO () -searchTermsAggHint hints = do - let terms hint = TermsAgg $ (mkTermsAggregation "user") { termExecutionHint = Just hint } - let search hint = mkAggregateSearch Nothing $ mkAggregations "users" $ terms hint - forM_ hints $ searchExpectAggs . search - forM_ hints (\x -> searchValidBucketAgg (search x) "users" toTerms) - -searchTweetHighlight :: Search -> BH IO (Either EsError (Maybe HitHighlight)) -searchTweetHighlight search = do - result <- searchTweets search - let myHighlight = fmap (hitHighlight . head . hits . searchHits) result - return myHighlight - -searchExpectSource :: Source -> Either EsError Value -> BH IO () -searchExpectSource src expected = do - _ <- insertData - let query = QueryMatchQuery $ mkMatchQuery (FieldName "_all") (QueryString "haskell") - let search = (mkSearch (Just query) Nothing) { source = Just src } - reply <- searchByIndex testIndex search - result <- parseEsResponse reply - let value = grabFirst result - liftIO $ - value `shouldBe` expected - -withSnapshotRepo - :: ( MonadMask m - , MonadBH m - ) - => SnapshotRepoName - -> (GenericSnapshotRepo -> m a) - -> m a -withSnapshotRepo srn@(SnapshotRepoName n) f = do - repoPaths <- liftIO getRepoPaths - -- we'll use the first repo path if available, otherwise system temp - -- dir. Note that this will fail on ES > 1.6, so be sure you use - -- @when' canSnapshot@. - case repoPaths of - (firstRepoPath:_) -> withTempDirectory firstRepoPath (T.unpack n) $ \dir -> bracket (alloc dir) free f - [] -> withSystemTempDirectory (T.unpack n) $ \dir -> bracket (alloc dir) free f - where - alloc dir = do - liftIO (setFileMode dir mode) - let repo = FsSnapshotRepo srn "bloodhound-tests-backups" True Nothing Nothing Nothing - resp <- updateSnapshotRepo defaultSnapshotRepoUpdateSettings repo - liftIO (validateStatus resp 200) - return (toGSnapshotRepo repo) - mode = ownerModes `unionFileModes` groupModes `unionFileModes` otherModes - free GenericSnapshotRepo {..} = do - resp <- deleteSnapshotRepo gSnapshotRepoName - liftIO (validateStatus resp 200) - - -withSnapshot - :: ( MonadMask m - , MonadBH m - ) - => SnapshotRepoName - -> SnapshotName - -> m a - -> m a -withSnapshot srn sn = bracket_ alloc free - where - alloc = do - resp <- createSnapshot srn sn createSettings - liftIO (validateStatus resp 200) - -- We'll make this synchronous for testing purposes - createSettings = defaultSnapshotCreateSettings { snapWaitForCompletion = True - , snapIndices = Just (IndexList (testIndex :| [])) - -- We don't actually need to back up any data - } - free = do - deleteSnapshot srn sn - - - -data BulkTest = BulkTest { name :: Text } deriving (Eq, Generic, Show) -instance FromJSON BulkTest where - parseJSON = genericParseJSON defaultOptions -instance ToJSON BulkTest where - toJSON = genericToJSON defaultOptions - -class GApproxEq f where - gApproxEq :: f a -> f a -> Bool - --- | Unit type -instance GApproxEq U1 where - gApproxEq U1 U1 = True - --- | Sum type, ensure same constructors, recurse -instance (GApproxEq a, GApproxEq b) => GApproxEq (a :+: b) where - gApproxEq (L1 a) (L1 b) = gApproxEq a b - gApproxEq (R1 a) (R1 b) = gApproxEq a b - gApproxEq _ _ = False - --- | Product type, ensure each field is approx eq -instance (GApproxEq a, GApproxEq b) => GApproxEq (a :*: b) where - gApproxEq (a1 :*: b1) (a2 :*: b2) = gApproxEq a1 a2 && gApproxEq b1 b2 - --- | Value type, actually check the values for approx equality -instance (ApproxEq a) => GApproxEq (K1 i a) where - gApproxEq (K1 a) (K1 b) = a =~ b - -instance (GApproxEq f) => GApproxEq (M1 i t f) where - gApproxEq (M1 a) (M1 b) = gApproxEq a b - --- | Typeclass for "equal where it matters". Use this to specify --- less-strict equivalence for things such as lists that can wind up --- in an unpredictable order -class ApproxEq a where - (=~) :: a -> a -> Bool - default (=~) :: (Generic a, GApproxEq (Rep a)) => a -> a -> Bool - a =~ b = gApproxEq (G.from a) (G.from b) - - showApproxEq :: a -> String - default showApproxEq :: (Show a) => a -> String - showApproxEq = show - -instance ApproxEq NominalDiffTime where (=~) = (==) -instance ApproxEq UTCTime where (=~) = (==) -instance ApproxEq Text where (=~) = (==) -instance ApproxEq Bool where (=~) = (==) -instance ApproxEq Int where (=~) = (==) -instance ApproxEq Double where (=~) = (==) -instance (ApproxEq a, Show a) => ApproxEq (NonEmpty a) -instance (ApproxEq a, Show a) => ApproxEq (Maybe a) -instance ApproxEq GeoPoint -instance ApproxEq Regexp -instance ApproxEq RangeValue -instance ApproxEq LessThan -instance ApproxEq LessThanEq -instance ApproxEq LessThanD -instance ApproxEq LessThanEqD -instance ApproxEq GreaterThan -instance ApproxEq GreaterThanEq -instance ApproxEq GreaterThanD -instance ApproxEq GreaterThanEqD -instance ApproxEq MinimumMatchHighLow -instance ApproxEq RegexpFlag -instance ApproxEq RegexpFlags -instance ApproxEq NullValue -instance ApproxEq Version -instance ApproxEq VersionNumber -instance ApproxEq DistanceRange -instance ApproxEq IndexName -instance ApproxEq MappingName -instance ApproxEq DocId -instance ApproxEq IndexAliasRouting -instance ApproxEq RoutingValue -instance ApproxEq ShardCount -instance ApproxEq ReplicaCount -instance ApproxEq TemplateName -instance ApproxEq TemplatePattern -instance ApproxEq QueryString -instance ApproxEq FieldName -instance ApproxEq CacheName -instance ApproxEq CacheKey -instance ApproxEq Existence -instance ApproxEq CutoffFrequency -instance ApproxEq Analyzer -instance ApproxEq Lenient -instance ApproxEq Tiebreaker -instance ApproxEq Boost -instance ApproxEq BoostTerms -instance ApproxEq MaxExpansions -instance ApproxEq MinimumMatch -instance ApproxEq DisableCoord -instance ApproxEq IgnoreTermFrequency -instance ApproxEq MinimumTermFrequency -instance ApproxEq MaxQueryTerms -instance ApproxEq Fuzziness -instance ApproxEq PrefixLength -instance ApproxEq TypeName -instance ApproxEq PercentMatch -instance ApproxEq StopWord -instance ApproxEq QueryPath -instance ApproxEq AllowLeadingWildcard -instance ApproxEq LowercaseExpanded -instance ApproxEq EnablePositionIncrements -instance ApproxEq AnalyzeWildcard -instance ApproxEq GeneratePhraseQueries -instance ApproxEq Locale -instance ApproxEq MaxWordLength -instance ApproxEq MinWordLength -instance ApproxEq PhraseSlop -instance ApproxEq MinDocFrequency -instance ApproxEq MaxDocFrequency -instance ApproxEq Filter -instance ApproxEq Query -instance ApproxEq SimpleQueryStringQuery -instance ApproxEq FieldOrFields -instance ApproxEq SimpleQueryFlag -instance ApproxEq RegexpQuery -instance ApproxEq QueryStringQuery -instance ApproxEq RangeQuery -instance ApproxEq PrefixQuery -instance ApproxEq NestedQuery -instance ApproxEq MoreLikeThisFieldQuery -instance ApproxEq MoreLikeThisQuery -instance ApproxEq IndicesQuery -instance ApproxEq HasParentQuery -instance ApproxEq HasChildQuery -instance ApproxEq FuzzyQuery -instance ApproxEq FuzzyLikeFieldQuery -instance ApproxEq FuzzyLikeThisQuery -instance ApproxEq FilteredQuery -instance ApproxEq DisMaxQuery -instance ApproxEq CommonTermsQuery -instance ApproxEq CommonMinimumMatch -instance ApproxEq BoostingQuery -instance ApproxEq BoolQuery -instance ApproxEq MatchQuery -instance ApproxEq MultiMatchQueryType -instance ApproxEq BooleanOperator -instance ApproxEq ZeroTermsQuery -instance ApproxEq MatchQueryType -instance ApproxEq AliasRouting -instance ApproxEq IndexAliasCreate -instance ApproxEq SearchAliasRouting -instance ApproxEq ScoreType -instance ApproxEq Distance -instance ApproxEq DistanceUnit -instance ApproxEq DistanceType -instance ApproxEq OptimizeBbox -instance ApproxEq GeoBoundingBoxConstraint -instance ApproxEq GeoFilterType -instance ApproxEq GeoBoundingBox -instance ApproxEq LatLon -instance ApproxEq RangeExecution -instance ApproxEq FSType -instance ApproxEq CompoundFormat -instance ApproxEq InitialShardCount -instance ApproxEq Bytes -instance ApproxEq ReplicaBounds -instance ApproxEq Term -instance ApproxEq BoolMatch -instance ApproxEq MultiMatchQuery -instance ApproxEq IndexSettings -instance ApproxEq AllocationPolicy -instance ApproxEq Char where - (=~) = (==) -instance ApproxEq Vers.Version where - (=~) = (==) -instance (ApproxEq a, Show a) => ApproxEq [a] where - as =~ bs = and (zipWith (=~) as bs) -instance (ApproxEq l, ApproxEq r) => ApproxEq (Either l r) where - Left a =~ Left b = a =~ b - Right a =~ Right b = a =~ b - _ =~ _ = False - showApproxEq (Left x) = "Left " <> showApproxEq x - showApproxEq (Right x) = "Right " <> showApproxEq x -instance ApproxEq NodeAttrFilter -instance ApproxEq NodeAttrName -instance ApproxEq BuildHash -instance ApproxEq TemplateQueryKeyValuePairs where - (=~) = (==) -instance ApproxEq TemplateQueryInline -instance ApproxEq Size -instance ApproxEq PhraseSuggesterHighlighter -instance ApproxEq PhraseSuggesterCollate -instance ApproxEq PhraseSuggester -instance ApproxEq SuggestType -instance ApproxEq Suggest -instance ApproxEq DirectGenerators -instance ApproxEq DirectGeneratorSuggestModeTypes - --- | Due to the way nodeattrfilters get serialized here, they may come --- out in a different order, but they are morally equivalent -instance ApproxEq UpdatableIndexSetting where - RoutingAllocationInclude a =~ RoutingAllocationInclude b = - NE.sort a =~ NE.sort b - RoutingAllocationExclude a =~ RoutingAllocationExclude b = - NE.sort a =~ NE.sort b - RoutingAllocationRequire a =~ RoutingAllocationRequire b = - NE.sort a =~ NE.sort b - a =~ b = a == b - showApproxEq (RoutingAllocationInclude xs) = show (RoutingAllocationInclude (NE.sort xs)) - showApproxEq (RoutingAllocationExclude xs) = show (RoutingAllocationExclude (NE.sort xs)) - showApproxEq (RoutingAllocationRequire xs) = show (RoutingAllocationRequire (NE.sort xs)) - showApproxEq x = show x - - -noDuplicates :: Eq a => [a] -> Bool -noDuplicates xs = nub xs == xs - -instance Arbitrary NominalDiffTime where - arbitrary = fromInteger <$> arbitrary - -#if !MIN_VERSION_QuickCheck(2,8,0) -instance (Arbitrary k, Ord k, Arbitrary v) => Arbitrary (M.Map k v) where - arbitrary = M.fromList <$> arbitrary -#endif - -instance Arbitrary Text where - arbitrary = T.pack <$> arbitrary - -instance Arbitrary UTCTime where - arbitrary = UTCTime - <$> arbitrary - <*> (fromRational . toRational <$> choose (0::Double, 86400)) - -instance Arbitrary Day where - arbitrary = ModifiedJulianDay <$> (2000 +) <$> arbitrary - shrink = (ModifiedJulianDay <$>) . shrink . toModifiedJulianDay - -#if !MIN_VERSION_QuickCheck(2,9,0) -instance Arbitrary a => Arbitrary (NonEmpty a) where - arbitrary = liftA2 (:|) arbitrary arbitrary -#endif - -arbitraryScore :: Gen Score -arbitraryScore = fmap getPositive <$> arbitrary - -instance (Arbitrary a, Typeable a) => Arbitrary (Hit a) where - arbitrary = Hit <$> arbitrary - <*> arbitrary - <*> arbitrary - <*> arbitraryScore - <*> arbitrary - <*> arbitrary - shrink = genericShrink - - -instance (Arbitrary a, Typeable a) => Arbitrary (SearchHits a) where - arbitrary = reduceSize $ do - tot <- getPositive <$> arbitrary - score <- arbitraryScore - hs <- arbitrary - return $ SearchHits tot score hs - shrink = genericShrink - -reduceSize :: Gen a -> Gen a -reduceSize f = sized $ \n -> resize (n `div` 2) f - -getSource :: EsResult a -> Maybe a -getSource = fmap _source . foundResult - -grabFirst :: Either EsError (SearchResult a) -> Either EsError a -grabFirst r = - case fmap (hitSource . head . hits . searchHits) r of - (Left e) -> Left e - (Right Nothing) -> Left (EsError 500 "Source was missing") - (Right (Just x)) -> Right x - -------------------------------------------------------------------------------- -arbitraryAlphaNum :: Gen Char -arbitraryAlphaNum = oneof [choose ('a', 'z') - ,choose ('A','Z') - , choose ('0', '9')] - -instance Arbitrary RoutingValue where - arbitrary = RoutingValue . T.pack <$> listOf1 arbitraryAlphaNum - -instance Arbitrary AliasRouting where - arbitrary = oneof [allAlias - ,one - ,theOther - ,both] - where one = GranularAliasRouting - <$> (Just <$> arbitrary) - <*> pure Nothing - theOther = GranularAliasRouting Nothing - <$> (Just <$> arbitrary) - both = GranularAliasRouting - <$> (Just <$> arbitrary) - <*> (Just <$> arbitrary) - allAlias = AllAliasRouting <$> arbitrary - shrink = genericShrink - - -instance Arbitrary FieldName where - arbitrary = FieldName . T.pack <$> listOf1 arbitraryAlphaNum - shrink = genericShrink - - -#if MIN_VERSION_base(4,10,0) --- Test.QuickCheck.Modifiers - -qcNonEmptyToNonEmpty :: NonEmptyList a -> NonEmpty a -qcNonEmptyToNonEmpty (NonEmpty (a : xs)) = (a :| xs) -qcNonEmptyToNonEmpty (NonEmpty []) = error "NonEmpty was empty!" - -instance Arbitrary a => Arbitrary (NonEmpty a) where - arbitrary = qcNonEmptyToNonEmpty <$> arbitrary -#endif - -instance Arbitrary RegexpFlags where - arbitrary = oneof [ pure AllRegexpFlags - , pure NoRegexpFlags - , SomeRegexpFlags <$> genUniqueFlags - ] - where genUniqueFlags = NE.fromList . nub <$> listOf1 arbitrary - shrink = genericShrink - - -instance Arbitrary IndexAliasCreate where - arbitrary = IndexAliasCreate <$> arbitrary <*> reduceSize arbitrary - shrink = genericShrink - -instance Arbitrary Query where - arbitrary = reduceSize $ oneof [ TermQuery <$> arbitrary <*> arbitrary - , TermsQuery <$> arbitrary <*> arbitrary - , QueryMatchQuery <$> arbitrary - , QueryMultiMatchQuery <$> arbitrary - , QueryBoolQuery <$> arbitrary - , QueryBoostingQuery <$> arbitrary - , QueryCommonTermsQuery <$> arbitrary - , ConstantScoreFilter <$> arbitrary <*> arbitrary - , ConstantScoreQuery <$> arbitrary <*> arbitrary - , QueryDisMaxQuery <$> arbitrary - , QueryFilteredQuery <$> arbitrary - , QueryFuzzyLikeThisQuery <$> arbitrary - , QueryFuzzyLikeFieldQuery <$> arbitrary - , QueryFuzzyQuery <$> arbitrary - , QueryHasChildQuery <$> arbitrary - , QueryHasParentQuery <$> arbitrary - , IdsQuery <$> arbitrary <*> arbitrary - , QueryIndicesQuery <$> arbitrary - , MatchAllQuery <$> arbitrary - , QueryMoreLikeThisQuery <$> arbitrary - , QueryMoreLikeThisFieldQuery <$> arbitrary - , QueryNestedQuery <$> arbitrary - , QueryPrefixQuery <$> arbitrary - , QueryQueryStringQuery <$> arbitrary - , QuerySimpleQueryStringQuery <$> arbitrary - , QueryRangeQuery <$> arbitrary - , QueryRegexpQuery <$> arbitrary - , QueryTemplateQueryInline <$> arbitrary - ] - shrink = genericShrink - -instance Arbitrary Filter where - arbitrary = reduceSize $ oneof [ AndFilter <$> arbitrary <*> arbitrary - , OrFilter <$> arbitrary <*> arbitrary - , NotFilter <$> arbitrary <*> arbitrary - , pure IdentityFilter - , BoolFilter <$> arbitrary - , ExistsFilter <$> arbitrary - , GeoBoundingBoxFilter <$> arbitrary - , GeoDistanceFilter <$> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary - , GeoDistanceRangeFilter <$> arbitrary <*> arbitrary - , GeoPolygonFilter <$> arbitrary <*> arbitrary - , IdsFilter <$> arbitrary <*> arbitrary - , LimitFilter <$> arbitrary - , MissingFilter <$> arbitrary <*> arbitrary <*> arbitrary - , PrefixFilter <$> arbitrary <*> arbitrary <*> arbitrary - , QueryFilter <$> arbitrary <*> arbitrary - , RangeFilter <$> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary - , RegexpFilter <$> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary - , TermFilter <$> arbitrary <*> arbitrary] - shrink = genericShrink - -instance Arbitrary ReplicaBounds where - arbitrary = oneof [ replicasBounded - , replicasLowerBounded - , pure ReplicasUnbounded - ] - where replicasBounded = do Positive a <- arbitrary - Positive b <- arbitrary - return (ReplicasBounded a b) - replicasLowerBounded = do Positive a <- arbitrary - return (ReplicasLowerBounded a) - -instance Arbitrary NodeAttrName where - arbitrary = NodeAttrName . T.pack <$> listOf1 arbitraryAlphaNum - - -instance Arbitrary NodeAttrFilter where - arbitrary = do - n <- arbitrary - s:ss <- listOf1 (listOf1 arbitraryAlphaNum) - let ts = T.pack <$> s :| ss - return (NodeAttrFilter n ts) - shrink = genericShrink - -instance Arbitrary VersionNumber where - arbitrary = mk . fmap getPositive . getNonEmpty <$> arbitrary - where - mk versions = VersionNumber (Vers.Version versions []) - -instance Arbitrary TemplateQueryKeyValuePairs where - arbitrary = TemplateQueryKeyValuePairs . HM.fromList <$> arbitrary - shrink (TemplateQueryKeyValuePairs x) = map (TemplateQueryKeyValuePairs . HM.fromList) . shrink $ HM.toList x - -instance Arbitrary IndexName where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MappingName where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary DocId where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Version where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary BuildHash where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary IndexAliasRouting where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary ShardCount where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary ReplicaCount where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary TemplateName where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary TemplatePattern where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary QueryString where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary CacheName where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary CacheKey where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Existence where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary CutoffFrequency where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Analyzer where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MaxExpansions where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Lenient where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Tiebreaker where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Boost where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary BoostTerms where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MinimumMatch where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary DisableCoord where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary IgnoreTermFrequency where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MinimumTermFrequency where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MaxQueryTerms where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Fuzziness where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary PrefixLength where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary TypeName where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary PercentMatch where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary StopWord where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary QueryPath where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary AllowLeadingWildcard where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary LowercaseExpanded where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary EnablePositionIncrements where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary AnalyzeWildcard where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary GeneratePhraseQueries where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Locale where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MaxWordLength where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MinWordLength where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary PhraseSlop where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MinDocFrequency where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MaxDocFrequency where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Regexp where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary SimpleQueryStringQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary FieldOrFields where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary SimpleQueryFlag where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary RegexpQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary QueryStringQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary RangeQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary RangeValue where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary PrefixQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary NestedQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MoreLikeThisFieldQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MoreLikeThisQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary IndicesQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary HasParentQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary HasChildQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary FuzzyQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary FuzzyLikeFieldQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary FuzzyLikeThisQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary FilteredQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary DisMaxQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary CommonTermsQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary DistanceRange where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MultiMatchQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary LessThanD where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary LessThanEqD where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary GreaterThanD where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary GreaterThanEqD where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary LessThan where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary LessThanEq where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary GreaterThan where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary GreaterThanEq where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary GeoPoint where - arbitrary = GeoPoint <$> (arbitrary `suchThat` reasonableFieldName) <*> arbitrary - where - -- These are problematic for geopoint for obvious reasons - reasonableFieldName (FieldName "from") = False - reasonableFieldName (FieldName "to") = False - reasonableFieldName _ = True - shrink = genericShrink -instance Arbitrary NullValue where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MinimumMatchHighLow where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary CommonMinimumMatch where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary BoostingQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary BoolQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MatchQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MultiMatchQueryType where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary BooleanOperator where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary ZeroTermsQuery where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary MatchQueryType where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary SearchAliasRouting where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary ScoreType where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Distance where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary DistanceUnit where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary DistanceType where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary OptimizeBbox where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary GeoBoundingBoxConstraint where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary GeoFilterType where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary GeoBoundingBox where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary LatLon where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary RangeExecution where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary RegexpFlag where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary BoolMatch where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Term where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary IndexSettings where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary UpdatableIndexSetting where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Bytes where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary AllocationPolicy where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary InitialShardCount where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary FSType where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary CompoundFormat where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary FsSnapshotRepo where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary SnapshotRepoName where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary TemplateQueryInline where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary PhraseSuggesterCollate where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary PhraseSuggesterHighlighter where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Size where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary PhraseSuggester where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary SuggestType where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary Suggest where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary DirectGenerators where arbitrary = sopArbitrary; shrink = genericShrink -instance Arbitrary DirectGeneratorSuggestModeTypes where arbitrary = sopArbitrary; shrink = genericShrink - -newtype UpdatableIndexSetting' = UpdatableIndexSetting' UpdatableIndexSetting - deriving (Show, Eq, ToJSON, FromJSON, ApproxEq, Typeable) - -instance Arbitrary UpdatableIndexSetting' where - arbitrary = do - settings <- arbitrary - return $ UpdatableIndexSetting' $ case settings of - RoutingAllocationInclude xs -> RoutingAllocationInclude (dropDuplicateAttrNames xs) - RoutingAllocationExclude xs -> RoutingAllocationExclude (dropDuplicateAttrNames xs) - RoutingAllocationRequire xs -> RoutingAllocationRequire (dropDuplicateAttrNames xs) - x -> x - where - dropDuplicateAttrNames = NE.fromList . L.nubBy sameAttrName . NE.toList - sameAttrName a b = nodeAttrFilterName a == nodeAttrFilterName b +import Test.Common +import Test.Import + +import Prelude + +import qualified Test.Aggregation as Aggregation +import qualified Test.BulkAPI as Bulk +import qualified Test.Documents as Documents +import qualified Test.Highlights as Highlights +import qualified Test.Indices as Indices +import qualified Test.JSON as JSON +import qualified Test.Query as Query +import qualified Test.Snapshots as Snapshots +import qualified Test.Sorting as Sorting +import qualified Test.SourceFiltering as SourceFiltering +import qualified Test.Suggest as Suggest +import qualified Test.Templates as Templates main :: IO () main = hspec $ do - - describe "index create/delete API" $ do - it "creates and then deletes the requested index" $ withTestEnv $ do - -- priming state. - _ <- deleteExampleIndex - resp <- createExampleIndex - deleteResp <- deleteExampleIndex - liftIO $ do - validateStatus resp 200 - validateStatus deleteResp 200 + Aggregation.spec + Bulk.spec + Documents.spec + Highlights.spec + Indices.spec + JSON.spec + Query.spec + Snapshots.spec + Sorting.spec + SourceFiltering.spec + Suggest.spec + Templates.spec describe "error parsing" $ do it "can parse EsErrors for < 2.0" $ when' (atmost es16) $ withTestEnv $ do @@ -993,619 +57,11 @@ main = hspec $ do let errorResp = eitherDecode (responseBody res) liftIO (errorResp `shouldBe` Right (EsError 404 "no such index")) - describe "document API" $ do - it "indexes, updates, gets, and then deletes the generated document" $ withTestEnv $ do - _ <- insertData - _ <- updateData - docInserted <- getDocument testIndex testMapping (DocId "1") - let newTweet = eitherDecode - (responseBody docInserted) :: Either String (EsResult Tweet) - liftIO $ (fmap getSource newTweet `shouldBe` Right (Just patchedTweet)) - - it "indexes, gets, and then deletes the generated document with a DocId containing a space" $ withTestEnv $ do - _ <- insertWithSpaceInId - docInserted <- getDocument testIndex testMapping (DocId "Hello World") - let newTweet = eitherDecode - (responseBody docInserted) :: Either String (EsResult Tweet) - liftIO $ (fmap getSource newTweet `shouldBe` Right (Just exampleTweet)) - - it "produces a parseable result when looking up a bogus document" $ withTestEnv $ do - doc <- getDocument testIndex testMapping (DocId "bogus") - let noTweet = eitherDecode - (responseBody doc) :: Either String (EsResult Tweet) - liftIO $ fmap foundResult noTweet `shouldBe` Right Nothing - - it "can use optimistic concurrency control" $ withTestEnv $ do - let ev = ExternalDocVersion minBound - let cfg = defaultIndexDocumentSettings { idsVersionControl = ExternalGT ev } - resetIndex - res <- insertData' cfg - liftIO $ isCreated res `shouldBe` True - res' <- insertData' cfg - liftIO $ isVersionConflict res' `shouldBe` True - - it "indexes two documents in a parent/child relationship and checks that the child exists" $ withTestEnv $ do - resetIndex - _ <- putMapping testIndex (MappingName "child") ChildMapping - _ <- putMapping testIndex (MappingName "parent") ParentMapping - _ <- indexDocument testIndex (MappingName "parent") defaultIndexDocumentSettings exampleTweet (DocId "1") - let parent = (Just . DocumentParent . DocId) "1" - ids = IndexDocumentSettings NoVersionControl parent - _ <- indexDocument testIndex (MappingName "child") ids otherTweet (DocId "2") - _ <- refreshIndex testIndex - exists <- documentExists testIndex (MappingName "child") parent (DocId "2") - liftIO $ exists `shouldBe` True - - describe "template API" $ do - it "can create a template" $ withTestEnv $ do - let idxTpl = IndexTemplate (TemplatePattern "tweet-*") (Just (IndexSettings (ShardCount 1) (ReplicaCount 1))) [toJSON TweetMapping] - resp <- putTemplate idxTpl (TemplateName "tweet-tpl") - liftIO $ validateStatus resp 200 - - it "can detect if a template exists" $ withTestEnv $ do - exists <- templateExists (TemplateName "tweet-tpl") - liftIO $ exists `shouldBe` True - - it "can delete a template" $ withTestEnv $ do - resp <- deleteTemplate (TemplateName "tweet-tpl") - liftIO $ validateStatus resp 200 - - it "can detect if a template doesn't exist" $ withTestEnv $ do - exists <- templateExists (TemplateName "tweet-tpl") - liftIO $ exists `shouldBe` False - - describe "bulk API" $ do - it "inserts all documents we request" $ withTestEnv $ do - _ <- insertData - let firstTest = BulkTest "blah" - let secondTest = BulkTest "bloo" - let firstDoc = BulkIndex testIndex - testMapping (DocId "2") (toJSON firstTest) - let secondDoc = BulkCreate testIndex - testMapping (DocId "3") (toJSON secondTest) - let stream = V.fromList [firstDoc, secondDoc] - _ <- bulk stream - _ <- refreshIndex testIndex - fDoc <- getDocument testIndex testMapping (DocId "2") - sDoc <- getDocument testIndex testMapping (DocId "3") - let maybeFirst = eitherDecode $ responseBody fDoc :: Either String (EsResult BulkTest) - let maybeSecond = eitherDecode $ responseBody sDoc :: Either String (EsResult BulkTest) - liftIO $ do - fmap getSource maybeFirst `shouldBe` Right (Just firstTest) - fmap getSource maybeSecond `shouldBe` Right (Just secondTest) - - - describe "query API" $ do - it "returns document for term query and identity filter" $ withTestEnv $ do - _ <- insertData - let query = TermQuery (Term "user" "bitemyapp") Nothing - let filter = IdentityFilter <&&> IdentityFilter - let search = mkSearch (Just query) (Just filter) - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "handles constant score queries" $ withTestEnv $ do - _ <- insertData - let query = TermsQuery "user" ("bitemyapp" :| []) - let cfQuery = ConstantScoreQuery query (Boost 1.0) - let filter = IdentityFilter - let search = mkSearch (Just cfQuery) (Just filter) - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - it "handles constant score filters" $ withTestEnv $ do - _ <- insertData - let query = TermsQuery "user" ("bitemyapp" :| []) - let cfFilter = ConstantScoreFilter IdentityFilter (Boost 1.0) - let boolQuery = mkBoolQuery [query, cfFilter] [] [] - let search = mkSearch (Just (QueryBoolQuery boolQuery)) Nothing - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - - it "returns document for terms query and identity filter" $ withTestEnv $ do - _ <- insertData - let query = TermsQuery "user" ("bitemyapp" :| []) - let filter = IdentityFilter <&&> IdentityFilter - let search = mkSearch (Just query) (Just filter) - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "returns document for match query" $ withTestEnv $ do - _ <- insertData - let query = QueryMatchQuery $ mkMatchQuery (FieldName "user") (QueryString "bitemyapp") - let search = mkSearch (Just query) Nothing - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "returns document for multi-match query" $ withTestEnv $ do - _ <- insertData - let flds = [FieldName "user", FieldName "message"] - let query = QueryMultiMatchQuery $ mkMultiMatchQuery flds (QueryString "bitemyapp") - let search = mkSearch (Just query) Nothing - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "returns document for multi-match query with a custom tiebreaker" $ withTestEnv $ do - _ <- insertData - let tiebreaker = Just $ Tiebreaker 0.3 - flds = [FieldName "user", FieldName "message"] - multiQuery' = mkMultiMatchQuery flds (QueryString "bitemyapp") - query = QueryMultiMatchQuery $ multiQuery' { multiMatchQueryTiebreaker = tiebreaker } - search = mkSearch (Just query) Nothing - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "returns document for bool query" $ withTestEnv $ do - _ <- insertData - let innerQuery = QueryMatchQuery $ - mkMatchQuery (FieldName "user") (QueryString "bitemyapp") - let query = QueryBoolQuery $ - mkBoolQuery [innerQuery] [] [] - let search = mkSearch (Just query) Nothing - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "returns document for boosting query" $ withTestEnv $ do - _ <- insertData - let posQuery = QueryMatchQuery $ mkMatchQuery (FieldName "user") (QueryString "bitemyapp") - let negQuery = QueryMatchQuery $ mkMatchQuery (FieldName "user") (QueryString "notmyapp") - let query = QueryBoostingQuery $ BoostingQuery posQuery negQuery (Boost 0.2) - let search = mkSearch (Just query) Nothing - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "returns document for common terms query" $ withTestEnv $ do - _ <- insertData - let query = QueryCommonTermsQuery $ - CommonTermsQuery (FieldName "user") - (QueryString "bitemyapp") - (CutoffFrequency 0.0001) - Or Or Nothing Nothing Nothing Nothing - let search = mkSearch (Just query) Nothing - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "returns document for for inline template query" $ withTestEnv $ do - _ <- insertData - let innerQuery = QueryMatchQuery $ - mkMatchQuery (FieldName "{{userKey}}") - (QueryString "{{bitemyappKey}}") - templateParams = TemplateQueryKeyValuePairs $ HM.fromList - [ ("userKey", "user") - , ("bitemyappKey", "bitemyapp") - ] - templateQuery = QueryTemplateQueryInline $ - TemplateQueryInline innerQuery templateParams - search = mkSearch (Just templateQuery) Nothing - myTweet <- searchTweet search - liftIO $ myTweet `shouldBe` Right exampleTweet - - - describe "sorting" $ do - it "returns documents in the right order" $ withTestEnv $ do - _ <- insertData - _ <- insertOther - let sortSpec = DefaultSortSpec $ mkSort (FieldName "age") Ascending - let search = Search Nothing - (Just IdentityFilter) (Just [sortSpec]) Nothing Nothing - False (From 0) (Size 10) SearchTypeQueryThenFetch Nothing Nothing - Nothing - result <- searchTweets search - let myTweet = grabFirst result - liftIO $ - myTweet `shouldBe` Right otherTweet - - - describe "filtering API" $ do - it "returns document for composed boolmatch and identity" $ withTestEnv $ do - _ <- insertData - let queryFilter = BoolFilter (MustMatch (Term "user" "bitemyapp") False) - <&&> IdentityFilter - let search = mkSearch Nothing (Just queryFilter) - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "returns document for term filter" $ withTestEnv $ do - _ <- insertData - let termFilter = TermFilter (Term "user" "bitemyapp") False - let search = mkSearch Nothing (Just termFilter) - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "returns document for existential filter" $ withTestEnv $ do - _ <- insertData - let search = mkSearch Nothing (Just (ExistsFilter (FieldName "user"))) - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "returns document for geo boundingbox filter" $ withTestEnv $ do - _ <- insertData - let box = GeoBoundingBox (LatLon 40.73 (-74.1)) (LatLon 40.10 (-71.12)) - let bbConstraint = GeoBoundingBoxConstraint (FieldName "tweet.location") box False GeoFilterMemory - let geoFilter = GeoBoundingBoxFilter bbConstraint - let search = mkSearch Nothing (Just geoFilter) - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "doesn't return document for nonsensical boundingbox filter" $ withTestEnv $ do - _ <- insertData - let box = GeoBoundingBox (LatLon 0.73 (-4.1)) (LatLon 0.10 (-1.12)) - let bbConstraint = GeoBoundingBoxConstraint (FieldName "tweet.location") box False GeoFilterMemory - let geoFilter = GeoBoundingBoxFilter bbConstraint - let search = mkSearch Nothing (Just geoFilter) - searchExpectNoResults search - - it "returns document for geo distance filter" $ withTestEnv $ do - _ <- insertData - let geoPoint = GeoPoint (FieldName "tweet.location") (LatLon 40.12 (-71.34)) - let distance = Distance 10.0 Miles - let optimizeBbox = OptimizeGeoFilterType GeoFilterMemory - let geoFilter = GeoDistanceFilter geoPoint distance SloppyArc optimizeBbox False - let search = mkSearch Nothing (Just geoFilter) - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "returns document for geo distance range filter" $ withTestEnv $ do - _ <- insertData - let geoPoint = GeoPoint (FieldName "tweet.location") (LatLon 40.12 (-71.34)) - let distanceRange = DistanceRange (Distance 0.0 Miles) (Distance 10.0 Miles) - let geoFilter = GeoDistanceRangeFilter geoPoint distanceRange - let search = mkSearch Nothing (Just geoFilter) - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "doesn't return document for wild geo distance range filter" $ withTestEnv $ do - _ <- insertData - let geoPoint = GeoPoint (FieldName "tweet.location") (LatLon 40.12 (-71.34)) - let distanceRange = DistanceRange (Distance 100.0 Miles) (Distance 1000.0 Miles) - let geoFilter = GeoDistanceRangeFilter geoPoint distanceRange - let search = mkSearch Nothing (Just geoFilter) - searchExpectNoResults search - - it "returns document for geo polygon filter" $ withTestEnv $ do - _ <- insertData - let points = [LatLon 40.0 (-70.00), - LatLon 40.0 (-72.00), - LatLon 41.0 (-70.00), - LatLon 41.0 (-72.00)] - let geoFilter = GeoPolygonFilter (FieldName "tweet.location") points - let search = mkSearch Nothing (Just geoFilter) - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "doesn't return document for bad geo polygon filter" $ withTestEnv $ do - _ <- insertData - let points = [LatLon 40.0 (-70.00), - LatLon 40.0 (-71.00), - LatLon 41.0 (-70.00), - LatLon 41.0 (-71.00)] - let geoFilter = GeoPolygonFilter (FieldName "tweet.location") points - let search = mkSearch Nothing (Just geoFilter) - searchExpectNoResults search - - it "returns document for ids filter" $ withTestEnv $ do - _ <- insertData - let filter = IdsFilter (MappingName "tweet") [DocId "1"] - let search = mkSearch Nothing (Just filter) - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "returns document for Double range filter" $ withTestEnv $ do - _ <- insertData - let filter = RangeFilter (FieldName "age") - (RangeDoubleGtLt (GreaterThan 1000.0) (LessThan 100000.0)) - RangeExecutionIndex False - let search = mkSearch Nothing (Just filter) - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "returns document for UTCTime date filter" $ withTestEnv $ do - _ <- insertData - let filter = RangeFilter (FieldName "postDate") - (RangeDateGtLt - (GreaterThanD (UTCTime - (ModifiedJulianDay 54000) - (secondsToDiffTime 0))) - (LessThanD (UTCTime - (ModifiedJulianDay 55000) - (secondsToDiffTime 11)))) - RangeExecutionIndex False - let search = mkSearch Nothing (Just filter) - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "returns document for regexp filter" $ withTestEnv $ do - _ <- insertData - let filter = RegexpFilter (FieldName "user") (Regexp "bite.*app") - AllRegexpFlags (CacheName "test") False (CacheKey "key") - let search = mkSearch Nothing (Just filter) - myTweet <- searchTweet search - liftIO $ - myTweet `shouldBe` Right exampleTweet - - it "doesn't return document for non-matching regexp filter" $ withTestEnv $ do - _ <- insertData - let filter = RegexpFilter (FieldName "user") - (Regexp "boy") AllRegexpFlags - (CacheName "test") False (CacheKey "key") - let search = mkSearch Nothing (Just filter) - searchExpectNoResults search - - it "returns document for query filter, uncached" $ withTestEnv $ do - _ <- insertData - let filter = QueryFilter (TermQuery (Term "user" "bitemyapp") Nothing) True - search = mkSearch Nothing (Just filter) - myTweet <- searchTweet search - liftIO $ myTweet `shouldBe` Right exampleTweet - - it "returns document for query filter, cached" $ withTestEnv $ do - _ <- insertData - let filter = QueryFilter (TermQuery (Term "user" "bitemyapp") Nothing) False - search = mkSearch Nothing (Just filter) - myTweet <- searchTweet search - liftIO $ myTweet `shouldBe` Right exampleTweet - - describe "Aggregation API" $ do - it "returns term aggregation results" $ withTestEnv $ do - _ <- insertData - let terms = TermsAgg $ mkTermsAggregation "user" - let search = mkAggregateSearch Nothing $ mkAggregations "users" terms - searchExpectAggs search - searchValidBucketAgg search "users" toTerms - - it "return sub-aggregation results" $ withTestEnv $ do - _ <- insertData - let subaggs = mkAggregations "age_agg" . TermsAgg $ mkTermsAggregation "age" - agg = TermsAgg $ (mkTermsAggregation "user") { termAggs = Just subaggs} - search = mkAggregateSearch Nothing $ mkAggregations "users" agg - reply <- searchByIndex testIndex search - let result = decode (responseBody reply) :: Maybe (SearchResult Tweet) - usersAggResults = result >>= aggregations >>= toTerms "users" - subAggResults = usersAggResults >>= (listToMaybe . buckets) >>= termsAggs >>= toTerms "age_agg" - subAddResultsExists = isJust subAggResults - liftIO $ (subAddResultsExists) `shouldBe` True - - it "returns cardinality aggregation results" $ withTestEnv $ do - _ <- insertData - let cardinality = CardinalityAgg $ mkCardinalityAggregation $ FieldName "user" - let search = mkAggregateSearch Nothing $ mkAggregations "users" cardinality - let search' = search { Database.V1.Bloodhound.from = From 0, size = Size 0 } - searchExpectAggs search' - let docCountPair k n = (k, object ["value" .= Number n]) - res <- searchTweets search' - liftIO $ - fmap aggregations res `shouldBe` Right (Just (M.fromList [ docCountPair "users" 1])) - - it "can give collection hint parameters to term aggregations" $ when' (atleast es13) $ withTestEnv $ do - _ <- insertData - let terms = TermsAgg $ (mkTermsAggregation "user") { termCollectMode = Just BreadthFirst } - let search = mkAggregateSearch Nothing $ mkAggregations "users" terms - searchExpectAggs search - searchValidBucketAgg search "users" toTerms - - -- One of these fails with 1.7.3 - it "can give execution hint parameters to term aggregations" $ when' (atmost es11) $ withTestEnv $ do - _ <- insertData - searchTermsAggHint [Map, Ordinals] - - it "can give execution hint parameters to term aggregations" $ when' (is es12) $ withTestEnv $ do - _ <- insertData - searchTermsAggHint [GlobalOrdinals, GlobalOrdinalsHash, GlobalOrdinalsLowCardinality, Map, Ordinals] - - it "can give execution hint parameters to term aggregations" $ when' (atleast es12) $ withTestEnv $ do - _ <- insertData - searchTermsAggHint [GlobalOrdinals, GlobalOrdinalsHash, GlobalOrdinalsLowCardinality, Map] - -- One of the above. - - it "can execute value_count aggregations" $ withTestEnv $ do - _ <- insertData - _ <- insertOther - let ags = mkAggregations "user_count" (ValueCountAgg (FieldValueCount (FieldName "user"))) <> - mkAggregations "bogus_count" (ValueCountAgg (FieldValueCount (FieldName "bogus"))) - let search = mkAggregateSearch Nothing ags - let docCountPair k n = (k, object ["value" .= Number n]) - res <- searchTweets search - liftIO $ - fmap aggregations res `shouldBe` Right (Just (M.fromList [ docCountPair "user_count" 2 - , docCountPair "bogus_count" 0 - ])) - - it "can execute filter aggregations" $ withTestEnv $ do - _ <- insertData - _ <- insertOther - let ags = mkAggregations "bitemyapps" (FilterAgg (FilterAggregation (TermFilter (Term "user" "bitemyapp") defaultCache) Nothing)) <> - mkAggregations "notmyapps" (FilterAgg (FilterAggregation (TermFilter (Term "user" "notmyapp") defaultCache) Nothing)) - let search = mkAggregateSearch Nothing ags - let docCountPair k n = (k, object ["doc_count" .= Number n]) - res <- searchTweets search - liftIO $ - fmap aggregations res `shouldBe` Right (Just (M.fromList [ docCountPair "bitemyapps" 1 - , docCountPair "notmyapps" 1 - ])) - it "can execute date_range aggregations" $ withTestEnv $ do - let now = fromGregorian 2015 3 14 - let ltAMonthAgo = UTCTime (fromGregorian 2015 3 1) 0 - let ltAWeekAgo = UTCTime (fromGregorian 2015 3 10) 0 - let oldDoc = exampleTweet { postDate = ltAMonthAgo } - let newDoc = exampleTweet { postDate = ltAWeekAgo } - _ <- indexDocument testIndex testMapping defaultIndexDocumentSettings oldDoc (DocId "1") - _ <- indexDocument testIndex testMapping defaultIndexDocumentSettings newDoc (DocId "2") - _ <- refreshIndex testIndex - let thisMonth = DateRangeFrom (DateMathExpr (DMDate now) [SubtractTime 1 DMMonth]) - let thisWeek = DateRangeFrom (DateMathExpr (DMDate now) [SubtractTime 1 DMWeek]) - let agg = DateRangeAggregation (FieldName "postDate") Nothing (thisMonth :| [thisWeek]) - let ags = mkAggregations "date_ranges" (DateRangeAgg agg) - let search = mkAggregateSearch Nothing ags - res <- searchTweets search - liftIO $ hitsTotal . searchHits <$> res `shouldBe` Right 2 - let bucks = do magrs <- fmapL show (aggregations <$> res) - agrs <- note "no aggregations returned" magrs - rawBucks <- note "no date_ranges aggregation" $ M.lookup "date_ranges" agrs - parseEither parseJSON rawBucks - let fromMonthT = UTCTime (fromGregorian 2015 2 14) 0 - let fromWeekT = UTCTime (fromGregorian 2015 3 7) 0 - liftIO $ buckets <$> bucks `shouldBe` Right [ DateRangeResult "2015-02-14T00:00:00.000Z-*" - (Just fromMonthT) - (Just "2015-02-14T00:00:00.000Z") - Nothing - Nothing - 2 - Nothing - , DateRangeResult "2015-03-07T00:00:00.000Z-*" - (Just fromWeekT) - (Just "2015-03-07T00:00:00.000Z") - Nothing - Nothing - 1 - Nothing - ] - - it "returns date histogram aggregation results" $ withTestEnv $ do - _ <- insertData - let histogram = DateHistogramAgg $ mkDateHistogram (FieldName "postDate") Minute - let search = mkAggregateSearch Nothing (mkAggregations "byDate" histogram) - searchExpectAggs search - searchValidBucketAgg search "byDate" toDateHistogram - - it "returns date histogram using fractional date" $ withTestEnv $ do - _ <- insertData - let periods = [Year, Quarter, Month, Week, Day, Hour, Minute, Second] - let fractionals = map (FractionalInterval 1.5) [Weeks, Days, Hours, Minutes, Seconds] - let intervals = periods ++ fractionals - let histogram = mkDateHistogram (FieldName "postDate") - let search interval = mkAggregateSearch Nothing $ mkAggregations "byDate" $ DateHistogramAgg (histogram interval) - let expect interval = searchExpectAggs (search interval) - let valid interval = searchValidBucketAgg (search interval) "byDate" toDateHistogram - forM_ intervals expect - forM_ intervals valid - - it "can execute missing aggregations" $ withTestEnv $ do - _ <- insertData - _ <- insertExtra - let ags = mkAggregations "missing_agg" (MissingAgg (MissingAggregation "extra")) - let search = mkAggregateSearch Nothing ags - let docCountPair k n = (k, object ["doc_count" .= Number n]) - res <- searchTweets search - liftIO $ - fmap aggregations res `shouldBe` Right (Just (M.fromList [docCountPair "missing_agg" 1])) - - describe "Highlights API" $ do - - it "returns highlight from query when there should be one" $ withTestEnv $ do - _ <- insertData - _ <- insertOther - let query = QueryMatchQuery $ mkMatchQuery (FieldName "_all") (QueryString "haskell") - let testHighlight = Highlights Nothing [FieldHighlight (FieldName "message") Nothing] - - let search = mkHighlightSearch (Just query) testHighlight - myHighlight <- searchTweetHighlight search - liftIO $ - myHighlight `shouldBe` Right (Just (M.fromList [("message",["Use haskell!"])])) - - it "doesn't return highlight from a query when it shouldn't" $ withTestEnv $ do - _ <- insertData - _ <- insertOther - let query = QueryMatchQuery $ mkMatchQuery (FieldName "_all") (QueryString "haskell") - let testHighlight = Highlights Nothing [FieldHighlight (FieldName "user") Nothing] - - let search = mkHighlightSearch (Just query) testHighlight - myHighlight <- searchTweetHighlight search - liftIO $ - myHighlight `shouldBe` Right Nothing - - describe "Source filtering" $ do - - it "doesn't include source when sources are disabled" $ withTestEnv $ do - searchExpectSource - NoSource - (Left (EsError 500 "Source was missing")) - - it "includes a source" $ withTestEnv $ do - searchExpectSource - (SourcePatterns (PopPattern (Pattern "message"))) - (Right (Object (HM.fromList [("message", String "Use haskell!")]))) - - it "includes sources" $ withTestEnv $ do - searchExpectSource - (SourcePatterns (PopPatterns [Pattern "user", Pattern "message"])) - (Right (Object (HM.fromList [("user",String "bitemyapp"),("message", String "Use haskell!")]))) - - it "includes source patterns" $ withTestEnv $ do - searchExpectSource - (SourcePatterns (PopPattern (Pattern "*ge"))) - (Right (Object (HM.fromList [("age", Number 10000),("message", String "Use haskell!")]))) - - it "excludes source patterns" $ withTestEnv $ do - searchExpectSource - (SourceIncludeExclude (Include []) - (Exclude [Pattern "l*", Pattern "*ge", Pattern "postDate", Pattern "extra"])) - (Right (Object (HM.fromList [("user",String "bitemyapp")]))) - - describe "ToJSON RegexpFlags" $ do - it "generates the correct JSON for AllRegexpFlags" $ - toJSON AllRegexpFlags `shouldBe` String "ALL" - - it "generates the correct JSON for NoRegexpFlags" $ - toJSON NoRegexpFlags `shouldBe` String "NONE" - - it "generates the correct JSON for SomeRegexpFlags" $ - let flags = AnyString :| [ Automaton - , Complement - , Empty - , Intersection - , Interval ] - in toJSON (SomeRegexpFlags flags) `shouldBe` String "ANYSTRING|AUTOMATON|COMPLEMENT|EMPTY|INTERSECTION|INTERVAL" - - prop "removes duplicates from flags" $ \(flags :: RegexpFlags) -> - let String str = toJSON flags - flagStrs = T.splitOn "|" str - in noDuplicates flagStrs - - describe "omitNulls" $ do - it "checks that omitNulls drops list elements when it should" $ - let dropped = omitNulls $ [ "test1" .= (toJSON ([] :: [Int])) - , "test2" .= (toJSON ("some value" :: Text))] - in dropped `shouldBe` Object (HM.fromList [("test2", String "some value")]) - - it "checks that omitNulls doesn't drop list elements when it shouldn't" $ - let notDropped = omitNulls $ [ "test1" .= (toJSON ([1] :: [Int])) - , "test2" .= (toJSON ("some value" :: Text))] - in notDropped `shouldBe` Object (HM.fromList [ ("test1", Array (V.fromList [Number 1.0])) - , ("test2", String "some value")]) - it "checks that omitNulls drops non list elements when it should" $ - let dropped = omitNulls $ [ "test1" .= (toJSON Null) - , "test2" .= (toJSON ("some value" :: Text))] - in dropped `shouldBe` Object (HM.fromList [("test2", String "some value")]) - it "checks that omitNulls doesn't drop non list elements when it shouldn't" $ - let notDropped = omitNulls $ [ "test1" .= (toJSON (1 :: Int)) - , "test2" .= (toJSON ("some value" :: Text))] - in notDropped `shouldBe` Object (HM.fromList [ ("test1", Number 1.0) - , ("test2", String "some value")]) - describe "Monoid (SearchHits a)" $ do + describe "Monoid (SearchHits a)" $ prop "abides the monoid laws" $ eq $ prop_Monoid (T :: T (SearchHits ())) - describe "mkDocVersion" $ do + describe "mkDocVersion" $ prop "can never construct an out of range docVersion" $ \i -> let res = mkDocVersion i in case res of @@ -1614,113 +70,7 @@ main = hspec $ do (dv <= maxBound) .&&. docVersionNumber dv === i - describe "FsSnapshotRepo" $ do - prop "SnapshotRepo laws" $ \fsr -> - fromGSnapshotRepo (toGSnapshotRepo fsr) === Right (fsr :: FsSnapshotRepo) - - describe "snapshot repos" $ do - it "always parses all snapshot repos API" $ when' canSnapshot $ withTestEnv $ do - res <- getSnapshotRepos AllSnapshotRepos - liftIO $ case res of - Left e -> expectationFailure ("Expected a right but got Left " <> show e) - Right _ -> return () - - it "finds an existing list of repos" $ when' canSnapshot $ withTestEnv $ do - let r1n = SnapshotRepoName "bloodhound-repo1" - let r2n = SnapshotRepoName "bloodhound-repo2" - withSnapshotRepo r1n $ \r1 -> - withSnapshotRepo r2n $ \r2 -> do - repos <- getSnapshotRepos (SnapshotRepoList (ExactRepo r1n :| [ExactRepo r2n])) - liftIO $ case repos of - Right xs -> do - let srt = L.sortBy (comparing gSnapshotRepoName) - srt xs `shouldBe` srt [r1, r2] - Left e -> expectationFailure (show e) - - it "creates and updates with updateSnapshotRepo" $ when' canSnapshot $ withTestEnv $ do - let r1n = SnapshotRepoName "bloodhound-repo1" - withSnapshotRepo r1n $ \r1 -> do - let Just (String dir) = HM.lookup "location" (gSnapshotRepoSettingsObject (gSnapshotRepoSettings r1)) - let noCompression = FsSnapshotRepo r1n (T.unpack dir) False Nothing Nothing Nothing - resp <- updateSnapshotRepo defaultSnapshotRepoUpdateSettings noCompression - liftIO (validateStatus resp 200) - Right [roundtrippedNoCompression] <- getSnapshotRepos (SnapshotRepoList (ExactRepo r1n :| [])) - liftIO (roundtrippedNoCompression `shouldBe` toGSnapshotRepo noCompression) - - -- verify came around in 1.4 it seems - it "can verify existing repos" $ when' canSnapshot $ when' (atleast es14) $ withTestEnv $ do - let r1n = SnapshotRepoName "bloodhound-repo1" - withSnapshotRepo r1n $ \_ -> do - res <- verifySnapshotRepo r1n - liftIO $ case res of - Right (SnapshotVerification vs) - | null vs -> expectationFailure "Expected nonempty set of verifying nodes" - | otherwise -> return () - Left e -> expectationFailure (show e) - - describe "snapshots" $ do - it "always parses all snapshots API" $ when' canSnapshot $ withTestEnv $ do - let r1n = SnapshotRepoName "bloodhound-repo1" - withSnapshotRepo r1n $ \_ -> do - res <- getSnapshots r1n AllSnapshots - liftIO $ case res of - Left e -> expectationFailure ("Expected a right but got Left " <> show e) - Right _ -> return () - - it "can parse a snapshot that it created" $ when' canSnapshot $ withTestEnv $ do - let r1n = SnapshotRepoName "bloodhound-repo1" - withSnapshotRepo r1n $ \_ -> do - let s1n = SnapshotName "example-snapshot" - withSnapshot r1n s1n $ do - res <- getSnapshots r1n (SnapshotList (ExactSnap s1n :| [])) - liftIO $ case res of - Right [snap] - | snapInfoState snap == SnapshotSuccess && - snapInfoName snap == s1n -> return () - | otherwise -> expectationFailure (show snap) - Right [] -> expectationFailure "There were no snapshots" - Right snaps -> expectationFailure ("Expected 1 snapshot but got" <> show (length snaps)) - Left e -> expectationFailure (show e) - - describe "snapshot restore" $ do - it "can restore a snapshot that we create" $ when' canSnapshot $ withTestEnv $ do - let r1n = SnapshotRepoName "bloodhound-repo1" - withSnapshotRepo r1n $ \_ -> do - let s1n = SnapshotName "example-snapshot" - withSnapshot r1n s1n $ do - let settings = defaultSnapshotRestoreSettings { snapRestoreWaitForCompletion = True } - -- have to close an index to restore it - resp1 <- closeIndex testIndex - liftIO (validateStatus resp1 200) - resp2 <- restoreSnapshot r1n s1n settings - liftIO (validateStatus resp2 200) - - it "can restore and rename" $ when' canSnapshot $ withTestEnv $ do - let r1n = SnapshotRepoName "bloodhound-repo1" - withSnapshotRepo r1n $ \_ -> do - let s1n = SnapshotName "example-snapshot" - withSnapshot r1n s1n $ do - let pat = RestoreRenamePattern "bloodhound-tests-twitter-(\\d+)" - let replace = RRTLit "restored-" :| [RRSubWholeMatch] - let expectedIndex = IndexName "restored-bloodhound-tests-twitter-1" - oldEnoughForOverrides <- liftIO (atleast es15) - let overrides = RestoreIndexSettings { restoreOverrideReplicas = Just (ReplicaCount 0) } - let settings = defaultSnapshotRestoreSettings { snapRestoreWaitForCompletion = True - , snapRestoreRenamePattern = Just pat - , snapRestoreRenameReplacement = Just replace - , snapRestoreIndexSettingsOverrides = if oldEnoughForOverrides - then Just overrides - else Nothing - } - -- have to close an index to restore it - let go = do - resp <- restoreSnapshot r1n s1n settings - liftIO (validateStatus resp 200) - exists <- indexExists expectedIndex - liftIO (exists `shouldBe` True) - go `finally` deleteIndex expectedIndex - - describe "getNodesInfo" $ do + describe "getNodesInfo" $ it "fetches the responding node when LocalNode is used" $ withTestEnv $ do res <- getNodesInfo LocalNode liftIO $ case res of @@ -1730,7 +80,7 @@ main = hspec $ do Right NodesInfo {..} -> length nodesInfo `shouldBe` 1 Left e -> expectationFailure ("Expected NodesInfo but got " <> show e) - describe "getNodesStats" $ do + describe "getNodesStats" $ it "fetches the responding node when LocalNode is used" $ withTestEnv $ do res <- getNodesStats LocalNode liftIO $ case res of @@ -1740,7 +90,7 @@ main = hspec $ do Right NodesStats {..} -> length nodesStats `shouldBe` 1 Left e -> expectationFailure ("Expected NodesStats but got " <> show e) - describe "Enum DocVersion" $ do + describe "Enum DocVersion" $ it "follows the laws of Enum, Bounded" $ do evaluate (succ maxBound :: DocVersion) `shouldThrow` anyErrorCall evaluate (pred minBound :: DocVersion) `shouldThrow` anyErrorCall @@ -1750,11 +100,14 @@ main = hspec $ do enumFrom (pred maxBound :: DocVersion) `shouldBe` [pred maxBound, maxBound] enumFromThen minBound (pred maxBound :: DocVersion) `shouldBe` [minBound, pred maxBound] - describe "scan&scroll API" $ do + describe "Scan & Scroll API" $ it "returns documents using the scan&scroll API" $ withTestEnv $ do _ <- insertData _ <- insertOther - let search = (mkSearch (Just $ MatchAllQuery Nothing) Nothing) { size = (Size 1) } + let search = + (mkSearch + (Just $ MatchAllQuery Nothing) Nothing) + { size = Size 1 } regular_search <- searchTweet search scan_search' <- scanSearch testIndex testMapping search :: BH IO [Hit Tweet] let scan_search = map hitSource scan_search' @@ -1762,206 +115,3 @@ main = hspec $ do regular_search `shouldBe` Right exampleTweet -- Check that the size restrtiction is being honored liftIO $ scan_search `shouldMatchList` [Just exampleTweet, Just otherTweet] - - describe "index aliases" $ do - it "handles the simple case of aliasing an existing index" $ do - let alias = IndexAlias (testIndex) (IndexAliasName (IndexName "bloodhound-tests-twitter-1-alias")) - let create = IndexAliasCreate Nothing Nothing - let action = AddAlias alias create - - withTestEnv $ do - resetIndex - resp <- updateIndexAliases (action :| []) - liftIO $ validateStatus resp 200 - let cleanup = withTestEnv (updateIndexAliases (RemoveAlias alias :| [])) - (do aliases <- withTestEnv getIndexAliases - let expected = IndexAliasSummary alias create - case aliases of - Right (IndexAliasesSummary summs) -> - L.find ((== alias) . indexAliasSummaryAlias) summs `shouldBe` Just expected - Left e -> expectationFailure ("Expected an IndexAliasesSummary but got " <> show e)) `finally` cleanup - - it "handles an alias with routing and a filter" $ do - let alias = IndexAlias (testIndex) (IndexAliasName (IndexName "bloodhound-tests-twitter-1-alias")) - let sar = SearchAliasRouting (RoutingValue "search val" :| []) - let iar = IndexAliasRouting (RoutingValue "index val") - let routing = GranularAliasRouting (Just sar) (Just iar) - let filter = LimitFilter 42 - let create = IndexAliasCreate (Just routing) (Just filter) - let action = AddAlias alias create - - withTestEnv $ do - resetIndex - resp <- updateIndexAliases (action :| []) - liftIO $ validateStatus resp 200 - let cleanup = withTestEnv (updateIndexAliases (RemoveAlias alias :| [])) - (do aliases <- withTestEnv getIndexAliases - let expected = IndexAliasSummary alias create - case aliases of - Right (IndexAliasesSummary summs) -> - L.find ((== alias) . indexAliasSummaryAlias) summs `shouldBe` Just expected - Left e -> expectationFailure ("Expected an IndexAliasesSummary but got " <> show e)) `finally` cleanup - - describe "Index Listing" $ do - it "returns a list of index names" $ withTestEnv $ do - _ <- createExampleIndex - ixns <- listIndices - liftIO (ixns `shouldContain` [testIndex]) - - describe "Index Settings" $ do - it "persists settings" $ withTestEnv $ do - _ <- deleteExampleIndex - _ <- createExampleIndex - let updates = BlocksWrite False :| [] - updateResp <- updateIndexSettings updates testIndex - liftIO $ validateStatus updateResp 200 - getResp <- getIndexSettings testIndex - liftIO $ - getResp `shouldBe` Right (IndexSettingsSummary - testIndex - (IndexSettings (ShardCount 1) (ReplicaCount 0)) - (NE.toList updates)) - - describe "Index Optimization" $ do - it "returns a successful response upon completion" $ withTestEnv $ do - _ <- createExampleIndex - resp <- optimizeIndex (IndexList (testIndex :| [])) defaultIndexOptimizationSettings - liftIO $ validateStatus resp 200 - - describe "Suggest" $ do - it "returns a search suggestion using the phrase suggester" $ withTestEnv $ do - _ <- insertData - let {- query = QueryMatchNoneQuery - query = TermQuery (Term "user" "bitemyapp") Nothing -} - let phraseSuggester = mkPhraseSuggester (FieldName "message") - namedSuggester = Suggest "Use haskel" "suggest_name" (SuggestTypePhraseSuggester phraseSuggester) - search' = mkSearch Nothing Nothing - search = search' { suggestBody = Just namedSuggester } - expectedText = Just "use haskell" - resp <- searchByIndex testIndex search - parsed <- parseEsResponse resp :: BH IO (Either EsError (SearchResult Tweet)) - case parsed of - Left e -> liftIO $ expectationFailure ("Expected an search suggestion but got " <> show e) - Right sr -> liftIO $ (suggestOptionsText . head . suggestResponseOptions . head . nsrResponses <$> suggest sr) `shouldBe` expectedText - - describe "JSON instances" $ do - propJSON (Proxy :: Proxy Version) - propJSON (Proxy :: Proxy IndexName) - propJSON (Proxy :: Proxy MappingName) - propJSON (Proxy :: Proxy DocId) - propJSON (Proxy :: Proxy IndexAliasRouting) - propJSON (Proxy :: Proxy RoutingValue) - propJSON (Proxy :: Proxy ShardCount) - propJSON (Proxy :: Proxy ReplicaCount) - propJSON (Proxy :: Proxy TemplateName) - propJSON (Proxy :: Proxy TemplatePattern) - propJSON (Proxy :: Proxy QueryString) - propJSON (Proxy :: Proxy FieldName) - propJSON (Proxy :: Proxy CacheName) - propJSON (Proxy :: Proxy CacheKey) - propJSON (Proxy :: Proxy Existence) - propJSON (Proxy :: Proxy CutoffFrequency) - propJSON (Proxy :: Proxy Analyzer) - propJSON (Proxy :: Proxy MaxExpansions) - propJSON (Proxy :: Proxy Lenient) - propJSON (Proxy :: Proxy Tiebreaker) - propJSON (Proxy :: Proxy Boost) - propJSON (Proxy :: Proxy BoostTerms) - propJSON (Proxy :: Proxy MinimumMatch) - propJSON (Proxy :: Proxy DisableCoord) - propJSON (Proxy :: Proxy IgnoreTermFrequency) - propJSON (Proxy :: Proxy MinimumTermFrequency) - propJSON (Proxy :: Proxy MaxQueryTerms) - propJSON (Proxy :: Proxy Fuzziness) - propJSON (Proxy :: Proxy PrefixLength) - propJSON (Proxy :: Proxy TypeName) - propJSON (Proxy :: Proxy PercentMatch) - propJSON (Proxy :: Proxy StopWord) - propJSON (Proxy :: Proxy QueryPath) - propJSON (Proxy :: Proxy AllowLeadingWildcard) - propJSON (Proxy :: Proxy LowercaseExpanded) - propJSON (Proxy :: Proxy EnablePositionIncrements) - propJSON (Proxy :: Proxy AnalyzeWildcard) - propJSON (Proxy :: Proxy GeneratePhraseQueries) - propJSON (Proxy :: Proxy Locale) - propJSON (Proxy :: Proxy MaxWordLength) - propJSON (Proxy :: Proxy MinWordLength) - propJSON (Proxy :: Proxy PhraseSlop) - propJSON (Proxy :: Proxy MinDocFrequency) - propJSON (Proxy :: Proxy MaxDocFrequency) - propJSON (Proxy :: Proxy Filter) - propJSON (Proxy :: Proxy Query) - propJSON (Proxy :: Proxy SimpleQueryStringQuery) - propJSON (Proxy :: Proxy FieldOrFields) - propJSON (Proxy :: Proxy SimpleQueryFlag) - propJSON (Proxy :: Proxy RegexpQuery) - propJSON (Proxy :: Proxy QueryStringQuery) - propJSON (Proxy :: Proxy RangeQuery) - propJSON (Proxy :: Proxy PrefixQuery) - propJSON (Proxy :: Proxy NestedQuery) - propJSON (Proxy :: Proxy MoreLikeThisFieldQuery) - propJSON (Proxy :: Proxy MoreLikeThisQuery) - propJSON (Proxy :: Proxy IndicesQuery) - propJSON (Proxy :: Proxy HasParentQuery) - propJSON (Proxy :: Proxy HasChildQuery) - propJSON (Proxy :: Proxy FuzzyQuery) - propJSON (Proxy :: Proxy FuzzyLikeFieldQuery) - propJSON (Proxy :: Proxy FuzzyLikeThisQuery) - propJSON (Proxy :: Proxy FilteredQuery) - propJSON (Proxy :: Proxy DisMaxQuery) - propJSON (Proxy :: Proxy CommonTermsQuery) - propJSON (Proxy :: Proxy CommonMinimumMatch) - propJSON (Proxy :: Proxy BoostingQuery) - propJSON (Proxy :: Proxy BoolQuery) - propJSON (Proxy :: Proxy MatchQuery) - propJSON (Proxy :: Proxy MultiMatchQueryType) - propJSON (Proxy :: Proxy BooleanOperator) - propJSON (Proxy :: Proxy ZeroTermsQuery) - propJSON (Proxy :: Proxy MatchQueryType) - propJSON (Proxy :: Proxy AliasRouting) - propJSON (Proxy :: Proxy IndexAliasCreate) - propJSON (Proxy :: Proxy SearchAliasRouting) - propJSON (Proxy :: Proxy ScoreType) - propJSON (Proxy :: Proxy Distance) - propJSON (Proxy :: Proxy DistanceUnit) - propJSON (Proxy :: Proxy DistanceType) - propJSON (Proxy :: Proxy OptimizeBbox) - propJSON (Proxy :: Proxy GeoBoundingBoxConstraint) - propJSON (Proxy :: Proxy GeoFilterType) - propJSON (Proxy :: Proxy GeoBoundingBox) - propJSON (Proxy :: Proxy LatLon) - propJSON (Proxy :: Proxy RangeExecution) - prop "RegexpFlags FromJSON/ToJSON roundtrips, removing dups " $ \rfs -> - let expected = case rfs of - SomeRegexpFlags fs -> SomeRegexpFlags (NE.fromList (nub (NE.toList fs))) - x -> x - in parseEither parseJSON (toJSON rfs) === Right expected - propJSON (Proxy :: Proxy BoolMatch) - propJSON (Proxy :: Proxy Term) - propJSON (Proxy :: Proxy MultiMatchQuery) - propJSON (Proxy :: Proxy IndexSettings) - propJSON (Proxy :: Proxy UpdatableIndexSetting') - propJSON (Proxy :: Proxy ReplicaBounds) - propJSON (Proxy :: Proxy Bytes) - propJSON (Proxy :: Proxy AllocationPolicy) - propJSON (Proxy :: Proxy InitialShardCount) - propJSON (Proxy :: Proxy FSType) - propJSON (Proxy :: Proxy CompoundFormat) - propJSON (Proxy :: Proxy TemplateQueryInline) - propJSON (Proxy :: Proxy Suggest) - propJSON (Proxy :: Proxy DirectGenerators) - propJSON (Proxy :: Proxy DirectGeneratorSuggestModeTypes) - --- Temporary solution for lacking of generic derivation of Arbitrary --- We use generics-sop, as it's much more concise than directly using GHC.Generics --- --- This will be unneeded after https://github.com/nick8325/quickcheck/pull/112 --- is merged and released -sopArbitrary :: forall a. (Generic a, SOP.GTo a, SOP.All SOP.SListI (SOP.GCode a), SOP.All2 Arbitrary (SOP.GCode a)) => Gen a -sopArbitrary = fmap SOP.gto sopArbitrary' - -sopArbitrary' :: forall xss. (SOP.All SOP.SListI xss, SOP.All2 Arbitrary xss) => Gen (SOP.SOP SOP.I xss) -sopArbitrary' = SOP.hsequence =<< elements (SOP.apInjs_POP $ SOP.hcpure p arbitrary) - where - p :: Proxy Arbitrary - p = Proxy