mirror of
https://github.com/typeable/bloodhound.git
synced 2024-12-15 10:02:12 +03:00
MultiMatchQueryType implemented
This commit is contained in:
parent
069704ac60
commit
aef9e08d0b
@ -11,6 +11,7 @@ module Database.Bloodhound.Types
|
||||
, showText
|
||||
, unpackId
|
||||
, mkMatchQuery
|
||||
, mkMultiMatchQuery
|
||||
, Version(..)
|
||||
, Status(..)
|
||||
, Existence(..)
|
||||
@ -95,6 +96,8 @@ module Database.Bloodhound.Types
|
||||
, MaxExpansions(..)
|
||||
, Lenient(..)
|
||||
, MatchQueryType(..)
|
||||
, MultiMatchQueryType(..)
|
||||
, Tiebreaker(..)
|
||||
) where
|
||||
|
||||
import Data.Aeson
|
||||
@ -167,7 +170,7 @@ data EsResult a = EsResult { _index :: Text
|
||||
type Sort = [SortSpec]
|
||||
|
||||
data SortSpec = DefaultSortSpec DefaultSort
|
||||
| GeoDistanceSortSpec SortOrder GeoPoint deriving (Eq, Show)
|
||||
| GeoDistanceSortSpec SortOrder GeoPoint DistanceUnit deriving (Eq, Show)
|
||||
|
||||
data DefaultSort =
|
||||
DefaultSort { sortFieldName :: FieldName
|
||||
@ -216,7 +219,7 @@ newtype CutoffFrequency = CutoffFrequency Double deriving (Eq, Show, Ge
|
||||
newtype Analyzer = Analyzer Text deriving (Eq, Show, Generic)
|
||||
newtype MaxExpansions = MaxExpansions Int deriving (Eq, Show, Generic)
|
||||
newtype Lenient = Lenient Bool deriving (Eq, Show, Generic)
|
||||
newtype Tiebreaker = Tiebreaker Double deriving (Eq, Show)
|
||||
newtype Tiebreaker = Tiebreaker Double deriving (Eq, Show, Generic)
|
||||
newtype Boost = Boost Double deriving (Eq, Show, Generic)
|
||||
newtype BoostTerms = BoostTerms Double deriving (Eq, Show)
|
||||
newtype MinimumMatch = MinimumMatch Int deriving (Eq, Show)
|
||||
@ -494,6 +497,10 @@ data MultiMatchQuery =
|
||||
, multiMatchQueryMaxExpansions :: Maybe MaxExpansions
|
||||
, multiMatchQueryLenient :: Maybe Lenient } deriving (Eq, Show)
|
||||
|
||||
mkMultiMatchQuery :: [FieldName] -> QueryString -> MultiMatchQuery
|
||||
mkMultiMatchQuery fields query =
|
||||
MultiMatchQuery fields query Or ZeroTermsNone Nothing Nothing Nothing Nothing Nothing Nothing
|
||||
|
||||
data MultiMatchQueryType = MultiMatchBestFields
|
||||
| MultiMatchMostFields
|
||||
| MultiMatchCrossFields
|
||||
@ -666,9 +673,12 @@ data ShardResult =
|
||||
, shardsSuccessful :: Int
|
||||
, shardsFailed :: Int } deriving (Eq, Show, Generic)
|
||||
|
||||
-- defaultFieldMaybeJSON :: Text -> Maybe a ->
|
||||
maybeJson ToJSON a => Text -> Maybe a -> [Data.Aeson.Types.Internal.Pair]
|
||||
maybeJson field (Just value) = [field .= toJSON value]
|
||||
maybeJson _ _ = []
|
||||
|
||||
maybeJsonF :: (ToJSON (f Value), ToJSON a, Functor f) =>
|
||||
Text -> Maybe (f a) -> [Data.Aeson.Types.Internal.Pair]
|
||||
maybeJsonF field (Just value) = [field .= fmap toJSON value]
|
||||
maybeJsonF _ _ = []
|
||||
|
||||
|
@ -137,6 +137,8 @@ instance ToJSON Query where
|
||||
toJSON (QueryMatchQuery matchQuery) =
|
||||
object ["match" .= toJSON matchQuery]
|
||||
|
||||
toJSON (QueryMultiMatchQuery multiMatchQuery) =
|
||||
object ["multi_match" .= toJSON multiMatchQuery]
|
||||
|
||||
instance ToJSON MatchQuery where
|
||||
toJSON (MatchQuery (FieldName fieldName)
|
||||
@ -155,6 +157,29 @@ instance ToJSON MatchQuery where
|
||||
, f "max_expansions" maxExpansions
|
||||
, f "lenient" lenient ]
|
||||
|
||||
instance ToJSON MultiMatchQuery where
|
||||
toJSON (MultiMatchQuery fields (QueryString query) boolOp
|
||||
ztQ tb mmqt cf analyzer maxEx lenient) =
|
||||
object ["multi_match" .= object conjoined]
|
||||
where baseQuery = [ "fields" .= fmap toJSON fields
|
||||
, "query" .= query
|
||||
, "operator" .= toJSON boolOp
|
||||
, "zero_terms_query" .= toJSON ztQ ]
|
||||
f field = fmap ((field .=) . toJSON)
|
||||
maybeAdd = catMaybes [ f "tiebreaker" tb
|
||||
, f "type" mmqt
|
||||
, f "cutoff_frequency" cf
|
||||
, f "analyzer" analyzer
|
||||
, f "max_expansions" maxEx
|
||||
, f "lenient" lenient ]
|
||||
conjoined = baseQuery ++ maybeAdd
|
||||
|
||||
instance ToJSON MultiMatchQueryType where
|
||||
toJSON MultiMatchBestFields = "best_fields"
|
||||
toJSON MultiMatchMostFields = "most_fields"
|
||||
toJSON MultiMatchCrossFields = "cross_fields"
|
||||
toJSON MultiMatchPhrase = "phrase"
|
||||
toJSON MultiMatchPhrasePrefix = "phrase_prefix"
|
||||
|
||||
instance ToJSON BooleanOperator where
|
||||
toJSON And = String "and"
|
||||
@ -168,6 +193,9 @@ instance ToJSON MatchQueryType where
|
||||
toJSON MatchPhrase = "phrase"
|
||||
toJSON MatchPhrasePrefix = "phrase_prefix"
|
||||
|
||||
instance ToJSON FieldName where
|
||||
toJSON (FieldName fieldName) = String fieldName
|
||||
|
||||
instance ToJSON ReplicaCount
|
||||
instance ToJSON ShardCount
|
||||
instance ToJSON CutoffFrequency
|
||||
@ -176,6 +204,7 @@ instance ToJSON MaxExpansions
|
||||
instance ToJSON Lenient
|
||||
instance ToJSON Boost
|
||||
instance ToJSON Version
|
||||
instance ToJSON Tiebreaker
|
||||
instance FromJSON Version
|
||||
instance FromJSON IndexName
|
||||
instance FromJSON MappingName
|
||||
@ -233,6 +262,11 @@ instance ToJSON SortSpec where
|
||||
lNestedFilter = maybeJson "nested_filter" nestedFilter
|
||||
merged = mconcat [base, lSortMode, lMissingSort, lNestedFilter]
|
||||
|
||||
toJSON (GeoDistanceSortSpec sortOrder (GeoPoint (FieldName field) latLon) units) =
|
||||
object [ "unit" .= toJSON units
|
||||
, field .= toJSON latLon
|
||||
, "order" .= toJSON sortOrder ]
|
||||
|
||||
|
||||
instance ToJSON SortOrder where
|
||||
toJSON Ascending = String "asc"
|
||||
|
@ -164,6 +164,14 @@ main = hspec $ do
|
||||
myTweet <- searchTweet search
|
||||
myTweet `shouldBe` Right exampleTweet
|
||||
|
||||
it "returns document for multi-match query" $ do
|
||||
_ <- insertData
|
||||
let fields = [FieldName "user", FieldName "message"]
|
||||
let query = QueryMultiMatchQuery $ mkMultiMatchQuery fields (QueryString "bitemyapp")
|
||||
let search = mkSearch (Just query) Nothing
|
||||
myTweet <- searchTweet search
|
||||
myTweet `shouldBe` Right exampleTweet
|
||||
|
||||
describe "sorting" $ do
|
||||
it "returns documents in the right order" $ do
|
||||
_ <- insertData
|
||||
|
Loading…
Reference in New Issue
Block a user