cleanup, passing test for match queries

This commit is contained in:
Chris Allen 2014-04-27 00:49:44 -05:00
parent 8bd69f4ed5
commit 069704ac60
8 changed files with 366 additions and 167 deletions

2
.gitignore vendored
View File

@ -1,3 +1,5 @@
cabal.sandbox.config
.cabal-sandbox/
dist/
.project
.dist-buildwrapper/

View File

@ -1,81 +1,6 @@
module Database.Bloodhound
( createIndex
, deleteIndex
, defaultIndexSettings
, createMapping
, deleteMapping
, indexDocument
, getDocument
, documentExists
, deleteDocument
, searchAll
, searchByIndex
, searchByType
, refreshIndex
, mkSearch
, bulk
, defaultCache
, halfRangeToKV
, maybeJson
, maybeJsonF
, rangeToKV
, showText
, unpackId
, mkSort
, Version(..)
, Status(..)
, Existence(..)
, NullValue(..)
, IndexSettings(..)
, Server(..)
, Reply(..)
, EsResult(..)
, Query(..)
, Search(..)
, SearchResult(..)
, SearchHits(..)
, ShardResult(..)
, Hit(..)
, Filter(..)
, Seminearring(..)
, BoolMatch(..)
, Term(..)
, GeoPoint(..)
, GeoBoundingBoxConstraint(..)
, GeoBoundingBox(..)
, GeoFilterType(..)
, Distance(..)
, DistanceUnit(..)
, DistanceType(..)
, DistanceRange(..)
, OptimizeBbox(..)
, LatLon(..)
, Range(..)
, HalfRange(..)
, RangeExecution(..)
, LessThan(..)
, LessThanEq(..)
, GreaterThan(..)
, GreaterThanEq(..)
, Regexp(..)
, RegexpFlags(..)
, FieldName(..)
, IndexName(..)
, MappingName(..)
, DocId(..)
, CacheName(..)
, CacheKey(..)
, BulkOperation(..)
, ReplicaCount(..)
, ShardCount(..)
, Sort(..)
, SortMode(..)
, SortOrder(..)
, SortSpec(..)
, DefaultSort(..)
, Missing(..)
, OpenCloseIndex(..)
, Method(..)
( module Database.Bloodhound.Client
, module Database.Bloodhound.Types
) where
import Database.Bloodhound.Client

View File

@ -10,6 +10,7 @@ module Database.Bloodhound.Types
, rangeToKV
, showText
, unpackId
, mkMatchQuery
, Version(..)
, Status(..)
, Existence(..)
@ -64,6 +65,36 @@ module Database.Bloodhound.Types
, Missing(..)
, OpenCloseIndex(..)
, Method(..)
, Boost(..)
, MatchQuery(..)
, MultiMatchQuery(..)
, BoolQuery(..)
, BoostingQuery(..)
, CommonTermsQuery(..)
, DisMaxQuery(..)
, FilteredQuery(..)
, FuzzyLikeThisQuery(..)
, FuzzyLikeFieldQuery(..)
, FuzzyQuery(..)
, HasChildQuery(..)
, HasParentQuery(..)
, AnIndicesQuery(..)
, MoreLikeThisQuery(..)
, MoreLikeThisFieldQuery(..)
, NestedQuery(..)
, PrefixQuery(..)
, QueryStringQuery(..)
, SimpleQueryStringQuery(..)
, RangeQuery(..)
, RegexpQuery(..)
, QueryString(..)
, BooleanOperator(..)
, ZeroTermsQuery(..)
, CutoffFrequency(..)
, Analyzer(..)
, MaxExpansions(..)
, Lenient(..)
, MatchQueryType(..)
) where
import Data.Aeson
@ -82,15 +113,11 @@ data Version = Version { number :: Text
, build_snapshot :: Bool
, lucene_version :: Text } deriving (Show, Generic)
data (FromJSON a, ToJSON a) => Status a =
Status { ok :: Bool
, status :: Int
, name :: Text
, version :: a
, tagline :: Text } deriving (Eq, Show)
newtype ShardCount = ShardCount Int deriving (Eq, Show, Generic)
newtype ReplicaCount = ReplicaCount Int deriving (Eq, Show, Generic)
data Status a = Status { ok :: Bool
, status :: Int
, name :: Text
, version :: a
, tagline :: Text } deriving (Eq, Show)
data IndexSettings =
IndexSettings { indexShards :: ShardCount
@ -100,7 +127,6 @@ defaultIndexSettings = IndexSettings (ShardCount 3) (ReplicaCount 2)
data Strategy = RoundRobinStrat | RandomStrat | HeadStrat deriving (Eq, Show)
newtype Server = Server String deriving (Eq, Show)
type Reply = Network.HTTP.Conduit.Response L.ByteString
type Method = NHTM.Method
@ -120,9 +146,9 @@ data FieldDefinition =
data MappingField =
MappingField { mappingFieldName :: FieldName
, fieldDefinition :: FieldDefinition } deriving (Eq, Show)
, fieldDefinition :: FieldDefinition } deriving (Eq, Show)
data Mapping = Mapping { typeName :: Text
data Mapping = Mapping { typeName :: TypeName
, fields :: [MappingField] } deriving (Eq, Show)
data BulkOperation =
@ -174,28 +200,48 @@ type PrefixValue = Text
data BooleanOperator = And | Or deriving (Eq, Show)
newtype IndexName = IndexName String deriving (Eq, Generic, Show)
newtype MappingName = MappingName String deriving (Eq, Generic, Show)
newtype DocId = DocId String deriving (Eq, Generic, Show)
newtype QueryString = QueryString Text deriving (Eq, Show)
newtype FieldName = FieldName Text deriving (Eq, Show)
newtype CacheName = CacheName Text deriving (Eq, Show)
newtype CacheKey = CacheKey Text deriving (Eq, Show)
newtype Existence = Existence Bool deriving (Eq, Show)
newtype NullValue = NullValue Bool deriving (Eq, Show)
newtype CutoffFrequency = CutoffFrequency Double deriving (Eq, Show)
newtype Analyzer = Analyzer Text deriving (Eq, Show)
newtype MaxExpansions = MaxExpansions Int deriving (Eq, Show)
newtype Lenient = Lenient Bool deriving (Eq, Show)
newtype Tiebreaker = Tiebreaker Double deriving (Eq, Show)
newtype Boost = Boost Double deriving (Eq, Show)
newtype MinimumMatch = MinimumMatch Int deriving (Eq, Show)
newtype MinimumMatchText = MinimumMatchText Text deriving (Eq, Show)
newtype DisableCoord = DisableCoord Bool deriving (Eq, Show)
newtype IgnoreTermFrequency = IgnoreTermFrequency Bool deriving (Eq, Show)
newtype MaxQueryTerms = MaxQueryTerms Int deriving (Eq, Show)
newtype Fuzziness = Fuzziness Double deriving (Eq, Show)
newtype PrefixLength = PrefixLength Int deriving (Eq, Show)
newtype ShardCount = ShardCount Int deriving (Eq, Show, Generic)
newtype ReplicaCount = ReplicaCount Int deriving (Eq, Show, Generic)
newtype Server = Server String deriving (Eq, Show)
newtype IndexName = IndexName String deriving (Eq, Generic, Show)
newtype MappingName = MappingName String deriving (Eq, Generic, Show)
newtype DocId = DocId String deriving (Eq, Generic, Show)
newtype QueryString = QueryString Text deriving (Eq, Show)
newtype FieldName = FieldName Text deriving (Eq, Show)
newtype CacheName = CacheName Text deriving (Eq, Show)
newtype CacheKey = CacheKey Text deriving (Eq, Show)
newtype Existence = Existence Bool deriving (Eq, Show)
newtype NullValue = NullValue Bool deriving (Eq, Show)
newtype CutoffFrequency = CutoffFrequency Double deriving (Eq, Show, Generic)
newtype Analyzer = Analyzer Text deriving (Eq, Show, Generic)
newtype MaxExpansions = MaxExpansions Int deriving (Eq, Show, Generic)
newtype Lenient = Lenient Bool deriving (Eq, Show, Generic)
newtype Tiebreaker = Tiebreaker Double deriving (Eq, Show)
newtype Boost = Boost Double deriving (Eq, Show, Generic)
newtype BoostTerms = BoostTerms Double deriving (Eq, Show)
newtype MinimumMatch = MinimumMatch Int deriving (Eq, Show)
newtype MinimumMatchText = MinimumMatchText Text deriving (Eq, Show)
newtype DisableCoord = DisableCoord Bool deriving (Eq, Show)
newtype IgnoreTermFrequency = IgnoreTermFrequency Bool deriving (Eq, Show)
newtype MinimumTermFrequency = MinimumTermFrequency Int deriving (Eq, Show)
newtype MaxQueryTerms = MaxQueryTerms Int deriving (Eq, Show)
newtype Fuzziness = Fuzziness Double deriving (Eq, Show)
newtype PrefixLength = PrefixLength Int deriving (Eq, Show)
newtype TypeName = TypeName Text deriving (Eq, Show)
newtype PercentMatch = PercentMatch Double deriving (Eq, Show)
newtype StopWord = StopWord Text deriving (Eq, Show)
newtype QueryPath = QueryPath Text deriving (Eq, Show)
newtype AllowLeadingWildcard = AllowLeadingWildcard Bool deriving (Eq, Show)
newtype LowercaseExpanded = LowercaseExpanded Bool deriving (Eq, Show)
newtype EnablePositionIncrements = EnablePositionIncrements Bool deriving (Eq, Show)
newtype AnalyzeWildcard = AnalyzeWildcard Bool deriving (Eq, Show)
newtype GeneratePhraseQueries = GeneratePhraseQueries Bool deriving (Eq, Show)
newtype Locale = Locale Text deriving (Eq, Show)
newtype MaxWordLength = MaxWordLength Int deriving (Eq, Show)
newtype MinWordLength = MinWordLength Int deriving (Eq, Show)
newtype PhraseSlop = PhraseSlop Int deriving (Eq, Show)
newtype MinDocFrequency = MinDocFrequency Int deriving (Eq, Show)
newtype MaxDocFrequency = MaxDocFrequency Int deriving (Eq, Show)
unpackId :: DocId -> String
unpackId (DocId docId) = docId
@ -212,21 +258,173 @@ data Search = Search { queryBody :: Maybe Query
, from :: From
, size :: Size } deriving (Eq, Show)
data Query = TermQuery Term (Maybe Boost)
| QueryMatchQuery MatchQuery
| QueryMultiMatchQuery MultiMatchQuery
| QueryBoolQuery BoolQuery
| QueryBoostingQuery BoostingQuery
| QueryCommonTermsQuery CommonTermsQuery
| ConstantScoreFilter Filter Boost
| ConstantScoreQuery Query Boost
| QueryDisMaxQuery DisMaxQuery
| QueryFilteredQuery FilteredQuery
| QueryFuzzyLikeThisQuery FuzzyLikeThisQuery
| QueryFuzzyLikeFieldQuery FuzzyLikeFieldQuery
| QueryFuzzyQuery FuzzyQuery
data Query = TermQuery Term (Maybe Boost)
| TermsQuery [Term] MinimumMatch
| QueryMatchQuery MatchQuery
| QueryMultiMatchQuery MultiMatchQuery
| QueryBoolQuery BoolQuery
| QueryBoostingQuery BoostingQuery
| QueryCommonTermsQuery CommonTermsQuery
| ConstantScoreFilter Filter Boost
| ConstantScoreQuery Query Boost
| QueryDisMaxQuery DisMaxQuery
| QueryFilteredQuery FilteredQuery
| QueryFuzzyLikeThisQuery FuzzyLikeThisQuery
| QueryFuzzyLikeFieldQuery FuzzyLikeFieldQuery
| QueryFuzzyQuery FuzzyQuery
| QueryHasChildQuery HasChildQuery
| QueryHasParentQuery HasParentQuery
| IdsQuery MappingName [DocId]
| QueryIndicesQuery AnIndicesQuery
| MatchAllQuery (Maybe Boost)
| QueryMoreLikeThisQuery MoreLikeThisQuery
| QueryMoreLikeThisFieldQuery MoreLikeThisFieldQuery
| QueryNestedQuery NestedQuery
| QueryPrefixQuery PrefixQuery
| QueryQueryStringQuery QueryStringQuery
| QuerySimpleQueryStringQuery SimpleQueryStringQuery
| QueryRangeQuery RangeQuery
| QueryRegexpQuery RegexpQuery
deriving (Eq, Show)
-- Possible flags are ALL, ANYSTRING, AUTOMATON, COMPLEMENT,
-- EMPTY, INTERSECTION, INTERVAL, or NONE. huh?
data RegexpQuery =
RegexpQuery { regexpQueryField :: FieldName
, regexpQuery :: Regexp
, regexpQueryFlags :: RegexpFlags
, regexpQueryCacheName :: CacheName
, regexpQueryCache :: Cache
, regexpQueryCacheKey :: CacheKey } deriving (Eq, Show)
data RangeQuery =
RangeQuery { rangeQueryField :: FieldName
, rangeQueryRange :: Either HalfRange Range
, rangeQueryBoost :: Boost } deriving (Eq, Show)
data SimpleQueryStringQuery =
SimpleQueryStringQuery { simpleQueryStringQuery :: QueryString
, simpleQueryStringField :: Maybe FieldOrFields
, simpleQueryStringOperator :: Maybe BooleanOperator
, simpleQueryStringAnalyzer :: Maybe Analyzer
, simpleQueryStringFlags :: Maybe [SimpleQueryFlag]
, simpleQueryStringLowercaseExpanded :: Maybe LowercaseExpanded
, simpleQueryStringLocale :: Maybe Locale
} deriving (Eq, Show)
data SimpleQueryFlag = SimpleQueryAll
| SimpleQueryNone
| SimpleQueryAnd
| SimpleQueryOr
| SimpleQueryPrefix
| SimpleQueryPhrase
| SimpleQueryPrecedence
| SimpleQueryEscape
| SimpleQueryWhitespace
| SimpleQueryFuzzy
| SimpleQueryNear
| SimpleQuerySlop deriving (Eq, Show)
-- use_dis_max and tie_breaker when fields are plural?
data QueryStringQuery =
QueryStringQuery { queryStringQuery :: QueryString
, queryStringDefaultField :: Maybe FieldOrFields
, queryStringOperator :: Maybe BooleanOperator
, queryStringAnalyzer :: Maybe Analyzer
, queryStringAllowLeadingWildcard :: Maybe AllowLeadingWildcard
, queryStringLowercaseExpanded :: Maybe LowercaseExpanded
, queryStringEnablePositionIncrements :: Maybe EnablePositionIncrements
, queryStringFuzzyMaxExpansions :: Maybe MaxExpansions
, queryStringFuzziness :: Maybe Fuzziness
, queryStringFuzzyPrefixLength :: Maybe PrefixLength
, queryStringPhraseSlop :: Maybe PhraseSlop
, queryStringBoost :: Maybe Boost
, queryStringAnalyzeWildcard :: Maybe AnalyzeWildcard
, queryStringGeneratePhraseQueries :: Maybe GeneratePhraseQueries
, queryStringMinimumShouldMatch :: Maybe MinimumMatch
, queryStringLenient :: Maybe Lenient
, queryStringLocale :: Maybe Locale
} deriving (Eq, Show)
data FieldOrFields = FofField FieldName
| FofFields [FieldName] deriving (Eq, Show)
data PrefixQuery =
PrefixQuery { prefixQueryField :: FieldName
, prefixQueryPrefixValue :: Text
, prefixQueryBoost :: Maybe Boost } deriving (Eq, Show)
data NestedQuery =
NestedQuery { nestedQueryPath :: QueryPath
, nestedQueryScoreType :: ScoreType
, nestedQuery :: Query } deriving (Eq, Show)
data MoreLikeThisFieldQuery =
MoreLikeThisFieldQuery { moreLikeThisFieldText :: Text
, moreLikeThisFieldFields :: FieldName
-- default 0.3 (30%)
, moreLikeThisFieldPercentMatch :: Maybe PercentMatch
, moreLikeThisFieldMinimumTermFreq :: Maybe MinimumTermFrequency
, moreLikeThisFieldMaxQueryTerms :: Maybe MaxQueryTerms
, moreLikeThisFieldStopWords :: Maybe [StopWord]
, moreLikeThisFieldMinDocFrequency :: Maybe MinDocFrequency
, moreLikeThisFieldMaxDocFrequency :: Maybe MaxDocFrequency
, moreLikeThisFieldMinWordLength :: Maybe MinWordLength
, moreLikeThisFieldMaxWordLength :: Maybe MaxWordLength
, moreLikeThisFieldBoostTerms :: Maybe BoostTerms
, moreLikeThisFieldBoost :: Maybe Boost
, moreLikeThisFieldAnalyzer :: Maybe Analyzer
} deriving (Eq, Show)
data MoreLikeThisQuery =
MoreLikeThisQuery { moreLikeThisText :: Text
, moreLikeThisFields :: Maybe [FieldName]
-- default 0.3 (30%)
, moreLikeThisPercentMatch :: Maybe PercentMatch
, moreLikeThisMinimumTermFreq :: Maybe MinimumTermFrequency
, moreLikeThisMaxQueryTerms :: Maybe MaxQueryTerms
, moreLikeThisStopWords :: Maybe [StopWord]
, moreLikeThisMinDocFrequency :: Maybe MinDocFrequency
, moreLikeThisMaxDocFrequency :: Maybe MaxDocFrequency
, moreLikeThisMinWordLength :: Maybe MinWordLength
, moreLikeThisMaxWordLength :: Maybe MaxWordLength
, moreLikeThisBoostTerms :: Maybe BoostTerms
, moreLikeThisBoost :: Maybe Boost
, moreLikeThisAnalyzer :: Maybe Analyzer
} deriving (Eq, Show)
data AnIndicesQuery = AnIndexQuery IndexQuery
| AnIndicesQuery IndicesQuery deriving (Eq, Show)
data IndicesQuery =
IndicesQuery { indicesQueryIndexName :: IndexName
, indicesQuery :: Query
-- default "all"
, indicesQueryNoMatch :: Maybe Query } deriving (Eq, Show)
data IndexQuery =
IndexQuery { indexQueryIndexName :: IndexName
, indexQuery :: Query
-- default "all"
, indexQueryNoMatch :: Maybe Query } deriving (Eq, Show)
data HasParentQuery =
HasParentQuery { hasParentQueryType :: TypeName
, hasParentQuery :: Query
, hasParentQueryScoreType :: Maybe ScoreType } deriving (Eq, Show)
data HasChildQuery =
HasChildQuery { hasChildQueryType :: TypeName
, hasChildQuery :: Query
, hasChildQueryScoreType :: Maybe ScoreType } deriving (Eq, Show)
data ScoreType = ScoreTypeMax
| ScoreTypeSum
| ScoreTypeAvg
| ScoreTypeNone deriving (Eq, Show)
data FuzzyQuery = FuzzyQuery { fuzzyQueryField :: FieldName
, fuzzyQueryValue :: Text
, fuzzyQueryPrefixLength :: PrefixLength
@ -373,17 +571,7 @@ data RangeExecution = RangeExecutionIndex
| RangeExecutionFielddata deriving (Eq, Show)
newtype Regexp = Regexp Text deriving (Eq, Show)
data RegexpFlags = RegexpAll
| Complement
| Interval
| Intersection
| AnyString
| CompInterval
| CompIntersection
| CompAnyString
| IntervalIntersection
| IntervalAnyString
| IntersectionAnyString deriving (Eq, Show)
newtype RegexpFlags = RegexpFlags Text deriving (Eq, Show)
halfRangeToKV :: HalfRange -> (Text, Double)
halfRangeToKV (HalfRangeLt (LessThan n)) = ("lt", n)

View File

@ -1,3 +1,5 @@
{-# LANGUAGE DeriveGeneric #-}
module Database.Bloodhound.Types.Instances
( Monoid(..)
, Seminearring(..)
@ -6,6 +8,7 @@ module Database.Bloodhound.Types.Instances
import Control.Applicative
import Data.Aeson
import Data.Maybe (catMaybes)
import Data.Monoid
import qualified Data.Text as T
import Database.Bloodhound.Types
@ -119,6 +122,7 @@ instance ToJSON GeoPoint where
toJSON (GeoPoint (FieldName geoField) latLon) =
object [geoField .= toJSON latLon]
instance ToJSON Query where
toJSON (TermQuery (Term termField termValue) boost) =
object ["term" .=
@ -130,10 +134,55 @@ instance ToJSON Query where
Nothing -> []
merged = mappend base boosted
toJSON (QueryMatchQuery matchQuery) =
object ["match" .= toJSON matchQuery]
instance ToJSON MatchQuery where
toJSON (MatchQuery (FieldName fieldName)
(QueryString queryString) booleanOperator
zeroTermsQuery cutoffFrequency matchQueryType
analyzer maxExpansions lenient) =
object [ fieldName .= object conjoined ]
where conjoined = [ "query" .= queryString
, "operator" .= toJSON booleanOperator
, "zero_terms_query" .= toJSON zeroTermsQuery]
++ maybeAdd
f field = fmap ((field .=) . toJSON)
maybeAdd = catMaybes [ f "cutoff_frequency" cutoffFrequency
, f "type" matchQueryType
, f "analyzer" analyzer
, f "max_expansions" maxExpansions
, f "lenient" lenient ]
instance ToJSON BooleanOperator where
toJSON And = String "and"
toJSON Or = String "or"
instance ToJSON ZeroTermsQuery where
toJSON ZeroTermsNone = String "none"
toJSON ZeroTermsAll = String "all"
instance ToJSON MatchQueryType where
toJSON MatchPhrase = "phrase"
toJSON MatchPhrasePrefix = "phrase_prefix"
instance ToJSON ReplicaCount
instance ToJSON ShardCount
instance ToJSON CutoffFrequency
instance ToJSON Analyzer
instance ToJSON MaxExpansions
instance ToJSON Lenient
instance ToJSON Boost
instance ToJSON Version
instance FromJSON Version
instance FromJSON IndexName
instance FromJSON MappingName
instance FromJSON DocId
instance (FromJSON a, ToJSON a) => FromJSON (Status a) where
instance (FromJSON a) => FromJSON (Status a) where
parseJSON (Object v) = Status <$>
v .: "ok" <*>
v .: "status" <*>
@ -142,13 +191,12 @@ instance (FromJSON a, ToJSON a) => FromJSON (Status a) where
v .: "tagline"
parseJSON _ = empty
instance ToJSON IndexSettings where
toJSON (IndexSettings s r) = object ["settings" .= object ["shards" .= s, "replicas" .= r]]
instance ToJSON ReplicaCount
instance ToJSON ShardCount
instance (FromJSON a, ToJSON a) => FromJSON (EsResult a) where
instance (FromJSON a) => FromJSON (EsResult a) where
parseJSON (Object v) = EsResult <$>
v .: "_index" <*>
v .: "_type" <*>
@ -158,6 +206,7 @@ instance (FromJSON a, ToJSON a) => FromJSON (EsResult a) where
v .: "_source"
parseJSON _ = empty
instance ToJSON Search where
toJSON (Search query filter sort trackSortScores from size) =
object merged where
@ -171,6 +220,7 @@ instance ToJSON Search where
, lFilter
, lSort]
instance ToJSON SortSpec where
toJSON (DefaultSortSpec
(DefaultSort (FieldName sortFieldName) sortOrder ignoreUnmapped
@ -183,24 +233,24 @@ instance ToJSON SortSpec where
lNestedFilter = maybeJson "nested_filter" nestedFilter
merged = mconcat [base, lSortMode, lMissingSort, lNestedFilter]
instance ToJSON SortOrder where
toJSON Ascending = String "asc"
toJSON Descending = String "desc"
instance ToJSON SortMode where
toJSON SortMin = String "min"
toJSON SortMax = String "max"
toJSON SortSum = String "sum"
toJSON SortAvg = String "avg"
instance ToJSON Missing where
toJSON LastMissing = String "_last"
toJSON FirstMissing = String "_first"
toJSON (CustomMissing txt) = String txt
instance FromJSON IndexName
instance FromJSON MappingName
instance FromJSON DocId
instance ToJSON Distance where
toJSON (Distance coefficient unit) =
@ -209,6 +259,7 @@ instance ToJSON Distance where
(String unitText) = (toJSON unit)
boltedTogether = mappend coefText unitText
instance ToJSON DistanceUnit where
toJSON Miles = String "mi"
toJSON Yards = String "yd"
@ -220,56 +271,56 @@ instance ToJSON DistanceUnit where
toJSON Millimeters = String "mm"
toJSON NauticalMiles = String "nmi"
instance ToJSON DistanceType where
toJSON Arc = String "arc"
toJSON SloppyArc = String "sloppy_arc"
toJSON Plane = String "plane"
instance ToJSON OptimizeBbox where
toJSON NoOptimizeBbox = String "none"
toJSON (OptimizeGeoFilterType gft) = toJSON gft
instance ToJSON GeoBoundingBoxConstraint where
toJSON (GeoBoundingBoxConstraint (FieldName geoBBField) constraintBox cache) =
object [geoBBField .= toJSON constraintBox
, "_cache" .= cache]
instance ToJSON GeoFilterType where
toJSON GeoFilterMemory = String "memory"
toJSON GeoFilterIndexed = String "indexed"
instance ToJSON GeoBoundingBox where
toJSON (GeoBoundingBox topLeft bottomRight) =
object ["top_left" .= toJSON topLeft
, "bottom_right" .= toJSON bottomRight]
instance ToJSON LatLon where
toJSON (LatLon lat lon) =
object ["lat" .= lat
, "lon" .= lon]
-- index for smaller ranges, fielddata for longer ranges
instance ToJSON RangeExecution where
toJSON RangeExecutionIndex = "index"
toJSON RangeExecutionFielddata = "fielddata"
instance ToJSON RegexpFlags where
toJSON RegexpAll = String "ALL"
toJSON Complement = String "COMPLEMENT"
toJSON Interval = String "INTERVAL"
toJSON Intersection = String "INTERSECTION"
toJSON AnyString = String "ANYSTRING"
toJSON CompInterval = String "COMPLEMENT|INTERVAL"
toJSON CompIntersection = String "COMPLEMENT|INTERSECTION"
toJSON CompAnyString = String "COMPLEMENT|ANYSTRING"
toJSON IntervalIntersection = String "INTERVAL|INTERSECTION"
toJSON IntervalAnyString = String "INTERVAL|ANYSTRING"
toJSON IntersectionAnyString = String "INTERSECTION|ANYSTRING"
toJSON (RegexpFlags txt) = String txt
instance ToJSON Term where
toJSON (Term field value) = object ["term" .= object
[field .= value]]
instance ToJSON BoolMatch where
toJSON (MustMatch term cache) = object ["must" .= toJSON term,
"_cache" .= cache]
@ -278,7 +329,8 @@ instance ToJSON BoolMatch where
toJSON (ShouldMatch terms cache) = object ["should" .= fmap toJSON terms,
"_cache" .= cache]
instance (FromJSON a, ToJSON a) => FromJSON (SearchResult a) where
instance (FromJSON a) => FromJSON (SearchResult a) where
parseJSON (Object v) = SearchResult <$>
v .: "took" <*>
v .: "timed_out" <*>
@ -286,14 +338,14 @@ instance (FromJSON a, ToJSON a) => FromJSON (SearchResult a) where
v .: "hits"
parseJSON _ = empty
instance (FromJSON a, ToJSON a) => FromJSON (SearchHits a) where
instance (FromJSON a) => FromJSON (SearchHits a) where
parseJSON (Object v) = SearchHits <$>
v .: "total" <*>
v .: "max_score" <*>
v .: "hits"
parseJSON _ = empty
instance (FromJSON a, ToJSON a) => FromJSON (Hit a) where
instance (FromJSON a) => FromJSON (Hit a) where
parseJSON (Object v) = Hit <$>
v .: "_index" <*>
v .: "_type" <*>

View File

@ -1,6 +1,6 @@
test:
cabal install --enable-tests
cabal test
build:
cabal build

View File

@ -509,6 +509,10 @@ let filter = RegexpFilter (FieldName "user") (Regexp "bite.*app")
* Possible future functionality
** Span Queries
Beginning here: http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/query-dsl-span-first-query.html
** Function Score Query
http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/query-dsl-function-score-query.html
@ -525,6 +529,10 @@ Pretend to be a transport client?
Might require making a lucene index on disk with the appropriate format.
** GeoShapeQuery
http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/query-dsl-geo-shape-query.html
** GeoShapeFilter
http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/query-dsl-geo-shape-filter.html

View File

@ -16,6 +16,7 @@ build-type: Simple
cabal-version: >=1.10
library
ghc-options: -Wall
default-extensions: OverloadedStrings
exposed-modules: Database.Bloodhound
Database.Bloodhound.Client
@ -28,6 +29,7 @@ library
free >= 4.5,
lens >= 4,
aeson >= 0.7,
aeson-pretty >= 0.7,
conduit >= 1.0,
http-conduit >= 2.0,
io-streams >= 1.1,
@ -42,6 +44,7 @@ library
default-language: Haskell2010
test-suite tests
ghc-options: -Wall
default-extensions: OverloadedStrings
type: exitcode-stdio-1.0
main-is: tests.hs
@ -51,12 +54,14 @@ test-suite tests
bloodhound,
http-conduit,
http-types,
bytestring >= 0.10,
hspec >= 1.8,
QuickCheck >= 2.5,
derive >= 2.5,
text >= 0.11,
time >= 1.4,
aeson >= 0.7,
aeson-pretty >= 0.7,
random >= 1.0,
quickcheck-instances >= 0.3
default-language: Haskell2010

View File

@ -4,25 +4,32 @@ module Main where
import Database.Bloodhound
import Data.Aeson
import Data.Either (Either(..))
import Data.Maybe (fromJust)
-- import Data.Aeson.Encode.Pretty
-- import Data.ByteString.Lazy.Char8 (putStrLn)
import Data.Time.Calendar (Day(..))
import Data.Time.Clock (secondsToDiffTime, UTCTime(..))
import Data.Text (Text)
import GHC.Generics (Generic)
import Network.HTTP.Conduit
import qualified Network.HTTP.Types.Status as NHTS
import Prelude hiding (filter, putStrLn)
import Test.Hspec
testServer :: Server
testServer = Server "http://localhost:9200"
testIndex :: IndexName
testIndex = IndexName "twitter"
testMapping :: MappingName
testMapping = MappingName "tweet"
validateStatus :: Response body -> Int -> Expectation
validateStatus resp expected =
(NHTS.statusCode $ responseStatus resp)
`shouldBe` (expected :: Int)
createExampleIndex :: IO Reply
createExampleIndex = createIndex testServer defaultIndexSettings testIndex
deleteExampleIndex :: IO Reply
deleteExampleIndex = deleteIndex testServer testIndex
data Location = Location { lat :: Double
@ -48,6 +55,7 @@ instance ToJSON TweetMapping where
object ["properties" .=
object ["location" .= object ["type" .= ("geo_point" :: Text)]]]]
exampleTweet :: Tweet
exampleTweet = Tweet { user = "bitemyapp"
, postDate = UTCTime
(ModifiedJulianDay 55000)
@ -56,6 +64,7 @@ exampleTweet = Tweet { user = "bitemyapp"
, age = 10000
, location = Location 40.12 (-71.34) }
otherTweet :: Tweet
otherTweet = Tweet { user = "notmyapp"
, postDate = UTCTime
(ModifiedJulianDay 55000)
@ -66,17 +75,16 @@ otherTweet = Tweet { user = "notmyapp"
insertData :: IO ()
insertData = do
let encoded = encode exampleTweet
_ <- deleteExampleIndex
created <- createExampleIndex
mappingCreated <- createMapping testServer testIndex testMapping TweetMapping
docCreated <- indexDocument testServer testIndex testMapping exampleTweet (DocId "1")
_ <- createExampleIndex
_ <- createMapping testServer testIndex testMapping TweetMapping
_ <- indexDocument testServer testIndex testMapping exampleTweet (DocId "1")
_ <- refreshIndex testServer testIndex
return ()
insertOther :: IO ()
insertOther = do
docCreated <- indexDocument testServer testIndex testMapping otherTweet (DocId "2")
_ <- indexDocument testServer testIndex testMapping otherTweet (DocId "2")
_ <- refreshIndex testServer testIndex
return ()
@ -110,6 +118,7 @@ main = hspec $ do
validateStatus resp 200
validateStatus deleteResp 200
describe "document API" $ do
it "indexes, gets, and then deletes the generated document" $ do
_ <- insertData
@ -118,6 +127,7 @@ main = hspec $ do
(responseBody docInserted) :: Either String (EsResult Tweet)
fmap _source newTweet `shouldBe` Right exampleTweet
describe "bulk API" $ do
it "inserts all documents we request" $ do
_ <- insertData
@ -128,7 +138,7 @@ main = hspec $ do
let secondDoc = BulkCreate (IndexName "twitter")
(MappingName "tweet") (DocId "3") (object ["name" .= String "bloo"])
let stream = [firstDoc, secondDoc]
response <- bulk testServer stream
_ <- bulk testServer stream
_ <- refreshIndex testServer testIndex
fDoc <- getDocument testServer testIndex testMapping (DocId "2")
sDoc <- getDocument testServer testIndex testMapping (DocId "3")
@ -137,6 +147,7 @@ main = hspec $ do
fmap _source maybeFirst `shouldBe` Right firstTest
fmap _source maybeSecond `shouldBe` Right secondTest
describe "query API" $ do
it "returns document for term query and identity filter" $ do
_ <- insertData
@ -146,6 +157,13 @@ main = hspec $ do
myTweet <- searchTweet search
myTweet `shouldBe` Right exampleTweet
it "returns document for match query" $ do
_ <- insertData
let query = QueryMatchQuery $ mkMatchQuery (FieldName "user") (QueryString "bitemyapp")
let search = mkSearch (Just query) Nothing
myTweet <- searchTweet search
myTweet `shouldBe` Right exampleTweet
describe "sorting" $ do
it "returns documents in the right order" $ do
_ <- insertData
@ -159,6 +177,7 @@ main = hspec $ do
let myTweet = fmap (hitSource . head . hits . searchHits) result
myTweet `shouldBe` Right otherTweet
describe "filtering API" $ do
it "returns document for composed boolmatch and identity" $ do
_ <- insertData
@ -258,7 +277,7 @@ main = hspec $ do
it "returns document for regexp filter" $ do
_ <- insertData
let filter = RegexpFilter (FieldName "user") (Regexp "bite.*app")
RegexpAll (CacheName "test") False (CacheKey "key")
(RegexpFlags "ALL") (CacheName "test") False (CacheKey "key")
let search = mkSearch Nothing (Just filter)
myTweet <- searchTweet search
myTweet `shouldBe` Right exampleTweet
@ -266,7 +285,7 @@ main = hspec $ do
it "doesn't return document for non-matching regexp filter" $ do
_ <- insertData
let filter = RegexpFilter (FieldName "user")
(Regexp "boy") RegexpAll
(Regexp "boy") (RegexpFlags "ALL")
(CacheName "test") False (CacheKey "key")
let search = mkSearch Nothing (Just filter)
searchExpectNoResults search