Merge remote-tracking branch 'upstream/master' into boost-in-match-query

Conflicts:
	src/Database/Bloodhound/Client.hs
This commit is contained in:
Thomas van Noort 2015-08-28 11:46:35 +02:00
commit e51b515427
7 changed files with 55 additions and 41 deletions

View File

@ -7,14 +7,14 @@
# release of a major GHC version. Setting HPVER implictly sets
# GHCVER. Omit lines with versions you don't need/want testing for.
env:
- GHCVER=7.6.3 ESVER=1.6.0
# - GHCVER=7.6.3 ESVER=1.6.0 # Deprecated
# - GHCVER=7.8.3 ESVER=1.0.3 # Deprecated
# - GHCVER=7.8.3 ESVER=1.1.2 # Deprecated
- GHCVER=7.8.3 ESVER=1.2.4
- GHCVER=7.8.3 ESVER=1.3.6
- GHCVER=7.8.3 ESVER=1.4.1
- GHCVER=7.10.1 ESVER=1.5.2
- GHCVER=7.10.1 ESVER=1.6.0
- GHCVER=7.8 ESVER=1.2.4
- GHCVER=7.8 ESVER=1.3.6
- GHCVER=7.8 ESVER=1.4.1
- GHCVER=7.10 ESVER=1.5.2
- GHCVER=7.10 ESVER=1.6.0
# services:
# - elasticsearch
@ -22,34 +22,23 @@ env:
# Note: the distinction between `before_install` and `install` is not
# important.
before_install:
- sudo add-apt-repository -y ppa:hvr/ghc
- export STACK_YAML=stack-$GHCVER.yaml
- wget -q -O- https://s3.amazonaws.com/download.fpcomplete.com/ubuntu/fpco.key | sudo apt-key add -
- echo 'deb http://download.fpcomplete.com/ubuntu/precise stable main' | sudo tee /etc/apt/sources.list.d/fpco.list
- sudo apt-get update
- sudo apt-get install cabal-install-1.20 ghc-$GHCVER
- sudo apt-get install happy-1.19.3
- export PATH=/opt/ghc/$GHCVER/bin:/opt/happy/1.19.3/bin:$PATH
- sudo apt-get install stack -y
- stack setup
- wget --no-check-certificate https://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-$ESVER.deb
- sudo dpkg --force-all -i elasticsearch-$ESVER.deb
- sudo service elasticsearch start
install:
- cabal-1.20 update
- cabal-1.20 install --only-dependencies --enable-tests --enable-benchmarks --force-reinstalls
- stack build
# Here starts the actual work to be performed for the package under
# test; any command which exits with a non-zero exit code causes the
# build to fail.
script:
# -v2 provides useful information for debugging
- cabal-1.20 configure --enable-tests --enable-benchmarks -v2
# this builds all libraries and executables
# (including tests/benchmarks)
- cabal-1.20 build
- cabal-1.20 test
- cabal-1.20 check
# tests that a source-distribution can be generated
- cabal-1.20 sdist
- stack test
# EOF

View File

@ -1,5 +1,5 @@
name: bloodhound
version: 0.7.1.0
version: 0.7.2.0
synopsis: ElasticSearch client library for Haskell
description: ElasticSearch made awesome for Haskell hackers
homepage: https://github.com/bitemyapp/bloodhound

View File

@ -173,16 +173,16 @@ joinPath ps = do
return $ joinPath' (s:ps)
appendSearchTypeParam :: Text -> SearchType -> Text
appendSearchTypeParam originalUrl st = addQuery [(keyEq, Just stParams)] originalUrl
where keyEq = "search_type="
stParams
| st == SearchTypeDfsQueryThenFetch = "dfs_query_then_fetch"
| st == SearchTypeCount = "count"
| st == SearchTypeScan = "scan&scroll=1m"
| st == SearchTypeQueryAndFetch = "query_and_fetch"
| st == SearchTypeDfsQueryAndFetch = "dfs_query_and_fetch"
appendSearchTypeParam originalUrl st = addQuery params originalUrl
where stText = "search_type"
params
| st == SearchTypeDfsQueryThenFetch = [(stText, Just "dfs_query_then_fetch")]
| st == SearchTypeCount = [(stText, Just "count")]
| st == SearchTypeScan = [(stText, Just "scan"), ("scroll", Just "1m")]
| st == SearchTypeQueryAndFetch = [(stText, Just "query_and_fetch")]
| st == SearchTypeDfsQueryAndFetch = [(stText, Just "dfs_query_and_fetch")]
-- used to catch 'SearchTypeQueryThenFetch', which is also the default
| otherwise = "query_then_fetch"
| otherwise = [(stText, Just "query_then_fetch")]
-- | Severely dumbed down query renderer. Assumes your data doesn't
-- need any encoding
@ -543,9 +543,9 @@ searchByType (IndexName indexName)
(MappingName mappingName) = bindM2 dispatchSearch url . return
where url = joinPath [indexName, mappingName, "_search"]
scanSearch' :: MonadBH m => Search -> m (Maybe ScrollId)
scanSearch' search = do
let url = joinPath ["_search"]
scanSearch' :: MonadBH m => IndexName -> MappingName -> Search -> m (Maybe ScrollId)
scanSearch' (IndexName indexName) (MappingName mappingName) search = do
let url = joinPath [indexName, mappingName, "_search"]
search' = search { searchType = SearchTypeScan }
resp' <- bindM2 dispatchSearch url (return search')
let msr = decode' $ responseBody resp' :: Maybe (SearchResult ())
@ -570,9 +570,11 @@ simpleAccumilator oldHits (newHits, msid) = do
(newHits', msid') <- scroll' msid
simpleAccumilator (oldHits ++ newHits) (newHits', msid')
scanSearch :: (FromJSON a, MonadBH m) => Search -> m [Hit a]
scanSearch search = do
msid <- scanSearch' search
-- | 'scanSearch' uses the 'scan&scroll' API of elastic,
-- for a given 'IndexName' and 'MappingName',
scanSearch :: (FromJSON a, MonadBH m) => IndexName -> MappingName -> Search -> m [Hit a]
scanSearch indexName mappingName search = do
msid <- scanSearch' indexName mappingName search
(hits, msid') <- scroll' msid
(totalHits, _) <- simpleAccumilator [] (hits, msid')
return totalHits

View File

@ -47,7 +47,7 @@ module Database.Bloodhound.Types
, toTerms
, toDateHistogram
, omitNulls
, BH
, BH(..)
, runBH
, BHEnv(..)
, MonadBH(..)

9
stack-7.10.yaml Normal file
View File

@ -0,0 +1,9 @@
flags: {}
packages:
- '.'
extra-deps:
- doctest-0.10.1
- doctest-prop-0.2.0.1
- quickcheck-properties-0.1
- uri-bytestring-0.1.2
resolver: lts-3.1

1
stack-7.8.yaml Symbolic link
View File

@ -0,0 +1 @@
stack.yaml

View File

@ -762,3 +762,16 @@ main = hspec $ do
enumFrom (pred maxBound :: DocVersion) `shouldBe` [pred maxBound, maxBound]
enumFrom (pred maxBound :: DocVersion) `shouldBe` [pred maxBound, maxBound]
enumFromThen minBound (pred maxBound :: DocVersion) `shouldBe` [minBound, pred maxBound]
describe "scan&scroll API" $ do
it "returns documents using the scan&scroll API" $ withTestEnv $ do
_ <- insertData
_ <- insertOther
let search = (mkSearch (Just $ MatchAllQuery Nothing) Nothing) { size = (Size 1) }
regular_search <- searchTweet search
scan_search' <- scanSearch testIndex testMapping search :: BH IO [Hit Tweet]
let scan_search = map hitSource scan_search'
liftIO $
regular_search `shouldBe` Right exampleTweet -- Check that the size restrtiction is being honored
liftIO $
scan_search `shouldMatchList` [exampleTweet, otherTweet]