mirror of
https://github.com/typeable/bloodhound.git
synced 2025-01-05 21:36:03 +03:00
tests, seminearring, cleanup, basic filter composition working
This commit is contained in:
parent
b2b5753105
commit
cdc262f6ec
@ -21,6 +21,7 @@ import Data.Aeson.TH (deriveJSON)
|
||||
import qualified Data.ByteString.Lazy.Char8 as L
|
||||
import Data.List (intercalate)
|
||||
import Data.Maybe (fromMaybe)
|
||||
import Data.Monoid
|
||||
import Data.Text (Text)
|
||||
import Data.Time.Clock (UTCTime)
|
||||
import GHC.Generics (Generic)
|
||||
@ -53,33 +54,8 @@ instance (FromJSON a, ToJSON a) => FromJSON (Status a) where
|
||||
v .: "tagline"
|
||||
parseJSON _ = empty
|
||||
|
||||
-- instance ToJSON (Status a)
|
||||
-- instance FromJSON (Status a)
|
||||
|
||||
-- bloo <- (liftM responseBody $ parseUrl "http://localhost:9200/" >>= \r -> withManager (httpLbs r))
|
||||
|
||||
-- rootPath :: String
|
||||
-- rootPath = rollUp $ ["/"]
|
||||
|
||||
-- -- Kinda hate this.
|
||||
-- pathFromType :: String -> String -> String
|
||||
-- pathFromType index docType = "/" ++ index ++ docType
|
||||
|
||||
-- -- Kinda hate this too.
|
||||
-- rollUp :: [String] -> String
|
||||
-- rollUp = Prelude.concat
|
||||
|
||||
main = simpleHttp "http://localhost:9200/events/event/_search?q=hostname:localhost&size=1" >>= L.putStrLn
|
||||
|
||||
-- data Response = Response { blah :: Text } deriving (Show)
|
||||
|
||||
-- indexDocument :: ToJSON a => a -> IO Response
|
||||
-- indexDocument doc = ToJSON a
|
||||
|
||||
-- nomenclature is because you're dropping the type baggage. All of this is compile-time erased.
|
||||
-- newtype ShardCount = ShardCount { unShardCount :: Int } deriving (Show, Generic)
|
||||
-- newtype ReplicaCount = ReplicaCount { unReplicaCount :: Int } deriving (Show, Generic)
|
||||
|
||||
newtype ShardCount = ShardCount Int deriving (Show, Generic)
|
||||
newtype ReplicaCount = ReplicaCount Int deriving (Show, Generic)
|
||||
|
||||
@ -95,8 +71,6 @@ mkReplicaCount n
|
||||
| n > 1000 = Nothing -- ...
|
||||
| otherwise = Just (ReplicaCount n)
|
||||
|
||||
-- IndexSettings <$> mkShardCount 10 <*> mkReplicaCount 20
|
||||
|
||||
data IndexSettings =
|
||||
IndexSettings { indexShards :: ShardCount
|
||||
, indexReplicas :: ReplicaCount } deriving (Show)
|
||||
@ -113,20 +87,6 @@ data Strategy = RoundRobinStrat | RandomStrat | HeadStrat deriving (Show)
|
||||
|
||||
newtype Server = Server String deriving (Show)
|
||||
|
||||
-- data Errors = IndexAlreadyExistsError | GenericError deriving (Show)
|
||||
|
||||
-- data ElasticsearchError = ElasticsearchError
|
||||
-- { status :: Int
|
||||
-- , error :: Text } deriving (Show, Generic)
|
||||
|
||||
-- instance FromJSON ElasticsearchError
|
||||
|
||||
-- errorForResponse resp = do
|
||||
-- let (sts, err) = decode (responseBody resp) :: ElasticsearchError
|
||||
-- if T.isInfixOf "IndexAlreadyExistsException" error
|
||||
-- then IndexAlreadyExistsError
|
||||
-- else
|
||||
|
||||
type Reply = Network.HTTP.Conduit.Response L.ByteString
|
||||
type Method = NHTM.Method
|
||||
|
||||
@ -242,17 +202,17 @@ data EsResult a = EsResult { _index :: Text
|
||||
, _type :: Text
|
||||
, _id :: Text
|
||||
, _version :: Int
|
||||
, found :: Bool
|
||||
, found :: Maybe Bool
|
||||
, _source :: a } deriving (Eq, Show)
|
||||
|
||||
instance (FromJSON a, ToJSON a) => FromJSON (EsResult a) where
|
||||
parseJSON (Object v) = EsResult <$>
|
||||
v .: "_index" <*>
|
||||
v .: "_type" <*>
|
||||
v .: "_id" <*>
|
||||
v .: "_version" <*>
|
||||
v .: "found" <*>
|
||||
v .: "_source"
|
||||
v .: "_index" <*>
|
||||
v .: "_type" <*>
|
||||
v .: "_id" <*>
|
||||
v .: "_version" <*>
|
||||
v .:? "found" <*>
|
||||
v .: "_source"
|
||||
parseJSON _ = empty
|
||||
|
||||
getDocument :: Server -> IndexName -> MappingName
|
||||
@ -270,25 +230,6 @@ documentExists (Server server) indexName mappingName docId = do
|
||||
return exists where
|
||||
url = joinPath [server, indexName, mappingName, docId]
|
||||
|
||||
-- data Analyzer = AStandardAnalyzer StandardAnalyzer
|
||||
-- | SimpleAnalyzer -- just "simple"
|
||||
-- | WhitespaceAnalyzer
|
||||
-- | StopAnalyzer
|
||||
-- | KeywordAnalyzer
|
||||
-- | PatternAnalyzer
|
||||
-- | LanguageAnalyzer
|
||||
-- | SnowballAnalyzer
|
||||
-- | CustomAnalyzer
|
||||
|
||||
-- data StandardAnalyzer =
|
||||
-- StandardAnalyzer
|
||||
-- { stopwords :: Maybe [Text] -- default []
|
||||
-- , max_token_length :: Maybe Int -- default 255
|
||||
-- }
|
||||
|
||||
-- DefaultField -> "default_field": ""
|
||||
-- Fields -> "fields": []
|
||||
|
||||
type QueryString = Text
|
||||
-- status:active
|
||||
-- author:"John Smith"
|
||||
@ -345,14 +286,33 @@ queryStringQuery query = emptyQueryStringQuery { query = query }
|
||||
type FieldName = Text
|
||||
|
||||
type Cache = Bool -- caching on/off
|
||||
defaultCache = False
|
||||
|
||||
data Filter = AndFilter [Filter] Cache
|
||||
| OrFilter [Filter] Cache
|
||||
| IdentityFilter
|
||||
| BoolFilter BoolMatch Cache
|
||||
| ExistsFilter FieldName -- always cached
|
||||
| GeoBoundingBoxFilter GeoBoundingBoxConstraint GeoFilterType Cache
|
||||
| GeoDistanceFilter GeoConstraint Distance Cache
|
||||
deriving (Show)
|
||||
|
||||
class Monoid a => Seminearring a where
|
||||
-- 0, +, *
|
||||
(<||>) :: a -> a -> a
|
||||
(<&&>) :: a -> a -> a
|
||||
(<&&>) = mappend
|
||||
|
||||
infixr 5 <||>
|
||||
infixr 5 <&&>
|
||||
|
||||
instance Monoid Filter where
|
||||
mempty = IdentityFilter
|
||||
mappend a b = AndFilter [a, b] defaultCache
|
||||
|
||||
instance Seminearring Filter where
|
||||
a <||> b = OrFilter [a, b] defaultCache
|
||||
|
||||
instance ToJSON Filter where
|
||||
toJSON (AndFilter filters cache) =
|
||||
object ["and" .= fmap toJSON filters
|
||||
@ -360,6 +320,8 @@ instance ToJSON Filter where
|
||||
toJSON (OrFilter filters cache) =
|
||||
object ["or" .= fmap toJSON filters
|
||||
, "_cache" .= cache]
|
||||
toJSON (IdentityFilter) =
|
||||
object ["match_all" .= object []]
|
||||
toJSON (ExistsFilter fieldName) =
|
||||
object ["exists" .= object
|
||||
["field" .= fieldName]]
|
||||
@ -367,7 +329,6 @@ instance ToJSON Filter where
|
||||
object ["bool" .= toJSON boolMatch
|
||||
, "_cache" .= cache]
|
||||
|
||||
-- I dunno.
|
||||
data Term = Term { termField :: Text
|
||||
, termValue :: Text } deriving (Show)
|
||||
|
||||
@ -446,25 +407,3 @@ data ShardResults =
|
||||
ShardResults { shardTotal :: Int
|
||||
, shardsSuccessful :: Int
|
||||
, shardsFailed :: Int } deriving (Show)
|
||||
|
||||
-- This is turning into a fractal of horror
|
||||
-- data Fuzziness = FDateFuzziness DateFuzziness |
|
||||
|
||||
-- data Query = Query { query :: () } deriving (Show)
|
||||
-- search :: Server -> IndexName -> Maybe MappingName
|
||||
-- -> Query -> IO Reply
|
||||
-- search = fuck
|
||||
|
||||
-- getStatus :: String -> IO (Maybe (Status Version))
|
||||
-- getStatus server = do
|
||||
-- request <- parseUrl $ server ++ rootPath
|
||||
-- response <- withManager $ httpLbs request
|
||||
-- return $ (decode $ responseBody response)
|
||||
|
||||
-- mkServer -- (Maybe Server) for URL parsing?
|
||||
|
||||
-- data Cluster = Cluster { targets :: [Server]
|
||||
-- , strategy :: Strategy } deriving (Show)
|
||||
|
||||
-- target :: Cluster -> Server
|
||||
-- target (Cluster targets HeadStrat) = head targets
|
||||
|
@ -46,7 +46,7 @@ test-suite tests
|
||||
bloodhound,
|
||||
http-conduit,
|
||||
http-types,
|
||||
hspec >= 1.9,
|
||||
hspec >= 1.8,
|
||||
QuickCheck >= 2.5,
|
||||
derive >= 2.5,
|
||||
text >= 0.11,
|
||||
|
@ -5,6 +5,7 @@ module Main where
|
||||
import Database.Bloodhound.Client
|
||||
import Data.Aeson
|
||||
import Data.DeriveTH
|
||||
import Data.Either (Either(..))
|
||||
import Data.Maybe (fromJust)
|
||||
import Data.Time.Calendar (Day(..))
|
||||
import Data.Time.Clock (secondsToDiffTime, UTCTime(..))
|
||||
@ -51,11 +52,8 @@ main = hspec $ do
|
||||
let encoded = encode tweet
|
||||
_ <- deleteExampleIndex
|
||||
created <- createExampleIndex
|
||||
docCreated <- indexDocument (Server "http://localhost:9200")
|
||||
"twitter" "tweet" tweet "1"
|
||||
docInserted <- getDocument (Server "http://localhost:9200")
|
||||
"twitter" "tweet" "1"
|
||||
let newTweet = decode
|
||||
(responseBody docInserted) :: Maybe (EsResult Tweet)
|
||||
docCreated <- indexDocument (Server "http://localhost:9200") "twitter" "tweet" tweet "1"
|
||||
docInserted <- getDocument (Server "http://localhost:9200") "twitter" "tweet" "1"
|
||||
let newTweet = eitherDecode (responseBody docInserted) :: Either String (EsResult Tweet)
|
||||
deleted <- deleteExampleIndex
|
||||
Just tweet `shouldBe` fmap _source newTweet
|
||||
Right tweet `shouldBe` fmap _source newTweet
|
||||
|
Loading…
Reference in New Issue
Block a user