mirror of
https://github.com/hasura/graphql-engine.git
synced 2024-12-14 17:02:49 +03:00
Remove duplicated Data Connector API types
PR-URL: https://github.com/hasura/graphql-engine-mono/pull/5930 GitOrigin-RevId: 8c11387fa8556c3cdf8c92a0924ae53d31b953a5
This commit is contained in:
parent
04ae6abf78
commit
5c06eb7a3e
@ -391,7 +391,7 @@ library dc-api
|
||||
, Hasura.Backends.DataConnector.API.V0.Query
|
||||
, Hasura.Backends.DataConnector.API.V0.Explain
|
||||
, Hasura.Backends.DataConnector.API.V0.Relationships
|
||||
, Hasura.Backends.DataConnector.API.V0.Scalar.Type
|
||||
, Hasura.Backends.DataConnector.API.V0.Scalar
|
||||
, Hasura.Backends.DataConnector.API.V0.Schema
|
||||
, Hasura.Backends.DataConnector.API.V0.Table
|
||||
|
||||
@ -591,21 +591,8 @@ library
|
||||
, Hasura.Backends.DataConnector.Adapter.Transport
|
||||
, Hasura.Backends.DataConnector.Adapter.Types
|
||||
, Hasura.Backends.DataConnector.Agent.Client
|
||||
, Hasura.Backends.DataConnector.IR.Aggregate
|
||||
, Hasura.Backends.DataConnector.IR.Column
|
||||
, Hasura.Backends.DataConnector.IR.Expression
|
||||
, Hasura.Backends.DataConnector.IR.Function
|
||||
, Hasura.Backends.DataConnector.IR.Name
|
||||
, Hasura.Backends.DataConnector.IR.OrderBy
|
||||
, Hasura.Backends.DataConnector.IR.Query
|
||||
, Hasura.Backends.DataConnector.IR.Relationships
|
||||
, Hasura.Backends.DataConnector.IR.Scalar.Type
|
||||
, Hasura.Backends.DataConnector.IR.Scalar.Value
|
||||
, Hasura.Backends.DataConnector.IR.Table
|
||||
, Hasura.Backends.DataConnector.Logging
|
||||
, Hasura.Backends.DataConnector.Plan
|
||||
, Hasura.Backends.DataConnector.Schema.Column
|
||||
, Hasura.Backends.DataConnector.Schema.Table
|
||||
|
||||
-- Exposed for benchmark:
|
||||
, Hasura.Cache.Bounded
|
||||
@ -1043,15 +1030,9 @@ test-suite graphql-engine-tests
|
||||
Hasura.Backends.DataConnector.API.V0.OrderBySpec
|
||||
Hasura.Backends.DataConnector.API.V0.QuerySpec
|
||||
Hasura.Backends.DataConnector.API.V0.RelationshipsSpec
|
||||
Hasura.Backends.DataConnector.API.V0.Scalar.TypeSpec
|
||||
Hasura.Backends.DataConnector.API.V0.ScalarSpec
|
||||
Hasura.Backends.DataConnector.API.V0.SchemaSpec
|
||||
Hasura.Backends.DataConnector.API.V0.TableSpec
|
||||
Hasura.Backends.DataConnector.RQLGenerator
|
||||
Hasura.Backends.DataConnector.RQLGenerator.GenAnnSelectG
|
||||
Hasura.Backends.DataConnector.RQLGenerator.GenCommon
|
||||
Hasura.Backends.DataConnector.RQLGenerator.GenSelectArgsG
|
||||
Hasura.Backends.DataConnector.RQLGenerator.GenSelectFromG
|
||||
Hasura.Backends.DataConnector.RQLGenerator.GenTablePermG
|
||||
Hasura.Backends.MSSQL.ErrorSpec
|
||||
Hasura.Backends.MySQL.DataLoader.ExecuteTests
|
||||
Hasura.Backends.Postgres.Execute.PrepareSpec
|
||||
|
@ -8,7 +8,7 @@ module Hasura.Backends.DataConnector.API.V0
|
||||
module Query,
|
||||
module Explain,
|
||||
module Relationships,
|
||||
module Scalar.Type,
|
||||
module Scalar,
|
||||
module Schema,
|
||||
module Table,
|
||||
)
|
||||
@ -23,6 +23,6 @@ import Hasura.Backends.DataConnector.API.V0.Expression as Expression
|
||||
import Hasura.Backends.DataConnector.API.V0.OrderBy as OrderBy
|
||||
import Hasura.Backends.DataConnector.API.V0.Query as Query
|
||||
import Hasura.Backends.DataConnector.API.V0.Relationships as Relationships
|
||||
import Hasura.Backends.DataConnector.API.V0.Scalar.Type as Scalar.Type
|
||||
import Hasura.Backends.DataConnector.API.V0.Scalar as Scalar
|
||||
import Hasura.Backends.DataConnector.API.V0.Schema as Schema
|
||||
import Hasura.Backends.DataConnector.API.V0.Table as Table
|
||||
|
@ -18,7 +18,7 @@ import Data.Hashable (Hashable)
|
||||
import Data.OpenApi (ToSchema)
|
||||
import Data.Text (Text)
|
||||
import GHC.Generics (Generic)
|
||||
import Hasura.Backends.DataConnector.API.V0.Scalar.Type qualified as API.V0.Scalar
|
||||
import Hasura.Backends.DataConnector.API.V0.Scalar qualified as API.V0.Scalar
|
||||
import Prelude
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
@ -36,7 +36,7 @@ instance HasCodec ColumnName where
|
||||
|
||||
data ColumnInfo = ColumnInfo
|
||||
{ dciName :: ColumnName,
|
||||
dciType :: API.V0.Scalar.Type,
|
||||
dciType :: API.V0.Scalar.ScalarType,
|
||||
dciNullable :: Bool,
|
||||
dciDescription :: Maybe Text
|
||||
}
|
||||
|
@ -102,15 +102,30 @@ instance HasCodec UnaryComparisonOperator where
|
||||
op@CustomUnaryComparisonOperator {} -> Right op
|
||||
op -> Left op
|
||||
|
||||
-- | A serializable representation of query expressions.
|
||||
-- | A serializable representation of query filter expressions.
|
||||
data Expression
|
||||
= And [Expression]
|
||||
| Or [Expression]
|
||||
| Not Expression
|
||||
| Exists ExistsInTable Expression
|
||||
| ApplyBinaryComparisonOperator BinaryComparisonOperator ComparisonColumn ComparisonValue
|
||||
| ApplyBinaryArrayComparisonOperator BinaryArrayComparisonOperator ComparisonColumn [Value]
|
||||
| ApplyUnaryComparisonOperator UnaryComparisonOperator ComparisonColumn
|
||||
= -- | A logical AND fold
|
||||
And [Expression]
|
||||
| -- | A logical OR fold
|
||||
Or [Expression]
|
||||
| -- | A logical NOT function
|
||||
Not Expression
|
||||
| -- | There must exist a row in the table specified by 'ExistsInTable' that
|
||||
-- satisfies the 'Expression'
|
||||
Exists ExistsInTable Expression
|
||||
| -- | Apply a 'BinaryComparisonOperator' that compares a column to a 'ComparisonValue';
|
||||
-- the result of this application will return "true" or "false" depending on the
|
||||
-- 'BinaryComparisonOperator' that's being applied.
|
||||
ApplyBinaryComparisonOperator BinaryComparisonOperator ComparisonColumn ComparisonValue
|
||||
| -- | Apply a 'BinaryArrayComparisonOperator' that evaluates a column with the
|
||||
-- 'BinaryArrayComparisonOperator' against an array of 'ComparisonValue's.
|
||||
-- The result of this application will return "true" or "false" depending
|
||||
-- on the 'BinaryArrayComparisonOperator' that's being applied.
|
||||
ApplyBinaryArrayComparisonOperator BinaryArrayComparisonOperator ComparisonColumn [Value]
|
||||
| -- | Apply a 'UnaryComparisonOperator' that evaluates a column with the
|
||||
-- 'UnaryComparisonOperator'; the result of this application will return "true" or
|
||||
-- "false" depending on the 'UnaryComparisonOperator' that's being applied.
|
||||
ApplyUnaryComparisonOperator UnaryComparisonOperator ComparisonColumn
|
||||
deriving stock (Data, Eq, Generic, Ord, Show)
|
||||
deriving anyclass (Hashable, NFData)
|
||||
deriving (FromJSON, ToJSON, ToSchema) via Autodocodec Expression
|
||||
|
@ -71,11 +71,17 @@ instance HasCodec QueryRequest where
|
||||
|
||||
-- | The details of a query against a table
|
||||
data Query = Query
|
||||
{ _qFields :: Maybe (KM.KeyMap Field),
|
||||
{ -- | Map of field name to Field definition.
|
||||
_qFields :: Maybe (KM.KeyMap Field),
|
||||
-- | Map of aggregate field name to Aggregate definition
|
||||
_qAggregates :: Maybe (KM.KeyMap API.V0.Aggregate),
|
||||
-- | Optionally limit to N results.
|
||||
_qLimit :: Maybe Int,
|
||||
-- | Optionally offset from the Nth result.
|
||||
_qOffset :: Maybe Int,
|
||||
-- | Optionally constrain the results to satisfy some predicate.
|
||||
_qWhere :: Maybe API.V0.Expression,
|
||||
-- | Optionally order the results by the value of one or more fields.
|
||||
_qOrderBy :: Maybe API.V0.OrderBy
|
||||
}
|
||||
deriving stock (Eq, Ord, Show, Generic, Data)
|
||||
@ -92,6 +98,12 @@ instance HasCodec Query where
|
||||
<*> optionalFieldOrNull "where" "Optionally constrain the results to satisfy some predicate" .= _qWhere
|
||||
<*> optionalFieldOrNull "order_by" "Optionally order the results by the value of one or more fields" .= _qOrderBy
|
||||
|
||||
-- | A relationship consists of the following components:
|
||||
-- - a sub-query, from the perspective that a relationship field will occur
|
||||
-- within a broader 'Query'
|
||||
-- - a join condition relating the data returned by the sub-query with that
|
||||
-- of the broader 'Query'. This join condition is represented by the
|
||||
-- name of the relationship that defines the joining criteria.
|
||||
data RelationshipField = RelationshipField
|
||||
{ _rfRelationship :: API.V0.RelationshipName,
|
||||
_rfQuery :: Query
|
||||
@ -104,7 +116,12 @@ relationshipFieldObjectCodec =
|
||||
<$> requiredField "relationship" "The name of the relationship to follow for the subquery" .= _rfRelationship
|
||||
<*> requiredField "query" "Relationship query" .= _rfQuery
|
||||
|
||||
-- | A serializable field targeted by a 'Query'.
|
||||
-- | The specific fields that are targeted by a 'Query'.
|
||||
--
|
||||
-- A field conceptually falls under one of the two following categories:
|
||||
-- 1. a "column" within the data store that the query is being issued against
|
||||
-- 2. a "relationship", which indicates that the field is the result of
|
||||
-- a subquery
|
||||
data Field
|
||||
= ColumnField API.V0.ColumnName
|
||||
| RelField RelationshipField
|
||||
|
@ -2,8 +2,8 @@
|
||||
{-# LANGUAGE OverloadedLists #-}
|
||||
|
||||
--
|
||||
module Hasura.Backends.DataConnector.API.V0.Scalar.Type
|
||||
( Type (..),
|
||||
module Hasura.Backends.DataConnector.API.V0.Scalar
|
||||
( ScalarType (..),
|
||||
)
|
||||
where
|
||||
|
||||
@ -12,7 +12,7 @@ where
|
||||
import Autodocodec
|
||||
import Autodocodec.OpenAPI ()
|
||||
import Control.DeepSeq (NFData)
|
||||
import Data.Aeson (FromJSON, ToJSON)
|
||||
import Data.Aeson (FromJSON, ToJSON, ToJSONKey)
|
||||
import Data.Data (Data)
|
||||
import Data.Hashable (Hashable)
|
||||
import Data.OpenApi (ToSchema)
|
||||
@ -22,16 +22,16 @@ import Prelude
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
data Type
|
||||
data ScalarType
|
||||
= StringTy
|
||||
| NumberTy
|
||||
| BoolTy
|
||||
| CustomTy {getCustomTy :: Text}
|
||||
deriving stock (Data, Eq, Generic, Ord, Show)
|
||||
deriving anyclass (Hashable, NFData)
|
||||
deriving (FromJSON, ToJSON, ToSchema) via Autodocodec Type
|
||||
deriving anyclass (Hashable, NFData, ToJSONKey)
|
||||
deriving (FromJSON, ToJSON, ToSchema) via Autodocodec ScalarType
|
||||
|
||||
instance HasCodec Type where
|
||||
instance HasCodec ScalarType where
|
||||
codec =
|
||||
named "ScalarType" $
|
||||
matchChoiceCodec
|
@ -28,6 +28,9 @@ import Prelude
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
-- | The fully qualified name of a table. The last element in the list is the table name
|
||||
-- and all other elements represent namespacing of the table name.
|
||||
-- For example, for a database that has schemas, the name would be '[<schema>,<table name>]'
|
||||
newtype TableName = TableName {unTableName :: NonEmpty Text}
|
||||
deriving stock (Eq, Ord, Show, Generic, Data)
|
||||
deriving anyclass (NFData, Hashable)
|
||||
|
@ -2,21 +2,15 @@
|
||||
|
||||
module Hasura.Backends.DataConnector.Adapter.Backend (CustomBooleanOperator (..)) where
|
||||
|
||||
import Data.Aeson qualified as J (ToJSON (..), Value)
|
||||
import Data.Aeson qualified as J
|
||||
import Data.Aeson.Extended (ToJSONKeyValue (..))
|
||||
import Data.Aeson.Key (fromText)
|
||||
import Data.Aeson.Types qualified as J
|
||||
import Data.List.NonEmpty qualified as NonEmpty
|
||||
import Data.Text qualified as Text
|
||||
import Data.Text.Casing qualified as C
|
||||
import Data.Text.Extended ((<<>))
|
||||
import Hasura.Backends.DataConnector.Adapter.Types qualified as Adapter
|
||||
import Hasura.Backends.DataConnector.IR.Aggregate qualified as IR.A
|
||||
import Hasura.Backends.DataConnector.IR.Column qualified as IR.C
|
||||
import Hasura.Backends.DataConnector.IR.Function qualified as IR.F
|
||||
import Hasura.Backends.DataConnector.IR.OrderBy qualified as IR.O
|
||||
import Hasura.Backends.DataConnector.IR.Scalar.Type qualified as IR.S.T
|
||||
import Hasura.Backends.DataConnector.IR.Scalar.Value qualified as IR.S.V
|
||||
import Hasura.Backends.DataConnector.IR.Table as IR.T
|
||||
import Hasura.Backends.DataConnector.Adapter.Types qualified as DC
|
||||
import Hasura.Base.Error (Code (ValidationFailed), QErr, runAesonParser, throw400)
|
||||
import Hasura.Incremental
|
||||
import Hasura.Prelude
|
||||
@ -36,27 +30,27 @@ import Language.GraphQL.Draft.Syntax qualified as G
|
||||
type Unimplemented = ()
|
||||
|
||||
instance Backend 'DataConnector where
|
||||
type BackendConfig 'DataConnector = InsOrdHashMap Adapter.DataConnectorName Adapter.DataConnectorOptions
|
||||
type BackendInfo 'DataConnector = HashMap Adapter.DataConnectorName Adapter.DataConnectorInfo
|
||||
type SourceConfig 'DataConnector = Adapter.SourceConfig
|
||||
type SourceConnConfiguration 'DataConnector = Adapter.ConnSourceConfig
|
||||
type BackendConfig 'DataConnector = InsOrdHashMap DC.DataConnectorName DC.DataConnectorOptions
|
||||
type BackendInfo 'DataConnector = HashMap DC.DataConnectorName DC.DataConnectorInfo
|
||||
type SourceConfig 'DataConnector = DC.SourceConfig
|
||||
type SourceConnConfiguration 'DataConnector = DC.ConnSourceConfig
|
||||
|
||||
type TableName 'DataConnector = IR.T.Name
|
||||
type FunctionName 'DataConnector = IR.F.Name
|
||||
type TableName 'DataConnector = DC.TableName
|
||||
type FunctionName 'DataConnector = DC.FunctionName
|
||||
type RawFunctionInfo 'DataConnector = XDisable
|
||||
type FunctionArgument 'DataConnector = XDisable
|
||||
type ConstraintName 'DataConnector = IR.T.ConstraintName
|
||||
type BasicOrderType 'DataConnector = IR.O.OrderDirection
|
||||
type ConstraintName 'DataConnector = DC.ConstraintName
|
||||
type BasicOrderType 'DataConnector = DC.OrderDirection
|
||||
type NullsOrderType 'DataConnector = Unimplemented
|
||||
type CountType 'DataConnector = IR.A.CountAggregate
|
||||
type Column 'DataConnector = IR.C.Name
|
||||
type CountType 'DataConnector = DC.CountAggregate
|
||||
type Column 'DataConnector = DC.ColumnName
|
||||
type ScalarValue 'DataConnector = J.Value
|
||||
type ScalarType 'DataConnector = IR.S.T.Type
|
||||
type ScalarType 'DataConnector = DC.ScalarType
|
||||
|
||||
-- This does not actually have to be the full IR Expression, in fact it is only
|
||||
-- required to represent literals, so we use a special type for that.
|
||||
-- The 'SQLExpression' type family should be removed in a future refactor
|
||||
type SQLExpression 'DataConnector = IR.S.V.Literal
|
||||
type SQLExpression 'DataConnector = DC.Literal
|
||||
type ScalarSelectionArguments 'DataConnector = Void
|
||||
type BooleanOperators 'DataConnector = CustomBooleanOperator
|
||||
type ExtraTableMetadata 'DataConnector = Unimplemented
|
||||
@ -75,20 +69,20 @@ instance Backend 'DataConnector where
|
||||
|
||||
isComparableType :: ScalarType 'DataConnector -> Bool
|
||||
isComparableType = \case
|
||||
IR.S.T.Number -> True
|
||||
IR.S.T.String -> True
|
||||
IR.S.T.Bool -> False
|
||||
IR.S.T.Custom _ -> False -- TODO: extend Capabilities for custom types
|
||||
DC.NumberTy -> True
|
||||
DC.StringTy -> True
|
||||
DC.BoolTy -> False
|
||||
DC.CustomTy _ -> False -- TODO: extend Capabilities for custom types
|
||||
|
||||
isNumType :: ScalarType 'DataConnector -> Bool
|
||||
isNumType IR.S.T.Number = True
|
||||
isNumType DC.NumberTy = True
|
||||
isNumType _ = False
|
||||
|
||||
textToScalarValue :: Maybe Text -> ScalarValue 'DataConnector
|
||||
textToScalarValue = error "textToScalarValue: not implemented for the Data Connector backend."
|
||||
|
||||
parseScalarValue :: ScalarType 'DataConnector -> J.Value -> Either QErr (ScalarValue 'DataConnector)
|
||||
parseScalarValue type' value = runAesonParser (IR.S.V.parseValue type') value
|
||||
parseScalarValue type' value = runAesonParser (parseValue type') value
|
||||
|
||||
scalarValueToJSON :: ScalarValue 'DataConnector -> J.Value
|
||||
scalarValueToJSON = error "scalarValueToJSON: not implemented for the Data Connector backend."
|
||||
@ -119,10 +113,10 @@ instance Backend 'DataConnector where
|
||||
functionGraphQLName = error "functionGraphQLName: not implemented for the Data Connector backend."
|
||||
|
||||
snakeCaseTableName :: TableName 'DataConnector -> Text
|
||||
snakeCaseTableName = Text.intercalate "_" . NonEmpty.toList . IR.T.unName
|
||||
snakeCaseTableName = Text.intercalate "_" . NonEmpty.toList . DC.unTableName
|
||||
|
||||
getTableIdentifier :: TableName 'DataConnector -> Either QErr C.GQLNameIdentifier
|
||||
getTableIdentifier name@(IR.T.Name (prefix :| suffixes)) =
|
||||
getTableIdentifier name@(DC.TableName (prefix :| suffixes)) =
|
||||
let identifier = do
|
||||
namePrefix <- G.mkName prefix
|
||||
nameSuffixes <- traverse G.mkNameSuffix suffixes
|
||||
@ -147,3 +141,14 @@ instance Cacheable a => Cacheable (CustomBooleanOperator a)
|
||||
|
||||
instance J.ToJSON a => ToJSONKeyValue (CustomBooleanOperator a) where
|
||||
toJSONKeyValue CustomBooleanOperator {..} = (fromText _cboName, J.toJSON _cboRHS)
|
||||
|
||||
parseValue :: DC.ScalarType -> J.Value -> J.Parser J.Value
|
||||
parseValue type' val =
|
||||
case (type', val) of
|
||||
(_, J.Null) -> pure J.Null
|
||||
(DC.StringTy, value) -> J.String <$> J.parseJSON value
|
||||
(DC.BoolTy, value) -> J.Bool <$> J.parseJSON value
|
||||
(DC.NumberTy, value) -> J.Number <$> J.parseJSON value
|
||||
-- For custom scalar types we don't know what subset of JSON values
|
||||
-- they accept, so we just accept any value.
|
||||
(DC.CustomTy _, value) -> pure value
|
||||
|
@ -8,15 +8,12 @@ where
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
import Data.Aeson qualified as J
|
||||
import Data.ByteString.Lazy qualified as BL
|
||||
import Data.Environment qualified as Env
|
||||
import Data.Text.Encoding qualified as TE
|
||||
import Data.Text.Extended (toTxt)
|
||||
import Hasura.Backends.DataConnector.API qualified as API
|
||||
import Hasura.Backends.DataConnector.Adapter.ConfigTransform (transformSourceConfig)
|
||||
import Hasura.Backends.DataConnector.Adapter.Types (SourceConfig (..))
|
||||
import Hasura.Backends.DataConnector.Agent.Client (AgentClientT)
|
||||
import Hasura.Backends.DataConnector.IR.Query qualified as IR.Q
|
||||
import Hasura.Backends.DataConnector.Plan qualified as DC
|
||||
import Hasura.Base.Error (Code (..), QErr, throw400, throw500)
|
||||
import Hasura.EncJSON (EncJSON, encJFromBuilder, encJFromJValue)
|
||||
@ -36,7 +33,7 @@ import Witch qualified
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
instance BackendExecute 'DataConnector where
|
||||
type PreparedQuery 'DataConnector = IR.Q.QueryRequest
|
||||
type PreparedQuery 'DataConnector = API.QueryRequest
|
||||
type MultiplexedQuery 'DataConnector = Void
|
||||
type ExecutionMonad 'DataConnector = AgentClientT (Tracing.TraceT (ExceptT QErr IO))
|
||||
|
||||
@ -98,6 +95,6 @@ buildExplainAction fieldName sourceName SourceConfig {..} DC.QueryPlan {..} =
|
||||
(Just (API._erQuery explainResponse))
|
||||
(Just (API._erLines explainResponse))
|
||||
|
||||
toExplainPlan :: GQL.RootFieldAlias -> IR.Q.QueryRequest -> ExplainPlan
|
||||
toExplainPlan :: GQL.RootFieldAlias -> API.QueryRequest -> ExplainPlan
|
||||
toExplainPlan fieldName queryRequest =
|
||||
ExplainPlan fieldName (Just "") (Just [TE.decodeUtf8 $ BL.toStrict $ J.encode $ queryRequest])
|
||||
ExplainPlan fieldName (Just "") (Just [DC.renderQuery $ queryRequest])
|
||||
|
@ -22,11 +22,6 @@ import Hasura.Backends.DataConnector.API qualified as API
|
||||
import Hasura.Backends.DataConnector.Adapter.ConfigTransform (transformConnSourceConfig)
|
||||
import Hasura.Backends.DataConnector.Adapter.Types qualified as DC
|
||||
import Hasura.Backends.DataConnector.Agent.Client (AgentClientContext (..), runAgentClientT)
|
||||
import Hasura.Backends.DataConnector.IR.Column qualified as IR.C
|
||||
import Hasura.Backends.DataConnector.IR.Name qualified as IR.N
|
||||
import Hasura.Backends.DataConnector.IR.Scalar.Type qualified as IR.S.T
|
||||
import Hasura.Backends.DataConnector.IR.Scalar.Value qualified as IR.S.V
|
||||
import Hasura.Backends.DataConnector.IR.Table qualified as IR.T
|
||||
import Hasura.Backends.Postgres.SQL.Types (PGDescription (..))
|
||||
import Hasura.Base.Error (Code (..), QErr, decodeValue, throw400, throw500, withPathK)
|
||||
import Hasura.Incremental qualified as Inc
|
||||
@ -209,7 +204,7 @@ resolveDatabaseMetadata' _ sc@(DC.SourceConfig {_scSchema = API.SchemaResponse {
|
||||
-- TODO: Add Column Mutability to the 'TableInfo'
|
||||
rciMutability = RQL.T.C.ColumnMutability False False
|
||||
},
|
||||
_ptmiPrimaryKey = RQL.T.T.PrimaryKey (RQL.T.T.Constraint (IR.T.ConstraintName "") (OID 0)) <$> NESeq.nonEmptySeq primaryKeyColumns,
|
||||
_ptmiPrimaryKey = RQL.T.T.PrimaryKey (RQL.T.T.Constraint (DC.ConstraintName "") (OID 0)) <$> NESeq.nonEmptySeq primaryKeyColumns,
|
||||
_ptmiUniqueConstraints = mempty,
|
||||
_ptmiForeignKeys = buildForeignKeySet foreignKeys,
|
||||
_ptmiViewInfo = Just $ RQL.T.T.ViewInfo False False False,
|
||||
@ -237,7 +232,7 @@ buildForeignKeySet (catMaybes -> foreignKeys) =
|
||||
foreignKeys <&> \(API.ForeignKeys constraints) ->
|
||||
constraints & HashMap.foldMapWithKey @[RQL.T.T.ForeignKeyMetadata 'DataConnector]
|
||||
\constraintName API.Constraint {..} -> maybeToList do
|
||||
let columnMapAssocList = HashMap.foldrWithKey' (\k v acc -> (Witch.from k, Witch.from v) : acc) [] cColumnMapping
|
||||
let columnMapAssocList = HashMap.foldrWithKey' (\k v acc -> (DC.ColumnName k, DC.ColumnName v) : acc) [] cColumnMapping
|
||||
columnMapping <- NEHashMap.fromList columnMapAssocList
|
||||
let foreignKey =
|
||||
RQL.T.T.ForeignKey
|
||||
@ -252,7 +247,7 @@ parseBoolExpOperations' ::
|
||||
forall m v.
|
||||
(MonadError QErr m, SchemaCache.TableCoreInfoRM 'DataConnector m) =>
|
||||
RQL.T.C.ValueParser 'DataConnector m v ->
|
||||
IR.T.Name ->
|
||||
DC.TableName ->
|
||||
RQL.T.T.FieldInfoMap (RQL.T.T.FieldInfo 'DataConnector) ->
|
||||
RQL.T.C.ColumnReference 'DataConnector ->
|
||||
J.Value ->
|
||||
@ -352,7 +347,7 @@ parseBoolExpOperations' rhsParser rootTable fieldInfoMap columnRef value =
|
||||
colInfo <- validateRhsColumn fieldInfoMap' colName
|
||||
pure $ RootOrCurrentColumn rootInfo colInfo
|
||||
|
||||
validateRhsColumn :: RQL.T.T.FieldInfoMap (RQL.T.T.FieldInfo 'DataConnector) -> IR.C.Name -> m IR.C.Name
|
||||
validateRhsColumn :: RQL.T.T.FieldInfoMap (RQL.T.T.FieldInfo 'DataConnector) -> DC.ColumnName -> m DC.ColumnName
|
||||
validateRhsColumn fieldInfoMap' rhsCol = do
|
||||
rhsType <- RQL.T.T.askColumnType fieldInfoMap' rhsCol "column operators can only compare table columns"
|
||||
when (columnType /= rhsType) $
|
||||
@ -374,7 +369,7 @@ parseCollectableType' collectableType = \case
|
||||
| HSU.isReqUserId t -> pure $ mkTypedSessionVar collectableType HSU.userIdHeader
|
||||
val -> case collectableType of
|
||||
CollectableTypeScalar scalarType ->
|
||||
PSESQLExp . IR.S.V.ValueLiteral <$> RQL.T.C.parseScalarValueColumnType scalarType val
|
||||
PSESQLExp . DC.ValueLiteral <$> RQL.T.C.parseScalarValueColumnType scalarType val
|
||||
CollectableTypeArray _ ->
|
||||
throw400 NotSupported "Array types are not supported by the Data Connector backend"
|
||||
|
||||
@ -385,8 +380,8 @@ mkTypedSessionVar ::
|
||||
mkTypedSessionVar columnType =
|
||||
PSESessVar (columnTypeToScalarType <$> columnType)
|
||||
|
||||
columnTypeToScalarType :: RQL.T.C.ColumnType 'DataConnector -> IR.S.T.Type
|
||||
columnTypeToScalarType :: RQL.T.C.ColumnType 'DataConnector -> DC.ScalarType
|
||||
columnTypeToScalarType = \case
|
||||
RQL.T.C.ColumnScalar scalarType -> scalarType
|
||||
-- NOTE: This should be unreachable:
|
||||
RQL.T.C.ColumnEnumReference _ -> IR.S.T.String
|
||||
RQL.T.C.ColumnEnumReference _ -> DC.StringTy
|
||||
|
@ -15,12 +15,7 @@ import Data.Text.Extended ((<<>))
|
||||
import Data.Text.NonEmpty qualified as NET
|
||||
import Hasura.Backends.DataConnector.API.V0.Capabilities (lookupComparisonInputObjectDefinition)
|
||||
import Hasura.Backends.DataConnector.Adapter.Backend (CustomBooleanOperator (..))
|
||||
import Hasura.Backends.DataConnector.Adapter.Types qualified as Adapter
|
||||
import Hasura.Backends.DataConnector.IR.Aggregate qualified as IR.A
|
||||
import Hasura.Backends.DataConnector.IR.Column qualified as IR.C
|
||||
import Hasura.Backends.DataConnector.IR.OrderBy qualified as IR.O
|
||||
import Hasura.Backends.DataConnector.IR.Scalar.Type qualified as IR.S.T
|
||||
import Hasura.Backends.DataConnector.IR.Scalar.Value qualified as IR.S.V
|
||||
import Hasura.Backends.DataConnector.Adapter.Types qualified as DC
|
||||
import Hasura.Base.Error
|
||||
import Hasura.GraphQL.Parser.Class
|
||||
import Hasura.GraphQL.Schema.Backend (BackendSchema (..), BackendTableSelectSchema (..), ComparisonExp, MonadBuildSchema)
|
||||
@ -74,7 +69,7 @@ instance BackendSchema 'DataConnector where
|
||||
comparisonExps = comparisonExps'
|
||||
|
||||
countTypeInput = countTypeInput'
|
||||
aggregateOrderByCountType = IR.S.T.Number
|
||||
aggregateOrderByCountType = DC.NumberTy
|
||||
computedField =
|
||||
error "computedField: not implemented for the Data Connector backend."
|
||||
|
||||
@ -105,10 +100,10 @@ columnParser' ::
|
||||
GS.C.SchemaT r m (P.Parser 'P.Both n (IR.ValueWithOrigin (RQL.ColumnValue 'DataConnector)))
|
||||
columnParser' columnType nullability = do
|
||||
parser <- case columnType of
|
||||
RQL.ColumnScalar scalarType@IR.S.T.String -> pure . possiblyNullable' scalarType nullability $ J.String <$> P.string
|
||||
RQL.ColumnScalar scalarType@IR.S.T.Number -> pure . possiblyNullable' scalarType nullability $ J.Number <$> P.scientific
|
||||
RQL.ColumnScalar scalarType@IR.S.T.Bool -> pure . possiblyNullable' scalarType nullability $ J.Bool <$> P.boolean
|
||||
RQL.ColumnScalar scalarType@(IR.S.T.Custom name) -> do
|
||||
RQL.ColumnScalar scalarType@DC.StringTy -> pure . possiblyNullable' scalarType nullability $ J.String <$> P.string
|
||||
RQL.ColumnScalar scalarType@DC.NumberTy -> pure . possiblyNullable' scalarType nullability $ J.Number <$> P.scientific
|
||||
RQL.ColumnScalar scalarType@DC.BoolTy -> pure . possiblyNullable' scalarType nullability $ J.Bool <$> P.boolean
|
||||
RQL.ColumnScalar scalarType@(DC.CustomTy name) -> do
|
||||
gqlName <-
|
||||
GQL.mkName name
|
||||
`onNothing` throw400 ValidationFailed ("The column type name " <> name <<> " is not a valid GraphQL name")
|
||||
@ -141,16 +136,16 @@ possiblyNullable' _scalarType (GQL.Nullability isNullable)
|
||||
|
||||
orderByOperators' :: RQL.SourceInfo 'DataConnector -> NamingCase -> (GQL.Name, NonEmpty (P.Definition P.EnumValueInfo, (RQL.BasicOrderType 'DataConnector, RQL.NullsOrderType 'DataConnector)))
|
||||
orderByOperators' RQL.SourceInfo {_siConfiguration} _tCase =
|
||||
let dcName = Adapter._scDataConnectorName _siConfiguration
|
||||
orderBy = fromMaybe Name._order_by $ GQL.mkName $ NET.unNonEmptyText (Adapter.unDataConnectorName dcName) <> "_order_by"
|
||||
let dcName = DC._scDataConnectorName _siConfiguration
|
||||
orderBy = fromMaybe Name._order_by $ GQL.mkName $ NET.unNonEmptyText (DC.unDataConnectorName dcName) <> "_order_by"
|
||||
in (orderBy,) $
|
||||
-- NOTE: NamingCase is not being used here as we don't support naming conventions for this DB
|
||||
NE.fromList
|
||||
[ ( define $$(GQL.litName "asc") "in ascending order",
|
||||
(IR.O.Ascending, ())
|
||||
(DC.Ascending, ())
|
||||
),
|
||||
( define $$(GQL.litName "desc") "in descending order",
|
||||
(IR.O.Descending, ())
|
||||
(DC.Descending, ())
|
||||
)
|
||||
]
|
||||
where
|
||||
@ -192,11 +187,11 @@ comparisonExps' sourceInfo columnType = P.memoizeOn 'comparisonExps' (dataConnec
|
||||
customOperators
|
||||
]
|
||||
where
|
||||
dataConnectorName = sourceInfo ^. RQL.siConfiguration . Adapter.scDataConnectorName
|
||||
dataConnectorName = sourceInfo ^. RQL.siConfiguration . DC.scDataConnectorName
|
||||
|
||||
mkListLiteral :: [RQL.ColumnValue 'DataConnector] -> IR.UnpreparedValue 'DataConnector
|
||||
mkListLiteral columnValues =
|
||||
IR.UVLiteral . IR.S.V.ArrayLiteral $ RQL.cvValue <$> columnValues
|
||||
IR.UVLiteral . DC.ArrayLiteral $ RQL.cvValue <$> columnValues
|
||||
|
||||
mkCustomOperators ::
|
||||
NamingCase ->
|
||||
@ -204,7 +199,7 @@ comparisonExps' sourceInfo columnType = P.memoizeOn 'comparisonExps' (dataConnec
|
||||
GQL.Name ->
|
||||
GS.C.SchemaT r m [P.InputFieldsParser n (Maybe (CustomBooleanOperator (IR.UnpreparedValue 'DataConnector)))]
|
||||
mkCustomOperators tCase collapseIfNull typeName = do
|
||||
let capabilities = sourceInfo ^. RQL.siConfiguration . Adapter.scCapabilities
|
||||
let capabilities = sourceInfo ^. RQL.siConfiguration . DC.scCapabilities
|
||||
case lookupComparisonInputObjectDefinition capabilities typeName of
|
||||
Nothing -> pure []
|
||||
Just GQL.InputObjectTypeDefinition {..} -> do
|
||||
@ -225,7 +220,7 @@ comparisonExps' sourceInfo columnType = P.memoizeOn 'comparisonExps' (dataConnec
|
||||
mkArgParser argType =
|
||||
fmap IR.mkParameter
|
||||
<$> columnParser'
|
||||
(RQL.ColumnScalar $ IR.S.T.fromGQLType $ GQL.getBaseType argType)
|
||||
(RQL.ColumnScalar $ DC.fromGQLType $ GQL.getBaseType argType)
|
||||
(GQL.Nullability $ GQL.isNotNull argType)
|
||||
|
||||
tableArgs' ::
|
||||
@ -254,13 +249,13 @@ tableArgs' sourceName tableInfo = do
|
||||
|
||||
countTypeInput' ::
|
||||
MonadParse n =>
|
||||
Maybe (P.Parser 'P.Both n IR.C.Name) ->
|
||||
P.InputFieldsParser n (IR.CountDistinct -> IR.A.CountAggregate)
|
||||
Maybe (P.Parser 'P.Both n DC.ColumnName) ->
|
||||
P.InputFieldsParser n (IR.CountDistinct -> DC.CountAggregate)
|
||||
countTypeInput' = \case
|
||||
Just columnEnum -> mkCountAggregate <$> P.fieldOptional Name._column Nothing columnEnum
|
||||
Nothing -> pure $ mkCountAggregate Nothing
|
||||
where
|
||||
mkCountAggregate :: Maybe IR.C.Name -> IR.CountDistinct -> IR.A.CountAggregate
|
||||
mkCountAggregate Nothing _ = IR.A.StarCount
|
||||
mkCountAggregate (Just column) IR.SelectCountDistinct = IR.A.ColumnDistinctCount column
|
||||
mkCountAggregate (Just column) IR.SelectCountNonDistinct = IR.A.ColumnCount column
|
||||
mkCountAggregate :: Maybe DC.ColumnName -> IR.CountDistinct -> DC.CountAggregate
|
||||
mkCountAggregate Nothing _ = DC.StarCount
|
||||
mkCountAggregate (Just column) IR.SelectCountDistinct = DC.ColumnDistinctCount column
|
||||
mkCountAggregate (Just column) IR.SelectCountNonDistinct = DC.ColumnCount column
|
||||
|
@ -7,10 +7,10 @@ module Hasura.Backends.DataConnector.Adapter.Transport () where
|
||||
import Control.Exception.Safe (throwIO)
|
||||
import Data.Aeson qualified as J
|
||||
import Data.Text.Extended ((<>>))
|
||||
import Hasura.Backends.DataConnector.API qualified as API
|
||||
import Hasura.Backends.DataConnector.Adapter.Execute ()
|
||||
import Hasura.Backends.DataConnector.Adapter.Types (SourceConfig (..))
|
||||
import Hasura.Backends.DataConnector.Agent.Client (AgentClientContext (..), AgentClientT, runAgentClientT)
|
||||
import Hasura.Backends.DataConnector.IR.Query qualified as IR.Q
|
||||
import Hasura.Backends.DataConnector.Plan qualified as DC
|
||||
import Hasura.Base.Error (Code (NotSupported), QErr, throw400)
|
||||
import Hasura.EncJSON (EncJSON)
|
||||
@ -51,7 +51,7 @@ runDBQuery' ::
|
||||
Logger Hasura ->
|
||||
SourceConfig ->
|
||||
AgentClientT (Tracing.TraceT (ExceptT QErr IO)) a ->
|
||||
Maybe IR.Q.QueryRequest ->
|
||||
Maybe API.QueryRequest ->
|
||||
m (DiffTime, a)
|
||||
runDBQuery' requestId query fieldName _userInfo logger SourceConfig {..} action queryRequest = do
|
||||
void $ HGL.logQueryLog logger $ mkQueryLog query fieldName queryRequest requestId
|
||||
@ -64,7 +64,7 @@ runDBQuery' requestId query fieldName _userInfo logger SourceConfig {..} action
|
||||
mkQueryLog ::
|
||||
GQLReqUnparsed ->
|
||||
RootFieldAlias ->
|
||||
Maybe IR.Q.QueryRequest ->
|
||||
Maybe API.QueryRequest ->
|
||||
RequestId ->
|
||||
HGL.QueryLog
|
||||
mkQueryLog gqlQuery fieldName maybeQuery requestId =
|
||||
|
@ -3,13 +3,9 @@
|
||||
|
||||
module Hasura.Backends.DataConnector.Adapter.Types
|
||||
( ConnSourceConfig (..),
|
||||
SourceConfig (..),
|
||||
DataConnectorName (..),
|
||||
DataConnectorOptions (..),
|
||||
DataConnectorInfo (..),
|
||||
CountType (..),
|
||||
SourceTimeout (),
|
||||
sourceTimeoutMicroseconds,
|
||||
SourceConfig (..),
|
||||
scCapabilities,
|
||||
scConfig,
|
||||
scDataConnectorName,
|
||||
@ -18,6 +14,18 @@ module Hasura.Backends.DataConnector.Adapter.Types
|
||||
scSchema,
|
||||
scTemplate,
|
||||
scTimeoutMicroseconds,
|
||||
DataConnectorName (..),
|
||||
DataConnectorOptions (..),
|
||||
DataConnectorInfo (..),
|
||||
TableName (..),
|
||||
ConstraintName (..),
|
||||
ColumnName (..),
|
||||
FunctionName (..),
|
||||
CountAggregate (..),
|
||||
Literal (..),
|
||||
OrderDirection (..),
|
||||
ScalarType (..),
|
||||
fromGQLType,
|
||||
)
|
||||
where
|
||||
|
||||
@ -26,18 +34,26 @@ import Control.Lens (makeLenses)
|
||||
import Data.Aeson (FromJSON, FromJSONKey, ToJSON, ToJSONKey, genericParseJSON, genericToJSON)
|
||||
import Data.Aeson qualified as J
|
||||
import Data.Aeson.KeyMap qualified as J
|
||||
import Data.Aeson.Types (toJSONKeyText)
|
||||
import Data.Data (Typeable)
|
||||
import Data.Text.Extended (ToTxt)
|
||||
import Data.List.NonEmpty qualified as NonEmpty
|
||||
import Data.Text qualified as Text
|
||||
import Data.Text.Extended (ToTxt (..))
|
||||
import Data.Text.NonEmpty (NonEmptyText)
|
||||
import Hasura.Backends.DataConnector.API qualified as API
|
||||
import Hasura.Backends.DataConnector.IR.Column qualified as IR.C
|
||||
import Hasura.Base.ErrorValue qualified as ErrorValue
|
||||
import Hasura.Base.ToErrorValue (ToErrorValue (..))
|
||||
import Hasura.GraphQL.Parser.Name.TypeSystem (_Boolean, _Float, _Int, _String)
|
||||
import Hasura.Incremental (Cacheable (..))
|
||||
import Hasura.Metadata.DTO.Placeholder (placeholderCodecViaJSON)
|
||||
import Hasura.Prelude
|
||||
import Language.GraphQL.Draft.Syntax qualified as GQL
|
||||
import Network.HTTP.Client qualified as HTTP
|
||||
import Servant.Client (BaseUrl)
|
||||
import Witch qualified
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
data ConnSourceConfig = ConnSourceConfig
|
||||
{ -- | An arbitrary JSON payload to be passed to the agent in a
|
||||
-- header. HGE validates this against the OpenAPI Spec provided by
|
||||
@ -68,6 +84,8 @@ instance HasCodec ConnSourceConfig where
|
||||
instance Cacheable ConnSourceConfig where
|
||||
unchanged _ = (==)
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
-- NOTE: There may be a time type with units datatype already available somewhere
|
||||
data SourceTimeout
|
||||
= SourceTimeoutSeconds Int
|
||||
@ -97,6 +115,8 @@ instance ToJSON SourceTimeout where
|
||||
toJSON (SourceTimeoutMilliseconds t) = J.object ["milliseconds" J..= t]
|
||||
toJSON (SourceTimeoutMicroseconds t) = J.object ["microseconds" J..= t]
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
data SourceConfig = SourceConfig
|
||||
{ _scEndpoint :: BaseUrl,
|
||||
_scConfig :: API.Config,
|
||||
@ -127,6 +147,8 @@ instance J.ToJSON SourceConfig where
|
||||
instance Cacheable SourceConfig where
|
||||
unchanged _ = (==)
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
newtype DataConnectorName = DataConnectorName {unDataConnectorName :: NonEmptyText}
|
||||
deriving stock (Eq, Ord, Show, Typeable, Generic)
|
||||
deriving newtype (FromJSON, ToJSON, FromJSONKey, ToJSONKey, Hashable, ToTxt)
|
||||
@ -145,6 +167,8 @@ instance FromJSON DataConnectorOptions where
|
||||
instance ToJSON DataConnectorOptions where
|
||||
toJSON = genericToJSON hasuraJSON
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
data DataConnectorInfo = DataConnectorInfo
|
||||
{ _dciOptions :: DataConnectorOptions,
|
||||
_dciCapabilities :: API.Capabilities,
|
||||
@ -164,10 +188,162 @@ instance Cacheable DataConnectorInfo where
|
||||
&& _dciCapabilities dci0 == _dciCapabilities dci1
|
||||
&& _dciConfigSchemaResponse dci0 == _dciConfigSchemaResponse dci1
|
||||
|
||||
data CountType
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
-- | The fully qualified name of a table. The last element in the list is the table name
|
||||
-- and all other elements represent namespacing of the table name.
|
||||
-- For example, for a database that has schemas, the name would be '[<schema>,<table name>]'
|
||||
newtype TableName = TableName {unTableName :: NonEmpty Text}
|
||||
deriving stock (Data, Eq, Generic, Ord, Show)
|
||||
deriving newtype (Cacheable, Hashable, NFData, ToJSON)
|
||||
|
||||
instance FromJSON TableName where
|
||||
parseJSON value =
|
||||
TableName <$> J.parseJSON value
|
||||
-- Fallback parsing of a single string to support older metadata
|
||||
<|> J.withText "TableName" (\text -> pure . TableName $ text :| []) value
|
||||
|
||||
instance ToJSONKey TableName where
|
||||
toJSONKey = toJSONKeyText toTxt
|
||||
|
||||
instance Witch.From API.TableName TableName where
|
||||
from (API.TableName n) = TableName n
|
||||
|
||||
instance Witch.From TableName API.TableName where
|
||||
from (TableName n) = API.TableName n
|
||||
|
||||
instance ToTxt TableName where
|
||||
toTxt = Text.intercalate "." . NonEmpty.toList . unTableName
|
||||
|
||||
instance ToErrorValue TableName where
|
||||
toErrorValue = ErrorValue.squote . toTxt
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
newtype ConstraintName = ConstraintName {unConstraintName :: Text}
|
||||
deriving stock (Eq, Ord, Show, Generic, Data)
|
||||
deriving newtype (NFData, Hashable, Cacheable, FromJSON, ToJSON)
|
||||
|
||||
instance Witch.From API.ConstraintName ConstraintName where
|
||||
from (API.ConstraintName n) = ConstraintName n
|
||||
|
||||
instance Witch.From ConstraintName API.ConstraintName where
|
||||
from (ConstraintName n) = API.ConstraintName n
|
||||
|
||||
instance ToTxt ConstraintName where
|
||||
toTxt = unConstraintName
|
||||
|
||||
instance ToErrorValue ConstraintName where
|
||||
toErrorValue = ErrorValue.squote . unConstraintName
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
newtype ColumnName = ColumnName {unColumnName :: Text}
|
||||
deriving stock (Eq, Ord, Show, Generic, Data)
|
||||
deriving newtype (NFData, Hashable, Cacheable, FromJSON, ToJSON, ToJSONKey, FromJSONKey)
|
||||
|
||||
instance Witch.From API.ColumnName ColumnName where
|
||||
from (API.ColumnName n) = ColumnName n
|
||||
|
||||
instance Witch.From ColumnName API.ColumnName where
|
||||
from (ColumnName n) = API.ColumnName n
|
||||
|
||||
instance ToTxt ColumnName where
|
||||
toTxt = unColumnName
|
||||
|
||||
instance ToErrorValue ColumnName where
|
||||
toErrorValue = ErrorValue.squote . unColumnName
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
newtype FunctionName = FunctionName {unFunctionName :: NonEmpty Text}
|
||||
deriving stock (Data, Eq, Generic, Ord, Show)
|
||||
deriving newtype (Cacheable, FromJSON, Hashable, NFData, ToJSON)
|
||||
|
||||
instance ToJSONKey FunctionName where
|
||||
toJSONKey = toJSONKeyText toTxt
|
||||
|
||||
instance ToTxt FunctionName where
|
||||
toTxt = Text.intercalate "." . NonEmpty.toList . unFunctionName
|
||||
|
||||
instance ToErrorValue FunctionName where
|
||||
toErrorValue = ErrorValue.squote . toTxt
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
data CountAggregate
|
||||
= StarCount
|
||||
| ColumnCount (NonEmpty IR.C.Name)
|
||||
| ColumnDistinctCount (NonEmpty IR.C.Name)
|
||||
deriving (Eq, Ord, Show, Generic, Data)
|
||||
| ColumnCount ColumnName
|
||||
| ColumnDistinctCount ColumnName
|
||||
deriving stock (Data, Eq, Generic, Ord, Show)
|
||||
deriving anyclass (Cacheable, FromJSON, Hashable, NFData, ToJSON)
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
data Literal
|
||||
= ValueLiteral J.Value
|
||||
| ArrayLiteral [J.Value]
|
||||
deriving stock (Eq, Show, Generic, Ord)
|
||||
deriving anyclass (Cacheable, Hashable, NFData, ToJSON)
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
data OrderDirection
|
||||
= Ascending
|
||||
| Descending
|
||||
deriving stock (Data, Eq, Generic, Ord, Show)
|
||||
deriving anyclass (Cacheable, Hashable, NFData)
|
||||
|
||||
instance ToJSON OrderDirection where
|
||||
toJSON = J.genericToJSON J.defaultOptions
|
||||
|
||||
instance Witch.From API.OrderDirection OrderDirection where
|
||||
from API.Ascending = Ascending
|
||||
from API.Descending = Descending
|
||||
|
||||
instance Witch.From OrderDirection API.OrderDirection where
|
||||
from Ascending = API.Ascending
|
||||
from Descending = API.Descending
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
data ScalarType
|
||||
= StringTy
|
||||
| NumberTy
|
||||
| BoolTy
|
||||
| CustomTy Text
|
||||
deriving stock (Data, Eq, Generic, Ord, Show)
|
||||
deriving anyclass (Cacheable, FromJSON, FromJSONKey, Hashable, NFData, ToJSON, ToJSONKey)
|
||||
|
||||
instance ToTxt ScalarType where
|
||||
toTxt = tshow
|
||||
|
||||
instance ToErrorValue ScalarType where
|
||||
toErrorValue = ErrorValue.squote . tshow
|
||||
|
||||
instance Witch.From API.ScalarType ScalarType where
|
||||
from = \case
|
||||
API.StringTy -> StringTy
|
||||
API.NumberTy -> NumberTy
|
||||
API.BoolTy -> BoolTy
|
||||
API.CustomTy name -> CustomTy name
|
||||
|
||||
instance Witch.From ScalarType API.ScalarType where
|
||||
from = \case
|
||||
StringTy -> API.StringTy
|
||||
NumberTy -> API.NumberTy
|
||||
BoolTy -> API.BoolTy
|
||||
CustomTy name -> API.CustomTy name
|
||||
|
||||
fromGQLType :: GQL.Name -> ScalarType
|
||||
fromGQLType typeName =
|
||||
if
|
||||
| typeName == _String -> StringTy
|
||||
| typeName == _Int -> NumberTy
|
||||
| typeName == _Float -> NumberTy
|
||||
| typeName == _Boolean -> BoolTy
|
||||
| otherwise -> CustomTy $ GQL.unName typeName
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
$(makeLenses ''SourceConfig)
|
||||
|
@ -1,77 +0,0 @@
|
||||
{-# LANGUAGE DeriveAnyClass #-}
|
||||
|
||||
module Hasura.Backends.DataConnector.IR.Aggregate
|
||||
( Aggregate (..),
|
||||
SingleColumnAggregate (..),
|
||||
SingleColumnAggregateFunction (..),
|
||||
CountAggregate (..),
|
||||
)
|
||||
where
|
||||
|
||||
import Data.Aeson (FromJSON, ToJSON)
|
||||
import Hasura.Backends.DataConnector.API qualified as API
|
||||
import Hasura.Backends.DataConnector.IR.Column qualified as IR.C
|
||||
import Hasura.Incremental (Cacheable)
|
||||
import Hasura.Prelude
|
||||
import Witch qualified
|
||||
|
||||
data Aggregate
|
||||
= SingleColumn SingleColumnAggregate
|
||||
| Count CountAggregate
|
||||
deriving stock (Data, Eq, Generic, Ord, Show)
|
||||
deriving anyclass (Cacheable, FromJSON, Hashable, NFData, ToJSON)
|
||||
|
||||
instance Witch.From Aggregate API.Aggregate where
|
||||
from (SingleColumn singleColumn) = API.SingleColumn (Witch.from singleColumn)
|
||||
from (Count StarCount) = API.StarCount
|
||||
from (Count (ColumnCount column)) = API.ColumnCount $ API.ColumnCountAggregate {_ccaColumn = Witch.from column, _ccaDistinct = False}
|
||||
from (Count (ColumnDistinctCount column)) = API.ColumnCount $ API.ColumnCountAggregate {_ccaColumn = Witch.from column, _ccaDistinct = True}
|
||||
|
||||
data SingleColumnAggregate = SingleColumnAggregate
|
||||
{ _scaFunction :: SingleColumnAggregateFunction,
|
||||
_scaColumn :: IR.C.Name
|
||||
}
|
||||
deriving stock (Data, Eq, Generic, Ord, Show)
|
||||
deriving anyclass (Cacheable, FromJSON, Hashable, NFData, ToJSON)
|
||||
|
||||
instance Witch.From SingleColumnAggregate API.SingleColumnAggregate where
|
||||
from SingleColumnAggregate {..} = API.SingleColumnAggregate (Witch.from _scaFunction) (Witch.from _scaColumn)
|
||||
|
||||
data SingleColumnAggregateFunction
|
||||
= Average
|
||||
| Max
|
||||
| Min
|
||||
| StandardDeviationPopulation
|
||||
| StandardDeviationSample
|
||||
| Sum
|
||||
| VariancePopulation
|
||||
| VarianceSample
|
||||
deriving stock (Data, Eq, Generic, Ord, Show)
|
||||
deriving anyclass (Cacheable, FromJSON, Hashable, NFData, ToJSON)
|
||||
|
||||
instance Witch.From API.SingleColumnAggregateFunction SingleColumnAggregateFunction where
|
||||
from API.Average = Average
|
||||
from API.Max = Max
|
||||
from API.Min = Min
|
||||
from API.StandardDeviationPopulation = StandardDeviationPopulation
|
||||
from API.StandardDeviationSample = StandardDeviationSample
|
||||
from API.Sum = Sum
|
||||
from API.VariancePopulation = VariancePopulation
|
||||
from API.VarianceSample = VarianceSample
|
||||
|
||||
instance Witch.From SingleColumnAggregateFunction API.SingleColumnAggregateFunction where
|
||||
from Average = API.Average
|
||||
from Max = API.Max
|
||||
from Min = API.Min
|
||||
from StandardDeviationPopulation = API.StandardDeviationPopulation
|
||||
from StandardDeviationSample = API.StandardDeviationSample
|
||||
from Sum = API.Sum
|
||||
from VariancePopulation = API.VariancePopulation
|
||||
from VarianceSample = API.VarianceSample
|
||||
|
||||
data CountAggregate
|
||||
= StarCount
|
||||
| ColumnCount IR.C.Name
|
||||
| ColumnDistinctCount IR.C.Name
|
||||
deriving stock (Data, Eq, Generic, Ord, Show)
|
||||
deriving anyclass (Cacheable, FromJSON, Hashable, NFData, ToJSON)
|
@ -1,25 +0,0 @@
|
||||
module Hasura.Backends.DataConnector.IR.Column
|
||||
( Name,
|
||||
)
|
||||
where
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
import Hasura.Backends.DataConnector.IR.Name qualified as IR.N
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
-- | An alias for 'Name.Column' 'Name.Name's.
|
||||
--
|
||||
-- This alias is defined in its own module primarily for the convenience of
|
||||
-- importing it qualified.
|
||||
--
|
||||
-- For example:
|
||||
-- @
|
||||
-- import Data.Coerce (coerce)
|
||||
-- import Hasura.Experimental.IR.Column qualified as Column (Name)
|
||||
--
|
||||
-- example :: Column.Name
|
||||
-- example = coerce @Text @Column.Name "column_name"
|
||||
-- @
|
||||
type Name = IR.N.Name 'IR.N.Column
|
@ -1,236 +0,0 @@
|
||||
{-# LANGUAGE DeriveAnyClass #-}
|
||||
|
||||
module Hasura.Backends.DataConnector.IR.Expression
|
||||
( Expression (..),
|
||||
ExistsInTable (..),
|
||||
BinaryComparisonOperator (..),
|
||||
BinaryArrayComparisonOperator (..),
|
||||
UnaryComparisonOperator (..),
|
||||
ComparisonColumn (..),
|
||||
ColumnPath (..),
|
||||
ComparisonValue (..),
|
||||
)
|
||||
where
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
import Data.Aeson (FromJSON, ToJSON, Value)
|
||||
import Hasura.Backends.DataConnector.API qualified as API
|
||||
import Hasura.Backends.DataConnector.IR.Column qualified as IR.C
|
||||
import Hasura.Backends.DataConnector.IR.Relationships qualified as IR.R
|
||||
import Hasura.Backends.DataConnector.IR.Table qualified as IR.T
|
||||
import Hasura.Incremental (Cacheable)
|
||||
import Hasura.Prelude
|
||||
import Witch qualified
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
-- | A "concrete" expression type for datasource-agnostic queries (as opposed
|
||||
-- to our existing polymorphic intermediate representation).
|
||||
--
|
||||
-- This type should be seen as an intermediate phase of the processing pipeline
|
||||
-- which provides a high-level interface that the GraphQL Engine can use to
|
||||
-- inspect, manipulate, optimize, etc. before sending off to an agent that will
|
||||
-- be responsible for performing query generation/execution.
|
||||
--
|
||||
-- This type should ascribe clear semantics to its sub-expressions; when this
|
||||
-- is not possible, it should clearly defer to the semantics of some reference
|
||||
-- datasource with clearer documentation.
|
||||
--
|
||||
-- e.g. https://www.postgresql.org/docs/13/sql-expressions.html
|
||||
data Expression
|
||||
= -- | A logical @AND@ fold.
|
||||
--
|
||||
-- cf. https://www.postgresql.org/docs/13/functions-logical.html
|
||||
And [Expression]
|
||||
| -- | A logical @OR@ fold.
|
||||
--
|
||||
-- cf. https://www.postgresql.org/docs/13/functions-logical.html
|
||||
Or [Expression]
|
||||
| -- | A logical @NOT@ function.
|
||||
--
|
||||
-- cf. https://www.postgresql.org/docs/13/functions-logical.html
|
||||
Not Expression
|
||||
| -- | There must exist a row in the table specified by 'ExistsInTable' that
|
||||
-- satisfies the 'Expression'
|
||||
Exists ExistsInTable Expression
|
||||
| -- | Apply a 'BinaryComparisonOperator' that compares a column to a 'ComparisonValue';
|
||||
-- the result of this application will return "true" or "false" depending on the
|
||||
-- 'BinaryComparisonOperator' that's being applied.
|
||||
ApplyBinaryComparisonOperator BinaryComparisonOperator ComparisonColumn ComparisonValue
|
||||
| -- | Apply a 'BinaryArrayComparisonOperator' that evaluates a column with the
|
||||
-- 'BinaryArrayComparisonOperator' against an array of 'ComparisonValue's.
|
||||
-- The result of this application will return "true" or "false" depending
|
||||
-- on the 'BinaryArrayComparisonOperator' that's being applied.
|
||||
ApplyBinaryArrayComparisonOperator BinaryArrayComparisonOperator ComparisonColumn [Value]
|
||||
| -- | Apply a 'UnaryComparisonOperator' that evaluates a column with the
|
||||
-- 'UnaryComparisonOperator'; the result of this application will return "true" or
|
||||
-- "false" depending on the 'UnaryComparisonOperator' that's being applied.
|
||||
ApplyUnaryComparisonOperator UnaryComparisonOperator ComparisonColumn
|
||||
deriving stock (Data, Eq, Generic, Ord, Show)
|
||||
deriving anyclass (Cacheable, FromJSON, Hashable, NFData, ToJSON)
|
||||
|
||||
instance Witch.From Expression API.Expression where
|
||||
from = \case
|
||||
And exprs -> API.And $ Witch.from <$> exprs
|
||||
Or exprs -> API.Or $ Witch.from <$> exprs
|
||||
Not expr -> API.Not $ Witch.from expr
|
||||
Exists inTable expr ->
|
||||
API.Exists (Witch.from inTable) (Witch.from expr)
|
||||
ApplyBinaryComparisonOperator op column value ->
|
||||
API.ApplyBinaryComparisonOperator (Witch.from op) (Witch.from column) (Witch.from value)
|
||||
ApplyUnaryComparisonOperator op column ->
|
||||
API.ApplyUnaryComparisonOperator (Witch.from op) (Witch.from column)
|
||||
ApplyBinaryArrayComparisonOperator op column values ->
|
||||
API.ApplyBinaryArrayComparisonOperator (Witch.from op) (Witch.from column) (Witch.from <$> values)
|
||||
|
||||
instance Witch.From API.Expression Expression where
|
||||
from = \case
|
||||
API.And exprs -> And $ Witch.from <$> exprs
|
||||
API.Or exprs -> Or $ Witch.from <$> exprs
|
||||
API.Not expr -> Not $ Witch.from expr
|
||||
API.Exists inTable expr ->
|
||||
Exists (Witch.from inTable) (Witch.from expr)
|
||||
API.ApplyBinaryComparisonOperator op column value ->
|
||||
ApplyBinaryComparisonOperator (Witch.from op) (Witch.from column) (Witch.from value)
|
||||
API.ApplyBinaryArrayComparisonOperator op column values ->
|
||||
ApplyBinaryArrayComparisonOperator (Witch.from op) (Witch.from column) (Witch.from <$> values)
|
||||
API.ApplyUnaryComparisonOperator op column ->
|
||||
ApplyUnaryComparisonOperator (Witch.from op) (Witch.from column)
|
||||
|
||||
-- | Which table should be subqueried to satisfy the 'Exists' expression
|
||||
data ExistsInTable
|
||||
= -- | The table is the one found by navigating the specified relationship
|
||||
-- from the current table
|
||||
RelatedTable IR.R.RelationshipName
|
||||
| -- | The table is completely unrelated to the current table (ie no join
|
||||
-- between the current table and the specified table should be performed
|
||||
-- and the whole of the specified table would be subqueried)
|
||||
UnrelatedTable IR.T.Name
|
||||
deriving stock (Data, Eq, Generic, Ord, Show)
|
||||
deriving anyclass (Cacheable, FromJSON, Hashable, NFData, ToJSON)
|
||||
|
||||
instance Witch.From ExistsInTable API.ExistsInTable where
|
||||
from = \case
|
||||
RelatedTable relationshipName -> API.RelatedTable (Witch.from relationshipName)
|
||||
UnrelatedTable tableName -> API.UnrelatedTable (Witch.from tableName)
|
||||
|
||||
instance Witch.From API.ExistsInTable ExistsInTable where
|
||||
from = \case
|
||||
API.RelatedTable relationshipName -> RelatedTable (Witch.from relationshipName)
|
||||
API.UnrelatedTable tableName -> UnrelatedTable (Witch.from tableName)
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
-- | Operators which are typically applied to two 'Expression's (via the
|
||||
-- 'ApplyOperator' sub-'Expression') to perform a boolean comparison.
|
||||
--
|
||||
-- cf. https://www.postgresql.org/docs/13/functions-comparison.html
|
||||
--
|
||||
-- XXX(jkachmar): Comparison operations are tricky business!
|
||||
--
|
||||
-- We should define the semantics of these comparisons in a way that is clear
|
||||
-- and carefully considered.
|
||||
data BinaryComparisonOperator
|
||||
= LessThan
|
||||
| LessThanOrEqual
|
||||
| GreaterThan
|
||||
| GreaterThanOrEqual
|
||||
| Equal
|
||||
| CustomBinaryComparisonOperator Text
|
||||
deriving stock (Data, Eq, Generic, Ord, Show)
|
||||
deriving anyclass (Cacheable, FromJSON, Hashable, NFData, ToJSON)
|
||||
|
||||
instance Witch.From API.BinaryComparisonOperator BinaryComparisonOperator where
|
||||
from API.LessThan = LessThan
|
||||
from API.LessThanOrEqual = LessThanOrEqual
|
||||
from API.GreaterThan = GreaterThan
|
||||
from API.GreaterThanOrEqual = GreaterThanOrEqual
|
||||
from API.Equal = Equal
|
||||
from (API.CustomBinaryComparisonOperator name) = CustomBinaryComparisonOperator name
|
||||
|
||||
instance Witch.From BinaryComparisonOperator API.BinaryComparisonOperator where
|
||||
from LessThan = API.LessThan
|
||||
from LessThanOrEqual = API.LessThanOrEqual
|
||||
from GreaterThan = API.GreaterThan
|
||||
from GreaterThanOrEqual = API.GreaterThanOrEqual
|
||||
from Equal = API.Equal
|
||||
from (CustomBinaryComparisonOperator name) = API.CustomBinaryComparisonOperator name
|
||||
|
||||
data UnaryComparisonOperator
|
||||
= IsNull
|
||||
| CustomUnaryComparisonOperator Text
|
||||
deriving stock (Data, Eq, Generic, Ord, Show)
|
||||
deriving anyclass (Cacheable, FromJSON, Hashable, NFData, ToJSON)
|
||||
|
||||
instance Witch.From API.UnaryComparisonOperator UnaryComparisonOperator where
|
||||
from API.IsNull = IsNull
|
||||
from (API.CustomUnaryComparisonOperator name) = CustomUnaryComparisonOperator name
|
||||
|
||||
instance Witch.From UnaryComparisonOperator API.UnaryComparisonOperator where
|
||||
from IsNull = API.IsNull
|
||||
from (CustomUnaryComparisonOperator name) = API.CustomUnaryComparisonOperator name
|
||||
|
||||
data BinaryArrayComparisonOperator
|
||||
= In
|
||||
| CustomBinaryArrayComparisonOperator Text
|
||||
deriving stock (Data, Eq, Generic, Ord, Show)
|
||||
deriving anyclass (Cacheable, FromJSON, Hashable, NFData, ToJSON)
|
||||
|
||||
instance Witch.From API.BinaryArrayComparisonOperator BinaryArrayComparisonOperator where
|
||||
from API.In = In
|
||||
from (API.CustomBinaryArrayComparisonOperator name) = CustomBinaryArrayComparisonOperator name
|
||||
|
||||
instance Witch.From BinaryArrayComparisonOperator API.BinaryArrayComparisonOperator where
|
||||
from In = API.In
|
||||
from (CustomBinaryArrayComparisonOperator name) = API.CustomBinaryArrayComparisonOperator name
|
||||
|
||||
data ComparisonColumn = ComparisonColumn
|
||||
{ _ccPath :: ColumnPath,
|
||||
_ccName :: IR.C.Name
|
||||
}
|
||||
deriving stock (Data, Eq, Generic, Ord, Show)
|
||||
deriving anyclass (Cacheable, FromJSON, Hashable, NFData, ToJSON)
|
||||
|
||||
instance Witch.From ComparisonColumn API.ComparisonColumn where
|
||||
from ComparisonColumn {..} =
|
||||
API.ComparisonColumn
|
||||
{ _ccPath = Witch.from _ccPath,
|
||||
_ccName = Witch.from _ccName
|
||||
}
|
||||
|
||||
instance Witch.From API.ComparisonColumn ComparisonColumn where
|
||||
from API.ComparisonColumn {..} =
|
||||
ComparisonColumn
|
||||
{ _ccPath = Witch.from _ccPath,
|
||||
_ccName = Witch.from _ccName
|
||||
}
|
||||
|
||||
data ColumnPath
|
||||
= CurrentTable
|
||||
| QueryTable
|
||||
deriving stock (Data, Eq, Generic, Ord, Show)
|
||||
deriving anyclass (Cacheable, FromJSON, Hashable, NFData, ToJSON)
|
||||
|
||||
instance Witch.From ColumnPath API.ColumnPath where
|
||||
from CurrentTable = API.CurrentTable
|
||||
from QueryTable = API.QueryTable
|
||||
|
||||
instance Witch.From API.ColumnPath ColumnPath where
|
||||
from API.CurrentTable = CurrentTable
|
||||
from API.QueryTable = QueryTable
|
||||
|
||||
data ComparisonValue
|
||||
= AnotherColumn ComparisonColumn
|
||||
| ScalarValue Value
|
||||
deriving stock (Data, Eq, Generic, Ord, Show)
|
||||
deriving anyclass (Cacheable, FromJSON, Hashable, NFData, ToJSON)
|
||||
|
||||
instance Witch.From ComparisonValue API.ComparisonValue where
|
||||
from (AnotherColumn column) = API.AnotherColumn $ Witch.from column
|
||||
from (ScalarValue value) = API.ScalarValue value
|
||||
|
||||
instance Witch.From API.ComparisonValue ComparisonValue where
|
||||
from (API.AnotherColumn column) = AnotherColumn (Witch.from column)
|
||||
from (API.ScalarValue value) = ScalarValue value
|
@ -1,33 +0,0 @@
|
||||
module Hasura.Backends.DataConnector.IR.Function
|
||||
( Name (..),
|
||||
)
|
||||
where
|
||||
|
||||
import Data.Aeson (FromJSON, ToJSON, ToJSONKey (..))
|
||||
import Data.Aeson.Types (toJSONKeyText)
|
||||
import Data.List.NonEmpty qualified as NonEmpty
|
||||
import Data.Text qualified as Text
|
||||
import Data.Text.Extended (ToTxt (..))
|
||||
import Hasura.Base.ErrorValue qualified as ErrorValue
|
||||
import Hasura.Base.ToErrorValue
|
||||
import Hasura.Incremental (Cacheable)
|
||||
import Hasura.Prelude
|
||||
|
||||
newtype Name = Name {unName :: NonEmpty Text}
|
||||
deriving stock (Data, Eq, Generic, Ord, Show)
|
||||
deriving newtype
|
||||
( Cacheable,
|
||||
FromJSON,
|
||||
Hashable,
|
||||
NFData,
|
||||
ToJSON
|
||||
)
|
||||
|
||||
instance ToJSONKey Name where
|
||||
toJSONKey = toJSONKeyText toTxt
|
||||
|
||||
instance ToTxt Name where
|
||||
toTxt = Text.intercalate "." . NonEmpty.toList . unName
|
||||
|
||||
instance ToErrorValue Name where
|
||||
toErrorValue = ErrorValue.squote . toTxt
|
@ -1,71 +0,0 @@
|
||||
{-# LANGUAGE StandaloneKindSignatures #-}
|
||||
|
||||
module Hasura.Backends.DataConnector.IR.Name
|
||||
( Name (..),
|
||||
NameType (..),
|
||||
)
|
||||
where
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
import Data.Aeson (FromJSON, FromJSONKey, ToJSON, ToJSONKey)
|
||||
import Data.Kind (Type)
|
||||
import Data.Text.Extended (ToTxt)
|
||||
import Hasura.Backends.DataConnector.API qualified as API
|
||||
import Hasura.Base.ErrorValue qualified as ErrorValue
|
||||
import Hasura.Base.ToErrorValue
|
||||
import Hasura.Incremental (Cacheable)
|
||||
import Hasura.Prelude
|
||||
import Witch qualified
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
-- | A tagged, opaque wrapper around 'Text' that provides a number of derived
|
||||
-- derived instances (primarily as required by the @Backend@ typeclass).
|
||||
--
|
||||
-- This wrapper is indexed by 'NameType' so that different "names" can be
|
||||
-- represented as semantically distinct types without the boilerplate of
|
||||
-- actually defining these wrappers separately.
|
||||
type Name :: NameType -> Type
|
||||
newtype Name ty = Name {unName :: Text}
|
||||
deriving stock (Data, Eq, Generic, Ord, Show)
|
||||
deriving newtype
|
||||
( Cacheable,
|
||||
FromJSON,
|
||||
FromJSONKey,
|
||||
Hashable,
|
||||
NFData,
|
||||
ToJSON,
|
||||
ToJSONKey,
|
||||
ToTxt
|
||||
)
|
||||
|
||||
instance ToErrorValue (Name ty) where
|
||||
toErrorValue = ErrorValue.squote . unName
|
||||
|
||||
instance Witch.From API.ColumnName (Name 'Column) where
|
||||
from (API.ColumnName n) = Name n
|
||||
|
||||
instance Witch.From (Name 'Column) API.ColumnName where
|
||||
from (Name n) = API.ColumnName n
|
||||
|
||||
instance Witch.From API.RelationshipName (Name 'Relationship) where
|
||||
from (API.RelationshipName n) = Name n
|
||||
|
||||
instance Witch.From (Name 'Relationship) API.RelationshipName where
|
||||
from (Name n) = API.RelationshipName n
|
||||
|
||||
instance Witch.From Text (Name 'Column) where
|
||||
from = coerce
|
||||
|
||||
instance Witch.From Text (Name 'Relationship) where
|
||||
from = coerce
|
||||
|
||||
-- | The "type" of "name" that the 'Name' type is meant to provide a textual
|
||||
-- representation for.
|
||||
--
|
||||
-- In other words: an enumeration of all the types for which 'Name' acts as a
|
||||
-- shared abstraction.
|
||||
data NameType
|
||||
= Column
|
||||
| Relationship
|
@ -1,113 +0,0 @@
|
||||
{-# LANGUAGE DeriveAnyClass #-}
|
||||
|
||||
module Hasura.Backends.DataConnector.IR.OrderBy
|
||||
( OrderBy (..),
|
||||
OrderByRelation (..),
|
||||
OrderByElement (..),
|
||||
OrderByTarget (..),
|
||||
OrderDirection (..),
|
||||
)
|
||||
where
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
import Data.Aeson (ToJSON)
|
||||
import Data.Aeson qualified as J
|
||||
import Data.Bifunctor (bimap)
|
||||
import Data.HashMap.Strict qualified as HashMap
|
||||
import Hasura.Backends.DataConnector.API qualified as API
|
||||
import Hasura.Backends.DataConnector.IR.Aggregate qualified as IR.A
|
||||
import Hasura.Backends.DataConnector.IR.Column qualified as IR.C
|
||||
import Hasura.Backends.DataConnector.IR.Expression qualified as IR.E
|
||||
import Hasura.Backends.DataConnector.IR.Relationships qualified as IR.R
|
||||
import Hasura.Incremental (Cacheable)
|
||||
import Hasura.Prelude
|
||||
import Witch qualified
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
data OrderBy = OrderBy
|
||||
{ _obRelations :: HashMap IR.R.RelationshipName OrderByRelation,
|
||||
_obElements :: NonEmpty OrderByElement
|
||||
}
|
||||
deriving stock (Data, Eq, Generic, Ord, Show)
|
||||
deriving anyclass (Cacheable, Hashable, NFData)
|
||||
|
||||
instance ToJSON OrderBy where
|
||||
toJSON = J.genericToJSON J.defaultOptions
|
||||
|
||||
instance Witch.From OrderBy API.OrderBy where
|
||||
from OrderBy {..} =
|
||||
API.OrderBy
|
||||
{ _obRelations = HashMap.fromList $ bimap Witch.from Witch.from <$> HashMap.toList _obRelations,
|
||||
_obElements = Witch.from <$> _obElements
|
||||
}
|
||||
|
||||
data OrderByRelation = OrderByRelation
|
||||
{ _obrWhere :: Maybe IR.E.Expression,
|
||||
_obrSubrelations :: HashMap IR.R.RelationshipName OrderByRelation
|
||||
}
|
||||
deriving stock (Data, Eq, Generic, Ord, Show)
|
||||
deriving anyclass (Cacheable, Hashable, NFData)
|
||||
|
||||
instance ToJSON OrderByRelation where
|
||||
toJSON = J.genericToJSON J.defaultOptions
|
||||
|
||||
instance Witch.From OrderByRelation API.OrderByRelation where
|
||||
from OrderByRelation {..} =
|
||||
API.OrderByRelation
|
||||
{ _obrWhere = Witch.from <$> _obrWhere,
|
||||
_obrSubrelations = HashMap.fromList $ bimap Witch.from Witch.from <$> HashMap.toList _obrSubrelations
|
||||
}
|
||||
|
||||
data OrderByElement = OrderByElement
|
||||
{ _obeTargetPath :: [IR.R.RelationshipName],
|
||||
_obeTarget :: OrderByTarget,
|
||||
_obeOrderDirection :: OrderDirection
|
||||
}
|
||||
deriving stock (Data, Eq, Generic, Ord, Show)
|
||||
deriving anyclass (Cacheable, Hashable, NFData)
|
||||
|
||||
instance ToJSON OrderByElement where
|
||||
toJSON = J.genericToJSON J.defaultOptions
|
||||
|
||||
instance Witch.From OrderByElement API.OrderByElement where
|
||||
from OrderByElement {..} =
|
||||
API.OrderByElement
|
||||
{ _obeTargetPath = Witch.from <$> _obeTargetPath,
|
||||
_obeTarget = Witch.from _obeTarget,
|
||||
_obeOrderDirection = Witch.from _obeOrderDirection
|
||||
}
|
||||
|
||||
data OrderByTarget
|
||||
= OrderByColumn IR.C.Name
|
||||
| OrderByStarCountAggregate
|
||||
| OrderBySingleColumnAggregate IR.A.SingleColumnAggregate
|
||||
deriving stock (Data, Eq, Generic, Ord, Show)
|
||||
deriving anyclass (Cacheable, Hashable, NFData)
|
||||
|
||||
instance ToJSON OrderByTarget where
|
||||
toJSON = J.genericToJSON J.defaultOptions
|
||||
|
||||
instance Witch.From OrderByTarget API.OrderByTarget where
|
||||
from = \case
|
||||
OrderByColumn name -> API.OrderByColumn $ Witch.from name
|
||||
OrderByStarCountAggregate -> API.OrderByStarCountAggregate
|
||||
OrderBySingleColumnAggregate aggregate -> API.OrderBySingleColumnAggregate $ Witch.from aggregate
|
||||
|
||||
data OrderDirection
|
||||
= Ascending
|
||||
| Descending
|
||||
deriving stock (Data, Eq, Generic, Ord, Show)
|
||||
deriving anyclass (Cacheable, Hashable, NFData)
|
||||
|
||||
instance ToJSON OrderDirection where
|
||||
toJSON = J.genericToJSON J.defaultOptions
|
||||
|
||||
instance Witch.From API.OrderDirection OrderDirection where
|
||||
from API.Ascending = Ascending
|
||||
from API.Descending = Descending
|
||||
|
||||
instance Witch.From OrderDirection API.OrderDirection where
|
||||
from Ascending = API.Ascending
|
||||
from Descending = API.Descending
|
@ -1,132 +0,0 @@
|
||||
module Hasura.Backends.DataConnector.IR.Query
|
||||
( QueryRequest (..),
|
||||
Query (..),
|
||||
Field (..),
|
||||
RelationshipField (..),
|
||||
)
|
||||
where
|
||||
|
||||
import Data.Aeson (ToJSON)
|
||||
import Data.Aeson qualified as J
|
||||
import Data.Aeson.Key qualified as Key
|
||||
import Data.Aeson.KeyMap qualified as KeyMap
|
||||
import Data.Bifunctor (bimap)
|
||||
import Data.HashMap.Strict qualified as HashMap
|
||||
import Hasura.Backends.DataConnector.API qualified as API
|
||||
import Hasura.Backends.DataConnector.IR.Aggregate qualified as IR.A
|
||||
import Hasura.Backends.DataConnector.IR.Column qualified as IR.C
|
||||
import Hasura.Backends.DataConnector.IR.Expression qualified as IR.E
|
||||
import Hasura.Backends.DataConnector.IR.OrderBy qualified as IR.O
|
||||
import Hasura.Backends.DataConnector.IR.Relationships qualified as IR.R
|
||||
import Hasura.Backends.DataConnector.IR.Table qualified as IR.T
|
||||
import Hasura.Prelude
|
||||
import Hasura.RQL.Types.Common (FieldName (..))
|
||||
import Witch qualified
|
||||
|
||||
-- | An abstract request to retrieve structured data from some source.
|
||||
data QueryRequest = QueryRequest
|
||||
{ _qrTable :: IR.T.Name,
|
||||
_qrTableRelationships :: IR.R.TableRelationships,
|
||||
_qrQuery :: Query
|
||||
}
|
||||
deriving stock (Data, Eq, Generic, Ord, Show)
|
||||
|
||||
-- NOTE: The 'ToJSON' instance is only intended for logging purposes.
|
||||
instance ToJSON QueryRequest where
|
||||
toJSON = J.genericToJSON J.defaultOptions
|
||||
|
||||
instance Witch.From QueryRequest API.QueryRequest where
|
||||
from QueryRequest {..} =
|
||||
API.QueryRequest
|
||||
{ _qrTable = Witch.from _qrTable,
|
||||
_qrTableRelationships =
|
||||
( \(sourceTableName, relationships) ->
|
||||
API.TableRelationships
|
||||
{ _trSourceTable = Witch.from sourceTableName,
|
||||
_trRelationships = HashMap.mapKeys Witch.from $ Witch.from <$> relationships
|
||||
}
|
||||
)
|
||||
<$> HashMap.toList (IR.R.unTableRelationships _qrTableRelationships),
|
||||
_qrQuery = Witch.from _qrQuery
|
||||
}
|
||||
|
||||
-- | The details of a query against a table
|
||||
data Query = Query
|
||||
{ -- Map of field name to Field definition.
|
||||
_qFields :: HashMap FieldName Field,
|
||||
-- Map of aggregate field name to Aggregate definition
|
||||
_qAggregates :: HashMap FieldName IR.A.Aggregate,
|
||||
-- | Optionally limit to N results.
|
||||
_qLimit :: Maybe Int,
|
||||
-- | Optionally offset from the Nth result.
|
||||
_qOffset :: Maybe Int,
|
||||
-- | Optionally constrain the results to satisfy some predicate.
|
||||
_qWhere :: Maybe IR.E.Expression,
|
||||
-- | Optionally order the results by the value of one or more fields.
|
||||
_qOrderBy :: Maybe IR.O.OrderBy
|
||||
}
|
||||
deriving stock (Data, Eq, Generic, Ord, Show)
|
||||
|
||||
-- NOTE: The 'ToJSON' instance is only intended for logging purposes.
|
||||
instance ToJSON Query where
|
||||
toJSON = J.genericToJSON J.defaultOptions
|
||||
|
||||
instance Witch.From Query API.Query where
|
||||
from Query {..} =
|
||||
API.Query
|
||||
{ _qFields = memptyToNothing . KeyMap.fromList $ (bimap (Key.fromText . getFieldNameTxt) Witch.from) <$> HashMap.toList _qFields,
|
||||
_qAggregates = memptyToNothing . KeyMap.fromList $ (bimap (Key.fromText . getFieldNameTxt) Witch.from) <$> HashMap.toList _qAggregates,
|
||||
_qLimit = _qLimit,
|
||||
_qOffset = _qOffset,
|
||||
_qWhere = fmap Witch.from _qWhere,
|
||||
_qOrderBy = Witch.from <$> _qOrderBy
|
||||
}
|
||||
|
||||
memptyToNothing :: (Monoid m, Eq m) => m -> Maybe m
|
||||
memptyToNothing m = if m == mempty then Nothing else Just m
|
||||
|
||||
-- | The specific fields that are targeted by a 'Query'.
|
||||
--
|
||||
-- A field conceptually falls under one of the two following categories:
|
||||
-- 1. a "column" within the data store that the query is being issued against
|
||||
-- 2. a "relationship", which indicates that the field is the result of
|
||||
-- another query that must be executed on its own
|
||||
-- NOTE: The 'ToJSON' instance is only intended for logging purposes.
|
||||
data Field
|
||||
= ColumnField IR.C.Name
|
||||
| RelField RelationshipField
|
||||
deriving stock (Data, Eq, Generic, Ord, Show)
|
||||
|
||||
instance ToJSON Field where
|
||||
toJSON = J.genericToJSON J.defaultOptions
|
||||
|
||||
instance Witch.From Field API.Field where
|
||||
from (ColumnField name) = API.ColumnField $ Witch.from name
|
||||
from (RelField relationshipField) = API.RelField $ Witch.from relationshipField
|
||||
|
||||
-- | A relationship consists of the following components:
|
||||
-- - a sub-query, from the perspective that a relationship field will occur
|
||||
-- within a broader 'Query'
|
||||
-- - a join condition relating the data returned by the sub-query with that
|
||||
-- of the broader 'Query'
|
||||
--
|
||||
-- cf. https://en.wikipedia.org/wiki/Join_(SQL)
|
||||
-- https://www.postgresql.org/docs/13/tutorial-join.html
|
||||
-- https://www.postgresql.org/docs/13/queries-table-expressions.html#QUERIES-FROM
|
||||
--
|
||||
-- NOTE: The 'ToJSON' instance is only intended for logging purposes.
|
||||
data RelationshipField = RelationshipField
|
||||
{ _rfRelationship :: IR.R.RelationshipName,
|
||||
_rfQuery :: Query
|
||||
}
|
||||
deriving stock (Eq, Ord, Show, Generic, Data)
|
||||
|
||||
instance ToJSON RelationshipField where
|
||||
toJSON = J.genericToJSON J.defaultOptions
|
||||
|
||||
instance Witch.From RelationshipField API.RelationshipField where
|
||||
from RelationshipField {..} =
|
||||
API.RelationshipField
|
||||
{ _rfRelationship = Witch.from _rfRelationship,
|
||||
_rfQuery = Witch.from _rfQuery
|
||||
}
|
@ -1,86 +0,0 @@
|
||||
module Hasura.Backends.DataConnector.IR.Relationships
|
||||
( RelationshipName,
|
||||
mkRelationshipName,
|
||||
TableRelationships (..),
|
||||
Relationship (..),
|
||||
RelationshipType (..),
|
||||
SourceColumnName,
|
||||
TargetColumnName,
|
||||
)
|
||||
where
|
||||
|
||||
import Data.Aeson (ToJSON (..))
|
||||
import Data.Aeson qualified as J
|
||||
import Data.HashMap.Strict qualified as HashMap
|
||||
import Data.Text.Extended (toTxt)
|
||||
import Hasura.Backends.DataConnector.API qualified as API
|
||||
import Hasura.Backends.DataConnector.IR.Column qualified as IR.C
|
||||
import Hasura.Backends.DataConnector.IR.Name qualified as IR.N
|
||||
import Hasura.Backends.DataConnector.IR.Table qualified as IR.T
|
||||
import Hasura.Prelude
|
||||
import Hasura.RQL.Types.Common (RelName (..))
|
||||
import Witch qualified
|
||||
|
||||
type RelationshipName = IR.N.Name 'IR.N.Relationship
|
||||
|
||||
mkRelationshipName :: RelName -> RelationshipName
|
||||
mkRelationshipName relName = IR.N.Name @('IR.N.Relationship) $ toTxt relName
|
||||
|
||||
type SourceTableName = IR.T.Name
|
||||
|
||||
newtype TableRelationships = TableRelationships
|
||||
{unTableRelationships :: HashMap SourceTableName (HashMap RelationshipName Relationship)}
|
||||
deriving stock (Eq, Ord, Show, Data, Generic)
|
||||
deriving newtype (ToJSON)
|
||||
|
||||
instance Semigroup TableRelationships where
|
||||
(TableRelationships l) <> (TableRelationships r) = TableRelationships $ HashMap.unionWith HashMap.union l r
|
||||
|
||||
instance Monoid TableRelationships where
|
||||
mempty = TableRelationships mempty
|
||||
|
||||
data Relationship = Relationship
|
||||
{ _rTargetTable :: IR.T.Name,
|
||||
_rRelationshipType :: RelationshipType,
|
||||
_rColumnMapping :: HashMap SourceColumnName TargetColumnName
|
||||
}
|
||||
deriving stock (Data, Eq, Generic, Ord, Show)
|
||||
|
||||
instance ToJSON Relationship where
|
||||
toJSON = J.genericToJSON J.defaultOptions
|
||||
|
||||
instance Witch.From Relationship API.Relationship where
|
||||
from Relationship {..} =
|
||||
API.Relationship
|
||||
{ _rTargetTable = Witch.from _rTargetTable,
|
||||
_rRelationshipType = Witch.from _rRelationshipType,
|
||||
_rColumnMapping = HashMap.mapKeys Witch.from $ Witch.from <$> _rColumnMapping
|
||||
}
|
||||
|
||||
instance Witch.From API.Relationship Relationship where
|
||||
from API.Relationship {..} =
|
||||
Relationship
|
||||
{ _rTargetTable = Witch.from _rTargetTable,
|
||||
_rRelationshipType = Witch.from _rRelationshipType,
|
||||
_rColumnMapping = HashMap.mapKeys Witch.from $ Witch.from <$> _rColumnMapping
|
||||
}
|
||||
|
||||
data RelationshipType = ObjectRelationship | ArrayRelationship
|
||||
deriving stock (Eq, Ord, Show, Generic, Data)
|
||||
|
||||
instance ToJSON RelationshipType where
|
||||
toJSON = J.genericToJSON J.defaultOptions
|
||||
|
||||
instance Witch.From RelationshipType API.RelationshipType where
|
||||
from = \case
|
||||
ObjectRelationship -> API.ObjectRelationship
|
||||
ArrayRelationship -> API.ArrayRelationship
|
||||
|
||||
instance Witch.From API.RelationshipType RelationshipType where
|
||||
from = \case
|
||||
API.ObjectRelationship -> ObjectRelationship
|
||||
API.ArrayRelationship -> ArrayRelationship
|
||||
|
||||
type SourceColumnName = IR.C.Name
|
||||
|
||||
type TargetColumnName = IR.C.Name
|
@ -1,76 +0,0 @@
|
||||
{-# LANGUAGE DeriveAnyClass #-}
|
||||
|
||||
module Hasura.Backends.DataConnector.IR.Scalar.Type
|
||||
( Type (..),
|
||||
fromGQLType,
|
||||
)
|
||||
where
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
import Data.Aeson (FromJSON, FromJSONKey, ToJSON, ToJSONKey)
|
||||
import Data.Text.Extended (ToTxt (..))
|
||||
import Hasura.Backends.DataConnector.API qualified as API
|
||||
import Hasura.Base.ErrorValue qualified as ErrorValue
|
||||
import Hasura.Base.ToErrorValue
|
||||
import Hasura.GraphQL.Parser.Name.TypeSystem
|
||||
import Hasura.Incremental (Cacheable)
|
||||
import Hasura.Prelude
|
||||
import Language.GraphQL.Draft.Syntax qualified as GQL
|
||||
import Witch qualified
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
-- | Types of scalar values
|
||||
--
|
||||
-- Used to specify the domain of legal values for a @Column@.
|
||||
--
|
||||
-- NOTE: This type shouldn't _need_ ser/de instances, but they're imposed by
|
||||
-- the 'Backend' class.
|
||||
--
|
||||
-- XXX: Should we add a @Nullable _ :: Type@ constructor instead of using an
|
||||
-- @isNullable@ flag in @Column@?
|
||||
data Type
|
||||
= String
|
||||
| Number
|
||||
| Bool
|
||||
| Custom Text
|
||||
deriving stock (Data, Eq, Generic, Ord, Show)
|
||||
deriving anyclass
|
||||
( Cacheable,
|
||||
FromJSON,
|
||||
FromJSONKey,
|
||||
Hashable,
|
||||
NFData,
|
||||
ToJSON,
|
||||
ToJSONKey
|
||||
)
|
||||
|
||||
instance ToTxt Type where
|
||||
toTxt = tshow
|
||||
|
||||
instance ToErrorValue Type where
|
||||
toErrorValue = ErrorValue.squote . tshow
|
||||
|
||||
instance Witch.From API.Type Type where
|
||||
from = \case
|
||||
API.StringTy -> String
|
||||
API.NumberTy -> Number
|
||||
API.BoolTy -> Bool
|
||||
API.CustomTy name -> Custom name
|
||||
|
||||
instance Witch.From Type API.Type where
|
||||
from = \case
|
||||
String -> API.StringTy
|
||||
Number -> API.NumberTy
|
||||
Bool -> API.BoolTy
|
||||
Custom name -> API.CustomTy name
|
||||
|
||||
fromGQLType :: GQL.Name -> Type
|
||||
fromGQLType typeName =
|
||||
if
|
||||
| typeName == _String -> String
|
||||
| typeName == _Int -> Number
|
||||
| typeName == _Float -> Number
|
||||
| typeName == _Boolean -> Bool
|
||||
| otherwise -> Custom $ GQL.unName typeName
|
@ -1,30 +0,0 @@
|
||||
{-# LANGUAGE DeriveAnyClass #-}
|
||||
|
||||
module Hasura.Backends.DataConnector.IR.Scalar.Value
|
||||
( Literal (..),
|
||||
parseValue,
|
||||
)
|
||||
where
|
||||
|
||||
import Data.Aeson (ToJSON, Value)
|
||||
import Data.Aeson.Types qualified as J
|
||||
import Hasura.Backends.DataConnector.IR.Scalar.Type qualified as IR.S.T
|
||||
import Hasura.Incremental (Cacheable)
|
||||
import Hasura.Prelude
|
||||
|
||||
data Literal
|
||||
= ValueLiteral Value
|
||||
| ArrayLiteral [Value]
|
||||
deriving stock (Eq, Show, Generic, Ord)
|
||||
deriving anyclass (Cacheable, Hashable, NFData, ToJSON)
|
||||
|
||||
parseValue :: IR.S.T.Type -> J.Value -> J.Parser Value
|
||||
parseValue type' val =
|
||||
case (type', val) of
|
||||
(_, J.Null) -> pure J.Null
|
||||
(IR.S.T.String, value) -> J.String <$> J.parseJSON value
|
||||
(IR.S.T.Bool, value) -> J.Bool <$> J.parseJSON value
|
||||
(IR.S.T.Number, value) -> J.Number <$> J.parseJSON value
|
||||
-- For custom scalar types we don't know what subset of JSON values
|
||||
-- they accept, so we just accept any value.
|
||||
(IR.S.T.Custom _, value) -> pure value
|
@ -1,70 +0,0 @@
|
||||
module Hasura.Backends.DataConnector.IR.Table
|
||||
( -- * Table
|
||||
Name (..),
|
||||
|
||||
-- * Foreign Key Constraints
|
||||
ConstraintName (..),
|
||||
)
|
||||
where
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
import Data.Aeson (FromJSON (..), ToJSON, ToJSONKey (..), withText)
|
||||
import Data.Aeson.Types (toJSONKeyText)
|
||||
import Data.List.NonEmpty qualified as NonEmpty
|
||||
import Data.Text qualified as Text
|
||||
import Data.Text.Extended (ToTxt (..))
|
||||
import Hasura.Backends.DataConnector.API qualified as API
|
||||
import Hasura.Base.ErrorValue qualified as ErrorValue
|
||||
import Hasura.Base.ToErrorValue (ToErrorValue (..))
|
||||
import Hasura.Incremental (Cacheable)
|
||||
import Hasura.Prelude
|
||||
import Witch.From qualified as Witch
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
-- | The fully qualified name of a table. The last element in the list is the table name
|
||||
-- and all other elements represent namespacing of the table name.
|
||||
-- For example, for a database that has schemas, the name would be '[<schema>,<table name>]'
|
||||
newtype Name = Name {unName :: NonEmpty Text}
|
||||
deriving stock (Data, Eq, Generic, Ord, Show)
|
||||
deriving newtype (Cacheable, Hashable, NFData, ToJSON)
|
||||
|
||||
instance FromJSON Name where
|
||||
parseJSON value =
|
||||
Name <$> parseJSON value
|
||||
-- Fallback parsing of a single string to support older metadata
|
||||
<|> withText "Name" (\text -> pure . Name $ text :| []) value
|
||||
|
||||
instance ToJSONKey Name where
|
||||
toJSONKey = toJSONKeyText toTxt
|
||||
|
||||
instance Witch.From API.TableName Name where
|
||||
from (API.TableName n) = Name n
|
||||
|
||||
instance Witch.From Name API.TableName where
|
||||
from (Name n) = API.TableName n
|
||||
|
||||
instance ToTxt Name where
|
||||
toTxt = Text.intercalate "." . NonEmpty.toList . unName
|
||||
|
||||
instance ToErrorValue Name where
|
||||
toErrorValue = ErrorValue.squote . toTxt
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
newtype ConstraintName = ConstraintName {unConstraintName :: Text}
|
||||
deriving stock (Eq, Ord, Show, Generic, Data)
|
||||
deriving newtype (NFData, Hashable, Cacheable, FromJSON, ToJSON)
|
||||
|
||||
instance Witch.From API.ConstraintName ConstraintName where
|
||||
from = coerce
|
||||
|
||||
instance Witch.From ConstraintName API.ConstraintName where
|
||||
from = coerce
|
||||
|
||||
instance ToTxt ConstraintName where
|
||||
toTxt = coerce
|
||||
|
||||
instance ToErrorValue ConstraintName where
|
||||
toErrorValue = ErrorValue.squote . coerce
|
@ -14,6 +14,7 @@ import Data.Aeson.Encoding qualified as JE
|
||||
import Data.Aeson.Key qualified as K
|
||||
import Data.Aeson.KeyMap (KeyMap)
|
||||
import Data.Aeson.KeyMap qualified as KM
|
||||
import Data.Bifunctor (Bifunctor (bimap))
|
||||
import Data.ByteString qualified as BS
|
||||
import Data.ByteString.Lazy qualified as BL
|
||||
import Data.HashMap.Strict qualified as HashMap
|
||||
@ -21,20 +22,10 @@ import Data.List.NonEmpty qualified as NE
|
||||
import Data.Semigroup (Min (..))
|
||||
import Data.Text qualified as T
|
||||
import Data.Text.Encoding qualified as TE
|
||||
import Data.Text.Extended ((<<>), (<>>))
|
||||
import Data.Text.Extended (toTxt, (<<>), (<>>))
|
||||
import Hasura.Backends.DataConnector.API qualified as API
|
||||
import Hasura.Backends.DataConnector.Adapter.Backend
|
||||
import Hasura.Backends.DataConnector.Adapter.Types
|
||||
import Hasura.Backends.DataConnector.IR.Aggregate qualified as IR.A
|
||||
import Hasura.Backends.DataConnector.IR.Column qualified as IR.C
|
||||
import Hasura.Backends.DataConnector.IR.Expression (UnaryComparisonOperator (CustomUnaryComparisonOperator))
|
||||
import Hasura.Backends.DataConnector.IR.Expression qualified as IR.E
|
||||
import Hasura.Backends.DataConnector.IR.OrderBy qualified as IR.O
|
||||
import Hasura.Backends.DataConnector.IR.Query qualified as IR.Q
|
||||
import Hasura.Backends.DataConnector.IR.Relationships qualified as IR.R
|
||||
import Hasura.Backends.DataConnector.IR.Scalar.Type qualified as IR.S
|
||||
import Hasura.Backends.DataConnector.IR.Scalar.Value qualified as IR.S
|
||||
import Hasura.Backends.DataConnector.IR.Table qualified as IR.T
|
||||
import Hasura.Base.Error
|
||||
import Hasura.Prelude
|
||||
import Hasura.RQL.IR.BoolExp
|
||||
@ -53,13 +44,13 @@ import Witch qualified
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
data QueryPlan = QueryPlan
|
||||
{ _qpRequest :: IR.Q.QueryRequest,
|
||||
{ _qpRequest :: API.QueryRequest,
|
||||
_qpResponseReshaper :: forall m. (MonadError QErr m) => API.QueryResponse -> m J.Encoding
|
||||
}
|
||||
|
||||
data FieldsAndAggregates = FieldsAndAggregates
|
||||
{ _faaFields :: HashMap FieldName IR.Q.Field,
|
||||
_faaAggregates :: HashMap FieldName IR.A.Aggregate
|
||||
{ _faaFields :: HashMap FieldName API.Field,
|
||||
_faaAggregates :: HashMap FieldName API.Aggregate
|
||||
}
|
||||
deriving stock (Show, Eq)
|
||||
|
||||
@ -92,12 +83,22 @@ prefixWith = FieldPrefix . Just
|
||||
applyPrefix :: FieldPrefix -> FieldName -> FieldName
|
||||
applyPrefix (FieldPrefix fieldNamePrefix) fieldName = maybe fieldName (\prefix -> prefix <> "_" <> fieldName) fieldNamePrefix
|
||||
|
||||
-- | Extract the 'IR.Q' from a 'Plan' and render it as 'Text'.
|
||||
newtype TableRelationships = TableRelationships
|
||||
{unTableRelationships :: HashMap API.TableName (HashMap API.RelationshipName API.Relationship)}
|
||||
deriving stock (Eq, Show)
|
||||
|
||||
instance Semigroup TableRelationships where
|
||||
(TableRelationships l) <> (TableRelationships r) = TableRelationships $ HashMap.unionWith HashMap.union l r
|
||||
|
||||
instance Monoid TableRelationships where
|
||||
mempty = TableRelationships mempty
|
||||
|
||||
-- | Render a 'API.QueryRequest' as 'Text'.
|
||||
--
|
||||
-- NOTE: This is for logging and debug purposes only.
|
||||
renderQuery :: IR.Q.QueryRequest -> Text
|
||||
renderQuery :: API.QueryRequest -> Text
|
||||
renderQuery =
|
||||
TE.decodeUtf8 . BL.toStrict . J.encode . Witch.into @API.QueryRequest
|
||||
TE.decodeUtf8 . BL.toStrict . J.encode
|
||||
|
||||
-- | Map a 'QueryDB 'DataConnector' term into a 'Plan'
|
||||
mkPlan ::
|
||||
@ -113,7 +114,7 @@ mkPlan session (SourceConfig {}) ir = do
|
||||
where
|
||||
translateQueryDB ::
|
||||
QueryDB 'DataConnector Void (UnpreparedValue 'DataConnector) ->
|
||||
m IR.Q.QueryRequest
|
||||
m API.QueryRequest
|
||||
translateQueryDB =
|
||||
\case
|
||||
QDBMultipleRows annSelect -> translateAnnSelectToQueryRequest (translateAnnFields noPrefix) annSelect
|
||||
@ -121,35 +122,36 @@ mkPlan session (SourceConfig {}) ir = do
|
||||
QDBAggregation annSelect -> translateAnnSelectToQueryRequest translateTableAggregateFields annSelect
|
||||
|
||||
translateAnnSelectToQueryRequest ::
|
||||
(IR.T.Name -> Fields (fieldType (UnpreparedValue 'DataConnector)) -> CPS.WriterT IR.R.TableRelationships m FieldsAndAggregates) ->
|
||||
(API.TableName -> Fields (fieldType (UnpreparedValue 'DataConnector)) -> CPS.WriterT TableRelationships m FieldsAndAggregates) ->
|
||||
AnnSelectG 'DataConnector fieldType (UnpreparedValue 'DataConnector) ->
|
||||
m IR.Q.QueryRequest
|
||||
m API.QueryRequest
|
||||
translateAnnSelectToQueryRequest translateFieldsAndAggregates selectG = do
|
||||
tableName <- extractTableName selectG
|
||||
(query, tableRelationships) <- CPS.runWriterT (translateAnnSelect translateFieldsAndAggregates tableName selectG)
|
||||
(query, (TableRelationships tableRelationships)) <- CPS.runWriterT (translateAnnSelect translateFieldsAndAggregates tableName selectG)
|
||||
let apiTableRelationships = uncurry API.TableRelationships <$> HashMap.toList tableRelationships
|
||||
pure $
|
||||
IR.Q.QueryRequest
|
||||
API.QueryRequest
|
||||
{ _qrTable = tableName,
|
||||
_qrTableRelationships = tableRelationships,
|
||||
_qrTableRelationships = apiTableRelationships,
|
||||
_qrQuery = query
|
||||
}
|
||||
|
||||
extractTableName :: AnnSelectG 'DataConnector fieldsType valueType -> m IR.T.Name
|
||||
extractTableName :: AnnSelectG 'DataConnector fieldsType valueType -> m API.TableName
|
||||
extractTableName selectG =
|
||||
case _asnFrom selectG of
|
||||
FromTable tn -> pure tn
|
||||
FromTable tn -> pure $ Witch.from tn
|
||||
FromIdentifier _ -> throw400 NotSupported "AnnSelectG: FromIdentifier not supported"
|
||||
FromFunction {} -> throw400 NotSupported "AnnSelectG: FromFunction not supported"
|
||||
|
||||
recordTableRelationship :: IR.T.Name -> IR.R.RelationshipName -> IR.R.Relationship -> CPS.WriterT IR.R.TableRelationships m ()
|
||||
recordTableRelationship :: API.TableName -> API.RelationshipName -> API.Relationship -> CPS.WriterT TableRelationships m ()
|
||||
recordTableRelationship sourceTableName relationshipName relationship =
|
||||
CPS.tell . IR.R.TableRelationships $ HashMap.singleton sourceTableName (HashMap.singleton relationshipName relationship)
|
||||
CPS.tell . TableRelationships $ HashMap.singleton sourceTableName (HashMap.singleton relationshipName relationship)
|
||||
|
||||
translateAnnSelect ::
|
||||
(IR.T.Name -> Fields (fieldType (UnpreparedValue 'DataConnector)) -> CPS.WriterT IR.R.TableRelationships m FieldsAndAggregates) ->
|
||||
IR.T.Name ->
|
||||
(API.TableName -> Fields (fieldType (UnpreparedValue 'DataConnector)) -> CPS.WriterT TableRelationships m FieldsAndAggregates) ->
|
||||
API.TableName ->
|
||||
AnnSelectG 'DataConnector fieldType (UnpreparedValue 'DataConnector) ->
|
||||
CPS.WriterT IR.R.TableRelationships m IR.Q.Query
|
||||
CPS.WriterT TableRelationships m API.Query
|
||||
translateAnnSelect translateFieldsAndAggregates tableName selectG = do
|
||||
FieldsAndAggregates {..} <- translateFieldsAndAggregates tableName (_asnFields selectG)
|
||||
let whereClauseWithPermissions =
|
||||
@ -159,9 +161,9 @@ mkPlan session (SourceConfig {}) ir = do
|
||||
whereClause <- translateBoolExpToExpression tableName whereClauseWithPermissions
|
||||
orderBy <- traverse (translateOrderBy tableName) (_saOrderBy $ _asnArgs selectG)
|
||||
pure
|
||||
IR.Q.Query
|
||||
{ _qFields = _faaFields,
|
||||
_qAggregates = _faaAggregates,
|
||||
API.Query
|
||||
{ _qFields = memptyToNothing . KM.fromList $ (first (K.fromText . getFieldNameTxt)) <$> HashMap.toList _faaFields,
|
||||
_qAggregates = memptyToNothing . KM.fromList $ (first (K.fromText . getFieldNameTxt)) <$> HashMap.toList _faaAggregates,
|
||||
_qLimit =
|
||||
fmap getMin $
|
||||
foldMap
|
||||
@ -175,96 +177,96 @@ mkPlan session (SourceConfig {}) ir = do
|
||||
}
|
||||
|
||||
translateOrderBy ::
|
||||
IR.T.Name ->
|
||||
API.TableName ->
|
||||
NE.NonEmpty (AnnotatedOrderByItemG 'DataConnector (UnpreparedValue 'DataConnector)) ->
|
||||
CPS.WriterT IR.R.TableRelationships m IR.O.OrderBy
|
||||
CPS.WriterT TableRelationships m API.OrderBy
|
||||
translateOrderBy sourceTableName orderByItems = do
|
||||
orderByElementsAndRelations <- for orderByItems \OrderByItemG {..} -> do
|
||||
let orderDirection = fromMaybe IR.O.Ascending obiType
|
||||
let orderDirection = maybe API.Ascending Witch.from obiType
|
||||
translateOrderByElement sourceTableName orderDirection [] obiColumn
|
||||
relations <- lift . mergeOrderByRelations $ snd <$> orderByElementsAndRelations
|
||||
pure
|
||||
IR.O.OrderBy
|
||||
API.OrderBy
|
||||
{ _obRelations = relations,
|
||||
_obElements = fst <$> orderByElementsAndRelations
|
||||
}
|
||||
|
||||
translateOrderByElement ::
|
||||
IR.T.Name ->
|
||||
IR.O.OrderDirection ->
|
||||
[IR.R.RelationshipName] ->
|
||||
API.TableName ->
|
||||
API.OrderDirection ->
|
||||
[API.RelationshipName] ->
|
||||
AnnotatedOrderByElement 'DataConnector (UnpreparedValue 'DataConnector) ->
|
||||
CPS.WriterT IR.R.TableRelationships m (IR.O.OrderByElement, HashMap IR.R.RelationshipName IR.O.OrderByRelation)
|
||||
CPS.WriterT TableRelationships m (API.OrderByElement, HashMap API.RelationshipName API.OrderByRelation)
|
||||
translateOrderByElement sourceTableName orderDirection targetReversePath = \case
|
||||
AOCColumn (ColumnInfo {..}) ->
|
||||
pure
|
||||
( IR.O.OrderByElement
|
||||
( API.OrderByElement
|
||||
{ _obeTargetPath = reverse targetReversePath,
|
||||
_obeTarget = IR.O.OrderByColumn ciColumn,
|
||||
_obeTarget = API.OrderByColumn $ Witch.from ciColumn,
|
||||
_obeOrderDirection = orderDirection
|
||||
},
|
||||
mempty
|
||||
)
|
||||
AOCObjectRelation relationshipInfo filterExp orderByElement -> do
|
||||
(relationshipName, IR.R.Relationship {..}) <- recordTableRelationshipFromRelInfo sourceTableName relationshipInfo
|
||||
(relationshipName, API.Relationship {..}) <- recordTableRelationshipFromRelInfo sourceTableName relationshipInfo
|
||||
(translatedOrderByElement, subOrderByRelations) <- translateOrderByElement _rTargetTable orderDirection (relationshipName : targetReversePath) orderByElement
|
||||
|
||||
targetTableWhereExp <- translateBoolExpToExpression _rTargetTable filterExp
|
||||
let orderByRelations = HashMap.fromList [(relationshipName, IR.O.OrderByRelation targetTableWhereExp subOrderByRelations)]
|
||||
let orderByRelations = HashMap.fromList [(relationshipName, API.OrderByRelation targetTableWhereExp subOrderByRelations)]
|
||||
|
||||
pure (translatedOrderByElement, orderByRelations)
|
||||
AOCArrayAggregation relationshipInfo filterExp aggregateOrderByElement -> do
|
||||
(relationshipName, IR.R.Relationship {..}) <- recordTableRelationshipFromRelInfo sourceTableName relationshipInfo
|
||||
(relationshipName, API.Relationship {..}) <- recordTableRelationshipFromRelInfo sourceTableName relationshipInfo
|
||||
orderByTarget <- case aggregateOrderByElement of
|
||||
AAOCount ->
|
||||
pure IR.O.OrderByStarCountAggregate
|
||||
pure API.OrderByStarCountAggregate
|
||||
AAOOp aggFunctionTxt ColumnInfo {..} -> do
|
||||
aggFunction <- lift $ translateSingleColumnAggregateFunction aggFunctionTxt
|
||||
pure . IR.O.OrderBySingleColumnAggregate $ IR.A.SingleColumnAggregate aggFunction ciColumn
|
||||
pure . API.OrderBySingleColumnAggregate $ API.SingleColumnAggregate aggFunction $ Witch.from ciColumn
|
||||
|
||||
let translatedOrderByElement =
|
||||
IR.O.OrderByElement
|
||||
API.OrderByElement
|
||||
{ _obeTargetPath = reverse (relationshipName : targetReversePath),
|
||||
_obeTarget = orderByTarget,
|
||||
_obeOrderDirection = orderDirection
|
||||
}
|
||||
|
||||
targetTableWhereExp <- translateBoolExpToExpression _rTargetTable filterExp
|
||||
let orderByRelations = HashMap.fromList [(relationshipName, IR.O.OrderByRelation targetTableWhereExp mempty)]
|
||||
let orderByRelations = HashMap.fromList [(relationshipName, API.OrderByRelation targetTableWhereExp mempty)]
|
||||
pure (translatedOrderByElement, orderByRelations)
|
||||
|
||||
mergeOrderByRelations ::
|
||||
Foldable f =>
|
||||
f (HashMap IR.R.RelationshipName IR.O.OrderByRelation) ->
|
||||
m (HashMap IR.R.RelationshipName IR.O.OrderByRelation)
|
||||
f (HashMap API.RelationshipName API.OrderByRelation) ->
|
||||
m (HashMap API.RelationshipName API.OrderByRelation)
|
||||
mergeOrderByRelations orderByRelationsList =
|
||||
foldM mergeMap mempty orderByRelationsList
|
||||
where
|
||||
mergeMap :: HashMap IR.R.RelationshipName IR.O.OrderByRelation -> HashMap IR.R.RelationshipName IR.O.OrderByRelation -> m (HashMap IR.R.RelationshipName IR.O.OrderByRelation)
|
||||
mergeMap :: HashMap API.RelationshipName API.OrderByRelation -> HashMap API.RelationshipName API.OrderByRelation -> m (HashMap API.RelationshipName API.OrderByRelation)
|
||||
mergeMap left right = foldM (\targetMap (relName, orderByRel) -> HashMap.alterF (maybe (pure $ Just orderByRel) (fmap Just . mergeOrderByRelation orderByRel)) relName targetMap) left $ HashMap.toList right
|
||||
|
||||
mergeOrderByRelation :: IR.O.OrderByRelation -> IR.O.OrderByRelation -> m IR.O.OrderByRelation
|
||||
mergeOrderByRelation :: API.OrderByRelation -> API.OrderByRelation -> m API.OrderByRelation
|
||||
mergeOrderByRelation right left =
|
||||
if IR.O._obrWhere left == IR.O._obrWhere right
|
||||
if API._obrWhere left == API._obrWhere right
|
||||
then do
|
||||
mergedSubrelations <- mergeMap (IR.O._obrSubrelations left) (IR.O._obrSubrelations right)
|
||||
pure $ IR.O.OrderByRelation (IR.O._obrWhere left) mergedSubrelations
|
||||
mergedSubrelations <- mergeMap (API._obrSubrelations left) (API._obrSubrelations right)
|
||||
pure $ API.OrderByRelation (API._obrWhere left) mergedSubrelations
|
||||
else throw500 "mergeOrderByRelations: Differing filter expressions found for the same table"
|
||||
|
||||
recordTableRelationshipFromRelInfo ::
|
||||
IR.T.Name ->
|
||||
API.TableName ->
|
||||
RelInfo 'DataConnector ->
|
||||
CPS.WriterT IR.R.TableRelationships m (IR.R.RelationshipName, IR.R.Relationship)
|
||||
CPS.WriterT TableRelationships m (API.RelationshipName, API.Relationship)
|
||||
recordTableRelationshipFromRelInfo sourceTableName RelInfo {..} = do
|
||||
let relationshipName = IR.R.mkRelationshipName riName
|
||||
let relationshipName = mkRelationshipName riName
|
||||
let relationshipType = case riType of
|
||||
ObjRel -> IR.R.ObjectRelationship
|
||||
ArrRel -> IR.R.ArrayRelationship
|
||||
ObjRel -> API.ObjectRelationship
|
||||
ArrRel -> API.ArrayRelationship
|
||||
let relationship =
|
||||
IR.R.Relationship
|
||||
{ _rTargetTable = riRTable,
|
||||
API.Relationship
|
||||
{ _rTargetTable = Witch.from riRTable,
|
||||
_rRelationshipType = relationshipType,
|
||||
_rColumnMapping = riMapping
|
||||
_rColumnMapping = HashMap.fromList $ bimap Witch.from Witch.from <$> HashMap.toList riMapping
|
||||
}
|
||||
recordTableRelationship
|
||||
sourceTableName
|
||||
@ -274,9 +276,9 @@ mkPlan session (SourceConfig {}) ir = do
|
||||
|
||||
translateAnnFields ::
|
||||
FieldPrefix ->
|
||||
IR.T.Name ->
|
||||
API.TableName ->
|
||||
AnnFieldsG 'DataConnector Void (UnpreparedValue 'DataConnector) ->
|
||||
CPS.WriterT IR.R.TableRelationships m FieldsAndAggregates
|
||||
CPS.WriterT TableRelationships m FieldsAndAggregates
|
||||
translateAnnFields fieldNamePrefix sourceTableName fields = do
|
||||
translatedFields <- traverse (traverse (translateAnnField sourceTableName)) fields
|
||||
let translatedFields' = HashMap.fromList . catMaybes $ (\(fieldName, field) -> (applyPrefix fieldNamePrefix fieldName,) <$> field) <$> translatedFields
|
||||
@ -286,35 +288,35 @@ mkPlan session (SourceConfig {}) ir = do
|
||||
mempty
|
||||
|
||||
translateAnnField ::
|
||||
IR.T.Name ->
|
||||
API.TableName ->
|
||||
AnnFieldG 'DataConnector Void (UnpreparedValue 'DataConnector) ->
|
||||
CPS.WriterT IR.R.TableRelationships m (Maybe IR.Q.Field)
|
||||
CPS.WriterT TableRelationships m (Maybe API.Field)
|
||||
translateAnnField sourceTableName = \case
|
||||
AFColumn colField ->
|
||||
-- TODO: make sure certain fields in colField are not in use, since we don't
|
||||
-- support them
|
||||
pure . Just . IR.Q.ColumnField $ _acfColumn colField
|
||||
pure . Just . API.ColumnField . Witch.from $ _acfColumn colField
|
||||
AFObjectRelation objRel -> do
|
||||
let targetTable = _aosTableFrom (_aarAnnSelect objRel)
|
||||
let relationshipName = IR.R.mkRelationshipName $ _aarRelationshipName objRel
|
||||
let targetTable = Witch.from $ _aosTableFrom (_aarAnnSelect objRel)
|
||||
let relationshipName = mkRelationshipName $ _aarRelationshipName objRel
|
||||
FieldsAndAggregates {..} <- translateAnnFields noPrefix targetTable (_aosFields (_aarAnnSelect objRel))
|
||||
whereClause <- translateBoolExpToExpression targetTable (_aosTableFilter (_aarAnnSelect objRel))
|
||||
|
||||
recordTableRelationship
|
||||
sourceTableName
|
||||
relationshipName
|
||||
IR.R.Relationship
|
||||
API.Relationship
|
||||
{ _rTargetTable = targetTable,
|
||||
_rRelationshipType = IR.R.ObjectRelationship,
|
||||
_rColumnMapping = _aarColumnMapping objRel
|
||||
_rRelationshipType = API.ObjectRelationship,
|
||||
_rColumnMapping = HashMap.fromList $ bimap Witch.from Witch.from <$> HashMap.toList (_aarColumnMapping objRel)
|
||||
}
|
||||
|
||||
pure . Just . IR.Q.RelField $
|
||||
IR.Q.RelationshipField
|
||||
pure . Just . API.RelField $
|
||||
API.RelationshipField
|
||||
relationshipName
|
||||
( IR.Q.Query
|
||||
{ _qFields = _faaFields,
|
||||
_qAggregates = _faaAggregates,
|
||||
( API.Query
|
||||
{ _qFields = memptyToNothing . KM.fromList $ (first (K.fromText . getFieldNameTxt)) <$> HashMap.toList _faaFields,
|
||||
_qAggregates = memptyToNothing . KM.fromList $ (first (K.fromText . getFieldNameTxt)) <$> HashMap.toList _faaAggregates,
|
||||
_qWhere = whereClause,
|
||||
_qLimit = Nothing,
|
||||
_qOffset = Nothing,
|
||||
@ -332,41 +334,41 @@ mkPlan session (SourceConfig {}) ir = do
|
||||
pure Nothing
|
||||
|
||||
translateArrayRelationSelect ::
|
||||
IR.T.Name ->
|
||||
(IR.T.Name -> Fields (fieldType (UnpreparedValue 'DataConnector)) -> CPS.WriterT IR.R.TableRelationships m FieldsAndAggregates) ->
|
||||
API.TableName ->
|
||||
(API.TableName -> Fields (fieldType (UnpreparedValue 'DataConnector)) -> CPS.WriterT TableRelationships m FieldsAndAggregates) ->
|
||||
AnnRelationSelectG 'DataConnector (AnnSelectG 'DataConnector fieldType (UnpreparedValue 'DataConnector)) ->
|
||||
CPS.WriterT IR.R.TableRelationships m IR.Q.Field
|
||||
CPS.WriterT TableRelationships m API.Field
|
||||
translateArrayRelationSelect sourceTableName translateFieldsAndAggregates arrRel = do
|
||||
targetTable <- lift $ extractTableName (_aarAnnSelect arrRel)
|
||||
query <- translateAnnSelect translateFieldsAndAggregates targetTable (_aarAnnSelect arrRel)
|
||||
let relationshipName = IR.R.mkRelationshipName $ _aarRelationshipName arrRel
|
||||
let relationshipName = mkRelationshipName $ _aarRelationshipName arrRel
|
||||
|
||||
recordTableRelationship
|
||||
sourceTableName
|
||||
relationshipName
|
||||
IR.R.Relationship
|
||||
API.Relationship
|
||||
{ _rTargetTable = targetTable,
|
||||
_rRelationshipType = IR.R.ArrayRelationship,
|
||||
_rColumnMapping = _aarColumnMapping arrRel
|
||||
_rRelationshipType = API.ArrayRelationship,
|
||||
_rColumnMapping = HashMap.fromList $ bimap Witch.from Witch.from <$> HashMap.toList (_aarColumnMapping arrRel)
|
||||
}
|
||||
|
||||
pure . IR.Q.RelField $
|
||||
IR.Q.RelationshipField
|
||||
pure . API.RelField $
|
||||
API.RelationshipField
|
||||
relationshipName
|
||||
query
|
||||
|
||||
translateTableAggregateFields ::
|
||||
IR.T.Name ->
|
||||
API.TableName ->
|
||||
TableAggregateFieldsG 'DataConnector Void (UnpreparedValue 'DataConnector) ->
|
||||
CPS.WriterT IR.R.TableRelationships m FieldsAndAggregates
|
||||
CPS.WriterT TableRelationships m FieldsAndAggregates
|
||||
translateTableAggregateFields sourceTableName fields = do
|
||||
mconcat <$> traverse (uncurry (translateTableAggregateField sourceTableName)) fields
|
||||
|
||||
translateTableAggregateField ::
|
||||
IR.T.Name ->
|
||||
API.TableName ->
|
||||
FieldName ->
|
||||
TableAggregateFieldG 'DataConnector Void (UnpreparedValue 'DataConnector) ->
|
||||
CPS.WriterT IR.R.TableRelationships m FieldsAndAggregates
|
||||
CPS.WriterT TableRelationships m FieldsAndAggregates
|
||||
translateTableAggregateField sourceTableName fieldName = \case
|
||||
TAFAgg aggregateFields -> do
|
||||
let fieldNamePrefix = prefixWith fieldName
|
||||
@ -387,9 +389,15 @@ mkPlan session (SourceConfig {}) ir = do
|
||||
FieldPrefix ->
|
||||
FieldName ->
|
||||
AggregateField 'DataConnector ->
|
||||
m (HashMap FieldName IR.A.Aggregate)
|
||||
m (HashMap FieldName API.Aggregate)
|
||||
translateAggregateField fieldPrefix fieldName = \case
|
||||
AFCount countAggregate -> pure $ HashMap.singleton (applyPrefix fieldPrefix fieldName) (IR.A.Count countAggregate)
|
||||
AFCount countAggregate ->
|
||||
let aggregate =
|
||||
case countAggregate of
|
||||
StarCount -> API.StarCount
|
||||
ColumnCount column -> API.ColumnCount $ API.ColumnCountAggregate {_ccaColumn = Witch.from column, _ccaDistinct = False}
|
||||
ColumnDistinctCount column -> API.ColumnCount $ API.ColumnCountAggregate {_ccaColumn = Witch.from column, _ccaDistinct = True}
|
||||
in pure $ HashMap.singleton (applyPrefix fieldPrefix fieldName) aggregate
|
||||
AFOp AggregateOp {..} -> do
|
||||
let fieldPrefix' = fieldPrefix <> prefixWith fieldName
|
||||
aggFunction <- translateSingleColumnAggregateFunction _aoOp
|
||||
@ -397,7 +405,7 @@ mkPlan session (SourceConfig {}) ir = do
|
||||
fmap (HashMap.fromList . catMaybes) . forM _aoFields $ \(columnFieldName, columnField) ->
|
||||
case columnField of
|
||||
CFCol column _columnType ->
|
||||
pure . Just $ (applyPrefix fieldPrefix' columnFieldName, IR.A.SingleColumn $ IR.A.SingleColumnAggregate aggFunction column)
|
||||
pure . Just $ (applyPrefix fieldPrefix' columnFieldName, API.SingleColumn . API.SingleColumnAggregate aggFunction $ Witch.from column)
|
||||
CFExp _txt ->
|
||||
-- We ignore literal text fields (we don't send them to the data connector agent)
|
||||
-- and add them back to the response JSON when we reshape what the agent returns
|
||||
@ -409,25 +417,25 @@ mkPlan session (SourceConfig {}) ir = do
|
||||
-- to us
|
||||
pure mempty
|
||||
|
||||
translateSingleColumnAggregateFunction :: Text -> m IR.A.SingleColumnAggregateFunction
|
||||
translateSingleColumnAggregateFunction :: Text -> m API.SingleColumnAggregateFunction
|
||||
translateSingleColumnAggregateFunction = \case
|
||||
"avg" -> pure IR.A.Average
|
||||
"max" -> pure IR.A.Max
|
||||
"min" -> pure IR.A.Min
|
||||
"stddev_pop" -> pure IR.A.StandardDeviationPopulation
|
||||
"stddev_samp" -> pure IR.A.StandardDeviationSample
|
||||
"stddev" -> pure IR.A.StandardDeviationSample
|
||||
"sum" -> pure IR.A.Sum
|
||||
"var_pop" -> pure IR.A.VariancePopulation
|
||||
"var_samp" -> pure IR.A.VarianceSample
|
||||
"variance" -> pure IR.A.VarianceSample
|
||||
"avg" -> pure API.Average
|
||||
"max" -> pure API.Max
|
||||
"min" -> pure API.Min
|
||||
"stddev_pop" -> pure API.StandardDeviationPopulation
|
||||
"stddev_samp" -> pure API.StandardDeviationSample
|
||||
"stddev" -> pure API.StandardDeviationSample
|
||||
"sum" -> pure API.Sum
|
||||
"var_pop" -> pure API.VariancePopulation
|
||||
"var_samp" -> pure API.VarianceSample
|
||||
"variance" -> pure API.VarianceSample
|
||||
unknownFunc -> throw500 $ "translateSingleColumnAggregateFunction: Unknown aggregate function encountered: " <> unknownFunc
|
||||
|
||||
prepareLiterals ::
|
||||
UnpreparedValue 'DataConnector ->
|
||||
m IR.S.Literal
|
||||
m Literal
|
||||
prepareLiterals (UVLiteral literal) = pure $ literal
|
||||
prepareLiterals (UVParameter _ e) = pure (IR.S.ValueLiteral (cvValue e))
|
||||
prepareLiterals (UVParameter _ e) = pure (ValueLiteral (cvValue e))
|
||||
prepareLiterals UVSession = throw400 NotSupported "prepareLiterals: UVSession"
|
||||
prepareLiterals (UVSessionVar sessionVarType sessionVar) = do
|
||||
textValue <-
|
||||
@ -435,23 +443,23 @@ mkPlan session (SourceConfig {}) ir = do
|
||||
`onNothing` throw400 NotSupported ("prepareLiterals: session var not found: " <>> sessionVar)
|
||||
parseSessionVariable sessionVar sessionVarType textValue
|
||||
|
||||
parseSessionVariable :: SessionVariable -> SessionVarType 'DataConnector -> Text -> m IR.S.Literal
|
||||
parseSessionVariable :: SessionVariable -> SessionVarType 'DataConnector -> Text -> m Literal
|
||||
parseSessionVariable varName varType varValue = do
|
||||
case varType of
|
||||
CollectableTypeScalar scalarType ->
|
||||
case scalarType of
|
||||
IR.S.String -> pure . IR.S.ValueLiteral $ J.String varValue
|
||||
IR.S.Number -> parseValue (IR.S.ValueLiteral . J.Number) "number value"
|
||||
IR.S.Bool -> parseValue (IR.S.ValueLiteral . J.Bool) "boolean value"
|
||||
IR.S.Custom customTypeName -> parseValue IR.S.ValueLiteral (customTypeName <> " JSON value")
|
||||
StringTy -> pure . ValueLiteral $ J.String varValue
|
||||
NumberTy -> parseValue (ValueLiteral . J.Number) "number value"
|
||||
BoolTy -> parseValue (ValueLiteral . J.Bool) "boolean value"
|
||||
CustomTy customTypeName -> parseValue ValueLiteral (customTypeName <> " JSON value")
|
||||
CollectableTypeArray scalarType ->
|
||||
case scalarType of
|
||||
IR.S.String -> parseValue (IR.S.ArrayLiteral . fmap J.String) "JSON array of strings"
|
||||
IR.S.Number -> parseValue (IR.S.ArrayLiteral . fmap J.Number) "JSON array of numbers"
|
||||
IR.S.Bool -> parseValue (IR.S.ArrayLiteral . fmap J.Bool) "JSON array of booleans"
|
||||
IR.S.Custom customTypeName -> parseValue IR.S.ArrayLiteral ("JSON array of " <> customTypeName <> " JSON values")
|
||||
StringTy -> parseValue (ArrayLiteral . fmap J.String) "JSON array of strings"
|
||||
NumberTy -> parseValue (ArrayLiteral . fmap J.Number) "JSON array of numbers"
|
||||
BoolTy -> parseValue (ArrayLiteral . fmap J.Bool) "JSON array of booleans"
|
||||
CustomTy customTypeName -> parseValue ArrayLiteral ("JSON array of " <> customTypeName <> " JSON values")
|
||||
where
|
||||
parseValue :: J.FromJSON a => (a -> IR.S.Literal) -> Text -> m IR.S.Literal
|
||||
parseValue :: J.FromJSON a => (a -> Literal) -> Text -> m Literal
|
||||
parseValue toLiteral description =
|
||||
toLiteral <$> J.eitherDecodeStrict' valValueBS
|
||||
`onLeft` (\err -> throw400 ParseFailed ("Expected " <> description <> " for session variable " <> varName <<> ". " <> T.pack err))
|
||||
@ -460,99 +468,100 @@ mkPlan session (SourceConfig {}) ir = do
|
||||
valValueBS = TE.encodeUtf8 varValue
|
||||
|
||||
translateBoolExpToExpression ::
|
||||
IR.T.Name ->
|
||||
API.TableName ->
|
||||
AnnBoolExp 'DataConnector (UnpreparedValue 'DataConnector) ->
|
||||
CPS.WriterT IR.R.TableRelationships m (Maybe IR.E.Expression)
|
||||
CPS.WriterT TableRelationships m (Maybe API.Expression)
|
||||
translateBoolExpToExpression sourceTableName boolExp = do
|
||||
removeAlwaysTrueExpression <$> translateBoolExp sourceTableName boolExp
|
||||
|
||||
translateBoolExp ::
|
||||
IR.T.Name ->
|
||||
API.TableName ->
|
||||
AnnBoolExp 'DataConnector (UnpreparedValue 'DataConnector) ->
|
||||
CPS.WriterT IR.R.TableRelationships m IR.E.Expression
|
||||
CPS.WriterT TableRelationships m API.Expression
|
||||
translateBoolExp sourceTableName = \case
|
||||
BoolAnd xs ->
|
||||
mkIfZeroOrMany IR.E.And . mapMaybe removeAlwaysTrueExpression <$> traverse (translateBoolExp sourceTableName) xs
|
||||
mkIfZeroOrMany API.And . mapMaybe removeAlwaysTrueExpression <$> traverse (translateBoolExp sourceTableName) xs
|
||||
BoolOr xs ->
|
||||
mkIfZeroOrMany IR.E.Or . mapMaybe removeAlwaysFalseExpression <$> traverse (translateBoolExp sourceTableName) xs
|
||||
mkIfZeroOrMany API.Or . mapMaybe removeAlwaysFalseExpression <$> traverse (translateBoolExp sourceTableName) xs
|
||||
BoolNot x ->
|
||||
IR.E.Not <$> (translateBoolExp sourceTableName) x
|
||||
API.Not <$> (translateBoolExp sourceTableName) x
|
||||
BoolField (AVColumn c xs) ->
|
||||
lift $ mkIfZeroOrMany IR.E.And <$> traverse (translateOp (ciColumn c)) xs
|
||||
lift $ mkIfZeroOrMany API.And <$> traverse (translateOp (Witch.from $ ciColumn c)) xs
|
||||
BoolField (AVRelationship relationshipInfo boolExp) -> do
|
||||
(relationshipName, IR.R.Relationship {..}) <- recordTableRelationshipFromRelInfo sourceTableName relationshipInfo
|
||||
IR.E.Exists (IR.E.RelatedTable relationshipName) <$> translateBoolExp _rTargetTable boolExp
|
||||
(relationshipName, API.Relationship {..}) <- recordTableRelationshipFromRelInfo sourceTableName relationshipInfo
|
||||
API.Exists (API.RelatedTable relationshipName) <$> translateBoolExp _rTargetTable boolExp
|
||||
BoolExists GExists {..} ->
|
||||
IR.E.Exists (IR.E.UnrelatedTable _geTable) <$> translateBoolExp _geTable _geWhere
|
||||
let tableName = Witch.from _geTable
|
||||
in API.Exists (API.UnrelatedTable tableName) <$> translateBoolExp tableName _geWhere
|
||||
where
|
||||
-- Makes an 'IR.E.Expression' like 'IR.E.And' if there is zero or many input expressions otherwise
|
||||
-- just returns the singleton expression. This helps remove redundant 'IR.E.And' etcs from the expression.
|
||||
mkIfZeroOrMany :: ([IR.E.Expression] -> IR.E.Expression) -> [IR.E.Expression] -> IR.E.Expression
|
||||
-- Makes an 'API.Expression' like 'API.And' if there is zero or many input expressions otherwise
|
||||
-- just returns the singleton expression. This helps remove redundant 'API.And' etcs from the expression.
|
||||
mkIfZeroOrMany :: ([API.Expression] -> API.Expression) -> [API.Expression] -> API.Expression
|
||||
mkIfZeroOrMany mk = \case
|
||||
[singleExp] -> singleExp
|
||||
zeroOrManyExps -> mk zeroOrManyExps
|
||||
|
||||
removeAlwaysTrueExpression :: IR.E.Expression -> Maybe IR.E.Expression
|
||||
removeAlwaysTrueExpression :: API.Expression -> Maybe API.Expression
|
||||
removeAlwaysTrueExpression = \case
|
||||
IR.E.And [] -> Nothing
|
||||
IR.E.Not (IR.E.Or []) -> Nothing
|
||||
API.And [] -> Nothing
|
||||
API.Not (API.Or []) -> Nothing
|
||||
other -> Just other
|
||||
|
||||
removeAlwaysFalseExpression :: IR.E.Expression -> Maybe IR.E.Expression
|
||||
removeAlwaysFalseExpression :: API.Expression -> Maybe API.Expression
|
||||
removeAlwaysFalseExpression = \case
|
||||
IR.E.Or [] -> Nothing
|
||||
IR.E.Not (IR.E.And []) -> Nothing
|
||||
API.Or [] -> Nothing
|
||||
API.Not (API.And []) -> Nothing
|
||||
other -> Just other
|
||||
|
||||
translateOp ::
|
||||
IR.C.Name ->
|
||||
API.ColumnName ->
|
||||
OpExpG 'DataConnector (UnpreparedValue 'DataConnector) ->
|
||||
m IR.E.Expression
|
||||
m API.Expression
|
||||
translateOp columnName opExp = do
|
||||
preparedOpExp <- traverse prepareLiterals $ opExp
|
||||
case preparedOpExp of
|
||||
AEQ _ (IR.S.ValueLiteral value) ->
|
||||
pure $ mkApplyBinaryComparisonOperatorToScalar IR.E.Equal value
|
||||
AEQ _ (IR.S.ArrayLiteral _array) ->
|
||||
AEQ _ (ValueLiteral value) ->
|
||||
pure $ mkApplyBinaryComparisonOperatorToScalar API.Equal value
|
||||
AEQ _ (ArrayLiteral _array) ->
|
||||
throw400 NotSupported "Array literals not supported for AEQ operator"
|
||||
ANE _ (IR.S.ValueLiteral value) ->
|
||||
pure . IR.E.Not $ mkApplyBinaryComparisonOperatorToScalar IR.E.Equal value
|
||||
ANE _ (IR.S.ArrayLiteral _array) ->
|
||||
ANE _ (ValueLiteral value) ->
|
||||
pure . API.Not $ mkApplyBinaryComparisonOperatorToScalar API.Equal value
|
||||
ANE _ (ArrayLiteral _array) ->
|
||||
throw400 NotSupported "Array literals not supported for ANE operator"
|
||||
AGT (IR.S.ValueLiteral value) ->
|
||||
pure $ mkApplyBinaryComparisonOperatorToScalar IR.E.GreaterThan value
|
||||
AGT (IR.S.ArrayLiteral _array) ->
|
||||
AGT (ValueLiteral value) ->
|
||||
pure $ mkApplyBinaryComparisonOperatorToScalar API.GreaterThan value
|
||||
AGT (ArrayLiteral _array) ->
|
||||
throw400 NotSupported "Array literals not supported for AGT operator"
|
||||
ALT (IR.S.ValueLiteral value) ->
|
||||
pure $ mkApplyBinaryComparisonOperatorToScalar IR.E.LessThan value
|
||||
ALT (IR.S.ArrayLiteral _array) ->
|
||||
ALT (ValueLiteral value) ->
|
||||
pure $ mkApplyBinaryComparisonOperatorToScalar API.LessThan value
|
||||
ALT (ArrayLiteral _array) ->
|
||||
throw400 NotSupported "Array literals not supported for ALT operator"
|
||||
AGTE (IR.S.ValueLiteral value) ->
|
||||
pure $ mkApplyBinaryComparisonOperatorToScalar IR.E.GreaterThanOrEqual value
|
||||
AGTE (IR.S.ArrayLiteral _array) ->
|
||||
AGTE (ValueLiteral value) ->
|
||||
pure $ mkApplyBinaryComparisonOperatorToScalar API.GreaterThanOrEqual value
|
||||
AGTE (ArrayLiteral _array) ->
|
||||
throw400 NotSupported "Array literals not supported for AGTE operator"
|
||||
ALTE (IR.S.ValueLiteral value) ->
|
||||
pure $ mkApplyBinaryComparisonOperatorToScalar IR.E.LessThanOrEqual value
|
||||
ALTE (IR.S.ArrayLiteral _array) ->
|
||||
ALTE (ValueLiteral value) ->
|
||||
pure $ mkApplyBinaryComparisonOperatorToScalar API.LessThanOrEqual value
|
||||
ALTE (ArrayLiteral _array) ->
|
||||
throw400 NotSupported "Array literals not supported for ALTE operator"
|
||||
ANISNULL ->
|
||||
pure $ IR.E.ApplyUnaryComparisonOperator IR.E.IsNull currentComparisonColumn
|
||||
pure $ API.ApplyUnaryComparisonOperator API.IsNull currentComparisonColumn
|
||||
ANISNOTNULL ->
|
||||
pure $ IR.E.Not (IR.E.ApplyUnaryComparisonOperator IR.E.IsNull currentComparisonColumn)
|
||||
pure $ API.Not (API.ApplyUnaryComparisonOperator API.IsNull currentComparisonColumn)
|
||||
AIN literal -> pure $ inOperator literal
|
||||
ANIN literal -> pure . IR.E.Not $ inOperator literal
|
||||
ANIN literal -> pure . API.Not $ inOperator literal
|
||||
CEQ rootOrCurrentColumn ->
|
||||
pure $ mkApplyBinaryComparisonOperatorToAnotherColumn IR.E.Equal rootOrCurrentColumn
|
||||
pure $ mkApplyBinaryComparisonOperatorToAnotherColumn API.Equal rootOrCurrentColumn
|
||||
CNE rootOrCurrentColumn ->
|
||||
pure $ IR.E.Not $ mkApplyBinaryComparisonOperatorToAnotherColumn IR.E.Equal rootOrCurrentColumn
|
||||
pure $ API.Not $ mkApplyBinaryComparisonOperatorToAnotherColumn API.Equal rootOrCurrentColumn
|
||||
CGT rootOrCurrentColumn ->
|
||||
pure $ mkApplyBinaryComparisonOperatorToAnotherColumn IR.E.GreaterThan rootOrCurrentColumn
|
||||
pure $ mkApplyBinaryComparisonOperatorToAnotherColumn API.GreaterThan rootOrCurrentColumn
|
||||
CLT rootOrCurrentColumn ->
|
||||
pure $ mkApplyBinaryComparisonOperatorToAnotherColumn IR.E.LessThan rootOrCurrentColumn
|
||||
pure $ mkApplyBinaryComparisonOperatorToAnotherColumn API.LessThan rootOrCurrentColumn
|
||||
CGTE rootOrCurrentColumn ->
|
||||
pure $ mkApplyBinaryComparisonOperatorToAnotherColumn IR.E.GreaterThanOrEqual rootOrCurrentColumn
|
||||
pure $ mkApplyBinaryComparisonOperatorToAnotherColumn API.GreaterThanOrEqual rootOrCurrentColumn
|
||||
CLTE rootOrCurrentColumn ->
|
||||
pure $ mkApplyBinaryComparisonOperatorToAnotherColumn IR.E.LessThanOrEqual rootOrCurrentColumn
|
||||
pure $ mkApplyBinaryComparisonOperatorToAnotherColumn API.LessThanOrEqual rootOrCurrentColumn
|
||||
ALIKE _literal ->
|
||||
throw400 NotSupported "The ALIKE operator is not supported by the Data Connector backend"
|
||||
ANLIKE _literal ->
|
||||
@ -560,38 +569,38 @@ mkPlan session (SourceConfig {}) ir = do
|
||||
ACast _literal ->
|
||||
throw400 NotSupported "The ACast operator is not supported by the Data Connector backend"
|
||||
ABackendSpecific CustomBooleanOperator {..} -> case _cboRHS of
|
||||
Nothing -> pure $ IR.E.ApplyUnaryComparisonOperator (CustomUnaryComparisonOperator _cboName) currentComparisonColumn
|
||||
Nothing -> pure $ API.ApplyUnaryComparisonOperator (API.CustomUnaryComparisonOperator _cboName) currentComparisonColumn
|
||||
Just (Left rootOrCurrentColumn) ->
|
||||
pure $ mkApplyBinaryComparisonOperatorToAnotherColumn (IR.E.CustomBinaryComparisonOperator _cboName) rootOrCurrentColumn
|
||||
Just (Right (IR.S.ValueLiteral value)) ->
|
||||
pure $ mkApplyBinaryComparisonOperatorToScalar (IR.E.CustomBinaryComparisonOperator _cboName) value
|
||||
Just (Right (IR.S.ArrayLiteral array)) ->
|
||||
pure $ IR.E.ApplyBinaryArrayComparisonOperator (IR.E.CustomBinaryArrayComparisonOperator _cboName) currentComparisonColumn array
|
||||
pure $ mkApplyBinaryComparisonOperatorToAnotherColumn (API.CustomBinaryComparisonOperator _cboName) rootOrCurrentColumn
|
||||
Just (Right (ValueLiteral value)) ->
|
||||
pure $ mkApplyBinaryComparisonOperatorToScalar (API.CustomBinaryComparisonOperator _cboName) value
|
||||
Just (Right (ArrayLiteral array)) ->
|
||||
pure $ API.ApplyBinaryArrayComparisonOperator (API.CustomBinaryArrayComparisonOperator _cboName) currentComparisonColumn array
|
||||
where
|
||||
currentComparisonColumn :: IR.E.ComparisonColumn
|
||||
currentComparisonColumn = IR.E.ComparisonColumn IR.E.CurrentTable columnName
|
||||
currentComparisonColumn :: API.ComparisonColumn
|
||||
currentComparisonColumn = API.ComparisonColumn API.CurrentTable columnName
|
||||
|
||||
mkApplyBinaryComparisonOperatorToAnotherColumn :: IR.E.BinaryComparisonOperator -> RootOrCurrentColumn 'DataConnector -> IR.E.Expression
|
||||
mkApplyBinaryComparisonOperatorToAnotherColumn :: API.BinaryComparisonOperator -> RootOrCurrentColumn 'DataConnector -> API.Expression
|
||||
mkApplyBinaryComparisonOperatorToAnotherColumn operator (RootOrCurrentColumn rootOrCurrent otherColumnName) =
|
||||
let columnPath = case rootOrCurrent of
|
||||
IsRoot -> IR.E.QueryTable
|
||||
IsCurrent -> IR.E.CurrentTable
|
||||
in IR.E.ApplyBinaryComparisonOperator operator currentComparisonColumn (IR.E.AnotherColumn $ IR.E.ComparisonColumn columnPath otherColumnName)
|
||||
IsRoot -> API.QueryTable
|
||||
IsCurrent -> API.CurrentTable
|
||||
in API.ApplyBinaryComparisonOperator operator currentComparisonColumn (API.AnotherColumn . API.ComparisonColumn columnPath $ Witch.from otherColumnName)
|
||||
|
||||
inOperator :: IR.S.Literal -> IR.E.Expression
|
||||
inOperator :: Literal -> API.Expression
|
||||
inOperator literal =
|
||||
let values = case literal of
|
||||
IR.S.ArrayLiteral array -> array
|
||||
IR.S.ValueLiteral value -> [value]
|
||||
in IR.E.ApplyBinaryArrayComparisonOperator IR.E.In currentComparisonColumn values
|
||||
ArrayLiteral array -> array
|
||||
ValueLiteral value -> [value]
|
||||
in API.ApplyBinaryArrayComparisonOperator API.In currentComparisonColumn values
|
||||
|
||||
mkApplyBinaryComparisonOperatorToScalar :: IR.E.BinaryComparisonOperator -> J.Value -> IR.E.Expression
|
||||
mkApplyBinaryComparisonOperatorToScalar :: API.BinaryComparisonOperator -> J.Value -> API.Expression
|
||||
mkApplyBinaryComparisonOperatorToScalar operator value =
|
||||
IR.E.ApplyBinaryComparisonOperator operator currentComparisonColumn (IR.E.ScalarValue value)
|
||||
API.ApplyBinaryComparisonOperator operator currentComparisonColumn (API.ScalarValue value)
|
||||
|
||||
-- | Validate if a 'IR.Q' contains any relationships.
|
||||
queryHasRelations :: IR.Q.QueryRequest -> Bool
|
||||
queryHasRelations IR.Q.QueryRequest {..} = _qrTableRelationships /= mempty
|
||||
-- | Validate if a 'API.QueryRequest' contains any relationships.
|
||||
queryHasRelations :: API.QueryRequest -> Bool
|
||||
queryHasRelations API.QueryRequest {..} = _qrTableRelationships /= mempty
|
||||
|
||||
data Cardinality
|
||||
= Single
|
||||
@ -740,3 +749,9 @@ reshapeAnnRelationSelect reshapeFields annRelationSelect fieldValue =
|
||||
Right subqueryResponse ->
|
||||
let annSimpleSelect = _aarAnnSelect annRelationSelect
|
||||
in reshapeFields (_asnFields annSimpleSelect) subqueryResponse
|
||||
|
||||
memptyToNothing :: (Monoid m, Eq m) => m -> Maybe m
|
||||
memptyToNothing m = if m == mempty then Nothing else Just m
|
||||
|
||||
mkRelationshipName :: RelName -> API.RelationshipName
|
||||
mkRelationshipName relName = API.RelationshipName $ toTxt relName
|
||||
|
@ -1,31 +0,0 @@
|
||||
module Hasura.Backends.DataConnector.Schema.Column
|
||||
( Column (..),
|
||||
)
|
||||
where
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
import Hasura.Backends.DataConnector.IR.Column qualified as Column (Name)
|
||||
import Hasura.Backends.DataConnector.IR.Scalar.Type qualified as Scalar (Type)
|
||||
import Hasura.Prelude
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
-- | A schematic representation which captures common attributes associated
|
||||
-- with a piece of data that is stored in a given backend.
|
||||
--
|
||||
-- These attributes ascribe meaningful semantics to the data that they are
|
||||
-- associated with.
|
||||
--
|
||||
-- cf. https://en.wikipedia.org/wiki/Column_(database)
|
||||
-- https://www.postgresql.org/docs/13/ddl-basics.html
|
||||
--
|
||||
-- XXX: Instead of an @isNullable@ flag, should we instead add a @Nullable@
|
||||
-- data constructor to 'Scalar.Type'?
|
||||
data Column = Column
|
||||
{ name :: Column.Name,
|
||||
type_ :: Scalar.Type,
|
||||
isNullable :: Bool,
|
||||
description :: Maybe Text
|
||||
}
|
||||
deriving stock (Data, Eq, Generic, Ord, Show)
|
@ -1,34 +0,0 @@
|
||||
module Hasura.Backends.DataConnector.Schema.Table
|
||||
( Table (..),
|
||||
)
|
||||
where
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
import Hasura.Backends.DataConnector.IR.Table qualified as Table (Name)
|
||||
import Hasura.Backends.DataConnector.Schema.Column (Column)
|
||||
import Hasura.Prelude
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
-- | A schematic representation which captures a named collection of columns
|
||||
--
|
||||
-- TODO(cdparks): schematic in the sense of "relating to a schema" or symbolic?
|
||||
-- This language is also used in the @Column@ documentation
|
||||
--
|
||||
-- An element of a table is known as a row, record, tuple, or object,
|
||||
-- and conforms to the shape specified by the list of @Column@s below.
|
||||
--
|
||||
-- cf. https://en.wikipedia.org/wiki/Table_(database)
|
||||
-- https://www.postgresql.org/docs/13/ddl-basics.html
|
||||
--
|
||||
-- NOTE(jkachmar): This type shouldn't _need_ ser/de instances, but they're
|
||||
-- imposed by the 'Backend' class.
|
||||
data Table = Table
|
||||
{ name :: Table.Name,
|
||||
columns :: [Column],
|
||||
-- TODO(cdparks): Composite primary keys
|
||||
primaryKey :: Maybe Text,
|
||||
description :: Maybe Text
|
||||
}
|
||||
deriving stock (Data, Eq, Generic, Ord, Show)
|
@ -5,7 +5,7 @@ module Hasura.Backends.DataConnector.API.V0.ColumnSpec (spec, genColumnName, gen
|
||||
|
||||
import Data.Aeson.QQ.Simple (aesonQQ)
|
||||
import Hasura.Backends.DataConnector.API.V0
|
||||
import Hasura.Backends.DataConnector.API.V0.Scalar.TypeSpec (genType)
|
||||
import Hasura.Backends.DataConnector.API.V0.ScalarSpec (genType)
|
||||
import Hasura.Generator.Common (defaultRange, genArbitraryAlphaNumText)
|
||||
import Hasura.Prelude
|
||||
import Hedgehog
|
||||
|
@ -1,9 +1,9 @@
|
||||
{-# LANGUAGE QuasiQuotes #-}
|
||||
|
||||
module Hasura.Backends.DataConnector.API.V0.Scalar.TypeSpec (spec, genType) where
|
||||
module Hasura.Backends.DataConnector.API.V0.ScalarSpec (spec, genType) where
|
||||
|
||||
import Data.Aeson.QQ.Simple (aesonQQ)
|
||||
import Hasura.Backends.DataConnector.API.V0.Scalar.Type
|
||||
import Hasura.Backends.DataConnector.API.V0.Scalar
|
||||
import Hasura.Generator.Common (defaultRange, genArbitraryAlphaNumText)
|
||||
import Hasura.Prelude
|
||||
import Hedgehog
|
||||
@ -24,6 +24,6 @@ spec = do
|
||||
testToFromJSONToSchema (CustomTy "foo") [aesonQQ|"foo"|]
|
||||
jsonOpenApiProperties genType
|
||||
|
||||
genType :: MonadGen m => m Type
|
||||
genType :: MonadGen m => m ScalarType
|
||||
genType =
|
||||
Gen.choice [pure StringTy, pure NumberTy, pure BoolTy, CustomTy <$> genArbitraryAlphaNumText defaultRange]
|
@ -1,10 +0,0 @@
|
||||
module Hasura.Backends.DataConnector.RQLGenerator
|
||||
( module M,
|
||||
)
|
||||
where
|
||||
|
||||
import Hasura.Backends.DataConnector.RQLGenerator.GenAnnSelectG as M
|
||||
import Hasura.Backends.DataConnector.RQLGenerator.GenCommon as M
|
||||
import Hasura.Backends.DataConnector.RQLGenerator.GenSelectArgsG as M
|
||||
import Hasura.Backends.DataConnector.RQLGenerator.GenSelectFromG as M
|
||||
import Hasura.Backends.DataConnector.RQLGenerator.GenTablePermG as M
|
@ -1,34 +0,0 @@
|
||||
module Hasura.Backends.DataConnector.RQLGenerator.GenAnnSelectG
|
||||
( genAnnSelectG,
|
||||
)
|
||||
where
|
||||
|
||||
import Hasura.Backends.DataConnector.RQLGenerator.GenSelectArgsG (genSelectArgsG)
|
||||
import Hasura.Backends.DataConnector.RQLGenerator.GenSelectFromG (genSelectFromG)
|
||||
import Hasura.Backends.DataConnector.RQLGenerator.GenTablePermG (genTablePermG)
|
||||
import Hasura.Generator.Common (defaultRange)
|
||||
import Hasura.GraphQL.Schema.Options qualified as Options
|
||||
import Hasura.Prelude hiding (bool)
|
||||
import Hasura.RQL.IR (AnnSelectG (..))
|
||||
import Hasura.RQL.IR.Generator (genFields)
|
||||
import Hasura.SQL.Backend (BackendType (..))
|
||||
import Hedgehog (MonadGen)
|
||||
import Hedgehog.Gen (bool)
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
genAnnSelectG :: forall m f a. MonadGen m => m a -> m (f a) -> m (AnnSelectG 'DataConnector f a)
|
||||
genAnnSelectG genA genFA =
|
||||
AnnSelectG
|
||||
<$> genFields genFA defaultRange defaultRange
|
||||
<*> genSelectFromG
|
||||
<*> genTablePermG genA
|
||||
<*> genArgs
|
||||
<*> genStringifyNumbers
|
||||
<*> (pure Nothing)
|
||||
where
|
||||
genStringifyNumbers =
|
||||
bool <&> \case
|
||||
False -> Options.Don'tStringifyNumbers
|
||||
True -> Options.StringifyNumbers
|
||||
genArgs = genSelectArgsG genA
|
@ -1,171 +0,0 @@
|
||||
module Hasura.Backends.DataConnector.RQLGenerator.GenCommon
|
||||
( genAnnBoolExpFld,
|
||||
genAnnotatedOrderByElement,
|
||||
|
||||
-- * Associated Types
|
||||
genColumn,
|
||||
genTableName,
|
||||
genScalarType,
|
||||
genFunctionName,
|
||||
genFunctionArgumentExp,
|
||||
)
|
||||
where
|
||||
|
||||
import Data.Functor.Const
|
||||
import Hasura.Backends.DataConnector.IR.Function qualified as FunctionName
|
||||
import Hasura.Backends.DataConnector.IR.Name qualified as Name
|
||||
import Hasura.Backends.DataConnector.IR.Scalar.Type qualified as ScalarType
|
||||
import Hasura.Backends.DataConnector.IR.Table qualified as TableName
|
||||
import Hasura.Generator.Common (defaultRange, genArbitraryUnicodeText, genHashMap)
|
||||
import Hasura.Prelude (coerce, fmap, pure, ($), (<$>), (<*>))
|
||||
import Hasura.RQL.IR
|
||||
import Hasura.RQL.IR.Generator (genAnnBoolExp, genAnnotatedAggregateOrderBy, genColumnInfo, genInsertOrder, genRelName, genRelType)
|
||||
import Hasura.RQL.Types.Backend
|
||||
import Hasura.RQL.Types.Relationships.Local
|
||||
import Hasura.SQL.Backend
|
||||
import Hedgehog (MonadGen)
|
||||
import Hedgehog.Gen (bool_, choice, element, list, nonEmpty)
|
||||
import Hedgehog.Internal.Range (linear)
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
genAnnBoolExpFld ::
|
||||
MonadGen m =>
|
||||
m a ->
|
||||
m (AnnBoolExpFld 'DataConnector a)
|
||||
genAnnBoolExpFld genA = choice [column, relationship]
|
||||
where
|
||||
column =
|
||||
AVColumn
|
||||
<$> genColumnInfo @_ @('DataConnector)
|
||||
genColumn
|
||||
genTableName
|
||||
genScalarType
|
||||
<*> list defaultRange (genOpExpG genA)
|
||||
relationship =
|
||||
AVRelationship
|
||||
<$> genRelInfo
|
||||
<*> genAnnBoolExp (genAnnBoolExpFld genA) genTableName
|
||||
|
||||
genAnnotatedOrderByElement ::
|
||||
MonadGen m =>
|
||||
m a ->
|
||||
m (AnnotatedOrderByElement 'DataConnector a)
|
||||
genAnnotatedOrderByElement genA =
|
||||
choice
|
||||
[ column,
|
||||
objectRelation,
|
||||
arrayAggregation
|
||||
]
|
||||
where
|
||||
column =
|
||||
AOCColumn
|
||||
<$> genColumnInfo
|
||||
genColumn
|
||||
genTableName
|
||||
genScalarType
|
||||
objectRelation =
|
||||
AOCObjectRelation
|
||||
<$> genRelInfo
|
||||
<*> genAnnBoolExp
|
||||
(genAnnBoolExpFld genA)
|
||||
genTableName
|
||||
<*> genAnnotatedOrderByElement genA
|
||||
arrayAggregation =
|
||||
AOCArrayAggregation
|
||||
<$> genRelInfo
|
||||
<*> genAnnBoolExp
|
||||
(genAnnBoolExpFld genA)
|
||||
genTableName
|
||||
<*> genAnnotatedAggregateOrderBy
|
||||
genColumn
|
||||
genTableName
|
||||
genScalarType
|
||||
|
||||
genColumn :: MonadGen m => m (Column 'DataConnector)
|
||||
genColumn = coerce <$> genArbitraryUnicodeText defaultRange
|
||||
|
||||
genTableName :: MonadGen m => m (TableName 'DataConnector)
|
||||
genTableName = coerce <$> nonEmpty (linear 1 3) (genArbitraryUnicodeText defaultRange)
|
||||
|
||||
genScalarType :: MonadGen m => m (ScalarType 'DataConnector)
|
||||
genScalarType =
|
||||
choice
|
||||
[ pure ScalarType.String,
|
||||
pure ScalarType.Number,
|
||||
pure ScalarType.Bool
|
||||
]
|
||||
|
||||
genFunctionName :: MonadGen m => m (FunctionName 'DataConnector)
|
||||
genFunctionName = coerce <$> nonEmpty (linear 1 3) (genArbitraryUnicodeText defaultRange)
|
||||
|
||||
genFunctionArgumentExp :: MonadGen m => m (FunctionArgumentExp 'DataConnector a)
|
||||
genFunctionArgumentExp = pure (Const ())
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
-- Unexported
|
||||
|
||||
genOpExpG ::
|
||||
MonadGen m =>
|
||||
m a ->
|
||||
m (OpExpG 'DataConnector a)
|
||||
genOpExpG genA =
|
||||
choice
|
||||
[ acast,
|
||||
aeq,
|
||||
ane,
|
||||
ain,
|
||||
anin,
|
||||
agt,
|
||||
alt,
|
||||
agte,
|
||||
alte,
|
||||
alike,
|
||||
anlike,
|
||||
ceq,
|
||||
cne,
|
||||
cgt,
|
||||
clt,
|
||||
cgte,
|
||||
clte,
|
||||
anIsNull,
|
||||
anIsNotNull
|
||||
]
|
||||
where
|
||||
acast =
|
||||
ACast
|
||||
<$> genHashMap
|
||||
genScalarType
|
||||
(list defaultRange $ genOpExpG genA)
|
||||
defaultRange
|
||||
aeq = AEQ <$> bool_ <*> genA
|
||||
ane = ANE <$> bool_ <*> genA
|
||||
ain = AIN <$> genA
|
||||
anin = ANIN <$> genA
|
||||
agt = AGT <$> genA
|
||||
alt = ALT <$> genA
|
||||
agte = AGTE <$> genA
|
||||
alte = ALTE <$> genA
|
||||
alike = ALIKE <$> genA
|
||||
anlike = ANLIKE <$> genA
|
||||
ceq = fmap CEQ $ RootOrCurrentColumn <$> genRootOrCurrent <*> genColumn
|
||||
cne = fmap CNE $ RootOrCurrentColumn <$> genRootOrCurrent <*> genColumn
|
||||
cgt = fmap CGT $ RootOrCurrentColumn <$> genRootOrCurrent <*> genColumn
|
||||
clt = fmap CLT $ RootOrCurrentColumn <$> genRootOrCurrent <*> genColumn
|
||||
cgte = fmap CGTE $ RootOrCurrentColumn <$> genRootOrCurrent <*> genColumn
|
||||
clte = fmap CLTE $ RootOrCurrentColumn <$> genRootOrCurrent <*> genColumn
|
||||
anIsNull = pure ANISNULL
|
||||
anIsNotNull = pure ANISNOTNULL
|
||||
genRootOrCurrent = element [IsRoot, IsCurrent]
|
||||
|
||||
genRelInfo ::
|
||||
MonadGen m =>
|
||||
m (RelInfo 'DataConnector)
|
||||
genRelInfo =
|
||||
RelInfo
|
||||
<$> genRelName
|
||||
<*> genRelType
|
||||
<*> genHashMap genColumn genColumn defaultRange
|
||||
<*> genTableName
|
||||
<*> bool_
|
||||
<*> genInsertOrder
|
@ -1,57 +0,0 @@
|
||||
--
|
||||
module Hasura.Backends.DataConnector.RQLGenerator.GenSelectArgsG
|
||||
( genSelectArgsG,
|
||||
)
|
||||
where
|
||||
|
||||
import Data.Int (Int64)
|
||||
import Hasura.Backends.DataConnector.IR.OrderBy qualified as IR
|
||||
import Hasura.Backends.DataConnector.RQLGenerator.GenCommon (genAnnBoolExpFld, genAnnotatedOrderByElement, genColumn, genTableName)
|
||||
import Hasura.Generator.Common (defaultRange)
|
||||
import Hasura.Prelude (Int, Maybe, NonEmpty, pure, ($), (.))
|
||||
import Hasura.RQL.IR.BoolExp
|
||||
import Hasura.RQL.IR.Generator (genAnnBoolExp, genAnnotatedOrderByItemG)
|
||||
import Hasura.RQL.IR.Select (AnnotatedOrderByItemG, SelectArgsG (..))
|
||||
import Hasura.RQL.Types.Backend
|
||||
import Hasura.SQL.Backend (BackendType (..))
|
||||
import Hedgehog (MonadGen)
|
||||
import Hedgehog.Gen (element, maybe, nonEmpty)
|
||||
import Hedgehog.Internal.Gen (integral)
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
-- Exported
|
||||
|
||||
genSelectArgsG :: forall m a. MonadGen m => m a -> m (SelectArgsG 'DataConnector a)
|
||||
genSelectArgsG genA = do
|
||||
_saWhere <- where'
|
||||
_saOrderBy <- orderBy
|
||||
_saLimit <- limit
|
||||
_saOffset <- offset
|
||||
_saDistinct <- distinct
|
||||
pure SelectArgs {..}
|
||||
where
|
||||
where' :: m (Maybe (AnnBoolExp 'DataConnector a))
|
||||
where' = maybe $ genAnnBoolExp (genAnnBoolExpFld genA) genTableName
|
||||
|
||||
orderBy :: m (Maybe (NonEmpty (AnnotatedOrderByItemG 'DataConnector a)))
|
||||
orderBy =
|
||||
maybe . nonEmpty defaultRange $
|
||||
genAnnotatedOrderByItemG @_ @'DataConnector
|
||||
genBasicOrderType
|
||||
genNullsOrderType
|
||||
(genAnnotatedOrderByElement genA)
|
||||
|
||||
limit :: m (Maybe Int)
|
||||
limit = maybe $ integral defaultRange
|
||||
|
||||
offset :: m (Maybe Int64)
|
||||
offset = maybe $ integral defaultRange
|
||||
|
||||
distinct :: m (Maybe (NonEmpty (Column 'DataConnector)))
|
||||
distinct = maybe . nonEmpty defaultRange $ genColumn
|
||||
|
||||
genBasicOrderType :: MonadGen m => m (BasicOrderType 'DataConnector)
|
||||
genBasicOrderType = element [IR.Ascending, IR.Descending]
|
||||
|
||||
genNullsOrderType :: MonadGen m => m (NullsOrderType 'DataConnector)
|
||||
genNullsOrderType = element [(), ()]
|
@ -1,35 +0,0 @@
|
||||
module Hasura.Backends.DataConnector.RQLGenerator.GenSelectFromG
|
||||
( genSelectFromG,
|
||||
)
|
||||
where
|
||||
|
||||
import Hasura.Backends.DataConnector.RQLGenerator.GenCommon
|
||||
( genColumn,
|
||||
genFunctionArgumentExp,
|
||||
genFunctionName,
|
||||
genScalarType,
|
||||
genTableName,
|
||||
)
|
||||
import Hasura.Generator.Common (defaultRange)
|
||||
import Hasura.Prelude hiding (choice, maybe)
|
||||
import Hasura.RQL.IR.Generator
|
||||
( genFunctionArgsExpG,
|
||||
genIdentifier,
|
||||
)
|
||||
import Hasura.RQL.IR.Select (SelectFromG (..))
|
||||
import Hasura.SQL.Backend (BackendType (..))
|
||||
import Hedgehog (MonadGen)
|
||||
import Hedgehog.Gen (choice, list, maybe)
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
genSelectFromG :: MonadGen m => m (SelectFromG 'DataConnector a)
|
||||
genSelectFromG = choice [fromTable, fromIdentifier, fromFunction]
|
||||
where
|
||||
fromTable = FromTable <$> genTableName
|
||||
fromIdentifier = FromIdentifier <$> genIdentifier
|
||||
fromFunction = do
|
||||
funcName <- genFunctionName
|
||||
funcArgsExp <- genFunctionArgsExpG genFunctionArgumentExp
|
||||
defList <- maybe . list defaultRange $ liftA2 (,) genColumn genScalarType
|
||||
pure $ FromFunction funcName funcArgsExp defList
|
@ -1,28 +0,0 @@
|
||||
module Hasura.Backends.DataConnector.RQLGenerator.GenTablePermG
|
||||
( genTablePermG,
|
||||
)
|
||||
where
|
||||
|
||||
import Hasura.Backends.DataConnector.RQLGenerator.GenCommon (genAnnBoolExpFld, genTableName)
|
||||
import Hasura.Generator.Common (defaultRange)
|
||||
import Hasura.Prelude (pure, ($))
|
||||
import Hasura.RQL.IR.Generator (genAnnBoolExp)
|
||||
import Hasura.RQL.IR.Select (TablePermG (TablePerm))
|
||||
import Hasura.SQL.Backend
|
||||
( BackendType (DataConnector),
|
||||
)
|
||||
import Hedgehog (MonadGen)
|
||||
import Hedgehog.Gen
|
||||
( integral,
|
||||
maybe,
|
||||
)
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
-- Exported
|
||||
|
||||
genTablePermG :: MonadGen m => m a -> m (TablePermG 'DataConnector a)
|
||||
genTablePermG genA = do
|
||||
let genV = genAnnBoolExpFld genA
|
||||
gBoolExp <- genAnnBoolExp @_ @_ @('DataConnector) genV genTableName
|
||||
limit <- maybe (integral defaultRange)
|
||||
pure $ TablePerm gBoolExp limit
|
Loading…
Reference in New Issue
Block a user