graphql-engine/server/src-lib/Hasura/Backends/DataConnector/Adapter/Backend.hs

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

207 lines
9.4 KiB
Haskell
Raw Normal View History

{-# OPTIONS_GHC -fno-warn-orphans #-}
module Hasura.Backends.DataConnector.Adapter.Backend
( CustomBooleanOperator (..),
columnTypeToScalarType,
parseValue,
)
where
import Data.Aeson qualified as J
import Data.Aeson.Extended (ToJSONKeyValue (..))
import Data.Aeson.Key (fromText)
import Data.Aeson.Types qualified as J
import Data.HashMap.Strict qualified as HashMap
import Data.List.NonEmpty qualified as NonEmpty
import Data.Map.Strict (Map)
import Data.Scientific (fromFloatDigits)
import Data.Text qualified as Text
import Data.Text.Casing qualified as C
import Data.Text.Extended ((<<>))
import Hasura.Backends.DataConnector.API qualified as API
import Hasura.Backends.DataConnector.Adapter.Types qualified as DC
import Hasura.Backends.DataConnector.Adapter.Types.Mutations qualified as DC
import Hasura.Base.Error (Code (ValidationFailed), QErr, runAesonParser, throw400)
import Hasura.Prelude
import Hasura.RQL.IR.BoolExp
import Hasura.RQL.Types.Backend (Backend (..), ComputedFieldReturnType, HasSourceConfiguration (..), SupportedNamingCase (..), XDisable, XEnable)
import Hasura.RQL.Types.BackendType (BackendType (DataConnector))
import Hasura.RQL.Types.Column (ColumnType (..))
import Hasura.RQL.Types.ResizePool
import Language.GraphQL.Draft.Syntax qualified as G
-- | An alias for '()' indicating that a particular associated type has not yet
-- been implemented for the 'DataConnector' backend.
--
-- '()' is used (rather than a type with an empty data constructor) because it
-- comes with many of the instances that these associated types require.
--
-- This alias should /not/ be exported from this module, and it's only defined
-- for clarity.
type Unimplemented = ()
instance Backend 'DataConnector where
type BackendConfig 'DataConnector = Map DC.DataConnectorName DC.DataConnectorOptions
type BackendInfo 'DataConnector = HashMap DC.DataConnectorName DC.DataConnectorInfo
type TableName 'DataConnector = DC.TableName
type FunctionName 'DataConnector = DC.FunctionName
type FunctionReturnType 'DataConnector = DC.FunctionReturnType
type RawFunctionInfo 'DataConnector = API.FunctionInfo
type FunctionArgument 'DataConnector = API.FunctionArg
type ConstraintName 'DataConnector = DC.ConstraintName
type BasicOrderType 'DataConnector = DC.OrderDirection
type NullsOrderType 'DataConnector = Unimplemented
type CountType 'DataConnector = DC.CountAggregate
type Column 'DataConnector = DC.ColumnName
type ScalarValue 'DataConnector = J.Value
type ScalarType 'DataConnector = DC.ScalarType
-- This does not actually have to be the full IR Expression, in fact it is only
-- required to represent literals, so we use a special type for that.
-- The 'SQLExpression' type family should be removed in a future refactor
type SQLExpression 'DataConnector = DC.Literal
type ScalarSelectionArguments 'DataConnector = Void
type BooleanOperators 'DataConnector = CustomBooleanOperator
type ExtraTableMetadata 'DataConnector = DC.ExtraTableMetadata
type ComputedFieldDefinition 'DataConnector = Unimplemented
type FunctionArgumentExp 'DataConnector = DC.ArgumentExp
type ComputedFieldImplicitArguments 'DataConnector = Unimplemented
type ComputedFieldReturn 'DataConnector = Unimplemented
type UpdateVariant 'DataConnector = DC.DataConnectorUpdateVariant
type BackendInsert 'DataConnector = DC.BackendInsert
type XComputedField 'DataConnector = XDisable
type XRelay 'DataConnector = XDisable
type XNodesAgg 'DataConnector = XEnable
server: safely signal backend support for event triggers Hooks up event trigger codecs from #7237. This required fixing a problem where some backend types implemented `defaultTriggerOnReplication` with `error` which caused the server to crash when evaluating those for default values in codecs. The changes here add a type family to `Backend` called `XEventTriggers` that signals backend support for event triggers, and changes the type of `defaultTriggerOnReplication` to from `TriggerOnReplication` to `Maybe (XEventTriggers b, TriggerOnReplication)` so that it can only be implemented with a `Just` value if `XEventTriggers b` is inhabited. This emulates some existing type families in `Backend`. (Thanks to @daniel-chambers for this suggestion!) I used the implementation of `defaultTriggerOnReplication` as a signal for event triggers support to prune the Metadata API so that event trigger fields will not appear in the OpenAPI spec for backend types that do not support event triggers. The codec version of the API will also not emit or accept those fields for those backend types. I think I could use `Typeable` to test whether `XEventTriggers` is `Void` instead of testing whether `defaultTriggerOnReplication` is `Nothing`. But the codec implementation will crash anyway if `defaultTriggerOnReplication` is `Nothing`. I checked to make sure that graphql-engine-pro still compiles. Ticket: https://hasurahq.atlassian.net/browse/GDC-521 PR-URL: https://github.com/hasura/graphql-engine-mono/pull/7295 GitOrigin-RevId: 2b2dd44291513266107ca25cf330319bf53a8b66
2022-12-21 20:14:07 +03:00
type XEventTriggers 'DataConnector = XDisable
type XNestedInserts 'DataConnector = XDisable
type XStreamingSubscription 'DataConnector = XDisable
Support nested object fields in DC API and use this to implement nest… ## Description This change adds support for nested object fields in HGE IR and Schema Cache, the Data Connectors backend and API, and the MongoDB agent. ### Data Connector API changes - The `/schema` endpoint response now includes an optional set of GraphQL type definitions. Table column types can refer to these definitions by name. - Queries can now include a new field type `object` which contains a column name and a nested query. This allows querying into a nested object within a field. ### MongoDB agent changes - Add support for querying into nested documents using the new `object` field type. ### HGE changes - The `Backend` type class has a new type family `XNestedObjects b` which controls whether or not a backend supports querying into nested objects. This is currently enabled only for the `DataConnector` backend. - For backends that support nested objects, the `FieldInfo` type gets a new constructor `FINestedObject`, and the `AnnFieldG` type gets a new constructor `AFNestedObject`. - If the DC `/schema` endpoint returns any custom GraphQL type definitions they are stored in the `TableInfo` for each table in the source. - During schema cache building, the function `addNonColumnFields` will check whether any column types match custom GraphQL object types stored in the `TableInfo`. If so, they are converted into `FINestedObject` instead of `FIColumn` in the `FieldInfoMap`. - When building the `FieldParser`s from `FieldInfo` (function `fieldSelection`) any `FINestedObject` fields are converted into nested object parsers returning `AFNestedObject`. - The `DataConnector` query planner converts `AFNestedObject` fields into `object` field types in the query sent to the agent. ## Limitations ### HGE not yet implemented: - Support for nested arrays - Support for nested objects/arrays in mutations - Support for nested objects/arrays in order-by - Support for filters (`where`) in nested objects/arrays - Support for adding custom GraphQL types via track table metadata API - Support for interface and union types - Tests for nested objects ### Mongo agent not yet implemented: - Generate nested object types from validation schema - Support for aggregates - Support for order-by - Configure agent port - Build agent in CI - Agent tests for nested objects and MongoDB agent PR-URL: https://github.com/hasura/graphql-engine-mono/pull/7844 GitOrigin-RevId: aec9ec1e4216293286a68f9b1af6f3f5317db423
2023-04-11 04:29:05 +03:00
type XNestedObjects 'DataConnector = XEnable
Nested array support for Data Connectors Backend and MongoDB ## Description This change adds support for querying into nested arrays in Data Connector agents that support such a concept (currently MongoDB). ### DC API changes - New API type `ColumnType` which allows representing the type of a "column" as either a scalar type, an object reference or an array of `ColumnType`s. This recursive definition allows arbitrary nesting of arrays of types. - The `type` fields in the API types `ColumnInfo` and `ColumnInsertSchema` now take a `ColumnType` instead of a `ScalarType`. - To ensure backwards compatibility, a `ColumnType` representing a scalar serialises and deserialises to the same representation as `ScalarType`. - In queries, the `Field` type now has a new constructor `NestedArrayField`. This contains a nested `Field` along with optional `limit`, `offset`, `where` and `order_by` arguments. (These optional arguments are not yet used by either HGE or the MongoDB agent.) ### MongoDB Haskell agent changes - The `/schema` endpoint will now recognise arrays within the JSON validation schema and generate corresponding arrays in the DC schema. - The `/query` endpoint will now handle `NestedArrayField`s within queries (although it does not yet handle `limit`, `offset`, `where` and `order_by`). ### HGE server changes - The `Backend` type class adds a new type family `XNestedArrays b` to enable nested arrays on a per-backend basis (currently enabled only for the `DataConnector` backend. - Within `RawColumnInfo` the column type is now represented by a new type `RawColumnType b` which mirrors the shape of the DC API `ColumnType`, but uses `XNestedObjects b` and `XNestedArrays b` type families to allow turning nested object and array supports on or off for a particular backend. In the `DataConnector` backend `API.CustomType` is converted into `RawColumnInfo 'DataConnector` while building the schema. - In the next stage of schema building, the `RawColumnInfo` is converted into a `StructuredColumnInfo` which allows us to represent the three different types of columns: scalar, object and array. TODO: the `StructuredColumnInfo` looks very similar to the Logical Model types. The main difference is that it uses the `XNestedObjects` and `XNestedArrays` type families. We should be able to combine these two representations. - The `StructuredColumnInfo` is then placed into a `FIColumn` `FieldInfo`. This involved some refactoring of `FieldInfo` as I had previously split out `FINestedObject` into a separate constructor. However it works out better to represent all "column" fields (i.e. scalar, object and array) using `FIColumn` as this make it easier to implement permission checking correctly. This is the reason the `StructuredColumnInfo` was needed. - Next, the `FieldInfo` are used to generate `FieldParser`s. We add a new constructor to `AnnFieldG` for `AFNestedArray`. An `AFNestedArray` field parser can contain either a simple array selection or an array aggregate. Simple array `FieldParsers` are currently limited to subfield selection. We will add support for limit, offset, where and order_by in a future PR. We also don't yet generate array aggregate `FieldParsers. - The new `AFNestedArray` field is handled by the `QueryPlan` module in the `DataConnector` backend. There we generate an `API.NestedArrayField` from the AFNestedArray. We also handle nested arrays when reshaping the response from the DC agent. ## Limitations - Support for limit, offset, filter (where) and order_by is not yet fully implemented, although it should not be hard to add this - Support for aggregations on nested arrays is not yet fully implemented - Permissions involving nested arrays (and objects) not yet implemented - This should be integrated with Logical Model types, but that will happen in a separate PR PR-URL: https://github.com/hasura/graphql-engine-mono/pull/9149 GitOrigin-RevId: 0e7b71a994fc1d2ca1ef73bfe7b96e95b5328531
2023-05-24 11:00:59 +03:00
type XNestedArrays 'DataConnector = XEnable
type HealthCheckTest 'DataConnector = Void
isComparableType :: ScalarType 'DataConnector -> Bool
isComparableType = const False
isNumType :: ScalarType 'DataConnector -> Bool
isNumType = const False
getCustomAggregateOperators :: DC.SourceConfig -> HashMap G.Name (HashMap DC.ScalarType DC.ScalarType)
getCustomAggregateOperators DC.SourceConfig {..} =
HashMap.foldrWithKey insertOps mempty scalarTypesCapabilities
where
scalarTypesCapabilities = API.unScalarTypesCapabilities $ API._cScalarTypes _scCapabilities
insertOps typeName API.ScalarTypeCapabilities {..} m =
HashMap.foldrWithKey insertOp m
$ API.unAggregateFunctions _stcAggregateFunctions
where
insertOp funtionName resultTypeName =
HashMap.insertWith HashMap.union funtionName
$ HashMap.singleton
(DC.mkScalarType _scCapabilities typeName)
(DC.mkScalarType _scCapabilities resultTypeName)
textToScalarValue :: Maybe Text -> ScalarValue 'DataConnector
textToScalarValue = error "textToScalarValue: not implemented for the Data Connector backend."
parseScalarValue :: ScalarType 'DataConnector -> J.Value -> Either QErr (ScalarValue 'DataConnector)
parseScalarValue type' value = runAesonParser (parseValue type') value
scalarValueToJSON :: ScalarValue 'DataConnector -> J.Value
scalarValueToJSON = id
-- TODO: Fill in this definition for computed fields
computedFieldFunction :: ComputedFieldDefinition 'DataConnector -> FunctionName 'DataConnector
computedFieldFunction = error "computedFieldFunction: not implemented for the Data Connector backend"
computedFieldReturnType :: ComputedFieldReturn 'DataConnector -> ComputedFieldReturnType 'DataConnector
computedFieldReturnType = error "computedFieldReturnType: not implemented for the Data Connector backend"
fromComputedFieldImplicitArguments :: v -> ComputedFieldImplicitArguments 'DataConnector -> [FunctionArgumentExp 'DataConnector v]
fromComputedFieldImplicitArguments = error "fromComputedFieldImplicitArguments: not implemented for the Data Connector backend"
-- phil said this was cursed
tableToFunction :: TableName 'DataConnector -> FunctionName 'DataConnector
tableToFunction = coerce
functionToTable :: FunctionName 'DataConnector -> TableName 'DataConnector
functionToTable = coerce
tableGraphQLName :: TableName 'DataConnector -> Either QErr G.Name
tableGraphQLName name = do
let snakedName = snakeCaseTableName @'DataConnector name
G.mkName snakedName
`onNothing` throw400 ValidationFailed ("TableName " <> snakedName <> " is not a valid GraphQL identifier")
functionGraphQLName :: FunctionName 'DataConnector -> Either QErr G.Name
functionGraphQLName name = do
let snakedName = snakeCaseTableName @'DataConnector (coerce name)
G.mkName snakedName
`onNothing` throw400 ValidationFailed ("FunctionName " <> snakedName <> " is not a valid GraphQL name")
snakeCaseTableName :: TableName 'DataConnector -> Text
snakeCaseTableName = Text.intercalate "_" . NonEmpty.toList . DC.unTableName
getTableIdentifier :: TableName 'DataConnector -> Either QErr C.GQLNameIdentifier
getTableIdentifier name@(DC.TableName (prefix :| suffixes)) =
let identifier = do
namePrefix <- G.mkName prefix
nameSuffixes <- traverse G.mkNameSuffix suffixes
pure $ C.fromAutogeneratedTuple (namePrefix, nameSuffixes)
in identifier
`onNothing` throw400 ValidationFailed ("TableName " <> name <<> " is not a valid GraphQL identifier")
namingConventionSupport :: SupportedNamingCase
namingConventionSupport = OnlyHasuraCase
resizeSourcePools :: SourceConfig 'DataConnector -> ServerReplicas -> IO SourceResizePoolSummary
resizeSourcePools _sourceConfig _serverReplicas =
-- Data connectors do not have concept of connection pools
pure noPoolsResizedSummary
server: safely signal backend support for event triggers Hooks up event trigger codecs from #7237. This required fixing a problem where some backend types implemented `defaultTriggerOnReplication` with `error` which caused the server to crash when evaluating those for default values in codecs. The changes here add a type family to `Backend` called `XEventTriggers` that signals backend support for event triggers, and changes the type of `defaultTriggerOnReplication` to from `TriggerOnReplication` to `Maybe (XEventTriggers b, TriggerOnReplication)` so that it can only be implemented with a `Just` value if `XEventTriggers b` is inhabited. This emulates some existing type families in `Backend`. (Thanks to @daniel-chambers for this suggestion!) I used the implementation of `defaultTriggerOnReplication` as a signal for event triggers support to prune the Metadata API so that event trigger fields will not appear in the OpenAPI spec for backend types that do not support event triggers. The codec version of the API will also not emit or accept those fields for those backend types. I think I could use `Typeable` to test whether `XEventTriggers` is `Void` instead of testing whether `defaultTriggerOnReplication` is `Nothing`. But the codec implementation will crash anyway if `defaultTriggerOnReplication` is `Nothing`. I checked to make sure that graphql-engine-pro still compiles. Ticket: https://hasurahq.atlassian.net/browse/GDC-521 PR-URL: https://github.com/hasura/graphql-engine-mono/pull/7295 GitOrigin-RevId: 2b2dd44291513266107ca25cf330319bf53a8b66
2022-12-21 20:14:07 +03:00
defaultTriggerOnReplication = Nothing
server: provide an option to enable event triggers on logically replicated tables ## Description ✍️ This PR introduces a new feature to enable/disable event triggers during logical replication of table data for PostgreSQL and MS-SQL data sources. We introduce a new field `trigger_on_replication` in the `*_create_event_trigger` metadata API. By default the event triggers will not fire for logical data replication. ## Changelog ✍️ __Component__ : server __Type__: feature __Product__: community-edition ### Short Changelog Add option to enable/disable event triggers on logically replicated tables ### Related Issues ✍ https://github.com/hasura/graphql-engine/issues/8814 https://hasurahq.atlassian.net/browse/GS-252 ### Solution and Design - By default, triggers do **not** fire when the session mode is `replica` in Postgres, so if the `triggerOnReplication` is set to `true` for an event trigger we run the query `ALTER TABLE #{tableTxt} ENABLE ALWAYS TRIGGER #{triggerNameTxt};` so that the trigger fires always irrespective of the `session_replication_role` - By default, triggers do fire in case of replication in MS-SQL, so if the `triggerOnReplication` is set to `false` for an event trigger we add a clause `NOT FOR REPLICATION` to the the SQL when the trigger is created/altered, which sets the `is_not_for_replication` for the trigger as `true` and it does not fire during logical replication. ### Steps to test and verify ✍ - Run hspec integration tests for HGE ## Server checklist ✍ ### Metadata ✍ Does this PR add a new Metadata feature? - ✅ Yes - Does `export_metadata`/`replace_metadata` supports the new metadata added? - ✅ PR-URL: https://github.com/hasura/graphql-engine-mono/pull/6953 Co-authored-by: Puru Gupta <32328846+purugupta99@users.noreply.github.com> Co-authored-by: Sean Park-Ross <94021366+seanparkross@users.noreply.github.com> GitOrigin-RevId: 92731328a2bbdcad2302c829f26f9acb33c36135
2022-11-29 20:41:41 +03:00
instance HasSourceConfiguration 'DataConnector where
type SourceConfig 'DataConnector = DC.SourceConfig
type SourceConnConfiguration 'DataConnector = DC.ConnSourceConfig
sourceConfigNumReadReplicas = const 0 -- not supported
sourceConfigConnectonTemplateEnabled = const False -- not supported
data CustomBooleanOperator a = CustomBooleanOperator
{ _cboName :: Text,
_cboRHS :: Maybe (Either (RootOrCurrentColumn 'DataConnector) a) -- TODO turn Either into a specific type
}
deriving stock (Eq, Generic, Foldable, Functor, Traversable, Show)
instance (NFData a) => NFData (CustomBooleanOperator a)
instance (Hashable a) => Hashable (CustomBooleanOperator a)
instance (J.ToJSON a) => ToJSONKeyValue (CustomBooleanOperator a) where
toJSONKeyValue CustomBooleanOperator {..} = (fromText _cboName, J.toJSON _cboRHS)
parseValue :: DC.ScalarType -> J.Value -> J.Parser J.Value
parseValue type' val =
case (type', val) of
(_, J.Null) -> pure J.Null
(DC.ScalarType _ graphQLType, value) -> case graphQLType of
Nothing -> pure value
Just DC.GraphQLInt -> (J.Number . fromIntegral) <$> J.parseJSON @Int value
Just DC.GraphQLFloat -> (J.Number . fromFloatDigits) <$> J.parseJSON @Double value
Just DC.GraphQLString -> J.String <$> J.parseJSON value
Just DC.GraphQLBoolean -> J.Bool <$> J.parseJSON value
Just DC.GraphQLID -> J.String <$> parseID value
where
parseID value = J.parseJSON @Text value <|> tshow <$> J.parseJSON @Int value
columnTypeToScalarType :: ColumnType 'DataConnector -> DC.ScalarType
columnTypeToScalarType = \case
ColumnScalar scalarType -> scalarType
-- Data connectors does not yet support enum tables.
-- If/when we add this support, we probably want to
-- embed the enum scalar type name within the `EnumReference` record type
ColumnEnumReference _ -> error "columnTypeToScalarType got enum"