graphql-engine/server/src-lib/Hasura/Backends/DataConnector/Adapter/Backend.hs

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

201 lines
9.2 KiB
Haskell
Raw Normal View History

{-# OPTIONS_GHC -fno-warn-orphans #-}
module Hasura.Backends.DataConnector.Adapter.Backend
( CustomBooleanOperator (..),
columnTypeToScalarType,
parseValue,
)
where
import Data.Aeson qualified as J
import Data.Aeson.Extended (ToJSONKeyValue (..))
import Data.Aeson.Key (fromText)
import Data.Aeson.Types qualified as J
import Data.HashMap.Strict qualified as HashMap
import Data.List.NonEmpty qualified as NonEmpty
import Data.Map.Strict (Map)
import Data.Scientific (fromFloatDigits)
import Data.Text qualified as Text
import Data.Text.Casing qualified as C
import Data.Text.Extended ((<<>))
import Hasura.Backends.DataConnector.API qualified as API
import Hasura.Backends.DataConnector.Adapter.Types qualified as DC
import Hasura.Backends.DataConnector.Adapter.Types.Mutations qualified as DC
import Hasura.Base.Error (Code (ValidationFailed), QErr, runAesonParser, throw400)
import Hasura.Prelude
import Hasura.RQL.IR.BoolExp
import Hasura.RQL.Types.Backend (Backend (..), ComputedFieldReturnType, HasSourceConfiguration (..), SupportedNamingCase (..), XDisable, XEnable)
import Hasura.RQL.Types.Column (ColumnType (..))
import Hasura.RQL.Types.ResizePool
import Hasura.SQL.Backend (BackendType (DataConnector))
import Language.GraphQL.Draft.Syntax qualified as G
-- | An alias for '()' indicating that a particular associated type has not yet
-- been implemented for the 'DataConnector' backend.
--
-- '()' is used (rather than a type with an empty data constructor) because it
-- comes with many of the instances that these associated types require.
--
-- This alias should /not/ be exported from this module, and it's only defined
-- for clarity.
type Unimplemented = ()
instance Backend 'DataConnector where
type BackendConfig 'DataConnector = Map DC.DataConnectorName DC.DataConnectorOptions
type BackendInfo 'DataConnector = HashMap DC.DataConnectorName DC.DataConnectorInfo
type TableName 'DataConnector = DC.TableName
type FunctionName 'DataConnector = DC.FunctionName
type RawFunctionInfo 'DataConnector = XDisable
type FunctionArgument 'DataConnector = XDisable
type ConstraintName 'DataConnector = DC.ConstraintName
type BasicOrderType 'DataConnector = DC.OrderDirection
type NullsOrderType 'DataConnector = Unimplemented
type CountType 'DataConnector = DC.CountAggregate
type Column 'DataConnector = DC.ColumnName
type ScalarValue 'DataConnector = J.Value
type ScalarType 'DataConnector = DC.ScalarType
-- This does not actually have to be the full IR Expression, in fact it is only
-- required to represent literals, so we use a special type for that.
-- The 'SQLExpression' type family should be removed in a future refactor
type SQLExpression 'DataConnector = DC.Literal
type ScalarSelectionArguments 'DataConnector = Void
type BooleanOperators 'DataConnector = CustomBooleanOperator
type ExtraTableMetadata 'DataConnector = DC.ExtraTableMetadata
type ComputedFieldDefinition 'DataConnector = Unimplemented
type FunctionArgumentExp 'DataConnector = Const Unimplemented
type ComputedFieldImplicitArguments 'DataConnector = Unimplemented
type ComputedFieldReturn 'DataConnector = Unimplemented
type UpdateVariant 'DataConnector = DC.DataConnectorUpdateVariant
type BackendInsert 'DataConnector = DC.BackendInsert
type XComputedField 'DataConnector = XDisable
type XRelay 'DataConnector = XDisable
type XNodesAgg 'DataConnector = XEnable
server: safely signal backend support for event triggers Hooks up event trigger codecs from #7237. This required fixing a problem where some backend types implemented `defaultTriggerOnReplication` with `error` which caused the server to crash when evaluating those for default values in codecs. The changes here add a type family to `Backend` called `XEventTriggers` that signals backend support for event triggers, and changes the type of `defaultTriggerOnReplication` to from `TriggerOnReplication` to `Maybe (XEventTriggers b, TriggerOnReplication)` so that it can only be implemented with a `Just` value if `XEventTriggers b` is inhabited. This emulates some existing type families in `Backend`. (Thanks to @daniel-chambers for this suggestion!) I used the implementation of `defaultTriggerOnReplication` as a signal for event triggers support to prune the Metadata API so that event trigger fields will not appear in the OpenAPI spec for backend types that do not support event triggers. The codec version of the API will also not emit or accept those fields for those backend types. I think I could use `Typeable` to test whether `XEventTriggers` is `Void` instead of testing whether `defaultTriggerOnReplication` is `Nothing`. But the codec implementation will crash anyway if `defaultTriggerOnReplication` is `Nothing`. I checked to make sure that graphql-engine-pro still compiles. Ticket: https://hasurahq.atlassian.net/browse/GDC-521 PR-URL: https://github.com/hasura/graphql-engine-mono/pull/7295 GitOrigin-RevId: 2b2dd44291513266107ca25cf330319bf53a8b66
2022-12-21 20:14:07 +03:00
type XEventTriggers 'DataConnector = XDisable
type XNestedInserts 'DataConnector = XDisable
type XStreamingSubscription 'DataConnector = XDisable
Support nested object fields in DC API and use this to implement nest… ## Description This change adds support for nested object fields in HGE IR and Schema Cache, the Data Connectors backend and API, and the MongoDB agent. ### Data Connector API changes - The `/schema` endpoint response now includes an optional set of GraphQL type definitions. Table column types can refer to these definitions by name. - Queries can now include a new field type `object` which contains a column name and a nested query. This allows querying into a nested object within a field. ### MongoDB agent changes - Add support for querying into nested documents using the new `object` field type. ### HGE changes - The `Backend` type class has a new type family `XNestedObjects b` which controls whether or not a backend supports querying into nested objects. This is currently enabled only for the `DataConnector` backend. - For backends that support nested objects, the `FieldInfo` type gets a new constructor `FINestedObject`, and the `AnnFieldG` type gets a new constructor `AFNestedObject`. - If the DC `/schema` endpoint returns any custom GraphQL type definitions they are stored in the `TableInfo` for each table in the source. - During schema cache building, the function `addNonColumnFields` will check whether any column types match custom GraphQL object types stored in the `TableInfo`. If so, they are converted into `FINestedObject` instead of `FIColumn` in the `FieldInfoMap`. - When building the `FieldParser`s from `FieldInfo` (function `fieldSelection`) any `FINestedObject` fields are converted into nested object parsers returning `AFNestedObject`. - The `DataConnector` query planner converts `AFNestedObject` fields into `object` field types in the query sent to the agent. ## Limitations ### HGE not yet implemented: - Support for nested arrays - Support for nested objects/arrays in mutations - Support for nested objects/arrays in order-by - Support for filters (`where`) in nested objects/arrays - Support for adding custom GraphQL types via track table metadata API - Support for interface and union types - Tests for nested objects ### Mongo agent not yet implemented: - Generate nested object types from validation schema - Support for aggregates - Support for order-by - Configure agent port - Build agent in CI - Agent tests for nested objects and MongoDB agent PR-URL: https://github.com/hasura/graphql-engine-mono/pull/7844 GitOrigin-RevId: aec9ec1e4216293286a68f9b1af6f3f5317db423
2023-04-11 04:29:05 +03:00
type XNestedObjects 'DataConnector = XEnable
type HealthCheckTest 'DataConnector = Void
isComparableType :: ScalarType 'DataConnector -> Bool
isComparableType = const False
isNumType :: ScalarType 'DataConnector -> Bool
isNumType = const False
getCustomAggregateOperators :: DC.SourceConfig -> HashMap G.Name (HashMap DC.ScalarType DC.ScalarType)
getCustomAggregateOperators DC.SourceConfig {..} =
HashMap.foldrWithKey insertOps mempty scalarTypesCapabilities
where
scalarTypesCapabilities = API.unScalarTypesCapabilities $ API._cScalarTypes _scCapabilities
insertOps typeName API.ScalarTypeCapabilities {..} m =
HashMap.foldrWithKey insertOp m $
API.unAggregateFunctions _stcAggregateFunctions
where
insertOp funtionName resultTypeName =
HashMap.insertWith HashMap.union funtionName $
HashMap.singleton
(DC.mkScalarType _scCapabilities typeName)
(DC.mkScalarType _scCapabilities resultTypeName)
textToScalarValue :: Maybe Text -> ScalarValue 'DataConnector
textToScalarValue = error "textToScalarValue: not implemented for the Data Connector backend."
parseScalarValue :: ScalarType 'DataConnector -> J.Value -> Either QErr (ScalarValue 'DataConnector)
parseScalarValue type' value = runAesonParser (parseValue type') value
scalarValueToJSON :: ScalarValue 'DataConnector -> J.Value
scalarValueToJSON = id
functionToTable :: FunctionName 'DataConnector -> TableName 'DataConnector
functionToTable = error "functionToTable: not implemented for the Data Connector backend."
computedFieldFunction :: ComputedFieldDefinition 'DataConnector -> FunctionName 'DataConnector
computedFieldFunction = error "computedFieldFunction: not implemented for the Data Connector backend"
computedFieldReturnType :: ComputedFieldReturn 'DataConnector -> ComputedFieldReturnType 'DataConnector
computedFieldReturnType = error "computedFieldReturnType: not implemented for the Data Connector backend"
fromComputedFieldImplicitArguments :: v -> ComputedFieldImplicitArguments 'DataConnector -> [FunctionArgumentExp 'DataConnector v]
fromComputedFieldImplicitArguments = error "fromComputedFieldImplicitArguments: not implemented for the Data Connector backend"
-- phil said this was cursed
tableToFunction :: TableName 'DataConnector -> FunctionName 'DataConnector
tableToFunction = coerce
tableGraphQLName :: TableName 'DataConnector -> Either QErr G.Name
tableGraphQLName name = do
let snakedName = snakeCaseTableName @'DataConnector name
G.mkName snakedName
`onNothing` throw400 ValidationFailed ("TableName " <> snakedName <> " is not a valid GraphQL identifier")
functionGraphQLName :: FunctionName 'DataConnector -> Either QErr G.Name
functionGraphQLName = error "functionGraphQLName: not implemented for the Data Connector backend."
snakeCaseTableName :: TableName 'DataConnector -> Text
snakeCaseTableName = Text.intercalate "_" . NonEmpty.toList . DC.unTableName
getTableIdentifier :: TableName 'DataConnector -> Either QErr C.GQLNameIdentifier
getTableIdentifier name@(DC.TableName (prefix :| suffixes)) =
let identifier = do
namePrefix <- G.mkName prefix
nameSuffixes <- traverse G.mkNameSuffix suffixes
pure $ C.fromAutogeneratedTuple (namePrefix, nameSuffixes)
in identifier
`onNothing` throw400 ValidationFailed ("TableName " <> name <<> " is not a valid GraphQL identifier")
namingConventionSupport :: SupportedNamingCase
namingConventionSupport = OnlyHasuraCase
resizeSourcePools :: SourceConfig 'DataConnector -> ServerReplicas -> IO SourceResizePoolSummary
resizeSourcePools _sourceConfig _serverReplicas =
-- Data connectors do not have concept of connection pools
pure noPoolsResizedSummary
server: safely signal backend support for event triggers Hooks up event trigger codecs from #7237. This required fixing a problem where some backend types implemented `defaultTriggerOnReplication` with `error` which caused the server to crash when evaluating those for default values in codecs. The changes here add a type family to `Backend` called `XEventTriggers` that signals backend support for event triggers, and changes the type of `defaultTriggerOnReplication` to from `TriggerOnReplication` to `Maybe (XEventTriggers b, TriggerOnReplication)` so that it can only be implemented with a `Just` value if `XEventTriggers b` is inhabited. This emulates some existing type families in `Backend`. (Thanks to @daniel-chambers for this suggestion!) I used the implementation of `defaultTriggerOnReplication` as a signal for event triggers support to prune the Metadata API so that event trigger fields will not appear in the OpenAPI spec for backend types that do not support event triggers. The codec version of the API will also not emit or accept those fields for those backend types. I think I could use `Typeable` to test whether `XEventTriggers` is `Void` instead of testing whether `defaultTriggerOnReplication` is `Nothing`. But the codec implementation will crash anyway if `defaultTriggerOnReplication` is `Nothing`. I checked to make sure that graphql-engine-pro still compiles. Ticket: https://hasurahq.atlassian.net/browse/GDC-521 PR-URL: https://github.com/hasura/graphql-engine-mono/pull/7295 GitOrigin-RevId: 2b2dd44291513266107ca25cf330319bf53a8b66
2022-12-21 20:14:07 +03:00
defaultTriggerOnReplication = Nothing
server: provide an option to enable event triggers on logically replicated tables ## Description ✍️ This PR introduces a new feature to enable/disable event triggers during logical replication of table data for PostgreSQL and MS-SQL data sources. We introduce a new field `trigger_on_replication` in the `*_create_event_trigger` metadata API. By default the event triggers will not fire for logical data replication. ## Changelog ✍️ __Component__ : server __Type__: feature __Product__: community-edition ### Short Changelog Add option to enable/disable event triggers on logically replicated tables ### Related Issues ✍ https://github.com/hasura/graphql-engine/issues/8814 https://hasurahq.atlassian.net/browse/GS-252 ### Solution and Design - By default, triggers do **not** fire when the session mode is `replica` in Postgres, so if the `triggerOnReplication` is set to `true` for an event trigger we run the query `ALTER TABLE #{tableTxt} ENABLE ALWAYS TRIGGER #{triggerNameTxt};` so that the trigger fires always irrespective of the `session_replication_role` - By default, triggers do fire in case of replication in MS-SQL, so if the `triggerOnReplication` is set to `false` for an event trigger we add a clause `NOT FOR REPLICATION` to the the SQL when the trigger is created/altered, which sets the `is_not_for_replication` for the trigger as `true` and it does not fire during logical replication. ### Steps to test and verify ✍ - Run hspec integration tests for HGE ## Server checklist ✍ ### Metadata ✍ Does this PR add a new Metadata feature? - ✅ Yes - Does `export_metadata`/`replace_metadata` supports the new metadata added? - ✅ PR-URL: https://github.com/hasura/graphql-engine-mono/pull/6953 Co-authored-by: Puru Gupta <32328846+purugupta99@users.noreply.github.com> Co-authored-by: Sean Park-Ross <94021366+seanparkross@users.noreply.github.com> GitOrigin-RevId: 92731328a2bbdcad2302c829f26f9acb33c36135
2022-11-29 20:41:41 +03:00
instance HasSourceConfiguration 'DataConnector where
type SourceConfig 'DataConnector = DC.SourceConfig
type SourceConnConfiguration 'DataConnector = DC.ConnSourceConfig
sourceConfigNumReadReplicas = const 0 -- not supported
sourceConfigConnectonTemplateEnabled = const False -- not supported
data CustomBooleanOperator a = CustomBooleanOperator
{ _cboName :: Text,
_cboRHS :: Maybe (Either (RootOrCurrentColumn 'DataConnector) a) -- TODO turn Either into a specific type
}
deriving stock (Eq, Generic, Foldable, Functor, Traversable, Show)
instance NFData a => NFData (CustomBooleanOperator a)
instance Hashable a => Hashable (CustomBooleanOperator a)
instance J.ToJSON a => ToJSONKeyValue (CustomBooleanOperator a) where
toJSONKeyValue CustomBooleanOperator {..} = (fromText _cboName, J.toJSON _cboRHS)
parseValue :: DC.ScalarType -> J.Value -> J.Parser J.Value
parseValue type' val =
case (type', val) of
(_, J.Null) -> pure J.Null
(DC.ScalarType _ graphQLType, value) -> case graphQLType of
Nothing -> pure value
Just DC.GraphQLInt -> (J.Number . fromIntegral) <$> J.parseJSON @Int value
Just DC.GraphQLFloat -> (J.Number . fromFloatDigits) <$> J.parseJSON @Double value
Just DC.GraphQLString -> J.String <$> J.parseJSON value
Just DC.GraphQLBoolean -> J.Bool <$> J.parseJSON value
Just DC.GraphQLID -> J.String <$> parseID value
where
parseID value = J.parseJSON @Text value <|> tshow <$> J.parseJSON @Int value
columnTypeToScalarType :: ColumnType 'DataConnector -> DC.ScalarType
columnTypeToScalarType = \case
ColumnScalar scalarType -> scalarType
-- Data connectors does not yet support enum tables.
-- If/when we add this support, we probably want to
-- embed the enum scalar type name within the `EnumReference` record type
ColumnEnumReference _ -> error "columnTypeToScalarType got enum"