server: plumb StoredIntrospection while building the Schema Cache

We'd like to be able to build a Schema Cache from only serializable data. We already have Metadata. The data that's missing to build a Schema Cache is referred to as "stored introspection", and this includes:
- DB introspection
- User-defined enum values (i.e. contents of specific DB tables)
- Remote schema introspection

This PR introduces a new `StoredIntrospection` container that holds that data, and plumbs it through to the right parts of the schema cache building process, so that stored introspection can be used as a substitute for fresh introspection requests against live data sources.

The serialization of `StoredIntrospection` is intended to be straightforward: just take the serialized source introspection results, and put them in an appropriate JSON object. Though I don't think that this PR achieves that entirely.

In order for `StoredIntrospection` to be deserializable (through `aeson` instances), while keeping the required code changes low, this piggy-backs off of the `ResolvedSource` data type. `ResolvedSource` is _almost_ exactly what we want, and _almost_ deserializable, so this PR brings it across the finish line by moving a few things out of that type, and adding a `FromJSON (RawFunctionInfo b)` context to the `Backend` type class.

[PLAT-270]: https://hasurahq.atlassian.net/browse/PLAT-270?atlOrigin=eyJpIjoiNWRkNTljNzYxNjVmNDY3MDlhMDU5Y2ZhYzA5YTRkZjUiLCJwIjoiZ2l0aHViLWNvbS1KU1cifQ
[PLAT-270]: https://hasurahq.atlassian.net/browse/PLAT-270?atlOrigin=eyJpIjoiNWRkNTljNzYxNjVmNDY3MDlhMDU5Y2ZhYzA5YTRkZjUiLCJwIjoiZ2l0aHViLWNvbS1KU1cifQ
[PLAT-276]: https://hasurahq.atlassian.net/browse/PLAT-276?atlOrigin=eyJpIjoiNWRkNTljNzYxNjVmNDY3MDlhMDU5Y2ZhYzA5YTRkZjUiLCJwIjoiZ2l0aHViLWNvbS1KU1cifQ
[PLAT-276]: https://hasurahq.atlassian.net/browse/PLAT-276?atlOrigin=eyJpIjoiNWRkNTljNzYxNjVmNDY3MDlhMDU5Y2ZhYzA5YTRkZjUiLCJwIjoiZ2l0aHViLWNvbS1KU1cifQ

PR-URL: https://github.com/hasura/graphql-engine-mono/pull/7053
GitOrigin-RevId: 5001b4ea086195cb5e65886747eac2a0a657b64c
This commit is contained in:
Auke Booij 2023-01-20 15:51:11 +01:00 committed by hasura-bot
parent ae5f3fe593
commit 83ea4a254d
21 changed files with 227 additions and 144 deletions

View File

@ -28,7 +28,6 @@ import Hasura.RQL.Types.Column
import Hasura.RQL.Types.Common
import Hasura.RQL.Types.Function (FunctionOverloads (..))
import Hasura.RQL.Types.Source
import Hasura.RQL.Types.SourceCustomization
import Hasura.RQL.Types.Table
import Hasura.SQL.Backend
@ -93,9 +92,8 @@ readNonNegative i paramName =
resolveSource ::
(MonadIO m) =>
BigQuerySourceConfig ->
SourceTypeCustomization ->
m (Either QErr (ResolvedSource 'BigQuery))
resolveSource sourceConfig customization =
m (Either QErr (DBObjectsIntrospection 'BigQuery))
resolveSource sourceConfig =
runExceptT $ do
tables <- getTables sourceConfig
routines <- getRoutines sourceConfig
@ -108,10 +106,8 @@ resolveSource sourceConfig customization =
seconds <- liftIO $ fmap systemSeconds getSystemTime
let functions = FunctionOverloads <$> HM.groupOnNE (routineReferenceToFunctionName . routineReference) restRoutines
pure
( ResolvedSource
{ _rsConfig = sourceConfig,
_rsCustomization = customization,
_rsTables =
( DBObjectsIntrospection
{ _rsTables =
HM.fromList
[ ( restTableReferenceToTableName tableReference,
DBTableMetadata

View File

@ -38,8 +38,7 @@ import Hasura.RQL.Types.Metadata.Backend (BackendMetadata (..))
import Hasura.RQL.Types.Metadata.Object
import Hasura.RQL.Types.SchemaCache qualified as SchemaCache
import Hasura.RQL.Types.SchemaCache.Build
import Hasura.RQL.Types.Source (ResolvedSource (..))
import Hasura.RQL.Types.SourceCustomization (SourceTypeCustomization)
import Hasura.RQL.Types.Source (DBObjectsIntrospection (..))
import Hasura.RQL.Types.Table (ForeignKey (_fkConstraint))
import Hasura.RQL.Types.Table qualified as RQL.T.T
import Hasura.SQL.Backend (BackendSourceKind (..), BackendType (..))
@ -183,9 +182,8 @@ resolveDatabaseMetadata' ::
Applicative m =>
SourceMetadata 'DataConnector ->
DC.SourceConfig ->
SourceTypeCustomization ->
m (Either QErr (ResolvedSource 'DataConnector))
resolveDatabaseMetadata' _ sc@DC.SourceConfig {_scSchema = API.SchemaResponse {..}, ..} customization =
m (Either QErr (DBObjectsIntrospection 'DataConnector))
resolveDatabaseMetadata' _ DC.SourceConfig {_scSchema = API.SchemaResponse {..}, ..} =
let foreignKeys = fmap API._tiForeignKeys _srTables
tables = Map.fromList $ do
API.TableInfo {..} <- _srTables
@ -218,10 +216,8 @@ resolveDatabaseMetadata' _ sc@DC.SourceConfig {_scSchema = API.SchemaResponse {.
pure (coerce _tiName, meta)
in pure $
pure $
ResolvedSource
{ _rsConfig = sc,
_rsCustomization = customization,
_rsTables = tables,
DBObjectsIntrospection
{ _rsTables = tables,
_rsFunctions = mempty,
_rsScalars = mempty
}

View File

@ -40,7 +40,6 @@ import Hasura.RQL.Types.Backend (BackendConfig)
import Hasura.RQL.Types.Common
import Hasura.RQL.Types.EventTrigger (RecreateEventTriggers (..))
import Hasura.RQL.Types.Source
import Hasura.RQL.Types.SourceCustomization
import Hasura.RQL.Types.Table
import Hasura.SQL.Backend
import Hasura.Server.Migrate.Version (SourceCatalogMigrationState (..))
@ -66,11 +65,10 @@ resolveSourceConfig _logger name config _backendKind _backendConfig _env _manage
resolveDatabaseMetadata ::
(MonadIO m, MonadBaseControl IO m) =>
MSSQLSourceConfig ->
SourceTypeCustomization ->
m (Either QErr (ResolvedSource 'MSSQL))
resolveDatabaseMetadata config customization = runExceptT do
m (Either QErr (DBObjectsIntrospection 'MSSQL))
resolveDatabaseMetadata config = runExceptT do
dbTablesMetadata <- mssqlRunReadOnly mssqlExecCtx $ loadDBMetadata
pure $ ResolvedSource config customization dbTablesMetadata mempty mempty
pure $ DBObjectsIntrospection dbTablesMetadata mempty mempty
where
MSSQLSourceConfig _connString mssqlExecCtx = config

View File

@ -38,7 +38,6 @@ import Hasura.Prelude
import Hasura.RQL.Types.Backend (BackendConfig)
import Hasura.RQL.Types.Common
import Hasura.RQL.Types.Source
import Hasura.RQL.Types.SourceCustomization
import Hasura.RQL.Types.Table (TableEventTriggers)
import Hasura.SQL.Backend
@ -63,11 +62,10 @@ resolveSourceConfig _logger _name csc@ConnSourceConfig {_cscPoolSettings = ConnP
(fromIntegral _cscMaxConnections)
)
resolveDatabaseMetadata :: (MonadIO m) => SourceConfig -> SourceTypeCustomization -> m (Either QErr (ResolvedSource 'MySQL))
resolveDatabaseMetadata sc@SourceConfig {..} sourceCustomization =
runExceptT $ do
metadata <- liftIO $ withResource scConnectionPool (getMetadata scConfig)
pure $ ResolvedSource sc sourceCustomization metadata mempty mempty
resolveDatabaseMetadata :: (MonadIO m) => SourceConfig -> m (Either QErr (DBObjectsIntrospection 'MySQL))
resolveDatabaseMetadata SourceConfig {..} = runExceptT do
metadata <- liftIO $ withResource scConnectionPool (getMetadata scConfig)
pure $ DBObjectsIntrospection metadata mempty mempty
postDropSourceHook ::
(MonadIO m) =>

View File

@ -51,7 +51,6 @@ import Hasura.RQL.Types.EventTrigger (RecreateEventTriggers (..))
import Hasura.RQL.Types.Function
import Hasura.RQL.Types.Metadata (SourceMetadata (..), TableMetadata (..), _cfmDefinition)
import Hasura.RQL.Types.Source
import Hasura.RQL.Types.SourceCustomization
import Hasura.RQL.Types.Table
import Hasura.SQL.Backend
import Hasura.Server.Migrate.Internal
@ -160,24 +159,20 @@ resolveDatabaseMetadata ::
) =>
SourceMetadata ('Postgres pgKind) ->
SourceConfig ('Postgres pgKind) ->
SourceTypeCustomization ->
m (Either QErr (ResolvedSource ('Postgres pgKind)))
resolveDatabaseMetadata sourceMetadata sourceConfig sourceCustomization = runExceptT do
(tablesMeta, functionsMeta, pgScalars) <- runTx (_pscExecCtx sourceConfig) PG.ReadOnly $ do
tablesMeta <- fetchTableMetadata $ HM.keysSet $ OMap.toHashMap $ _smTables sourceMetadata
let allFunctions =
Set.fromList $
OMap.keys (_smFunctions sourceMetadata) -- Tracked functions
<> concatMap getComputedFieldFunctionsMetadata (OMap.elems $ _smTables sourceMetadata) -- Computed field functions
functionsMeta <- fetchFunctionMetadata @pgKind allFunctions
pgScalars <- fetchPgScalars
let scalarsMap = Map.fromList do
scalar <- Set.toList pgScalars
name <- afold @(Either QErr) $ mkScalarTypeName scalar
pure (name, scalar)
pure (tablesMeta, functionsMeta, scalarsMap)
pure $ ResolvedSource sourceConfig sourceCustomization tablesMeta functionsMeta (ScalarMap pgScalars)
m (Either QErr (DBObjectsIntrospection ('Postgres pgKind)))
resolveDatabaseMetadata sourceMetadata sourceConfig = runExceptT $ runTx (_pscExecCtx sourceConfig) PG.ReadOnly do
tablesMeta <- fetchTableMetadata $ HM.keysSet $ OMap.toHashMap $ _smTables sourceMetadata
let allFunctions =
Set.fromList $
OMap.keys (_smFunctions sourceMetadata) -- Tracked functions
<> concatMap getComputedFieldFunctionsMetadata (OMap.elems $ _smTables sourceMetadata) -- Computed field functions
functionsMeta <- fetchFunctionMetadata @pgKind allFunctions
pgScalars <- fetchPgScalars
let scalarsMap = Map.fromList do
scalar <- Set.toList pgScalars
name <- afold @(Either QErr) $ mkScalarTypeName scalar
pure (name, scalar)
pure $ DBObjectsIntrospection tablesMeta functionsMeta (ScalarMap scalarsMap)
where
-- A helper function to list all functions underpinning computed fields from a table metadata
getComputedFieldFunctionsMetadata :: TableMetadata ('Postgres pgKind) -> [FunctionName ('Postgres pgKind)]

View File

@ -74,6 +74,13 @@ removeSOH uncons bs =
else bs
Nothing -> bs
-- NB: this is somewhat wasteful, because the design of the `FromJSON` type
-- class forces that the incoming `ByteString` value is first parsed to an
-- `aeson` `Value`. But we then immediately re-serialize it here into an
-- `EncJSON`.
instance J.FromJSON EncJSON where
parseJSON = pure . encJFromJValue
-- No other instances for `EncJSON`. In particular, because:
--
-- - Having a `Semigroup` or `Monoid` instance allows constructing semantically

View File

@ -2,7 +2,9 @@
module Hasura.GraphQL.RemoteServer
( fetchRemoteSchema,
stitchRemoteSchema,
execRemoteGQ,
FromIntrospection (..),
)
where
@ -57,13 +59,28 @@ fetchRemoteSchema ::
RemoteSchemaName ->
ValidatedRemoteSchemaDef ->
m (IntrospectionResult, BL.ByteString, RemoteSchemaInfo)
fetchRemoteSchema env manager _rscName rsDef@ValidatedRemoteSchemaDef {..} = do
(_, _, _rscRawIntrospectionResult) <-
fetchRemoteSchema env manager _rscName rsDef = do
(_, _, rawIntrospectionResult) <-
execRemoteGQ env manager adminUserInfo [] rsDef introspectionQuery
(ir, rsi) <- stitchRemoteSchema rawIntrospectionResult _rscName rsDef
-- The 'rawIntrospectionResult' contains the 'Bytestring' response of
-- the introspection result of the remote server. We store this in the
-- 'RemoteSchemaCtx' because we can use this when the 'introspect_remote_schema'
-- is called by simple encoding the result to JSON.
pure (ir, rawIntrospectionResult, rsi)
-- | Parses the remote schema introspection result, and check whether it looks
-- like it's a valid GraphQL endpoint even under the configured customization.
stitchRemoteSchema ::
(MonadIO m, MonadError QErr m) =>
BL.ByteString ->
RemoteSchemaName ->
ValidatedRemoteSchemaDef ->
m (IntrospectionResult, RemoteSchemaInfo)
stitchRemoteSchema rawIntrospectionResult _rscName rsDef@ValidatedRemoteSchemaDef {..} = do
-- Parse the JSON into flat GraphQL type AST.
FromIntrospection _rscIntroOriginal <-
J.eitherDecode _rscRawIntrospectionResult `onLeft` (throwRemoteSchema . T.pack)
J.eitherDecode rawIntrospectionResult `onLeft` (throwRemoteSchema . T.pack)
-- Possibly transform type names from the remote schema, per the user's 'RemoteSchemaDef'.
let rsCustomizer = getCustomizer (addDefaultRoots _rscIntroOriginal) _vrsdCustomization
@ -82,12 +99,7 @@ fetchRemoteSchema env manager _rscName rsDef@ValidatedRemoteSchemaDef {..} = do
_rscIntroOriginal
mempty -- remote relationships
remoteSchemaInfo
-- The 'rawIntrospectionResult' contains the 'Bytestring' response of
-- the introspection result of the remote server. We store this in the
-- 'RemoteSchemaCtx' because we can use this when the 'introspect_remote_schema'
-- is called by simple encoding the result to JSON.
return (_rscIntroOriginal, _rscRawIntrospectionResult, remoteSchemaInfo)
return (_rscIntroOriginal, remoteSchemaInfo)
where
-- If there is no explicit mutation or subscription root type we need to check for
-- objects type definitions with the default names "Mutation" and "Subscription".

View File

@ -298,7 +298,8 @@ runDeleteRemoteSchemaRemoteRelationship DeleteRemoteSchemaRemoteRelationship {..
-- collection, and used here to build remote field info.
data PartiallyResolvedSource b = PartiallyResolvedSource
{ _prsSourceMetadata :: SourceMetadata b,
_resolvedSource :: ResolvedSource b,
_prsConfig :: SourceConfig b,
_prsIntrospection :: DBObjectsIntrospection b,
_tableCoreInfoMap :: HashMap (TableName b) (TableCoreInfoG b (ColumnInfo b) (ColumnInfo b)),
_eventTriggerInfoMap :: HashMap (TableName b) (EventTriggerInfoMap b)
}
@ -331,7 +332,7 @@ buildRemoteFieldInfo lhsIdentifier lhsJoinFields RemoteRelationship {..} allSour
Map.lookup _tsrdSource allSources
`onNothing` throw400 NotFound ("source not found: " <>> _tsrdSource)
AB.dispatchAnyBackend @Backend targetTables \(partiallyResolvedSource :: PartiallyResolvedSource b') -> do
let PartiallyResolvedSource _ targetSourceInfo targetTablesInfo _ = partiallyResolvedSource
let PartiallyResolvedSource _ sourceConfig _ targetTablesInfo _ = partiallyResolvedSource
(targetTable :: TableName b') <- runAesonParser J.parseJSON _tsrdTable
targetColumns <-
fmap _tciFieldInfoMap $
@ -348,8 +349,7 @@ buildRemoteFieldInfo lhsIdentifier lhsJoinFields RemoteRelationship {..} allSour
ColumnScalar scalarType -> pure scalarType
ColumnEnumReference _ -> throw400 NotSupported "relationships to enum fields are not supported yet"
pure (srcFieldName, (srcColumn, tgtScalar, ciColumn tgtColumn))
let sourceConfig = _rsConfig targetSourceInfo
rsri =
let rsri =
RemoteSourceFieldInfo _rrName _tsrdRelationshipType _tsrdSource sourceConfig targetTable $
fmap (\(_, tgtType, tgtColumn) -> (tgtType, tgtColumn)) $
Map.fromList columnMapping

View File

@ -36,6 +36,7 @@ import Data.Sequence qualified as Seq
import Data.Set qualified as S
import Data.Text.Extended
import Hasura.Base.Error
import Hasura.EncJSON
import Hasura.GraphQL.Schema (buildGQLContext)
import Hasura.GraphQL.Schema.NamingCase
import Hasura.Incremental qualified as Inc
@ -149,7 +150,7 @@ buildRebuildableSchemaCacheWithReason ::
buildRebuildableSchemaCacheWithReason reason logger env metadata = do
result <-
flip runReaderT reason $
Inc.build (buildSchemaCacheRule logger env) (metadata, initialInvalidationKeys)
Inc.build (buildSchemaCacheRule logger env) (metadata, initialInvalidationKeys, Nothing)
pure $ RebuildableSchemaCache (Inc.result result) initialInvalidationKeys (Inc.rebuildRule result)
@ -210,7 +211,7 @@ instance
lift $
runCacheBuildM $
flip runReaderT buildReason $
Inc.build rule (metadata, newInvalidationKeys)
Inc.build rule (metadata, newInvalidationKeys, Nothing)
let schemaCache = (Inc.result result) {scMetadataResourceVersion = metadataVersion}
prunedInvalidationKeys = pruneInvalidationKeys schemaCache newInvalidationKeys
!newCache = RebuildableSchemaCache schemaCache prunedInvalidationKeys (Inc.rebuildRule result)
@ -310,15 +311,15 @@ buildSchemaCacheRule ::
) =>
Logger Hasura ->
Env.Environment ->
(Metadata, InvalidationKeys) `arr` SchemaCache
buildSchemaCacheRule logger env = proc (metadataNoDefaults, invalidationKeys) -> do
(Metadata, InvalidationKeys, Maybe StoredIntrospection) `arr` SchemaCache
buildSchemaCacheRule logger env = proc (metadataNoDefaults, invalidationKeys, storedIntrospection) -> do
invalidationKeysDep <- Inc.newDependency -< invalidationKeys
metadataDefaults <- bindA -< askMetadataDefaults
let metadata@Metadata {..} = overrideMetadataDefaults metadataNoDefaults metadataDefaults
metadataDep <- Inc.newDependency -< metadata
(inconsistentObjects, (resolvedOutputs, dependencyInconsistentObjects, resolvedDependencies), ((adminIntrospection, gqlContext, gqlContextUnauth, inconsistentRemoteSchemas), (relayContext, relayContextUnauth))) <-
Inc.cache buildOutputsAndSchema -< (metadataDep, invalidationKeysDep)
Inc.cache buildOutputsAndSchema -< (metadataDep, invalidationKeysDep, storedIntrospection)
let (resolvedEndpoints, endpointCollectedInfo) = runIdentity $ runWriterT $ buildRESTEndpoints _metaQueryCollections (OMap.elems _metaRestEndpoints)
(cronTriggersMap, cronTriggersCollectedInfo) = runIdentity $ runWriterT $ buildCronTriggers (OMap.elems _metaCronTriggers)
@ -413,8 +414,8 @@ buildSchemaCacheRule logger env = proc (metadataNoDefaults, invalidationKeys) ->
}
where
-- See Note [Avoiding GraphQL schema rebuilds when changing irrelevant Metadata]
buildOutputsAndSchema = proc (metadataDep, invalidationKeysDep) -> do
(outputs, collectedInfo) <- runWriterA buildAndCollectInfo -< (metadataDep, invalidationKeysDep)
buildOutputsAndSchema = proc (metadataDep, invalidationKeysDep, storedIntrospection) -> do
(outputs, collectedInfo) <- runWriterA buildAndCollectInfo -< (metadataDep, invalidationKeysDep, storedIntrospection)
let (inconsistentObjects, unresolvedDependencies) = partitionEithers $ toList collectedInfo
out2@(resolvedOutputs, _dependencyInconsistentObjects, _resolvedDependencies) <- resolveDependencies -< (outputs, unresolvedDependencies)
out3 <-
@ -505,26 +506,30 @@ buildSchemaCacheRule logger env = proc (metadataNoDefaults, invalidationKeys) ->
BackendMetadata b
) =>
( Inc.Dependency (HashMap SourceName Inc.InvalidationKey),
Maybe (BackendIntrospection b),
BackendInfoAndSourceMetadata b
)
`arr` Maybe (ResolvedSource b)
tryResolveSource = Inc.cache proc (invalidationKeys, BackendInfoAndSourceMetadata {..}) -> do
`arr` Maybe (SourceConfig b, DBObjectsIntrospection b)
tryResolveSource = Inc.cache proc (invalidationKeys, sourceIntrospection, BackendInfoAndSourceMetadata {..}) -> do
let sourceName = _smName _bcasmSourceMetadata
metadataObj = MetadataObject (MOSource sourceName) $ toJSON sourceName
maybeSourceConfig <- tryGetSourceConfig @b -< (invalidationKeys, sourceName, _smConfiguration _bcasmSourceMetadata, _smKind _bcasmSourceMetadata, _bcasmBackendInfo)
case maybeSourceConfig of
Nothing -> returnA -< Nothing
Just sourceConfig ->
(|
withRecordInconsistency
( liftEitherA <<< bindA
-< do
resSource <- resolveDatabaseMetadata _bcasmSourceMetadata sourceConfig (getSourceTypeCustomization $ _smCustomization _bcasmSourceMetadata)
for_ resSource $ liftIO . unLogger logger
pure resSource
)
|) metadataObj
Just sourceConfig -> do
case biMetadata <$> sourceIntrospection of
Just rs -> returnA -< Just (sourceConfig, rs)
_ ->
(|
withRecordInconsistency
( liftEitherA <<< bindA
-< do
resSource <- resolveDatabaseMetadata _bcasmSourceMetadata sourceConfig
for_ resSource $ liftIO . unLogger logger
pure $ (sourceConfig,) <$> resSource
)
|) metadataObj
-- impl notes (swann):
--
@ -706,8 +711,8 @@ buildSchemaCacheRule logger env = proc (metadataNoDefaults, invalidationKeys) ->
HasServerConfigCtx m,
MonadResolveSource m
) =>
(Inc.Dependency Metadata, Inc.Dependency InvalidationKeys) `arr` BuildOutputs
buildAndCollectInfo = proc (metadataDep, invalidationKeys) -> do
(Inc.Dependency Metadata, Inc.Dependency InvalidationKeys, Maybe StoredIntrospection) `arr` BuildOutputs
buildAndCollectInfo = proc (metadataDep, invalidationKeys, storedIntrospection) -> do
sources <- Inc.dependOn -< Inc.selectD #_metaSources metadataDep
remoteSchemas <- Inc.dependOn -< Inc.selectD #_metaRemoteSchemas metadataDep
customTypes <- Inc.dependOn -< Inc.selectD #_metaCustomTypes metadataDep
@ -741,7 +746,7 @@ buildSchemaCacheRule logger env = proc (metadataNoDefaults, invalidationKeys) ->
-- remote schemas
let remoteSchemaInvalidationKeys = Inc.selectD #_ikRemoteSchemas invalidationKeys
remoteSchemaMap <- buildRemoteSchemas env -< ((remoteSchemaInvalidationKeys, orderedRoles), OMap.elems remoteSchemas)
remoteSchemaMap <- buildRemoteSchemas env -< ((remoteSchemaInvalidationKeys, orderedRoles, fmap encJToLBS . siRemotes <$> storedIntrospection), OMap.elems remoteSchemas)
let remoteSchemaCtxMap = M.map fst remoteSchemaMap
!defaultNC <- bindA -< _sccDefaultNamingConvention <$> askServerConfigCtx
@ -759,14 +764,15 @@ buildSchemaCacheRule logger env = proc (metadataNoDefaults, invalidationKeys) ->
Inc.keyed
( \_ exists ->
AB.dispatchAnyBackendArrow @BackendMetadata @BackendEventTrigger
( proc (backendInfoAndSourceMetadata, (invalidationKeys, defaultNC, isNamingConventionEnabled)) -> do
( proc (backendInfoAndSourceMetadata :: BackendInfoAndSourceMetadata b, (invalidationKeys, storedIntrospection, defaultNC, isNamingConventionEnabled)) -> do
let sourceMetadata = _bcasmSourceMetadata backendInfoAndSourceMetadata
sourceName = _smName sourceMetadata
sourceInvalidationsKeys = Inc.selectD #_ikSources invalidationKeys
maybeResolvedSource <- tryResolveSource -< (sourceInvalidationsKeys, backendInfoAndSourceMetadata)
sourceIntrospection = AB.unpackAnyBackend @b =<< M.lookup sourceName =<< siBackendIntrospection <$> storedIntrospection
maybeResolvedSource <- tryResolveSource -< (sourceInvalidationsKeys, sourceIntrospection, backendInfoAndSourceMetadata)
case maybeResolvedSource of
Nothing -> returnA -< Nothing
Just (source :: ResolvedSource b) -> do
Just (sourceConfig, source) -> do
let metadataInvalidationKey = Inc.selectD #_ikMetadata invalidationKeys
(tableInputs, _, _) = unzip3 $ map mkTableInputs $ OMap.elems $ _smTables sourceMetadata
!namingConv = if isNamingConventionEnabled then getNamingConvention (_smCustomization sourceMetadata) defaultNC else HasuraCase
@ -774,17 +780,17 @@ buildSchemaCacheRule logger env = proc (metadataNoDefaults, invalidationKeys) ->
buildTableCache
-<
( sourceName,
_rsConfig source,
sourceConfig,
_rsTables source,
tableInputs,
metadataInvalidationKey,
sourceIntrospection,
namingConv
)
let tablesMetadata = OMap.elems $ _smTables sourceMetadata
eventTriggers = map (_tmTable &&& OMap.elems . _tmEventTriggers) tablesMetadata
numEventTriggers = sum $ map (length . snd) eventTriggers
sourceConfig = _rsConfig source
(recreateEventTriggers, sourceCatalogMigrationState) <- initCatalogIfNeeded -< (Proxy :: Proxy b, numEventTriggers > 0, sourceConfig)
@ -805,10 +811,10 @@ buildSchemaCacheRule logger env = proc (metadataNoDefaults, invalidationKeys) ->
-<
Just $
AB.mkAnyBackend @b $
PartiallyResolvedSource sourceMetadata source tablesCoreInfo eventTriggerInfoMaps
PartiallyResolvedSource sourceMetadata sourceConfig source tablesCoreInfo eventTriggerInfoMaps
)
-<
(exists, (invalidationKeys, defaultNC, isNamingConventionEnabled))
(exists, (invalidationKeys, storedIntrospection, defaultNC, isNamingConventionEnabled))
)
|) (M.fromList $ OMap.toList backendInfoAndSourceMetadata)
let partiallyResolvedSources = catMaybes partiallyResolvedSourcesMaybes
@ -831,8 +837,8 @@ buildSchemaCacheRule logger env = proc (metadataNoDefaults, invalidationKeys) ->
(allResolvedSources, remoteSchemaCtxMap, orderedRoles)
)
-> do
let PartiallyResolvedSource sourceMetadata resolvedSource tablesInfo eventTriggers = partiallyResolvedSource
ResolvedSource sourceConfig _sourceCustomization tablesMeta functionsMeta scalars = resolvedSource
let PartiallyResolvedSource sourceMetadata sourceConfig introspection tablesInfo eventTriggers = partiallyResolvedSource
DBObjectsIntrospection tablesMeta functionsMeta scalars = introspection
so <-
Inc.cache buildSource
-<

View File

@ -1,4 +1,5 @@
{-# LANGUAGE Arrows #-}
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE UndecidableInstances #-}
@ -8,10 +9,12 @@ module Hasura.RQL.DDL.Schema.Cache.Common
( ApolloFederationConfig (..),
ApolloFederationVersion (..),
BackendInvalidationKeysWrapper (..),
BackendIntrospection (..),
BuildOutputs (..),
CacheBuild,
CacheBuildParams (CacheBuildParams),
InvalidationKeys (..),
StoredIntrospection (..),
ikMetadata,
ikRemoteSchemas,
ikSources,
@ -46,14 +49,17 @@ import Control.Arrow.Interpret
import Control.Lens
import Control.Monad.Trans.Control (MonadBaseControl)
import Control.Monad.Unique
import Data.Aeson.Extended
import Data.HashMap.Strict.Extended qualified as M
import Data.HashMap.Strict.InsOrd qualified as OMap
import Data.Sequence qualified as Seq
import Data.Text.Extended
import Hasura.Base.Error
import Hasura.EncJSON
import Hasura.Incremental qualified as Inc
import Hasura.Prelude
import Hasura.RQL.Types.Backend
import Hasura.RQL.Types.Column
import Hasura.RQL.Types.Common
import Hasura.RQL.Types.CustomTypes
import Hasura.RQL.Types.Metadata
@ -68,6 +74,7 @@ import Hasura.RQL.Types.SchemaCache
import Hasura.RQL.Types.SchemaCache.Build
import Hasura.RQL.Types.Source
import Hasura.RemoteSchema.Metadata
import Hasura.SQL.AnyBackend
import Hasura.SQL.Backend
import Hasura.SQL.BackendMap (BackendMap)
import Hasura.SQL.BackendMap qualified as BackendMap
@ -128,6 +135,51 @@ invalidateKeys CacheInvalidations {..} InvalidationKeys {..} =
invalidateDataConnectors (BackendInvalidationKeysWrapper invalidationKeys) =
BackendInvalidationKeysWrapper $ foldl' (flip invalidate) invalidationKeys ciDataConnectors
data BackendIntrospection (b :: BackendType) = BackendIntrospection
{ biMetadata :: DBObjectsIntrospection b,
biEnumValues :: HashMap (TableName b) EnumValues
}
deriving stock (Generic)
instance Backend b => FromJSON (BackendIntrospection b) where
parseJSON = withObject "BackendIntrospection" \o -> do
metadata <- o .: "metadata"
enumValues <- o .: "enum_values"
pure $ BackendIntrospection metadata (M.fromList enumValues)
deriving stock instance BackendMetadata b => Eq (BackendIntrospection b)
data StoredIntrospection = StoredIntrospection
{ siBackendIntrospection :: HashMap SourceName (AnyBackend BackendIntrospection),
-- We'd prefer to pass the results of introspecting remote GraphQL schemas
-- as a structured Haskell type, rather than the opaque `EncJSON`. What
-- makes this complicated is that, in the `introspect_remote_schema` API, we
-- need to return the result of the specific introspection query stored in
-- `server/src-rsr/introspection.json`. That has a very specific format
-- (see e.g. `fragment TypeRef`). Additionally, it requires us to return
-- introspection results for directives, which so far we have avoided
-- parsing entirely (for better or for worse).
--
-- In the future, perhaps we can change the implementation of
-- `introspect_remote_schema` to be backed by Stored Introspection directly.
-- Then we could pass around highly structured data internally here, but
-- also return the original raw introspection to the user on request. But
-- this approach would require a wholesale commitment to Stored
-- Introspection.
siRemotes :: HashMap RemoteSchemaName EncJSON
}
instance Eq StoredIntrospection where
-- compare introspected remotes as serialized values, not as JSON value equality
StoredIntrospection bi1 r1 == StoredIntrospection bi2 r2 = bi1 == bi2 && (encJToBS <$> r1) == (encJToBS <$> r2)
instance FromJSON StoredIntrospection where
parseJSON = withObject "StoredIntrospection" \o -> do
-- Use of `parseJSONKeyValue` here means that the backend type is specified as a key
backendIntrospection <- traverse parseJSONKeyValue =<< o .: "backend_introspection"
remotes <- o .: "remotes"
pure $ StoredIntrospection backendIntrospection remotes
data TableBuildInput b = TableBuildInput
{ _tbiName :: TableName b,
_tbiIsEnum :: Bool,
@ -210,7 +262,7 @@ data CacheBuildParams = CacheBuildParams
-- | The monad in which @'RebuildableSchemaCache' is being run
newtype CacheBuild a = CacheBuild (ReaderT CacheBuildParams (ExceptT QErr IO) a)
deriving
deriving newtype
( Functor,
Applicative,
Monad,
@ -263,7 +315,7 @@ runCacheBuildM m = do
data RebuildableSchemaCache = RebuildableSchemaCache
{ lastBuiltSchemaCache :: SchemaCache,
_rscInvalidationMap :: InvalidationKeys,
_rscRebuild :: Inc.Rule (ReaderT BuildReason CacheBuild) (Metadata, InvalidationKeys) SchemaCache
_rscRebuild :: Inc.Rule (ReaderT BuildReason CacheBuild) (Metadata, InvalidationKeys, Maybe StoredIntrospection) SchemaCache
}
bindErrorA ::

View File

@ -428,10 +428,11 @@ buildTableCache ::
DBTablesMetadata b,
[TableBuildInput b],
Inc.Dependency Inc.InvalidationKey,
Maybe (BackendIntrospection b),
NamingCase
)
`arr` Map.HashMap (TableName b) (TableCoreInfoG b (ColumnInfo b) (ColumnInfo b))
buildTableCache = Inc.cache proc (source, sourceConfig, dbTablesMeta, tableBuildInputs, reloadMetadataInvalidationKey, tCase) -> do
buildTableCache = Inc.cache proc (source, sourceConfig, dbTablesMeta, tableBuildInputs, reloadMetadataInvalidationKey, sourceIntrospection, tCase) -> do
rawTableInfos <-
(|
Inc.keyed
@ -446,7 +447,7 @@ buildTableCache = Inc.cache proc (source, sourceConfig, dbTablesMeta, tableBuild
-<
err400 NotExists $ "no such table/view exists in source: " <>> _tbiName table
Just metadataTable ->
buildRawTableInfo -< (table, metadataTable, sourceConfig, reloadMetadataInvalidationKey)
buildRawTableInfo -< (table, metadataTable, sourceConfig, reloadMetadataInvalidationKey, biEnumValues <$> sourceIntrospection)
)
|) (mkTableMetadataObject source tableName)
)
@ -480,24 +481,28 @@ buildTableCache = Inc.cache proc (source, sourceConfig, dbTablesMeta, tableBuild
( TableBuildInput b,
DBTableMetadata b,
SourceConfig b,
Inc.Dependency Inc.InvalidationKey
Inc.Dependency Inc.InvalidationKey,
Maybe (HashMap (TableName b) EnumValues)
)
(TableCoreInfoG b (RawColumnInfo b) (Column b))
buildRawTableInfo = Inc.cache proc (tableBuildInput, metadataTable, sourceConfig, reloadMetadataInvalidationKey) -> do
buildRawTableInfo = Inc.cache proc (tableBuildInput, metadataTable, sourceConfig, reloadMetadataInvalidationKey, sourceIntrospection) -> do
let TableBuildInput name isEnum config apolloFedConfig = tableBuildInput
columns :: [RawColumnInfo b] = _ptmiColumns metadataTable
columnMap = mapFromL (FieldName . toTxt . rciName) columns
primaryKey = _ptmiPrimaryKey metadataTable
description = buildDescription name config metadataTable
rawPrimaryKey <- liftEitherA -< traverse (resolvePrimaryKeyColumns columnMap) primaryKey
enumValues <-
enumValues <- do
if isEnum
then do
-- We want to make sure we reload enum values whenever someone explicitly calls
-- `reload_metadata`.
Inc.dependOn -< reloadMetadataInvalidationKey
eitherEnums <- bindA -< fetchAndValidateEnumValues sourceConfig name rawPrimaryKey columns
liftEitherA -< Just <$> eitherEnums
case HM.lookup name =<< sourceIntrospection of
Just enumValues -> returnA -< Just enumValues
_ -> do
-- We want to make sure we reload enum values whenever someone explicitly calls
-- `reload_metadata`.
Inc.dependOn -< reloadMetadataInvalidationKey
eitherEnums <- bindA -< fetchAndValidateEnumValues sourceConfig name rawPrimaryKey columns
liftEitherA -< Just <$> eitherEnums
else returnA -< Nothing
returnA

View File

@ -104,6 +104,7 @@ class
FromJSON (ComputedFieldDefinition b),
FromJSON (BackendSourceKind b),
FromJSON (HealthCheckTest b),
FromJSON (RawFunctionInfo b),
FromJSONKey (Column b),
HasCodec (BackendSourceKind b),
HasCodec (Column b),

View File

@ -235,6 +235,8 @@ instance FromJSON SourceName where
"default" -> pure SNDefault
t -> SNName <$> parseJSON (String t)
instance FromJSONKey SourceName
instance HasCodec SourceName where
codec = dimapCodec dec enc nonEmptyTextCodec
where

View File

@ -26,7 +26,6 @@ import Hasura.RQL.Types.Relationships.Local
import Hasura.RQL.Types.SchemaCache
import Hasura.RQL.Types.SchemaCache.Build
import Hasura.RQL.Types.Source
import Hasura.RQL.Types.SourceCustomization
import Hasura.RQL.Types.Table
import Hasura.SQL.Backend
import Hasura.SQL.Types
@ -105,8 +104,7 @@ class
(MonadIO m, MonadBaseControl IO m, MonadResolveSource m) =>
SourceMetadata b ->
SourceConfig b ->
SourceTypeCustomization ->
m (Either QErr (ResolvedSource b))
m (Either QErr (DBObjectsIntrospection b))
parseBoolExpOperations ::
(MonadError QErr m, TableCoreInfoRM b m) =>

View File

@ -1,3 +1,4 @@
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE UndecidableInstances #-}
@ -22,7 +23,7 @@ module Hasura.RQL.Types.Source
_siNativeQueries,
-- * Schema cache
ResolvedSource (..),
DBObjectsIntrospection (..),
ScalarMap (..),
-- * Source resolver
@ -44,7 +45,7 @@ where
import Control.Lens hiding ((.=))
import Data.Aeson.Extended
import Data.HashMap.Strict qualified as HM
import Data.HashMap.Strict qualified as Map
import Data.Maybe (fromJust)
import Data.Text.Lazy qualified as TL
import Data.Text.Lazy.Encoding qualified as BS
@ -90,7 +91,7 @@ _siNativeQueries :: forall b. Backend b => CustomSQLFields b -> NativeQueryCache
_siNativeQueries = foldMap toItem
where
toItem :: CustomSQLMetadata b -> HashMap NativeQueryName (NativeQueryInfo b)
toItem csm = HM.fromList [(toNativeQueryName (_csmRootFieldName csm), toInfo csm)]
toItem csm = Map.fromList [(toNativeQueryName (_csmRootFieldName csm), toInfo csm)]
toNativeQueryName :: G.Name -> NativeQueryName
toNativeQueryName = NativeQueryName . G.unName
@ -112,7 +113,7 @@ _siNativeQueries = foldMap toItem
toArgs = foldMap toArg
toArg :: CustomSQLParameter -> HashMap NativeQueryArgumentName (ScalarType b)
toArg CustomSQLParameter {..} = HM.fromList [(toArgName cspName, toScalarType cspType)]
toArg CustomSQLParameter {..} = Map.fromList [(toArgName cspName, toScalarType cspType)]
toArgName :: CustomSQLParameterName -> NativeQueryArgumentName
toArgName CustomSQLParameterName {..} = NativeQueryArgumentName cspnName
@ -174,18 +175,27 @@ unsafeSourceConfiguration = fmap _siConfiguration . unsafeSourceInfo @b
--------------------------------------------------------------------------------
-- Schema cache
-- | Contains Postgres connection configuration and essential metadata from the
-- database to build schema cache for tables and function.
data ResolvedSource b = ResolvedSource
{ _rsConfig :: SourceConfig b,
_rsCustomization :: SourceTypeCustomization,
_rsTables :: DBTablesMetadata b,
-- | Contains metadata (introspection) from the database, used to build the
-- schema cache. This type only contains results of introspecting DB objects,
-- i.e. the DB types specified by tables, functions, and scalars. Notably, it
-- does not include the additional introspection that takes place on Postgres,
-- namely reading the contents of tables used as Enum Values -- see
-- @fetchAndValidateEnumValues@.
data DBObjectsIntrospection b = DBObjectsIntrospection
{ _rsTables :: DBTablesMetadata b,
_rsFunctions :: DBFunctionsMetadata b,
_rsScalars :: ScalarMap b
}
deriving (Eq)
deriving (Eq, Generic)
instance (L.ToEngineLog (ResolvedSource b) L.Hasura) where
instance Backend b => FromJSON (DBObjectsIntrospection b) where
parseJSON = withObject "DBObjectsIntrospection" \o -> do
tables <- o .: "tables"
functions <- o .: "functions"
scalars <- o .: "scalars"
pure $ DBObjectsIntrospection (Map.fromList tables) (Map.fromList functions) (ScalarMap (Map.fromList scalars))
instance (L.ToEngineLog (DBObjectsIntrospection b) L.Hasura) where
toEngineLog _ = (L.LevelDebug, L.ELTStartup, toJSON rsLog)
where
rsLog =
@ -195,16 +205,10 @@ instance (L.ToEngineLog (ResolvedSource b) L.Hasura) where
]
-- | A map from GraphQL name to equivalent scalar type for a given backend.
data ScalarMap b where
ScalarMap :: Backend b => HashMap G.Name (ScalarType b) -> ScalarMap b
newtype ScalarMap b = ScalarMap (HashMap G.Name (ScalarType b))
deriving newtype (Semigroup, Monoid)
deriving stock instance Eq (ScalarMap b)
instance Backend b => Semigroup (ScalarMap b) where
ScalarMap s1 <> ScalarMap s2 = ScalarMap $ s1 <> s2
instance Backend b => Monoid (ScalarMap b) where
mempty = ScalarMap mempty
deriving stock instance Backend b => Eq (ScalarMap b)
--------------------------------------------------------------------------------
-- Source resolver

View File

@ -25,6 +25,7 @@ newtype RemoteSchemaName = RemoteSchemaName
J.ToJSON,
J.ToJSONKey,
J.FromJSON,
J.FromJSONKey,
PG.ToPrepArg,
PG.FromCol,
ToTxt,

View File

@ -81,7 +81,7 @@ instance J.FromJSON RemoteSchemaDef where
<*> o J..:? "timeout_seconds"
<*> o J..:? "customization"
getUrlFromEnv :: (MonadIO m, MonadError QErr m) => Env.Environment -> Text -> m (EnvRecord N.URI)
getUrlFromEnv :: (MonadError QErr m) => Env.Environment -> Text -> m (EnvRecord N.URI)
getUrlFromEnv env urlFromEnv = do
let mEnv = Env.lookupEnv env $ T.unpack urlFromEnv
uri <- onNothing mEnv (throw400 InvalidParams $ envNotFoundMsg urlFromEnv)

View File

@ -14,7 +14,7 @@ import Data.Environment qualified as Env
import Data.HashMap.Strict.Extended qualified as M
import Data.Text.Extended
import Hasura.Base.Error
import Hasura.GraphQL.RemoteServer (fetchRemoteSchema)
import Hasura.GraphQL.RemoteServer
import Hasura.Incremental qualified as Inc
import Hasura.Prelude
import Hasura.RQL.DDL.Schema.Cache.Common
@ -46,7 +46,7 @@ buildRemoteSchemas ::
MonadError QErr m
) =>
Env.Environment ->
( (Inc.Dependency (HashMap RemoteSchemaName Inc.InvalidationKey), OrderedRoles),
( (Inc.Dependency (HashMap RemoteSchemaName Inc.InvalidationKey), OrderedRoles, Maybe (HashMap RemoteSchemaName BL.ByteString)),
[RemoteSchemaMetadataG remoteRelationshipDefinition]
)
`arr` HashMap RemoteSchemaName (PartiallyResolvedRemoteSchemaCtxG remoteRelationshipDefinition, MetadataObject)
@ -56,14 +56,19 @@ buildRemoteSchemas env =
-- We want to cache this call because it fetches the remote schema over
-- HTTP, and we dont want to re-run that if the remote schema definition
-- hasnt changed.
buildRemoteSchema = Inc.cache proc ((invalidationKeys, orderedRoles), remoteSchema@(RemoteSchemaMetadata name defn _comment permissions relationships)) -> do
buildRemoteSchema = Inc.cache proc ((invalidationKeys, orderedRoles, storedIntrospection), remoteSchema@(RemoteSchemaMetadata name defn _comment permissions relationships)) -> do
Inc.dependOn -< Inc.selectKeyD name invalidationKeys
remoteSchemaContextParts <-
(|
withRecordInconsistency
( liftEitherA <<< bindA
-<
runExceptT $ noopTrace $ addRemoteSchemaP2Setup env name defn
-< runExceptT
case M.lookup name =<< storedIntrospection of
Nothing -> noopTrace $ addRemoteSchemaP2Setup env name defn
Just rawIntro -> do
rsDef <- validateRemoteSchemaDef env defn
(ir, rsi) <- stitchRemoteSchema rawIntro name rsDef
pure (ir, rawIntro, rsi)
)
|) (mkRemoteSchemaMetadataObject remoteSchema)
case remoteSchemaContextParts of
@ -72,8 +77,8 @@ buildRemoteSchemas env =
-- we then resolve permissions
resolvedPermissions <- buildRemoteSchemaPermissions -< ((name, introspection, orderedRoles), fmap (name,) permissions)
-- resolve remote relationships
let transformedRelationships = flip fmap relationships $ \RemoteSchemaTypeRelationships {..} -> fmap (PartiallyResolvedRemoteRelationship _rstrsName) _rstrsRelationships
let remoteSchemaContext =
let transformedRelationships = relationships <&> \RemoteSchemaTypeRelationships {..} -> PartiallyResolvedRemoteRelationship _rstrsName <$> _rstrsRelationships
remoteSchemaContext =
RemoteSchemaCtx
{ _rscName = name,
_rscIntroOriginal = introspection,
@ -96,7 +101,7 @@ buildRemoteSchemaPermissions ::
( ArrowChoice arr,
Inc.ArrowDistribute arr,
ArrowWriter (Seq (Either InconsistentMetadata MetadataDependency)) arr,
Inc.ArrowCache m arr,
ArrowKleisli m arr,
MonadError QErr m
) =>
-- this ridiculous duplication of [(RemoteSchemaName, RemoteSchemaPermissionMetadata)]
@ -169,6 +174,6 @@ addRemoteSchemaP2Setup ::
RemoteSchemaDef ->
m (IntrospectionResult, BL.ByteString, RemoteSchemaInfo)
addRemoteSchemaP2Setup env name def = do
httpMgr <- askHttpManager
rsi <- validateRemoteSchemaDef env def
httpMgr <- askHttpManager
fetchRemoteSchema env httpMgr name rsi

View File

@ -206,7 +206,7 @@ validateRemoteSchemaCustomization (Just RemoteSchemaCustomization {..}) =
isReservedName = ("__" `T.isPrefixOf`) . G.unName
validateRemoteSchemaDef ::
(MonadError QErr m, MonadIO m) =>
(MonadError QErr m) =>
Env.Environment ->
RemoteSchemaDef ->
m ValidatedRemoteSchemaDef

View File

@ -112,6 +112,8 @@ import Control.Applicative
import Control.Arrow.Extended (ArrowChoice)
import Control.Lens (preview, _Right)
import Data.Aeson
import Data.Aeson.Extended
import Data.Aeson.Key qualified as Key
import Data.Aeson.Types (Parser)
import Data.Kind (Constraint, Type)
import Hasura.Backends.DataConnector.Adapter.Types (mkDataConnectorName)
@ -503,6 +505,11 @@ deriving instance i `SatisfiesForAllBackends` Ord => Ord (AnyBackend i)
instance i `SatisfiesForAllBackends` Hashable => Hashable (AnyBackend i)
instance i `SatisfiesForAllBackends` FromJSON => FromJSONKeyValue (AnyBackend i) where
parseJSONKeyValue (backendTypeStr, value) = do
backendType <- parseBackendTypeFromText $ Key.toText backendTypeStr
parseAnyBackendFromJSON backendType value
backendSourceKindFromText :: Text -> Maybe (AnyBackend BackendSourceKind)
backendSourceKindFromText text =
PostgresVanillaValue <$> staticKindFromText PostgresVanillaKind

View File

@ -14,8 +14,8 @@ where
--------------------------------------------------------------------------------
import Data.Aeson (FromJSON, Key, ToJSON)
import Data.Aeson qualified as Aeson
import Data.Aeson.Extended
import Data.Aeson.Key qualified as Key
import Data.Aeson.KeyMap qualified as KeyMap
import Data.Data
@ -26,8 +26,8 @@ import Data.Text.Extended (toTxt)
import Hasura.Incremental.Internal.Dependency (Dependency (..), selectD)
import Hasura.Incremental.Select
import Hasura.Prelude hiding (empty, lookup, modify)
import Hasura.SQL.AnyBackend (AnyBackend, SatisfiesForAllBackends, dispatchAnyBackend'', mergeAnyBackend, mkAnyBackend, parseAnyBackendFromJSON, unpackAnyBackend)
import Hasura.SQL.Backend (BackendType, parseBackendTypeFromText)
import Hasura.SQL.AnyBackend
import Hasura.SQL.Backend (BackendType)
import Hasura.SQL.Tag (BackendTag, HasTag, backendTag, reify)
--------------------------------------------------------------------------------
@ -47,9 +47,9 @@ instance i `SatisfiesForAllBackends` FromJSON => FromJSON (BackendMap i) where
Aeson.withObject "BackendMap" $ \obj -> do
BackendMap . Map.fromList
<$> traverse
( \(backendTypeStr, val) -> do
backendType <- parseBackendTypeFromText $ Key.toText backendTypeStr
(backendType,) <$> parseAnyBackendFromJSON backendType val
( \keyValue -> do
out <- parseJSONKeyValue keyValue
pure $ (lowerTag out, out)
)
(KeyMap.toList obj)