Logical models in get_table_info

PR-URL: https://github.com/hasura/graphql-engine-mono/pull/9798
GitOrigin-RevId: ab7b1dc0a4504c1ec103c449ca2e444589673c0f
This commit is contained in:
David Overton 2023-07-12 12:07:53 +10:00 committed by hasura-bot
parent 6ea943c4b1
commit a46f07ea45
11 changed files with 113 additions and 106 deletions

View File

@ -281,6 +281,7 @@ common lib-depends
, dependent-sum
, dependent-sum-template
, either
, extra
, hasura-error-message
, exceptions
, fast-logger
@ -837,6 +838,7 @@ library
, Hasura.RQL.Types.Source
, Hasura.RQL.Types.Source.Column
, Hasura.RQL.Types.Source.Table
, Hasura.RQL.Types.Source.TableType
, Hasura.RQL.Types.SourceConfiguration
, Hasura.RQL.Types.SourceCustomization
, Hasura.RQL.Types.Subscription

View File

@ -53,6 +53,7 @@ import Hasura.Function.Common
import Hasura.Incremental qualified as Inc
import Hasura.Incremental.Select qualified as Inc
import Hasura.Logging (Hasura, Logger)
import Hasura.LogicalModel.Cache
import Hasura.LogicalModel.Metadata (LogicalModelMetadata (..))
import Hasura.LogicalModel.Types
import Hasura.Prelude
@ -645,21 +646,22 @@ getVolatility API.FWrite = FTVOLATILE
getTableInfo' :: (CacheRM m, MetadataM m, MonadError QErr m) => SourceName -> DC.TableName -> m (Maybe (SourceTableInfo 'DataConnector))
getTableInfo' sourceName tableName = do
SourceInfo {_siDbObjectsIntrospection} <- askSourceInfo @'DataConnector sourceName
SourceInfo {_siDbObjectsIntrospection, _siTables, _siLogicalModels} <- askSourceInfo @'DataConnector sourceName
let tables :: HashMap DC.TableName (RQL.T.T.DBTableMetadata 'DataConnector)
tables = _rsTables _siDbObjectsIntrospection
pure $ fmap (convertTableMetadataToTableInfo tableName) (HashMap.lookup tableName tables)
pure $ convertTableMetadataToTableInfo tableName _siLogicalModels <$> HashMap.lookup tableName tables <*> HashMap.lookup tableName _siTables
convertTableMetadataToTableInfo :: DC.TableName -> RQL.T.T.DBTableMetadata 'DataConnector -> SourceTableInfo 'DataConnector
convertTableMetadataToTableInfo tableName RQL.T.T.DBTableMetadata {..} =
convertTableMetadataToTableInfo :: DC.TableName -> LogicalModelCache 'DataConnector -> RQL.T.T.DBTableMetadata 'DataConnector -> RQL.T.T.TableInfo 'DataConnector -> SourceTableInfo 'DataConnector
convertTableMetadataToTableInfo tableName logicalModelCache RQL.T.T.DBTableMetadata {..} RQL.T.T.TableInfo {..} =
SourceTableInfo
{ _stiName = Witch.from tableName,
_stiType = case DC._etmTableType _ptmiExtraTableMetadata of
DC.Table -> Table
DC.View -> View,
_stiColumns = convertColumn <$> _ptmiColumns,
_stiColumns = convertColumn <$> RQL.T.T._tciRawColumns _tiCoreInfo,
_stiLogicalModels = fmap logicalModelInfoToMetadata . HashMap.elems $ foldl' collectLogicalModels mempty $ RQL.T.C.rciType <$> RQL.T.T._tciRawColumns _tiCoreInfo,
_stiPrimaryKey = fmap Witch.from . toNonEmpty . RQL.T.T._pkColumns <$> _ptmiPrimaryKey,
_stiForeignKeys = convertForeignKeys _ptmiForeignKeys,
_stiDescription = getPGDescription <$> _ptmiDescription,
@ -688,6 +690,37 @@ convertTableMetadataToTableInfo tableName RQL.T.T.DBTableMetadata {..} =
where
extraColumnMetadata = HashMap.lookup rciName . DC._etmExtraColumnMetadata $ _ptmiExtraTableMetadata
collectLogicalModels :: LogicalModelCache 'DataConnector -> RQL.T.C.RawColumnType 'DataConnector -> LogicalModelCache 'DataConnector
collectLogicalModels seenLogicalModels = \case
RQL.T.C.RawColumnTypeScalar _ -> seenLogicalModels
RQL.T.C.RawColumnTypeObject _ name -> collectLogicalModelName seenLogicalModels (LogicalModelName name)
RQL.T.C.RawColumnTypeArray _ rawColumnType _ -> collectLogicalModels seenLogicalModels rawColumnType
collectLogicalModelName :: LogicalModelCache 'DataConnector -> LogicalModelName -> LogicalModelCache 'DataConnector
collectLogicalModelName seenLogicalModels logicalModelName
| logicalModelName `HashMap.member` seenLogicalModels = seenLogicalModels
| otherwise =
case HashMap.lookup logicalModelName logicalModelCache of
Nothing -> seenLogicalModels
Just logicalModelInfo ->
let seenLogicalModels' = HashMap.insert logicalModelName logicalModelInfo seenLogicalModels
in foldl' collectLogicalModelType seenLogicalModels' (fmap lmfType $ InsOrdHashMap.elems $ _lmiFields logicalModelInfo)
collectLogicalModelType :: LogicalModelCache 'DataConnector -> LogicalModelType 'DataConnector -> LogicalModelCache 'DataConnector
collectLogicalModelType seenLogicalModels = \case
LogicalModelTypeScalar _ -> seenLogicalModels
LogicalModelTypeArray LogicalModelTypeArrayC {..} -> collectLogicalModelType seenLogicalModels lmtaArray
LogicalModelTypeReference LogicalModelTypeReferenceC {..} -> collectLogicalModelName seenLogicalModels lmtrReference
logicalModelInfoToMetadata :: LogicalModelInfo 'DataConnector -> LogicalModelMetadata 'DataConnector
logicalModelInfoToMetadata LogicalModelInfo {..} =
LogicalModelMetadata
{ _lmmName = _lmiName,
_lmmFields = _lmiFields,
_lmmDescription = _lmiDescription,
_lmmSelectPermissions = mempty
}
convertForeignKeys :: HashSet (RQL.T.T.ForeignKeyMetadata 'DataConnector) -> SourceForeignKeys 'DataConnector
convertForeignKeys foreignKeys =
foreignKeys

View File

@ -332,6 +332,7 @@ instance
_stiForeignKeys = convertForeignKeys _ptmiForeignKeys,
_stiPrimaryKey = fmap (toNonEmpty . _pkColumns) _ptmiPrimaryKey,
_stiColumns = map convertColumn _ptmiColumns,
_stiLogicalModels = [],
_stiType = tableTypeImpl @pgKind _ptmiExtraTableMetadata,
_stiDescription = Nothing
}

View File

@ -86,7 +86,7 @@ import Hasura.Prelude
import Hasura.RQL.Types.Backend (SupportedNamingCase (..))
import Hasura.RQL.Types.Common
import Hasura.RQL.Types.ComputedField.Name (ComputedFieldName (..))
import Hasura.RQL.Types.Source.Table (SourceTableType (..))
import Hasura.RQL.Types.Source.TableType (SourceTableType (..))
import Hasura.SQL.Types
import Language.GraphQL.Draft.Syntax qualified as G
import PostgreSQL.Binary.Decoding qualified as PD

View File

@ -40,19 +40,12 @@ import Control.Lens (at, (.~), (^.))
import Control.Monad.Trans.Control (MonadBaseControl)
import Data.Aeson qualified as J
import Data.Aeson.Extended
import Data.Bifunctor (bimap)
import Data.Environment qualified as Env
import Data.Has
import Data.HashMap.Strict qualified as HashMap
import Data.HashMap.Strict.InsOrd qualified as InsOrdHashMap
import Data.HashMap.Strict.NonEmpty qualified as NEHashMap
import Data.HashSet qualified as HashSet
import Data.Semigroup.Foldable (Foldable1 (..))
import Data.Text.Extended
import Data.Text.Extended qualified as Text.E
import Hasura.Backends.DataConnector.API qualified as API
import Hasura.Backends.DataConnector.Adapter.Types qualified as DC.Types
import Hasura.Backends.Postgres.SQL.Types (getPGDescription)
import Hasura.Base.Error
import Hasura.Base.Error qualified as Error
import Hasura.EncJSON
@ -63,7 +56,6 @@ import Hasura.RQL.Types.Backend
import Hasura.RQL.Types.Backend qualified as RQL.Types
import Hasura.RQL.Types.BackendType
import Hasura.RQL.Types.BackendType qualified as Backend
import Hasura.RQL.Types.Column (ColumnMutability (..), RawColumnInfo (..), RawColumnType (..))
import Hasura.RQL.Types.Common
import Hasura.RQL.Types.Common qualified as Common
import Hasura.RQL.Types.HealthCheck (HealthCheckConfig)
@ -82,9 +74,6 @@ import Hasura.SQL.AnyBackend qualified as AB
import Hasura.SQL.AnyBackend qualified as AnyBackend
import Hasura.Server.Logging (MetadataLog (..))
import Hasura.Services
import Hasura.Table.Cache (Constraint (..), DBTableMetadata (..), ForeignKey (..), ForeignKeyMetadata (..), PrimaryKey (..))
import Language.GraphQL.Draft.Syntax qualified as G
import Witch qualified
--------------------------------------------------------------------------------
-- Add source
@ -408,7 +397,7 @@ runGetSourceTables GetSourceTables {..} = do
data GetTableInfo_ = GetTableInfo_
{ _gtiSourceName_ :: Common.SourceName,
_gtiTableName_ :: API.TableName
_gtiTableName_ :: TableName 'DataConnector
}
instance FromJSON GetTableInfo_ where
@ -422,7 +411,9 @@ instance FromJSON GetTableInfo_ where
runGetTableInfo_ ::
( CacheRM m,
MonadError Error.QErr m,
Metadata.MetadataM m
Metadata.MetadataM m,
MonadBaseControl IO m,
MonadIO m
) =>
GetTableInfo_ ->
m EncJSON
@ -432,13 +423,10 @@ runGetTableInfo_ GetTableInfo_ {..} = do
let sources = fmap Metadata.unBackendSourceMetadata $ Metadata._metaSources metadata
abSourceMetadata <- lookupSourceMetadata _gtiSourceName_ sources
AnyBackend.dispatchAnyBackend @RQL.Types.Backend abSourceMetadata $ \Metadata.SourceMetadata {_smKind, _smConfiguration} -> do
AnyBackend.dispatchAnyBackend @RQL.Types.Backend abSourceMetadata $ \Metadata.SourceMetadata {_smKind} -> do
case _smKind of
Backend.DataConnectorKind _dcName -> do
sourceInfo <- askSourceInfo @'DataConnector _gtiSourceName_
let tableName = Witch.from _gtiTableName_
let table = HashMap.lookup tableName $ _rsTables $ _siDbObjectsIntrospection sourceInfo
pure . EncJSON.encJFromJValue $ convertTableMetadataToTableInfo tableName <$> table
fmap EncJSON.encJFromJValue (getTableInfo @'DataConnector _gtiSourceName_ _gtiTableName_)
backend ->
Error.throw500 ("Schema fetching is not supported for '" <> Text.E.toTxt backend <> "'")
@ -475,54 +463,3 @@ lookupSourceMetadata :: (MonadError QErr m) => SourceName -> InsOrdHashMap Sourc
lookupSourceMetadata sourceName sources =
InsOrdHashMap.lookup sourceName sources
`onNothing` Error.throw400 Error.DataConnectorError ("Source '" <> Text.E.toTxt sourceName <> "' not found")
convertTableMetadataToTableInfo :: TableName 'DataConnector -> DBTableMetadata 'DataConnector -> API.TableInfo
convertTableMetadataToTableInfo tableName DBTableMetadata {..} =
API.TableInfo
{ _tiName = Witch.from tableName,
_tiType = DC.Types._etmTableType _ptmiExtraTableMetadata,
_tiColumns = convertColumn <$> _ptmiColumns,
_tiPrimaryKey = fmap Witch.from . toNonEmpty . _pkColumns <$> _ptmiPrimaryKey,
_tiForeignKeys = convertForeignKeys _ptmiForeignKeys,
_tiDescription = getPGDescription <$> _ptmiDescription,
_tiInsertable = all viIsInsertable _ptmiViewInfo,
_tiUpdatable = all viIsUpdatable _ptmiViewInfo,
_tiDeletable = all viIsDeletable _ptmiViewInfo
}
where
convertRawColumnType :: RawColumnType 'DataConnector -> API.ColumnType
convertRawColumnType = \case
RawColumnTypeScalar scalarType -> API.ColumnTypeScalar $ Witch.from scalarType
RawColumnTypeObject _ name -> API.ColumnTypeObject name
RawColumnTypeArray _ columnType isNullable -> API.ColumnTypeArray (convertRawColumnType columnType) isNullable
convertColumn :: RawColumnInfo 'DataConnector -> API.ColumnInfo
convertColumn RawColumnInfo {..} =
API.ColumnInfo
{ _ciName = Witch.from rciName,
_ciType = convertRawColumnType rciType,
_ciNullable = rciIsNullable,
_ciDescription = G.unDescription <$> rciDescription,
_ciInsertable = _cmIsInsertable rciMutability,
_ciUpdatable = _cmIsUpdatable rciMutability,
_ciValueGenerated = DC.Types._ecmValueGenerated =<< extraColumnMetadata
}
where
extraColumnMetadata = HashMap.lookup rciName . DC.Types._etmExtraColumnMetadata $ _ptmiExtraTableMetadata
convertForeignKeys :: HashSet (ForeignKeyMetadata 'DataConnector) -> API.ForeignKeys
convertForeignKeys foreignKeys =
foreignKeys
& HashSet.toList
& fmap
( \(ForeignKeyMetadata ForeignKey {..}) ->
let constraintName = Witch.from $ _cName _fkConstraint
constraint =
API.Constraint
{ _cForeignTable = Witch.from _fkForeignTable,
_cColumnMapping = HashMap.fromList $ bimap Witch.from Witch.from <$> NEHashMap.toList _fkColumnMapping
}
in (constraintName, constraint)
)
& HashMap.fromList
& API.ForeignKeys

View File

@ -51,7 +51,9 @@ import Data.Aeson hiding ((.=))
import Data.Aeson.TH
import Data.Has
import Data.HashMap.Strict qualified as HashMap
import Data.Text (unpack)
import Data.Text.Extended
import Data.Tuple.Extra (uncurry3)
import Hasura.Base.Error
import Hasura.LogicalModel.Types (LogicalModelName)
import Hasura.Prelude
@ -217,6 +219,7 @@ data RawColumnType (b :: BackendType)
| RawColumnTypeObject (XNestedObjects b) G.Name
| RawColumnTypeArray (XNestedObjects b) (RawColumnType b) Bool
deriving stock (Generic)
deriving (ToJSON, FromJSON) via Autodocodec (RawColumnType b)
deriving instance (Backend b) => Eq (RawColumnType b)
@ -228,21 +231,37 @@ deriving instance (Backend b) => Show (RawColumnType b)
instance (Backend b) => NFData (RawColumnType b)
-- For backwards compatibility we want to serialize and deserialize
-- RawColumnTypeScalar as a ScalarType
instance (Backend b) => ToJSON (RawColumnType b) where
toJSON = \case
RawColumnTypeScalar scalar -> toJSON scalar
other -> genericToJSON hasuraJSON other
instance (Backend b) => FromJSON (RawColumnType b) where
parseJSON v = (RawColumnTypeScalar <$> parseJSON v) <|> genericParseJSON hasuraJSON v
-- Ideally we'd derive ToJSON and FromJSON instances from the HasCodec instance, rather than the other way around.
-- Unfortunately, I'm not sure if it's possible to write a proper HasCodec instance in the presence
-- of the (XNestedObjects b) type family, which may be Void.
instance (Backend b) => HasCodec (RawColumnType b) where
codec = codecViaAeson "RawColumnType"
codec =
named "RawColumnType"
$
-- For backwards compatibility we want to serialize and deserialize
-- RawColumnTypeScalar as a ScalarType.
-- Note: we need to use `codecViaAeson` instead of `codec` because the `HasCodec` instance
-- for `PGScalarType` has diverged from the `ToJSON`/`FromJSON` instances.
matchChoiceCodec
(dimapCodec RawColumnTypeScalar id $ codecViaAeson "ScalarType")
(Autodocodec.object "ColumnTypeNonScalar" $ discriminatedUnionCodec "type" enc dec)
\case
RawColumnTypeScalar scalar -> Left scalar
ct -> Right ct
where
enc = \case
RawColumnTypeScalar _ -> error "unexpected RawColumnTypeScalar"
RawColumnTypeObject _ objectName -> ("object", mapToEncoder objectName columnTypeObjectCodec)
RawColumnTypeArray _ columnType isNullable -> ("array", mapToEncoder (columnType, isNullable) columnTypeArrayCodec)
dec =
HashMap.fromList
[ ("object", ("ColumnTypeObject", mapToDecoder (uncurry RawColumnTypeObject) columnTypeObjectCodec)),
("array", ("ColumnTypeArray", mapToDecoder (uncurry3 RawColumnTypeArray) columnTypeArrayCodec))
]
columnTypeObjectCodec = (,) <$> xNestedObjectsCodec <*> requiredField' "name"
columnTypeArrayCodec = (,,) <$> xNestedObjectsCodec <*> requiredField' "element_type" .= fst <*> requiredField' "nullable" .= snd
xNestedObjectsCodec :: ObjectCodec void (XNestedObjects b)
xNestedObjectsCodec =
bimapCodec supportsNestedObjects id (pureCodec ())
supportsNestedObjects :: void -> Either String (XNestedObjects b)
supportsNestedObjects _ = mapLeft (unpack . showQErr) $ backendSupportsNestedObjects @b
-- | “Raw” column info, as stored in the catalog (but not in the schema cache). Instead of
-- containing a 'PGColumnType', it only contains a 'PGScalarType', which is combined with the

View File

@ -7,6 +7,7 @@ module Hasura.RQL.Types.Source.Table
stiName,
stiType,
stiColumns,
stiLogicalModels,
stiPrimaryKey,
stiForeignKeys,
stiDescription,
@ -26,7 +27,6 @@ where
import Autodocodec
import Autodocodec.OpenAPI ()
import Control.DeepSeq (NFData)
import Control.Lens.TH (makeLenses)
import Data.Aeson (FromJSON, ToJSON)
import Data.HashMap.Strict (HashMap)
@ -36,8 +36,10 @@ import Data.List.NonEmpty qualified as NonEmpty
import Data.OpenApi (ToSchema)
import Data.Text (Text)
import GHC.Generics (Generic)
import Hasura.LogicalModel.Metadata
import Hasura.RQL.Types.Backend (Backend (..), ConstraintName)
import Hasura.RQL.Types.Source.Column (SourceColumnInfo)
import Hasura.RQL.Types.Source.TableType
import Prelude
--------------------------------------------------------------------------------
@ -47,6 +49,7 @@ data SourceTableInfo b = SourceTableInfo
{ _stiName :: TableName b,
_stiType :: SourceTableType,
_stiColumns :: [SourceColumnInfo b],
_stiLogicalModels :: [LogicalModelMetadata b],
_stiPrimaryKey :: Maybe (NonEmpty (Column b)),
_stiForeignKeys :: SourceForeignKeys b,
_stiDescription :: Maybe Text,
@ -55,13 +58,10 @@ data SourceTableInfo b = SourceTableInfo
_stiDeletable :: Bool
}
deriving stock (Generic)
deriving anyclass (Hashable)
deriving (FromJSON, ToJSON, ToSchema) via Autodocodec (SourceTableInfo b)
deriving stock instance (Backend b) => Eq (SourceTableInfo b)
deriving stock instance (Backend b) => Ord (SourceTableInfo b)
deriving stock instance (Backend b) => Show (SourceTableInfo b)
instance (Backend b) => HasCodec (SourceTableInfo b) where
@ -71,6 +71,7 @@ instance (Backend b) => HasCodec (SourceTableInfo b) where
<$> requiredField "name" "The name of the table" .= _stiName
<*> optionalFieldWithDefault "type" Table "The type of table" .= _stiType
<*> requiredField "columns" "The columns of the table" .= _stiColumns
<*> optionalFieldWithOmittedDefault "logical_models" [] "The logical models referenced by the table's column types" .= _stiLogicalModels
<*> dimapMaybeNonEmpty (optionalFieldWithOmittedDefault "primary_key" [] "The primary key of the table") .= _stiPrimaryKey
<*> optionalFieldWithOmittedDefault "foreign_keys" (SourceForeignKeys mempty) "Foreign key constraints" .= _stiForeignKeys
<*> optionalFieldOrNull "description" "Description of the table" .= _stiDescription
@ -83,16 +84,6 @@ instance (Backend b) => HasCodec (SourceTableInfo b) where
--------------------------------------------------------------------------------
data SourceTableType = Table | View
deriving stock (Eq, Ord, Show, Generic, Enum, Bounded)
deriving anyclass (NFData, Hashable)
deriving (FromJSON, ToJSON) via Autodocodec SourceTableType
instance HasCodec SourceTableType where
codec = named "TableType" (stringConstCodec [(Table, "table"), (View, "view")])
--------------------------------------------------------------------------------
newtype SourceForeignKeys b = SourceForeignKeys {_unSourceForeignKeys :: HashMap (ConstraintName b) (SourceConstraint b)}
deriving stock (Generic)
deriving anyclass (Hashable)

View File

@ -0,0 +1,20 @@
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE OverloadedLists #-}
module Hasura.RQL.Types.Source.TableType (SourceTableType (..)) where
import Autodocodec
import Autodocodec.OpenAPI ()
import Control.DeepSeq (NFData)
import Data.Aeson (FromJSON, ToJSON)
import Data.Hashable (Hashable)
import GHC.Generics (Generic)
import Prelude
data SourceTableType = Table | View
deriving stock (Eq, Ord, Show, Generic, Enum, Bounded)
deriving anyclass (NFData, Hashable)
deriving (FromJSON, ToJSON) via Autodocodec SourceTableType
instance HasCodec SourceTableType where
codec = named "TableType" (stringConstCodec [(Table, "table"), (View, "view")])

View File

@ -818,7 +818,8 @@ buildTableCache = Inc.cache proc (source, sourceConfig, dbTablesMeta, tableBuild
_tciCustomConfig = config,
_tciDescription = description,
_tciExtraTableMetadata = _ptmiExtraTableMetadata metadataTable,
_tciApolloFederationConfig = apolloFedConfig
_tciApolloFederationConfig = apolloFedConfig,
_tciRawColumns = columns
}
logicalModelToRawColumnInfos :: LogicalModelMetadata b -> Either QErr [RawColumnInfo b]

View File

@ -80,6 +80,7 @@ module Hasura.Table.Cache
tciUniqueConstraints,
tciUniqueOrPrimaryKeyConstraints,
tciViewInfo,
tciRawColumns,
tiAdminRolePermInfo,
tiCoreInfo,
tiEventTriggerInfoMap,
@ -1011,7 +1012,8 @@ data TableCoreInfoG (b :: BackendType) field primaryKeyColumn = TableCoreInfo
_tciEnumValues :: Maybe EnumValues,
_tciCustomConfig :: TableConfig b,
_tciExtraTableMetadata :: ExtraTableMetadata b,
_tciApolloFederationConfig :: Maybe ApolloFederationConfig
_tciApolloFederationConfig :: Maybe ApolloFederationConfig,
_tciRawColumns :: [RawColumnInfo b]
}
deriving (Generic)

View File

@ -147,7 +147,8 @@ buildTableInfo TableInfoBuilder {..} = tableInfo
_tciEnumValues = Nothing,
_tciCustomConfig = tableConfig,
_tciExtraTableMetadata = PGExtraTableMetadata Table,
_tciApolloFederationConfig = Nothing
_tciApolloFederationConfig = Nothing,
_tciRawColumns = []
}
pk :: Maybe (PrimaryKey PG (ColumnInfo PG))