2019-11-27 01:49:42 +03:00
|
|
|
|
{-# LANGUAGE Arrows #-}
|
|
|
|
|
|
2019-08-14 02:34:37 +03:00
|
|
|
|
-- | Description: Create/delete SQL tables to/from Hasura metadata.
|
|
|
|
|
module Hasura.RQL.DDL.Schema.Table
|
|
|
|
|
( TrackTable(..)
|
|
|
|
|
, runTrackTableQ
|
|
|
|
|
, trackExistingTableOrViewP2
|
2019-07-11 10:41:20 +03:00
|
|
|
|
|
2019-09-19 07:47:36 +03:00
|
|
|
|
, TrackTableV2(..)
|
|
|
|
|
, runTrackTableV2Q
|
|
|
|
|
|
2019-08-14 02:34:37 +03:00
|
|
|
|
, UntrackTable(..)
|
|
|
|
|
, runUntrackTableQ
|
2019-03-18 19:22:21 +03:00
|
|
|
|
|
2019-08-14 02:34:37 +03:00
|
|
|
|
, SetTableIsEnum(..)
|
|
|
|
|
, runSetExistingTableIsEnumQ
|
|
|
|
|
|
2019-09-19 07:47:36 +03:00
|
|
|
|
, SetTableCustomFields(..)
|
|
|
|
|
, runSetTableCustomFieldsQV2
|
|
|
|
|
|
2019-08-14 02:34:37 +03:00
|
|
|
|
, buildTableCache
|
|
|
|
|
, delTableAndDirectDeps
|
|
|
|
|
, processTableChanges
|
|
|
|
|
) where
|
2018-06-27 16:11:32 +03:00
|
|
|
|
|
2019-03-22 10:08:42 +03:00
|
|
|
|
import Hasura.EncJSON
|
2018-06-27 16:11:32 +03:00
|
|
|
|
import Hasura.Prelude
|
|
|
|
|
import Hasura.RQL.DDL.Deps
|
2019-12-09 01:17:39 +03:00
|
|
|
|
import Hasura.RQL.DDL.Schema.Cache.Common
|
2019-08-14 02:34:37 +03:00
|
|
|
|
import Hasura.RQL.DDL.Schema.Catalog
|
2018-06-27 16:11:32 +03:00
|
|
|
|
import Hasura.RQL.DDL.Schema.Diff
|
2019-07-22 15:47:13 +03:00
|
|
|
|
import Hasura.RQL.DDL.Schema.Enum
|
2019-03-01 12:17:22 +03:00
|
|
|
|
import Hasura.RQL.DDL.Schema.Rename
|
2018-06-27 16:11:32 +03:00
|
|
|
|
import Hasura.RQL.Types
|
2019-05-08 10:36:43 +03:00
|
|
|
|
import Hasura.RQL.Types.Catalog
|
2019-12-13 10:47:28 +03:00
|
|
|
|
import Hasura.Server.Utils
|
2018-06-27 16:11:32 +03:00
|
|
|
|
import Hasura.SQL.Types
|
|
|
|
|
|
2019-12-09 01:17:39 +03:00
|
|
|
|
import qualified Database.PG.Query as Q
|
|
|
|
|
import qualified Hasura.GraphQL.Context as GC
|
|
|
|
|
import qualified Hasura.GraphQL.Schema as GS
|
|
|
|
|
import qualified Hasura.Incremental as Inc
|
|
|
|
|
import qualified Language.GraphQL.Draft.Syntax as G
|
2018-06-27 16:11:32 +03:00
|
|
|
|
|
2019-11-27 01:49:42 +03:00
|
|
|
|
import Control.Arrow.Extended
|
2019-12-09 01:17:39 +03:00
|
|
|
|
import Control.Lens.Extended hiding ((.=))
|
2018-06-27 16:11:32 +03:00
|
|
|
|
import Data.Aeson
|
|
|
|
|
import Data.Aeson.Casing
|
|
|
|
|
import Data.Aeson.TH
|
2019-12-09 01:17:39 +03:00
|
|
|
|
import Instances.TH.Lift ()
|
|
|
|
|
import Language.Haskell.TH.Syntax (Lift)
|
|
|
|
|
import Network.URI.Extended ()
|
2019-08-14 02:34:37 +03:00
|
|
|
|
|
2019-12-09 01:17:39 +03:00
|
|
|
|
import qualified Data.HashMap.Strict.Extended as M
|
2019-12-13 00:46:33 +03:00
|
|
|
|
import qualified Data.HashSet as S
|
2019-12-09 01:17:39 +03:00
|
|
|
|
import qualified Data.Text as T
|
2018-06-27 16:11:32 +03:00
|
|
|
|
|
2019-07-22 15:47:13 +03:00
|
|
|
|
data TrackTable
|
2018-06-27 16:11:32 +03:00
|
|
|
|
= TrackTable
|
2019-07-22 15:47:13 +03:00
|
|
|
|
{ tName :: !QualifiedTable
|
|
|
|
|
, tIsEnum :: !Bool
|
|
|
|
|
} deriving (Show, Eq, Lift)
|
|
|
|
|
|
|
|
|
|
instance FromJSON TrackTable where
|
|
|
|
|
parseJSON v = withOptions <|> withoutOptions
|
|
|
|
|
where
|
|
|
|
|
withOptions = flip (withObject "TrackTable") v $ \o -> TrackTable
|
|
|
|
|
<$> o .: "table"
|
|
|
|
|
<*> o .:? "is_enum" .!= False
|
|
|
|
|
withoutOptions = TrackTable <$> parseJSON v <*> pure False
|
|
|
|
|
|
|
|
|
|
instance ToJSON TrackTable where
|
|
|
|
|
toJSON (TrackTable name isEnum)
|
|
|
|
|
| isEnum = object [ "table" .= name, "is_enum" .= isEnum ]
|
|
|
|
|
| otherwise = toJSON name
|
|
|
|
|
|
|
|
|
|
data SetTableIsEnum
|
|
|
|
|
= SetTableIsEnum
|
|
|
|
|
{ stieTable :: !QualifiedTable
|
|
|
|
|
, stieIsEnum :: !Bool
|
|
|
|
|
} deriving (Show, Eq, Lift)
|
|
|
|
|
$(deriveJSON (aesonDrop 4 snakeCase) ''SetTableIsEnum)
|
|
|
|
|
|
|
|
|
|
data UntrackTable =
|
|
|
|
|
UntrackTable
|
|
|
|
|
{ utTable :: !QualifiedTable
|
|
|
|
|
, utCascade :: !(Maybe Bool)
|
|
|
|
|
} deriving (Show, Eq, Lift)
|
|
|
|
|
$(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''UntrackTable)
|
2018-06-27 16:11:32 +03:00
|
|
|
|
|
2019-07-11 10:41:20 +03:00
|
|
|
|
-- | Track table/view, Phase 1:
|
|
|
|
|
-- Validate table tracking operation. Fails if table is already being tracked,
|
|
|
|
|
-- or if a function with the same name is being tracked.
|
2019-11-20 21:21:30 +03:00
|
|
|
|
trackExistingTableOrViewP1 :: (QErrM m, CacheRWM m) => QualifiedTable -> m ()
|
2019-09-19 07:47:36 +03:00
|
|
|
|
trackExistingTableOrViewP1 qt = do
|
2018-12-13 10:26:15 +03:00
|
|
|
|
rawSchemaCache <- askSchemaCache
|
2019-09-19 07:47:36 +03:00
|
|
|
|
when (M.member qt $ scTables rawSchemaCache) $
|
|
|
|
|
throw400 AlreadyTracked $ "view/table already tracked : " <>> qt
|
|
|
|
|
let qf = fmap (FunctionName . getTableTxt) qt
|
2019-07-11 10:41:20 +03:00
|
|
|
|
when (M.member qf $ scFunctions rawSchemaCache) $
|
2019-09-19 07:47:36 +03:00
|
|
|
|
throw400 NotSupported $ "function with name " <> qt <<> " already exists"
|
|
|
|
|
|
2019-10-21 19:01:05 +03:00
|
|
|
|
trackExistingTableOrViewP2
|
2019-11-20 21:21:30 +03:00
|
|
|
|
:: (MonadTx m, CacheRWM m)
|
|
|
|
|
=> QualifiedTable -> SystemDefined -> Bool -> TableConfig -> m EncJSON
|
2019-10-21 19:01:05 +03:00
|
|
|
|
trackExistingTableOrViewP2 tableName systemDefined isEnum config = do
|
2018-11-23 16:02:46 +03:00
|
|
|
|
sc <- askSchemaCache
|
|
|
|
|
let defGCtx = scDefaultRemoteGCtx sc
|
2019-07-22 15:47:13 +03:00
|
|
|
|
GS.checkConflictingNode defGCtx $ GS.qualObjectToName tableName
|
2019-10-11 08:13:57 +03:00
|
|
|
|
saveTableToCatalog tableName systemDefined isEnum config
|
2019-07-22 15:47:13 +03:00
|
|
|
|
buildSchemaCacheFor (MOTable tableName)
|
2018-06-27 16:11:32 +03:00
|
|
|
|
return successMsg
|
|
|
|
|
|
2019-11-20 21:21:30 +03:00
|
|
|
|
runTrackTableQ
|
|
|
|
|
:: (MonadTx m, CacheRWM m, HasSystemDefined m) => TrackTable -> m EncJSON
|
2019-09-19 07:47:36 +03:00
|
|
|
|
runTrackTableQ (TrackTable qt isEnum) = do
|
|
|
|
|
trackExistingTableOrViewP1 qt
|
2019-10-21 19:01:05 +03:00
|
|
|
|
systemDefined <- askSystemDefined
|
|
|
|
|
trackExistingTableOrViewP2 qt systemDefined isEnum emptyTableConfig
|
2019-09-19 07:47:36 +03:00
|
|
|
|
|
|
|
|
|
data TrackTableV2
|
|
|
|
|
= TrackTableV2
|
|
|
|
|
{ ttv2Table :: !TrackTable
|
|
|
|
|
, ttv2Configuration :: !TableConfig
|
|
|
|
|
} deriving (Show, Eq, Lift)
|
|
|
|
|
$(deriveJSON (aesonDrop 4 snakeCase) ''TrackTableV2)
|
|
|
|
|
|
2019-11-20 21:21:30 +03:00
|
|
|
|
runTrackTableV2Q
|
|
|
|
|
:: (MonadTx m, CacheRWM m, HasSystemDefined m) => TrackTableV2 -> m EncJSON
|
2019-09-19 07:47:36 +03:00
|
|
|
|
runTrackTableV2Q (TrackTableV2 (TrackTable qt isEnum) config) = do
|
|
|
|
|
trackExistingTableOrViewP1 qt
|
2019-10-21 19:01:05 +03:00
|
|
|
|
systemDefined <- askSystemDefined
|
|
|
|
|
trackExistingTableOrViewP2 qt systemDefined isEnum config
|
2019-07-22 15:47:13 +03:00
|
|
|
|
|
2019-11-20 21:21:30 +03:00
|
|
|
|
runSetExistingTableIsEnumQ :: (MonadTx m, CacheRWM m) => SetTableIsEnum -> m EncJSON
|
2019-07-22 15:47:13 +03:00
|
|
|
|
runSetExistingTableIsEnumQ (SetTableIsEnum tableName isEnum) = do
|
|
|
|
|
void $ askTabInfo tableName -- assert that table is tracked
|
2019-08-14 02:34:37 +03:00
|
|
|
|
updateTableIsEnumInCatalog tableName isEnum
|
2019-07-22 15:47:13 +03:00
|
|
|
|
buildSchemaCacheFor (MOTable tableName)
|
|
|
|
|
return successMsg
|
2018-06-27 16:11:32 +03:00
|
|
|
|
|
2019-09-19 07:47:36 +03:00
|
|
|
|
data SetTableCustomFields
|
|
|
|
|
= SetTableCustomFields
|
|
|
|
|
{ _stcfTable :: !QualifiedTable
|
|
|
|
|
, _stcfCustomRootFields :: !GC.TableCustomRootFields
|
|
|
|
|
, _stcfCustomColumnNames :: !CustomColumnNames
|
|
|
|
|
} deriving (Show, Eq, Lift)
|
|
|
|
|
$(deriveToJSON (aesonDrop 5 snakeCase) ''SetTableCustomFields)
|
|
|
|
|
|
|
|
|
|
instance FromJSON SetTableCustomFields where
|
|
|
|
|
parseJSON = withObject "SetTableCustomFields" $ \o ->
|
|
|
|
|
SetTableCustomFields
|
|
|
|
|
<$> o .: "table"
|
|
|
|
|
<*> o .:? "custom_root_fields" .!= GC.emptyCustomRootFields
|
|
|
|
|
<*> o .:? "custom_column_names" .!= M.empty
|
|
|
|
|
|
2019-11-20 21:21:30 +03:00
|
|
|
|
runSetTableCustomFieldsQV2
|
|
|
|
|
:: (MonadTx m, CacheRWM m) => SetTableCustomFields -> m EncJSON
|
2019-09-19 07:47:36 +03:00
|
|
|
|
runSetTableCustomFieldsQV2 (SetTableCustomFields tableName rootFields columnNames) = do
|
2019-12-16 20:10:02 +03:00
|
|
|
|
void $ askTabInfo tableName -- assert that table is tracked
|
|
|
|
|
updateTableConfig tableName (TableConfig rootFields columnNames)
|
2019-09-19 07:47:36 +03:00
|
|
|
|
buildSchemaCacheFor (MOTable tableName)
|
|
|
|
|
return successMsg
|
|
|
|
|
|
2019-07-22 15:47:13 +03:00
|
|
|
|
unTrackExistingTableOrViewP1
|
2019-11-26 15:14:21 +03:00
|
|
|
|
:: (CacheRM m, QErrM m) => UntrackTable -> m ()
|
2019-07-22 15:47:13 +03:00
|
|
|
|
unTrackExistingTableOrViewP1 (UntrackTable vn _) = do
|
|
|
|
|
rawSchemaCache <- askSchemaCache
|
|
|
|
|
case M.lookup vn (scTables rawSchemaCache) of
|
|
|
|
|
Just ti ->
|
|
|
|
|
-- Check if table/view is system defined
|
2019-11-20 21:21:30 +03:00
|
|
|
|
when (isSystemDefined $ _tciSystemDefined $ _tiCoreInfo ti) $ throw400 NotSupported $
|
2019-07-22 15:47:13 +03:00
|
|
|
|
vn <<> " is system defined, cannot untrack"
|
|
|
|
|
Nothing -> throw400 AlreadyUntracked $
|
|
|
|
|
"view/table already untracked : " <>> vn
|
|
|
|
|
|
|
|
|
|
unTrackExistingTableOrViewP2
|
2019-11-20 21:21:30 +03:00
|
|
|
|
:: (CacheRWM m, MonadTx m)
|
2019-07-22 15:47:13 +03:00
|
|
|
|
=> UntrackTable -> m EncJSON
|
2020-01-09 02:19:02 +03:00
|
|
|
|
unTrackExistingTableOrViewP2 (UntrackTable qtn cascade) = withNewInconsistentObjsCheck do
|
2019-07-22 15:47:13 +03:00
|
|
|
|
sc <- askSchemaCache
|
|
|
|
|
|
|
|
|
|
-- Get relational, query template and function dependants
|
|
|
|
|
let allDeps = getDependentObjs sc (SOTable qtn)
|
|
|
|
|
indirectDeps = filter (not . isDirectDep) allDeps
|
|
|
|
|
-- Report bach with an error if cascade is not set
|
|
|
|
|
when (indirectDeps /= [] && not (or cascade)) $ reportDepsExt indirectDeps []
|
2019-08-14 02:34:37 +03:00
|
|
|
|
-- Purge all the dependents from state
|
|
|
|
|
mapM_ purgeDependentObject indirectDeps
|
2019-07-22 15:47:13 +03:00
|
|
|
|
-- delete the table and its direct dependencies
|
|
|
|
|
delTableAndDirectDeps qtn
|
2020-01-09 02:19:02 +03:00
|
|
|
|
buildSchemaCache
|
2019-07-22 15:47:13 +03:00
|
|
|
|
|
2020-01-09 02:19:02 +03:00
|
|
|
|
pure successMsg
|
2019-07-22 15:47:13 +03:00
|
|
|
|
where
|
|
|
|
|
isDirectDep = \case
|
|
|
|
|
(SOTableObj dtn _) -> qtn == dtn
|
|
|
|
|
_ -> False
|
|
|
|
|
|
|
|
|
|
runUntrackTableQ
|
2019-11-20 21:21:30 +03:00
|
|
|
|
:: (CacheRWM m, MonadTx m)
|
2019-07-22 15:47:13 +03:00
|
|
|
|
=> UntrackTable -> m EncJSON
|
|
|
|
|
runUntrackTableQ q = do
|
|
|
|
|
unTrackExistingTableOrViewP1 q
|
|
|
|
|
unTrackExistingTableOrViewP2 q
|
|
|
|
|
|
2019-12-09 07:18:53 +03:00
|
|
|
|
processTableChanges :: (MonadTx m, CacheRM m) => TableCoreInfo -> TableDiff -> m ()
|
2018-06-27 16:11:32 +03:00
|
|
|
|
processTableChanges ti tableDiff = do
|
2019-03-01 12:17:22 +03:00
|
|
|
|
-- If table rename occurs then don't replace constraints and
|
|
|
|
|
-- process dropped/added columns, because schema reload happens eventually
|
2018-06-27 16:11:32 +03:00
|
|
|
|
sc <- askSchemaCache
|
2019-11-20 21:21:30 +03:00
|
|
|
|
let tn = _tciName ti
|
|
|
|
|
withOldTabName = do
|
|
|
|
|
procAlteredCols sc tn
|
|
|
|
|
|
|
|
|
|
withNewTabName newTN = do
|
2019-03-01 12:17:22 +03:00
|
|
|
|
let tnGQL = GS.qualObjectToName newTN
|
|
|
|
|
defGCtx = scDefaultRemoteGCtx sc
|
|
|
|
|
-- check for GraphQL schema conflicts on new name
|
|
|
|
|
GS.checkConflictingNode defGCtx tnGQL
|
2019-11-20 21:21:30 +03:00
|
|
|
|
procAlteredCols sc tn
|
2019-03-01 12:17:22 +03:00
|
|
|
|
-- update new table in catalog
|
|
|
|
|
renameTableInCatalog newTN tn
|
|
|
|
|
|
2019-10-18 11:29:47 +03:00
|
|
|
|
-- Process computed field diff
|
|
|
|
|
processComputedFieldDiff tn
|
2019-10-03 10:45:52 +03:00
|
|
|
|
-- Drop custom column names for dropped columns
|
2019-11-20 21:21:30 +03:00
|
|
|
|
possiblyDropCustomColumnNames tn
|
|
|
|
|
maybe withOldTabName withNewTabName mNewName
|
2018-06-27 16:11:32 +03:00
|
|
|
|
where
|
2019-11-20 21:21:30 +03:00
|
|
|
|
TableDiff mNewName droppedCols _ alteredCols _ computedFieldDiff _ _ = tableDiff
|
2019-03-01 12:17:22 +03:00
|
|
|
|
|
2019-10-03 10:45:52 +03:00
|
|
|
|
possiblyDropCustomColumnNames tn = do
|
2019-11-20 21:21:30 +03:00
|
|
|
|
let TableConfig customFields customColumnNames = _tciCustomConfig ti
|
2019-10-03 10:45:52 +03:00
|
|
|
|
modifiedCustomColumnNames = foldl' (flip M.delete) customColumnNames droppedCols
|
2019-11-20 21:21:30 +03:00
|
|
|
|
when (modifiedCustomColumnNames /= customColumnNames) $
|
|
|
|
|
liftTx $ updateTableConfig tn $ TableConfig customFields modifiedCustomColumnNames
|
2019-07-22 15:47:13 +03:00
|
|
|
|
|
2019-11-20 21:21:30 +03:00
|
|
|
|
procAlteredCols sc tn = for_ alteredCols $
|
2019-12-09 07:18:53 +03:00
|
|
|
|
\( PGRawColumnInfo oldName _ oldType _ _
|
|
|
|
|
, PGRawColumnInfo newName _ newType _ _ ) -> do
|
2019-11-20 21:21:30 +03:00
|
|
|
|
if | oldName /= newName -> renameColInCatalog oldName newName tn (_tciFieldInfoMap ti)
|
2019-07-22 15:47:13 +03:00
|
|
|
|
|
|
|
|
|
| oldType /= newType -> do
|
|
|
|
|
let colId = SOTableObj tn $ TOCol oldName
|
|
|
|
|
typeDepObjs = getDependentObjsWith (== DROnType) sc colId
|
|
|
|
|
|
|
|
|
|
unless (null typeDepObjs) $ throw400 DependencyError $
|
|
|
|
|
"cannot change type of column " <> oldName <<> " in table "
|
|
|
|
|
<> tn <<> " because of the following dependencies : " <>
|
|
|
|
|
reportSchemaObjs typeDepObjs
|
|
|
|
|
|
2019-11-20 21:21:30 +03:00
|
|
|
|
| otherwise -> pure ()
|
2018-06-27 16:11:32 +03:00
|
|
|
|
|
2019-10-18 11:29:47 +03:00
|
|
|
|
processComputedFieldDiff table = do
|
2019-11-07 17:39:48 +03:00
|
|
|
|
let ComputedFieldDiff _ altered overloaded = computedFieldDiff
|
2019-10-18 11:29:47 +03:00
|
|
|
|
getFunction = fmFunction . ccmFunctionMeta
|
|
|
|
|
forM_ overloaded $ \(columnName, function) ->
|
|
|
|
|
throw400 NotSupported $ "The function " <> function
|
|
|
|
|
<<> " associated with computed field" <> columnName
|
|
|
|
|
<<> " of table " <> table <<> " is being overloaded"
|
|
|
|
|
forM_ altered $ \(old, new) ->
|
|
|
|
|
if | (fmType . ccmFunctionMeta) new == FTVOLATILE ->
|
|
|
|
|
throw400 NotSupported $ "The type of function " <> getFunction old
|
|
|
|
|
<<> " associated with computed field " <> ccmName old
|
|
|
|
|
<<> " of table " <> table <<> " is being altered to \"VOLATILE\""
|
|
|
|
|
| otherwise -> pure ()
|
|
|
|
|
|
2019-11-20 21:21:30 +03:00
|
|
|
|
delTableAndDirectDeps :: (MonadTx m) => QualifiedTable -> m ()
|
2019-01-25 06:31:54 +03:00
|
|
|
|
delTableAndDirectDeps qtn@(QualifiedObject sn tn) = do
|
2018-11-16 15:40:23 +03:00
|
|
|
|
liftTx $ Q.catchE defaultTxErrorHandler $ do
|
|
|
|
|
Q.unitQ [Q.sql|
|
|
|
|
|
DELETE FROM "hdb_catalog"."hdb_relationship"
|
|
|
|
|
WHERE table_schema = $1 AND table_name = $2
|
|
|
|
|
|] (sn, tn) False
|
|
|
|
|
Q.unitQ [Q.sql|
|
|
|
|
|
DELETE FROM "hdb_catalog"."hdb_permission"
|
|
|
|
|
WHERE table_schema = $1 AND table_name = $2
|
|
|
|
|
|] (sn, tn) False
|
|
|
|
|
Q.unitQ [Q.sql|
|
|
|
|
|
DELETE FROM "hdb_catalog"."event_triggers"
|
|
|
|
|
WHERE schema_name = $1 AND table_name = $2
|
2019-10-18 11:29:47 +03:00
|
|
|
|
|] (sn, tn) False
|
|
|
|
|
Q.unitQ [Q.sql|
|
|
|
|
|
DELETE FROM "hdb_catalog"."hdb_computed_field"
|
|
|
|
|
WHERE table_schema = $1 AND table_name = $2
|
2018-11-16 15:40:23 +03:00
|
|
|
|
|] (sn, tn) False
|
2019-08-14 02:34:37 +03:00
|
|
|
|
deleteTableFromCatalog qtn
|
2018-11-16 15:40:23 +03:00
|
|
|
|
|
2019-08-11 18:34:38 +03:00
|
|
|
|
-- | Builds an initial @'TableCache' 'PGColumnInfo'@ from catalog information. Does not fill in
|
2019-07-22 15:47:13 +03:00
|
|
|
|
-- '_tiRolePermInfoMap' or '_tiEventTriggerInfoMap' at all, and '_tiFieldInfoMap' only contains
|
|
|
|
|
-- columns, not relationships; those pieces of information are filled in by later stages.
|
|
|
|
|
buildTableCache
|
2019-11-27 01:49:42 +03:00
|
|
|
|
:: forall arr m
|
2019-12-11 04:46:34 +03:00
|
|
|
|
. ( ArrowChoice arr, Inc.ArrowDistribute arr, ArrowWriter (Seq CollectedInfo) arr
|
2019-12-15 19:07:08 +03:00
|
|
|
|
, Inc.ArrowCache m arr, MonadTx m )
|
2019-12-09 07:18:53 +03:00
|
|
|
|
=> [CatalogTable] `arr` M.HashMap QualifiedTable TableRawInfo
|
2019-12-13 00:46:33 +03:00
|
|
|
|
buildTableCache = Inc.cache proc catalogTables -> do
|
2019-11-27 01:49:42 +03:00
|
|
|
|
rawTableInfos <-
|
2019-12-09 01:17:39 +03:00
|
|
|
|
(| Inc.keyed (| withTable (\tables -> buildRawTableInfo <<< noDuplicateTables -< tables) |)
|
2019-11-27 01:49:42 +03:00
|
|
|
|
|) (M.groupOnNE _ctName catalogTables)
|
|
|
|
|
let rawTableCache = M.catMaybes rawTableInfos
|
2019-12-09 07:18:53 +03:00
|
|
|
|
enumTables = flip M.mapMaybe rawTableCache \rawTableInfo ->
|
|
|
|
|
(,) <$> _tciPrimaryKey rawTableInfo <*> _tciEnumValues rawTableInfo
|
2019-11-27 01:49:42 +03:00
|
|
|
|
tableInfos <-
|
|
|
|
|
(| Inc.keyed (| withTable (\table -> processTableInfo -< (enumTables, table)) |)
|
|
|
|
|
|) rawTableCache
|
|
|
|
|
returnA -< M.catMaybes tableInfos
|
2018-06-27 16:11:32 +03:00
|
|
|
|
where
|
2019-11-27 01:49:42 +03:00
|
|
|
|
withTable :: ErrorA QErr arr (e, s) a -> arr (e, (QualifiedTable, s)) (Maybe a)
|
|
|
|
|
withTable f = withRecordInconsistency f <<<
|
|
|
|
|
second (first $ arr \name -> MetadataObject (MOTable name) (toJSON name))
|
|
|
|
|
|
2019-12-09 01:17:39 +03:00
|
|
|
|
noDuplicateTables = proc tables -> case tables of
|
2019-11-27 01:49:42 +03:00
|
|
|
|
table :| [] -> returnA -< table
|
|
|
|
|
_ -> throwA -< err400 AlreadyExists "duplication definition for table"
|
2019-07-22 15:47:13 +03:00
|
|
|
|
|
|
|
|
|
-- Step 1: Build the raw table cache from metadata information.
|
2019-12-09 07:18:53 +03:00
|
|
|
|
buildRawTableInfo :: ErrorA QErr arr CatalogTable (TableCoreInfoG PGRawColumnInfo PGCol)
|
2019-12-13 00:46:33 +03:00
|
|
|
|
buildRawTableInfo = Inc.cache proc (CatalogTable name systemDefined isEnum config maybeInfo) -> do
|
2019-12-13 10:47:28 +03:00
|
|
|
|
catalogInfo <-
|
|
|
|
|
(| onNothingA (throwA -<
|
|
|
|
|
err400 NotExists $ "no such table/view exists in postgres: " <>> name)
|
|
|
|
|
|) maybeInfo
|
|
|
|
|
|
|
|
|
|
let columns = _ctiColumns catalogInfo
|
|
|
|
|
columnMap = mapFromL (fromPGCol . prciName) columns
|
|
|
|
|
primaryKey = _ctiPrimaryKey catalogInfo
|
|
|
|
|
rawPrimaryKey <- liftEitherA -< traverse (resolvePrimaryKeyColumns columnMap) primaryKey
|
|
|
|
|
enumValues <- if isEnum
|
|
|
|
|
then bindErrorA -< Just <$> fetchAndValidateEnumValues name rawPrimaryKey columns
|
|
|
|
|
else returnA -< Nothing
|
|
|
|
|
|
|
|
|
|
returnA -< TableCoreInfo
|
|
|
|
|
{ _tciName = name
|
|
|
|
|
, _tciSystemDefined = systemDefined
|
|
|
|
|
, _tciFieldInfoMap = columnMap
|
|
|
|
|
, _tciPrimaryKey = primaryKey
|
|
|
|
|
, _tciUniqueConstraints = _ctiUniqueConstraints catalogInfo
|
|
|
|
|
, _tciForeignKeys = S.map unCatalogForeignKey $ _ctiForeignKeys catalogInfo
|
|
|
|
|
, _tciViewInfo = _ctiViewInfo catalogInfo
|
|
|
|
|
, _tciEnumValues = enumValues
|
|
|
|
|
, _tciCustomConfig = config
|
|
|
|
|
, _tciDescription = _ctiDescription catalogInfo
|
|
|
|
|
}
|
2019-11-20 21:21:30 +03:00
|
|
|
|
|
2019-07-22 15:47:13 +03:00
|
|
|
|
-- Step 2: Process the raw table cache to replace Postgres column types with logical column
|
|
|
|
|
-- types.
|
2019-11-27 01:49:42 +03:00
|
|
|
|
processTableInfo
|
|
|
|
|
:: ErrorA QErr arr
|
2019-12-09 07:18:53 +03:00
|
|
|
|
( M.HashMap QualifiedTable (PrimaryKey PGCol, EnumValues)
|
|
|
|
|
, TableCoreInfoG PGRawColumnInfo PGCol
|
|
|
|
|
) TableRawInfo
|
|
|
|
|
processTableInfo = proc (enumTables, rawInfo) -> liftEitherA -< do
|
2019-12-13 10:47:28 +03:00
|
|
|
|
let columns = _tciFieldInfoMap rawInfo
|
2019-12-09 07:18:53 +03:00
|
|
|
|
enumReferences = resolveEnumReferences enumTables (_tciForeignKeys rawInfo)
|
2019-12-13 10:47:28 +03:00
|
|
|
|
columnInfoMap <-
|
|
|
|
|
alignCustomColumnNames columns (_tcCustomColumnNames $ _tciCustomConfig rawInfo)
|
|
|
|
|
>>= traverse (processColumnInfo enumReferences (_tciName rawInfo))
|
|
|
|
|
assertNoDuplicateFieldNames (M.elems columnInfoMap)
|
2019-12-09 07:18:53 +03:00
|
|
|
|
|
|
|
|
|
primaryKey <- traverse (resolvePrimaryKeyColumns columnInfoMap) (_tciPrimaryKey rawInfo)
|
|
|
|
|
pure rawInfo
|
|
|
|
|
{ _tciFieldInfoMap = columnInfoMap
|
|
|
|
|
, _tciPrimaryKey = primaryKey
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
resolvePrimaryKeyColumns
|
|
|
|
|
:: (QErrM n) => HashMap FieldName a -> PrimaryKey PGCol -> n (PrimaryKey a)
|
|
|
|
|
resolvePrimaryKeyColumns columnMap = traverseOf (pkColumns.traverse) \columnName ->
|
|
|
|
|
M.lookup (fromPGCol columnName) columnMap
|
|
|
|
|
`onNothing` throw500 "column in primary key not in table!"
|
2019-11-27 01:49:42 +03:00
|
|
|
|
|
2019-12-13 10:47:28 +03:00
|
|
|
|
alignCustomColumnNames
|
|
|
|
|
:: (QErrM n)
|
|
|
|
|
=> FieldInfoMap PGRawColumnInfo
|
|
|
|
|
-> CustomColumnNames
|
|
|
|
|
-> n (FieldInfoMap (PGRawColumnInfo, G.Name))
|
|
|
|
|
alignCustomColumnNames columns customNames = do
|
|
|
|
|
let customNamesByFieldName = M.fromList $ map (first fromPGCol) $ M.toList customNames
|
|
|
|
|
flip M.traverseWithKey (align columns customNamesByFieldName) \columnName -> \case
|
|
|
|
|
This column -> pure (column, G.Name $ getFieldNameTxt columnName)
|
|
|
|
|
These column customName -> pure (column, customName)
|
|
|
|
|
That customName -> throw400 NotExists $ "the custom field name " <> customName
|
|
|
|
|
<<> " was given for the column " <> columnName <<> ", but no such column exists"
|
|
|
|
|
|
2019-11-27 01:49:42 +03:00
|
|
|
|
-- | “Processes” a 'PGRawColumnInfo' into a 'PGColumnInfo' by resolving its type using a map of
|
|
|
|
|
-- known enum tables.
|
|
|
|
|
processColumnInfo
|
|
|
|
|
:: (QErrM n)
|
2019-12-09 07:18:53 +03:00
|
|
|
|
=> M.HashMap PGCol (NonEmpty EnumReference)
|
2019-11-27 01:49:42 +03:00
|
|
|
|
-> QualifiedTable -- ^ the table this column belongs to
|
2019-12-13 10:47:28 +03:00
|
|
|
|
-> (PGRawColumnInfo, G.Name)
|
2019-11-27 01:49:42 +03:00
|
|
|
|
-> n PGColumnInfo
|
2019-12-13 10:47:28 +03:00
|
|
|
|
processColumnInfo tableEnumReferences tableName (rawInfo, name) = do
|
2019-11-27 01:49:42 +03:00
|
|
|
|
resolvedType <- resolveColumnType
|
|
|
|
|
pure PGColumnInfo
|
|
|
|
|
{ pgiColumn = pgCol
|
2019-12-13 10:47:28 +03:00
|
|
|
|
, pgiName = name
|
2019-11-27 01:49:42 +03:00
|
|
|
|
, pgiType = resolvedType
|
|
|
|
|
, pgiIsNullable = prciIsNullable rawInfo
|
|
|
|
|
, pgiDescription = prciDescription rawInfo
|
|
|
|
|
}
|
2019-07-22 15:47:13 +03:00
|
|
|
|
where
|
2019-11-27 01:49:42 +03:00
|
|
|
|
pgCol = prciName rawInfo
|
|
|
|
|
resolveColumnType =
|
2019-12-09 07:18:53 +03:00
|
|
|
|
case M.lookup pgCol tableEnumReferences of
|
|
|
|
|
-- no references? not an enum
|
|
|
|
|
Nothing -> pure $ PGColumnScalar (prciType rawInfo)
|
|
|
|
|
-- one reference? is an enum
|
|
|
|
|
Just (enumReference:|[]) -> pure $ PGColumnEnumReference enumReference
|
|
|
|
|
-- multiple referenced enums? the schema is strange, so let’s reject it
|
|
|
|
|
Just enumReferences -> throw400 ConstraintViolation
|
|
|
|
|
$ "column " <> prciName rawInfo <<> " in table " <> tableName
|
|
|
|
|
<<> " references multiple enum tables ("
|
|
|
|
|
<> T.intercalate ", " (map (dquote . erTable) $ toList enumReferences) <> ")"
|
2019-12-13 10:47:28 +03:00
|
|
|
|
|
|
|
|
|
assertNoDuplicateFieldNames columns =
|
|
|
|
|
flip M.traverseWithKey (M.groupOn pgiName columns) \name columnsWithName ->
|
|
|
|
|
case columnsWithName of
|
|
|
|
|
one:two:more -> throw400 AlreadyExists $ "the definitions of columns "
|
|
|
|
|
<> englishList (dquoteTxt . pgiColumn <$> (one:|two:more))
|
|
|
|
|
<> " are in conflict: they are mapped to the same field name, " <>> name
|
|
|
|
|
_ -> pure ()
|