2018-06-27 16:11:32 +03:00
module Hasura.RQL.DDL.Metadata
2021-09-24 01:56:37 +03:00
( runReplaceMetadata ,
runReplaceMetadataV2 ,
runExportMetadata ,
runExportMetadataV2 ,
runClearMetadata ,
runReloadMetadata ,
runDumpInternalState ,
runGetInconsistentMetadata ,
runDropInconsistentMetadata ,
runGetCatalogState ,
runSetCatalogState ,
2021-09-29 11:13:30 +03:00
runTestWebhookTransform ,
2021-09-24 01:56:37 +03:00
runSetMetricsConfig ,
runRemoveMetricsConfig ,
module Hasura.RQL.DDL.Metadata.Types ,
)
where
import Control.Lens ( ( .~ ) , ( ^. ) , ( ^? ) )
2022-04-11 14:24:11 +03:00
import Control.Monad.Trans.Control ( MonadBaseControl )
2021-09-29 11:13:30 +03:00
import Data.Aeson qualified as J
2021-09-24 01:56:37 +03:00
import Data.Aeson.Ordered qualified as AO
2022-02-03 17:56:24 +03:00
import Data.Attoparsec.Text qualified as AT
2022-03-11 02:22:54 +03:00
import Data.Bifunctor ( first )
2022-02-03 17:56:24 +03:00
import Data.Bitraversable
2021-12-09 10:58:41 +03:00
import Data.ByteString.Lazy qualified as BL
2021-09-24 01:56:37 +03:00
import Data.CaseInsensitive qualified as CI
2021-12-03 10:12:43 +03:00
import Data.Environment qualified as Env
2021-09-24 01:56:37 +03:00
import Data.Has ( Has , getter )
import Data.HashMap.Strict qualified as Map
import Data.HashMap.Strict.InsOrd.Extended qualified as OMap
import Data.HashSet qualified as HS
2022-03-15 11:41:03 +03:00
import Data.HashSet qualified as Set
2021-09-24 01:56:37 +03:00
import Data.List qualified as L
import Data.TByteString qualified as TBS
2021-12-03 10:12:43 +03:00
import Data.Text qualified as T
2021-12-09 10:58:41 +03:00
import Data.Text.Encoding qualified as TE
2021-09-24 01:56:37 +03:00
import Data.Text.Extended ( ( <<> ) )
import Hasura.Base.Error
import Hasura.EncJSON
import Hasura.Logging qualified as HL
import Hasura.Metadata.Class
2021-12-09 10:58:41 +03:00
import Hasura.Prelude hiding ( first )
2021-09-24 01:56:37 +03:00
import Hasura.RQL.DDL.Action
import Hasura.RQL.DDL.ComputedField
import Hasura.RQL.DDL.CustomTypes
import Hasura.RQL.DDL.Endpoint
import Hasura.RQL.DDL.EventTrigger
import Hasura.RQL.DDL.InheritedRoles
import Hasura.RQL.DDL.Metadata.Types
import Hasura.RQL.DDL.Network
import Hasura.RQL.DDL.Permission
import Hasura.RQL.DDL.Relationship
import Hasura.RQL.DDL.RemoteRelationship
import Hasura.RQL.DDL.RemoteSchema
import Hasura.RQL.DDL.ScheduledTrigger
import Hasura.RQL.DDL.Schema
2022-04-11 14:24:11 +03:00
import Hasura.RQL.DDL.Schema.Source
2022-03-08 03:42:06 +03:00
import Hasura.RQL.DDL.Webhook.Transform
import Hasura.RQL.DDL.Webhook.Transform.Class ( mkReqTransformCtx )
2021-09-24 01:56:37 +03:00
import Hasura.RQL.Types
2022-03-13 10:40:06 +03:00
import Hasura.RQL.Types.Endpoint
2022-03-15 11:41:03 +03:00
import Hasura.RQL.Types.EventTrigger qualified as ET
2021-09-24 01:56:37 +03:00
import Hasura.RQL.Types.Eventing.Backend ( BackendEventTrigger ( .. ) )
import Hasura.SQL.AnyBackend qualified as AB
import Network.HTTP.Client.Transformable qualified as HTTP
runClearMetadata ::
2022-04-11 14:24:11 +03:00
forall m r .
( QErrM m ,
MonadIO m ,
2021-09-24 01:56:37 +03:00
CacheRWM m ,
MetadataM m ,
MonadMetadataStorageQueryAPI m ,
2022-04-11 14:24:11 +03:00
MonadBaseControl IO m ,
2021-09-24 01:56:37 +03:00
MonadReader r m ,
Has ( HL . Logger HL . Hasura ) r
) =>
ClearMetadata ->
m EncJSON
2018-12-13 10:26:15 +03:00
runClearMetadata _ = do
2020-12-28 15:56:00 +03:00
metadata <- getMetadata
2022-04-11 14:24:11 +03:00
-- Clean up all sources, drop hdb_catalog schema from source
for_ ( OMap . toList $ _metaSources metadata ) $ \ ( sourceName , backendSourceMetadata ) ->
AB . dispatchAnyBackend @ BackendMetadata backendSourceMetadata \ ( _sourceMetadata :: SourceMetadata b ) -> do
sourceInfo <- askSourceInfo @ b sourceName
-- We do not bother dropping all dependencies on the source, because the
-- metadata is going to be replaced with an empty metadata. And dropping the
-- depdencies would lead to rebuilding of schema cache which is of no use here
-- since we do not use the rebuilt schema cache. Hence, we only clean up the
-- 'hdb_catalog' tables from the source.
runPostDropSourceHook sourceName sourceInfo
2020-12-28 15:56:00 +03:00
-- We can infer whether the server is started with `--database-url` option
-- (or corresponding env variable) by checking the existence of @'defaultSource'
-- in current metadata.
2021-09-24 01:56:37 +03:00
let maybeDefaultSourceMetadata = metadata ^? metaSources . ix defaultSource
2020-12-28 15:56:00 +03:00
emptyMetadata' = case maybeDefaultSourceMetadata of
2021-09-24 01:56:37 +03:00
Nothing -> emptyMetadata
Just exists ->
-- If default postgres source is defined, we need to set metadata
-- which contains only default source without any tables and functions.
let emptyDefaultSource =
AB . dispatchAnyBackend @ Backend exists \ ( s :: SourceMetadata b ) ->
AB . mkAnyBackend @ b $
2021-10-29 17:42:07 +03:00
SourceMetadata @ b defaultSource mempty mempty ( _smConfiguration @ b s ) Nothing emptySourceCustomization
2021-09-24 01:56:37 +03:00
in emptyMetadata
& metaSources %~ OMap . insert defaultSource emptyDefaultSource
2021-02-16 11:08:19 +03:00
runReplaceMetadataV1 $ RMWithSources emptyMetadata'
2020-12-08 17:22:31 +03:00
2021-09-09 14:54:19 +03:00
{- Note [Cleanup for dropped triggers]
2020-12-08 17:22:31 +03:00
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
There was an issue ( https :// github . com / hasura / graphql - engine / issues / 5461 )
fixed ( via https :// github . com / hasura / graphql - engine / pull / 6137 ) related to
event triggers while replacing metadata in the catalog prior to metadata
separation . The metadata separation solves the issue naturally , since the
'hdb_catalog . event_triggers' table is no more in use and new / updated event
triggers are processed in building schema cache . But we need to drop the
2021-09-09 14:54:19 +03:00
database trigger and archive events for dropped event triggers . This is handled
2020-12-08 17:22:31 +03:00
explicitly in @ 'runReplaceMetadata' function .
- }
2018-06-27 16:11:32 +03:00
2021-07-29 11:29:12 +03:00
-- | Replace the 'current metadata' with the 'new metadata'
-- The 'new metadata' might come via the 'Import Metadata' in console
2021-09-24 01:56:37 +03:00
runReplaceMetadata ::
( CacheRWM m ,
MetadataM m ,
MonadIO m ,
MonadMetadataStorageQueryAPI m ,
MonadReader r m ,
Has ( HL . Logger HL . Hasura ) r
) =>
ReplaceMetadata ->
m EncJSON
2021-02-16 11:08:19 +03:00
runReplaceMetadata = \ case
RMReplaceMetadataV1 v1args -> runReplaceMetadataV1 v1args
RMReplaceMetadataV2 v2args -> runReplaceMetadataV2 v2args
2021-09-24 01:56:37 +03:00
runReplaceMetadataV1 ::
( QErrM m ,
CacheRWM m ,
MetadataM m ,
MonadIO m ,
MonadMetadataStorageQueryAPI m ,
MonadReader r m ,
Has ( HL . Logger HL . Hasura ) r
) =>
ReplaceMetadataV1 ->
m EncJSON
2021-02-16 11:08:19 +03:00
runReplaceMetadataV1 =
( successMsg <$ ) . runReplaceMetadataV2 . ReplaceMetadataV2 NoAllowInconsistentMetadata
2021-09-24 01:56:37 +03:00
runReplaceMetadataV2 ::
forall m r .
( QErrM m ,
CacheRWM m ,
MetadataM m ,
MonadIO m ,
MonadMetadataStorageQueryAPI m ,
MonadReader r m ,
Has ( HL . Logger HL . Hasura ) r
) =>
ReplaceMetadataV2 ->
m EncJSON
runReplaceMetadataV2 ReplaceMetadataV2 { .. } = do
2021-09-09 14:54:19 +03:00
logger :: ( HL . Logger HL . Hasura ) <- asks getter
2021-05-26 19:19:26 +03:00
-- we drop all the future cron trigger events before inserting the new metadata
-- and re-populating future cron events below
2021-10-05 15:28:38 +03:00
let introspectionDisabledRoles =
2021-05-05 15:25:27 +03:00
case _rmv2Metadata of
2021-09-24 01:56:37 +03:00
RMWithSources m -> _metaSetGraphqlIntrospectionOptions m
2021-05-05 15:25:27 +03:00
RMWithoutSources _ -> mempty
2020-12-08 17:22:31 +03:00
oldMetadata <- getMetadata
2021-08-04 17:51:20 +03:00
( cronTriggersMetadata , cronTriggersToBeAdded ) <- processCronTriggers oldMetadata
2021-02-16 11:08:19 +03:00
metadata <- case _rmv2Metadata of
2021-09-24 01:56:37 +03:00
RMWithSources m -> pure $ m { _metaCronTriggers = cronTriggersMetadata }
RMWithoutSources MetadataNoSources { .. } -> do
let maybeDefaultSourceMetadata = oldMetadata ^? metaSources . ix defaultSource . toSourceMetadata
defaultSourceMetadata <-
onNothing maybeDefaultSourceMetadata $
throw400 NotSupported " cannot import metadata without sources since no default source is defined "
let newDefaultSourceMetadata =
AB . mkAnyBackend
defaultSourceMetadata
{ _smTables = _mnsTables ,
_smFunctions = _mnsFunctions
}
pure $
Metadata
( OMap . singleton defaultSource newDefaultSourceMetadata )
_mnsRemoteSchemas
_mnsQueryCollections
_mnsAllowlist
_mnsCustomTypes
_mnsActions
cronTriggersMetadata
( _metaRestEndpoints oldMetadata )
emptyApiLimit
emptyMetricsConfig
mempty
introspectionDisabledRoles
emptyNetwork
2020-12-08 17:22:31 +03:00
putMetadata metadata
2021-02-16 11:08:19 +03:00
case _rmv2AllowInconsistentMetadata of
AllowInconsistentMetadata ->
2022-02-18 15:46:55 +03:00
buildSchemaCache mempty
2021-02-16 11:08:19 +03:00
NoAllowInconsistentMetadata ->
buildSchemaCacheStrict
2021-05-26 19:19:26 +03:00
-- populate future cron events for all the new cron triggers that are imported
2021-08-04 17:51:20 +03:00
for_ cronTriggersToBeAdded $ \ CronTriggerMetadata { .. } ->
2021-05-26 19:19:26 +03:00
populateInitialCronTriggerEvents ctSchedule ctName
2021-09-09 14:54:19 +03:00
-- See Note [Cleanup for dropped triggers]
dropSourceSQLTriggers logger ( _metaSources oldMetadata ) ( _metaSources metadata )
2019-01-25 06:31:54 +03:00
2021-07-23 02:06:10 +03:00
encJFromJValue . formatInconsistentObjs . scInconsistentObjs <$> askSchemaCache
2021-02-23 20:37:27 +03:00
where
2021-08-04 17:51:20 +03:00
{- Note [Cron triggers behaviour with replace metadata]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When the metadata is replaced , we delete only the cron triggers
that were deleted , instead of deleting all the old cron triggers ( which
existed in the metadata before it was replaced ) and inserting all the
new cron triggers . This is done this way , because when a cron trigger is
dropped , the cron events associated with it will also be dropped from the DB
and when a new cron trigger is added , new cron events are generated by the
graphql - engine . So , this way we only delete and insert the data which has been changed .
The cron triggers that were deleted is calculated by getting a diff
of the old cron triggers and the new cron triggers . Note that we don't just
check the name of the trigger to calculate the diff , the whole cron trigger
definition is considered in the calculation .
Note : Only cron triggers with ` include_in_metadata ` set to ` true ` can be updated / deleted
via the replace metadata API . Cron triggers with ` include_in_metadata ` can only be modified
via the ` create_cron_trigger ` and ` delete_cron_trigger ` APIs .
- }
processCronTriggers oldMetadata = do
let ( oldCronTriggersIncludedInMetadata , oldCronTriggersNotIncludedInMetadata ) =
OMap . partition ctIncludeInMetadata ( _metaCronTriggers oldMetadata )
allNewCronTriggers =
case _rmv2Metadata of
RMWithoutSources m -> _mnsCronTriggers m
2021-09-24 01:56:37 +03:00
RMWithSources m -> _metaCronTriggers m
2021-08-04 17:51:20 +03:00
-- this function is intended to use with `Map.differenceWith`, it's used when two
-- equal keys are encountered, then the values are compared to calculate the diff.
-- see https://hackage.haskell.org/package/unordered-containers-0.2.14.0/docs/Data-HashMap-Internal.html#v:differenceWith
leftIfDifferent l r
2021-09-24 01:56:37 +03:00
| l == r = Nothing
2021-08-04 17:51:20 +03:00
| otherwise = Just l
2021-09-24 01:56:37 +03:00
cronTriggersToBeAdded =
Map . differenceWith
leftIfDifferent
( OMap . toHashMap allNewCronTriggers )
( OMap . toHashMap oldCronTriggersIncludedInMetadata )
cronTriggersToBeDropped =
Map . differenceWith
leftIfDifferent
( OMap . toHashMap oldCronTriggersIncludedInMetadata )
( OMap . toHashMap allNewCronTriggers )
2021-08-04 17:51:20 +03:00
dropFutureCronEvents $ MetadataCronTriggers $ Map . keys cronTriggersToBeDropped
cronTriggers <- do
-- traverse over the new cron triggers and check if any of them
-- already exists as a cron trigger with "included_in_metadata: false"
for_ allNewCronTriggers $ \ ct ->
when ( ctName ct ` OMap . member ` oldCronTriggersNotIncludedInMetadata ) $
throw400 AlreadyExists $
2021-09-24 01:56:37 +03:00
" cron trigger with name "
<> ctName ct
<<> " already exists as a cron trigger with \ " included_in_metadata \ " as false "
2021-08-04 17:51:20 +03:00
-- we add the old cron triggers with included_in_metadata set to false with the
-- newly added cron triggers
pure $ allNewCronTriggers <> oldCronTriggersNotIncludedInMetadata
pure $ ( cronTriggers , cronTriggersToBeAdded )
2021-09-24 01:56:37 +03:00
dropSourceSQLTriggers ::
HL . Logger HL . Hasura ->
InsOrdHashMap SourceName BackendSourceMetadata ->
InsOrdHashMap SourceName BackendSourceMetadata ->
m ()
2021-09-09 14:54:19 +03:00
dropSourceSQLTriggers ( HL . Logger logger ) oldSources newSources = do
-- NOTE: the current implementation of this function has an edge case.
-- The edge case is that when a `SourceA` which contained some event triggers
-- is modified to point to a new database, this function will try to drop the
-- SQL triggers of the dropped event triggers on the new database which doesn't exist.
-- In the current implementation, this doesn't throw an error because the trigger is dropped
-- using `DROP IF EXISTS..` meaning this silently fails without throwing an error.
for_ ( OMap . toList newSources ) $ \ ( source , newBackendSourceMetadata ) -> do
onJust ( OMap . lookup source oldSources ) $ \ oldBackendSourceMetadata ->
compose source newBackendSourceMetadata oldBackendSourceMetadata \ ( newSourceMetadata :: SourceMetadata b ) -> do
dispatch oldBackendSourceMetadata \ oldSourceMetadata -> do
let oldTriggersMap = getTriggersMap oldSourceMetadata
newTriggersMap = getTriggersMap newSourceMetadata
2022-03-15 11:41:03 +03:00
droppedEventTriggers = OMap . keys $ oldTriggersMap ` OMap . difference ` newTriggersMap
retainedNewTriggers = newTriggersMap ` OMap . intersection ` oldTriggersMap
2021-09-24 01:56:37 +03:00
catcher e @ QErr { qeCode }
2021-09-09 14:54:19 +03:00
| qeCode == Unexpected = pure () -- NOTE: This information should be returned by the inconsistent_metadata response, so doesn't need additional logging.
| otherwise = throwError e -- rethrow other errors
-- This will swallow Unexpected exceptions for sources if allow_inconsistent_metadata is enabled
-- This should be ok since if the sources are already missing from the cache then they should
-- not need to be removed.
--
-- TODO: Determine if any errors should be thrown from askSourceConfig at all if the errors are just being discarded
return $
flip catchError catcher do
sourceConfig <- askSourceConfig @ b source
2022-03-15 11:41:03 +03:00
for_ droppedEventTriggers $ dropTriggerAndArchiveEvents @ b sourceConfig
for_ ( OMap . toList retainedNewTriggers ) $ \ ( retainedNewTriggerName , retainedNewTriggerConf ) ->
case OMap . lookup retainedNewTriggerName oldTriggersMap of
Nothing -> pure ()
Just oldTriggerConf -> do
let newTriggerOps = etcDefinition retainedNewTriggerConf
oldTriggerOps = etcDefinition oldTriggerConf
isDroppedOp old new = isJust old && isNothing new
droppedOps =
[ ( bool Nothing ( Just INSERT ) ( isDroppedOp ( tdInsert oldTriggerOps ) ( tdInsert newTriggerOps ) ) ) ,
( bool Nothing ( Just UPDATE ) ( isDroppedOp ( tdUpdate oldTriggerOps ) ( tdUpdate newTriggerOps ) ) ) ,
( bool Nothing ( Just ET . DELETE ) ( isDroppedOp ( tdDelete oldTriggerOps ) ( tdDelete newTriggerOps ) ) )
]
dropDanglingSQLTrigger @ b sourceConfig retainedNewTriggerName ( Set . fromList $ catMaybes droppedOps )
2021-02-23 20:37:27 +03:00
where
2021-09-09 14:54:19 +03:00
dispatch = AB . dispatchAnyBackend @ BackendEventTrigger
2021-09-24 01:56:37 +03:00
compose ::
SourceName ->
AB . AnyBackend i ->
AB . AnyBackend i ->
( forall b . BackendEventTrigger b => i b -> i b -> m () ) ->
m ()
2021-09-09 14:54:19 +03:00
compose sourceName x y f = AB . composeAnyBackend @ BackendEventTrigger f x y ( logger $ HL . UnstructuredLog HL . LevelInfo $ TBS . fromText $ " Event trigger clean up couldn't be done on the source " <> sourceName <<> " because it has changed its type " )
2021-01-29 08:48:17 +03:00
2021-05-26 19:19:26 +03:00
-- | Only includes the cron triggers with `included_in_metadata` set to `True`
processCronTriggersMetadata :: Metadata -> Metadata
processCronTriggersMetadata metadata =
let cronTriggersIncludedInMetadata = OMap . filter ctIncludeInMetadata $ _metaCronTriggers metadata
2021-09-24 01:56:37 +03:00
in metadata { _metaCronTriggers = cronTriggersIncludedInMetadata }
2021-05-26 19:19:26 +03:00
2021-09-24 01:56:37 +03:00
runExportMetadata ::
forall m .
2021-10-05 15:28:38 +03:00
( QErrM m , MetadataM m ) =>
2021-09-24 01:56:37 +03:00
ExportMetadata ->
m EncJSON
runExportMetadata ExportMetadata { } =
2021-10-05 15:28:38 +03:00
encJFromOrderedValue . metadataToOrdJSON <$> ( processCronTriggersMetadata <$> getMetadata )
2021-02-19 05:39:30 +03:00
2021-09-24 01:56:37 +03:00
runExportMetadataV2 ::
forall m .
2021-10-05 15:28:38 +03:00
( QErrM m , MetadataM m ) =>
2021-09-24 01:56:37 +03:00
MetadataResourceVersion ->
ExportMetadata ->
m EncJSON
runExportMetadataV2 currentResourceVersion ExportMetadata { } = do
2021-10-05 15:28:38 +03:00
exportMetadata <- processCronTriggersMetadata <$> getMetadata
2021-09-24 01:56:37 +03:00
pure $
encJFromOrderedValue $
AO . object
[ ( " resource_version " , AO . toOrdered currentResourceVersion ) ,
( " metadata " , metadataToOrdJSON exportMetadata )
]
2018-09-05 18:25:30 +03:00
2020-12-08 17:22:31 +03:00
runReloadMetadata :: ( QErrM m , CacheRWM m , MetadataM m ) => ReloadMetadata -> m EncJSON
2021-11-10 17:34:22 +03:00
runReloadMetadata ( ReloadMetadata reloadRemoteSchemas reloadSources reloadRecreateEventTriggers ) = do
2021-07-13 10:56:32 +03:00
metadata <- getMetadata
let allSources = HS . fromList $ OMap . keys $ _metaSources metadata
allRemoteSchemas = HS . fromList $ OMap . keys $ _metaRemoteSchemas metadata
checkRemoteSchema name =
2021-09-24 01:56:37 +03:00
unless ( HS . member name allRemoteSchemas ) $
throw400 NotExists $
" Remote schema with name " <> name <<> " not found in metadata "
2021-07-13 10:56:32 +03:00
checkSource name =
2021-09-24 01:56:37 +03:00
unless ( HS . member name allSources ) $
throw400 NotExists $
" Source with name " <> name <<> " not found in metadata "
2021-07-13 10:56:32 +03:00
remoteSchemaInvalidations <- case reloadRemoteSchemas of
2021-09-24 01:56:37 +03:00
RSReloadAll -> pure allRemoteSchemas
2021-07-13 10:56:32 +03:00
RSReloadList l -> mapM_ checkRemoteSchema l *> pure l
pgSourcesInvalidations <- case reloadSources of
2021-09-24 01:56:37 +03:00
RSReloadAll -> pure allSources
2021-07-13 10:56:32 +03:00
RSReloadList l -> mapM_ checkSource l *> pure l
2021-11-10 17:34:22 +03:00
recreateEventTriggersSources <- case reloadRecreateEventTriggers of
RSReloadAll -> pure allSources
RSReloadList l -> mapM_ checkSource l *> pure l
2021-07-13 10:56:32 +03:00
2021-09-24 01:56:37 +03:00
let cacheInvalidations =
CacheInvalidations
{ ciMetadata = True ,
ciRemoteSchemas = remoteSchemaInvalidations ,
ciSources = pgSourcesInvalidations
}
2021-07-13 10:56:32 +03:00
2021-11-10 17:34:22 +03:00
buildSchemaCacheWithOptions ( CatalogUpdate $ Just recreateEventTriggersSources ) cacheInvalidations metadata
2022-03-03 16:33:43 +03:00
inconsObjs <- scInconsistentObjs <$> askSchemaCache
pure . encJFromJValue . J . object $
[ ( " message " :: Text ) J ..= ( " success " :: Text ) ,
" is_consistent " J ..= null inconsObjs
]
<> [ " inconsistent_objects " J ..= inconsObjs | not ( null inconsObjs ) ]
2018-09-05 18:25:30 +03:00
2021-09-24 01:56:37 +03:00
runDumpInternalState ::
( QErrM m , CacheRM m ) =>
DumpInternalState ->
m EncJSON
2019-11-26 15:14:21 +03:00
runDumpInternalState _ =
2019-03-18 19:22:21 +03:00
encJFromJValue <$> askSchemaCache
2019-04-17 19:29:39 +03:00
2021-09-24 01:56:37 +03:00
runGetInconsistentMetadata ::
( QErrM m , CacheRM m ) =>
GetInconsistentMetadata ->
m EncJSON
2019-04-17 19:29:39 +03:00
runGetInconsistentMetadata _ = do
inconsObjs <- scInconsistentObjs <$> askSchemaCache
2021-02-16 11:08:19 +03:00
return $ encJFromJValue $ formatInconsistentObjs inconsObjs
2021-09-29 11:13:30 +03:00
formatInconsistentObjs :: [ InconsistentMetadata ] -> J . Value
2021-09-24 01:56:37 +03:00
formatInconsistentObjs inconsObjs =
2021-09-29 11:13:30 +03:00
J . object
[ " is_consistent " J ..= null inconsObjs ,
" inconsistent_objects " J ..= inconsObjs
2021-09-24 01:56:37 +03:00
]
runDropInconsistentMetadata ::
( QErrM m , CacheRWM m , MetadataM m ) =>
DropInconsistentMetadata ->
m EncJSON
2019-04-17 19:29:39 +03:00
runDropInconsistentMetadata _ = do
sc <- askSchemaCache
2019-11-27 01:49:42 +03:00
let inconsSchObjs = L . nub . concatMap imObjectIds $ scInconsistentObjs sc
2019-11-20 21:21:30 +03:00
-- Note: when building the schema cache, we try to put dependents after their dependencies in the
-- list of inconsistent objects, so reverse the list to start with dependents first. This is not
-- perfect — a completely accurate solution would require performing a topological sort — but it
-- seems to work well enough for now.
2022-02-18 15:46:55 +03:00
MetadataModifier { .. } <- execWriterT $ mapM_ ( tell . purgeMetadataObj ) ( reverse inconsSchObjs )
2020-12-08 17:22:31 +03:00
metadata <- getMetadata
2022-02-18 15:46:55 +03:00
putMetadata $ runMetadataModifier metadata
buildSchemaCache mempty
2021-09-14 15:02:13 +03:00
-- after building the schema cache, we need to check the inconsistent metadata, if any
-- are only those which are not droppable
newInconsistentObjects <- scInconsistentObjs <$> askSchemaCache
let droppableInconsistentObjects = filter droppableInconsistentMetadata newInconsistentObjects
unless ( null droppableInconsistentObjects ) $
2021-09-24 01:56:37 +03:00
throwError
( err400 Unexpected " cannot continue due to new inconsistent metadata " )
2021-09-29 11:13:30 +03:00
{ qeInternal = Just $ ExtraInternal $ J . toJSON newInconsistentObjects
2021-09-24 01:56:37 +03:00
}
2019-04-17 19:29:39 +03:00
return successMsg
2020-12-08 17:22:31 +03:00
purgeMetadataObj :: MetadataObjId -> MetadataModifier
purgeMetadataObj = \ case
2021-09-24 01:56:37 +03:00
MOSource source -> MetadataModifier $ metaSources %~ OMap . delete source
MOSourceObjId source exists -> AB . dispatchAnyBackend @ BackendMetadata exists $ handleSourceObj source
MORemoteSchema rsn -> dropRemoteSchemaInMetadata rsn
2021-03-15 16:02:58 +03:00
MORemoteSchemaPermissions rsName role -> dropRemoteSchemaPermissionInMetadata rsName role
2022-02-03 21:58:37 +03:00
MORemoteSchemaRemoteRelationship rsName typeName relName ->
dropRemoteSchemaRemoteRelationshipInMetadata rsName typeName relName
2021-09-24 01:56:37 +03:00
MOCustomTypes -> clearCustomTypesInMetadata
MOAction action -> dropActionInMetadata action -- Nothing
MOActionPermission action role -> dropActionPermissionInMetadata action role
MOCronTrigger ctName -> dropCronTriggerInMetadata ctName
MOEndpoint epName -> dropEndpointInMetadata epName
MOInheritedRole role -> dropInheritedRoleInMetadata role
MOHostTlsAllowlist host -> dropHostFromAllowList host
2022-03-08 12:48:21 +03:00
MOQueryCollectionsQuery cName lq -> dropListedQueryFromQueryCollections cName lq
2021-03-15 16:02:58 +03:00
where
2021-04-22 00:44:37 +03:00
handleSourceObj :: forall b . BackendMetadata b => SourceName -> SourceMetadataObjId b -> MetadataModifier
2021-03-15 16:02:58 +03:00
handleSourceObj source = \ case
2021-09-24 01:56:37 +03:00
SMOTable qt -> dropTableInMetadata @ b source qt
SMOFunction qf -> dropFunctionInMetadata @ b source qf
2021-04-22 00:44:37 +03:00
SMOFunctionPermission qf rn -> dropFunctionPermissionInMetadata @ b source qf rn
2021-09-24 01:56:37 +03:00
SMOTableObj qt tableObj ->
MetadataModifier $
tableMetadataSetter @ b source qt %~ case tableObj of
MTORel rn _ -> dropRelationshipInMetadata rn
MTOPerm rn pt -> dropPermissionInMetadata rn pt
MTOTrigger trn -> dropEventTriggerInMetadata trn
MTOComputedField ccn -> dropComputedFieldInMetadata ccn
2021-02-14 09:07:52 +03:00
MTORemoteRelationship rn -> dropRemoteRelationshipInMetadata rn
2021-01-07 12:04:22 +03:00
2022-03-08 12:48:21 +03:00
dropListedQueryFromQueryCollections :: CollectionName -> ListedQuery -> MetadataModifier
dropListedQueryFromQueryCollections cName lq = MetadataModifier $ removeAndCleanupMetadata
where
removeAndCleanupMetadata m =
let newQueryCollection = filteredCollection ( _metaQueryCollections m )
-- QueryCollections = InsOrdHashMap CollectionName CreateCollection
filteredCollection :: QueryCollections -> QueryCollections
filteredCollection qc = OMap . filter ( isNonEmptyCC ) $ OMap . adjust ( collectionModifier ) ( cName ) qc
collectionModifier :: CreateCollection -> CreateCollection
collectionModifier cc @ CreateCollection { .. } =
cc
{ _ccDefinition =
let oldQueries = _cdQueries _ccDefinition
in _ccDefinition
{ _cdQueries = filter ( /= lq ) oldQueries
}
}
isNonEmptyCC :: CreateCollection -> Bool
isNonEmptyCC = not . null . _cdQueries . _ccDefinition
cleanupAllowList :: MetadataAllowlist -> MetadataAllowlist
cleanupAllowList = OMap . filterWithKey ( \ _ _ -> OMap . member cName newQueryCollection )
cleanupRESTEndpoints :: Endpoints -> Endpoints
cleanupRESTEndpoints endpoints = OMap . filter ( not . isFaultyQuery . _edQuery . _ceDefinition ) endpoints
isFaultyQuery :: QueryReference -> Bool
isFaultyQuery QueryReference { .. } = _qrCollectionName == cName && _qrQueryName == ( _lqName lq )
in m
{ _metaQueryCollections = newQueryCollection ,
_metaAllowlist = cleanupAllowList ( _metaAllowlist m ) ,
_metaRestEndpoints = cleanupRESTEndpoints ( _metaRestEndpoints m )
}
2021-09-24 01:56:37 +03:00
runGetCatalogState ::
( MonadMetadataStorageQueryAPI m ) => GetCatalogState -> m EncJSON
2021-01-07 12:04:22 +03:00
runGetCatalogState _ =
encJFromJValue <$> fetchCatalogState
2021-09-24 01:56:37 +03:00
runSetCatalogState ::
( MonadMetadataStorageQueryAPI m ) => SetCatalogState -> m EncJSON
runSetCatalogState SetCatalogState { .. } = do
2021-01-07 12:04:22 +03:00
updateCatalogState _scsType _scsState
pure successMsg
2021-02-11 20:54:25 +03:00
2021-09-24 01:56:37 +03:00
runSetMetricsConfig ::
( MonadIO m , CacheRWM m , MetadataM m , MonadError QErr m ) =>
MetricsConfig ->
m EncJSON
2021-02-11 20:54:25 +03:00
runSetMetricsConfig mc = do
2021-09-24 01:56:37 +03:00
withNewInconsistentObjsCheck $
buildSchemaCache $
MetadataModifier $
metaMetricsConfig .~ mc
2021-02-11 20:54:25 +03:00
pure successMsg
2021-09-24 01:56:37 +03:00
runRemoveMetricsConfig ::
( MonadIO m , CacheRWM m , MetadataM m , MonadError QErr m ) =>
m EncJSON
2021-02-11 20:54:25 +03:00
runRemoveMetricsConfig = do
2021-09-24 01:56:37 +03:00
withNewInconsistentObjsCheck $
buildSchemaCache $
MetadataModifier $
metaMetricsConfig .~ emptyMetricsConfig
2021-02-11 20:54:25 +03:00
pure successMsg
2021-09-16 14:03:01 +03:00
2021-12-09 10:58:41 +03:00
data TestTransformError
2022-03-11 02:22:54 +03:00
= RequestInitializationError HTTP . HttpException
2021-12-09 10:58:41 +03:00
| RequestTransformationError HTTP . Request TransformErrorBundle
2021-09-29 11:13:30 +03:00
runTestWebhookTransform ::
2021-12-09 10:58:41 +03:00
( QErrM m ) =>
2021-09-29 11:13:30 +03:00
TestWebhookTransform ->
2021-09-24 01:56:37 +03:00
m EncJSON
2022-03-08 03:42:06 +03:00
runTestWebhookTransform ( TestWebhookTransform env headers urlE payload rt _ sv ) = do
2021-12-03 10:12:43 +03:00
url <- case urlE of
2022-03-02 01:54:47 +03:00
URL url' -> interpolateFromEnv env url'
2021-12-09 10:58:41 +03:00
EnvVar var ->
let err = throwError $ err400 NotFound " Missing Env Var "
in maybe err ( pure . T . pack ) $ Env . lookupEnv env var
2022-03-02 01:54:47 +03:00
headers' <- traverse ( traverse ( fmap TE . encodeUtf8 . interpolateFromEnv env . TE . decodeUtf8 ) ) headers
2021-12-09 10:58:41 +03:00
result <- runExceptT $ do
2022-03-11 02:22:54 +03:00
initReq <- hoistEither $ first RequestInitializationError $ HTTP . mkRequestEither url
2021-12-09 10:58:41 +03:00
2022-03-02 01:54:47 +03:00
let req = initReq & HTTP . body .~ pure ( J . encode payload ) & HTTP . headers .~ headers'
2022-03-08 03:42:06 +03:00
reqTransform = requestFields rt
engine = templateEngine rt
2022-03-11 02:22:54 +03:00
reqTransformCtx = mkReqTransformCtx url sv engine
2022-01-19 07:46:42 +03:00
hoistEither $ first ( RequestTransformationError req ) $ applyRequestTransform reqTransformCtx reqTransform req
2021-12-09 10:58:41 +03:00
case result of
2021-09-29 11:13:30 +03:00
Right transformed ->
2022-03-11 02:22:54 +03:00
pure $ packTransformResult $ Right transformed
Left ( RequestTransformationError _ err ) -> pure $ packTransformResult ( Left err )
-- NOTE: In the following case we have failed before producing a valid request.
Left ( RequestInitializationError err ) ->
let errorBundle =
TransformErrorBundle $
pure $
J . object [ " error_code " J ..= J . String " Request Initialization Error " , " message " J ..= J . String ( tshow err ) ]
in pure $ encJFromJValue $ J . toJSON errorBundle
2021-12-09 10:58:41 +03:00
2022-03-02 01:54:47 +03:00
interpolateFromEnv :: MonadError QErr m => Env . Environment -> Text -> m Text
interpolateFromEnv env url =
case AT . parseOnly parseEnvTemplate url of
Left _ -> throwError $ err400 ParseFailed " Invalid Url Template "
Right xs ->
let lookup' var = maybe ( Left var ) ( Right . T . pack ) $ Env . lookupEnv env ( T . unpack var )
result = traverse ( fmap indistinct . bitraverse lookup' pure ) xs
err e = throwError $ err400 NotFound $ " Missing Env Var: " <> e
in either err ( pure . fold ) result
2022-03-11 02:22:54 +03:00
-- | Deserialize a JSON or X-WWW-URL-FORMENCODED body from an
-- 'HTTP.Request' as 'J.Value'.
2022-03-02 22:42:21 +03:00
decodeBody :: Maybe BL . ByteString -> J . Value
decodeBody Nothing = J . Null
decodeBody ( Just bs ) = fromMaybe J . Null $ jsonToValue bs <|> formUrlEncodedToValue bs
-- | Attempt to encode a 'ByteString' as an Aeson 'Value'
jsonToValue :: BL . ByteString -> Maybe J . Value
jsonToValue bs = J . decode bs
-- | Quote a 'ByteString' then attempt to encode it as a JSON
-- String. This is necessary for 'x-www-url-formencoded' bodies. They
-- are a list of key/value pairs encoded as a raw 'ByteString' with no
-- quoting whereas JSON Strings must be quoted.
formUrlEncodedToValue :: BL . ByteString -> Maybe J . Value
formUrlEncodedToValue bs = J . decode ( " \ " " <> bs <> " \ " " )
2022-02-03 17:56:24 +03:00
parseEnvTemplate :: AT . Parser [ Either T . Text T . Text ]
parseEnvTemplate = AT . many1 $ pEnv <|> pLit <|> fmap Right " { "
where
pEnv = fmap ( Left ) $ " {{ " *> AT . takeWhile1 ( /= '}' ) <* " }} "
pLit = fmap Right $ AT . takeWhile1 ( /= '{' )
indistinct :: Either a a -> a
indistinct = either id id
2022-03-11 02:22:54 +03:00
packTransformResult :: Either TransformErrorBundle HTTP . Request -> EncJSON
packTransformResult = \ case
Right req ->
encJFromJValue $
J . object
[ " webhook_url " J ..= ( req ^. HTTP . url ) ,
" method " J ..= ( req ^. HTTP . method ) ,
" headers " J ..= ( first CI . foldedCase <$> ( req ^. HTTP . headers ) ) ,
" body " J ..= decodeBody ( req ^. HTTP . body )
]
Left err -> encJFromJValue $ J . toJSON err