allow reusing Postgres scalars in custom types & actions (close #4125) (#4333)

* allow re-using Postgres scalars in custom types, close #4125

* add pytest tests

* update CHANGELOG.md

* add a doc pointer for reusable postgres scalars

* document the code, improve the CHANGELOG entry

As suggested by @lexi-lambda

* a bit more source code documentation, use WriterT to collect reused scalars

* Apply suggestions from code review

Co-Authored-By: Marion Schleifer <marion@hasura.io>

* improve doc for Postgres scalars in custom graphql types

* Add some more references to Note; fix Haddock syntax

Also a few very minor tweaks:
  * Use HashSet instead of [] more pervasively
  * Export execWriterT from Hasura.Prelude
  * Use pattern guards in multi-way if
  * Tweak a few names/comments

* Pull buildActions out of buildAndCollectInfo, use buildInfoMap

* Tweak wording in documentation

* incorporate changes in console code

* account Postgres scalars for action input arguments

-> Avoid unnecessary 'throw500' in making action schema

* Review changes

Co-authored-by: Marion Schleifer <marion@hasura.io>
Co-authored-by: Alexis King <lexi.lambda@gmail.com>
Co-authored-by: Vamshi Surabhi <0x777@users.noreply.github.com>
Co-authored-by: Aleksandra Sikora <ola.zxcvbnm@gmail.com>
This commit is contained in:
Rakesh Emmadi 2020-04-15 17:33:13 +05:30 committed by GitHub
parent dfc1f98e61
commit dc31b835e1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
22 changed files with 443 additions and 131 deletions

View File

@ -10,6 +10,7 @@ The order and collapsed state of columns is now persisted across page navigation
### Bug fixes and improvements
- server: support reusing Postgres scalars in custom types (close #4125)
- cli: set_table_is_enum metadata type for squashing migrations (close #4394) (#4395)
- console: query support for actions (#4318)
- cli: query support for actions (#4318)

View File

@ -231,7 +231,7 @@ export const getActionTypes = (currentAction, allTypes) => {
const type = findType(allTypes, typename);
actionTypes[typename] = type;
if (type.fields) {
if (type && type.fields) {
type.fields.forEach(f => {
getDependentTypes(f.type);
if (f.arguments) {
@ -268,7 +268,7 @@ export const getOverlappingTypeConfirmation = (
const action = otherActions[i];
const actionTypes = getActionTypes(action, allTypes);
actionTypes.forEach(t => {
if (typeCollisionMap[t.name]) return;
if (!t || typeCollisionMap[t.name]) return;
overlappingTypenames.forEach(ot => {
if (ot === t.name) {
typeCollisionMap[ot] = true;

View File

@ -117,6 +117,10 @@ const deriveAction = (
const allHasuraTypes = clientSchema._typeMap;
const operationType = getOperationType(clientSchema, operation);
const isHasuraScalar = name => {
return isScalarType(allHasuraTypes[name]);
};
const actionArguments = [];
const newTypes = {};
@ -128,7 +132,7 @@ const deriveAction = (
newType.name = typename;
if (isScalarType(type)) {
if (!inbuiltTypes[type.name]) {
if (!inbuiltTypes[type.name] && !allHasuraTypes[type.name]) {
newType.kind = 'scalar';
newTypes[typename] = newType;
}
@ -156,7 +160,10 @@ const deriveAction = (
type: underLyingType,
wraps: fieldTypeWraps,
} = getUnderlyingType(tf.type);
if (inbuiltTypes[underLyingType.name]) {
if (
inbuiltTypes[underLyingType.name] ||
isHasuraScalar(underLyingType.name)
) {
_tf.type = wrapTypename(underLyingType.name, fieldTypeWraps);
} else {
_tf.type = wrapTypename(
@ -177,7 +184,10 @@ const deriveAction = (
name: v.variable.name.value,
};
const argTypeMetadata = getAstTypeMetadata(v.type);
if (!inbuiltTypes[argTypeMetadata.typename]) {
if (
!inbuiltTypes[argTypeMetadata.typename] &&
!isHasuraScalar(argTypeMetadata.typename)
) {
const argTypename = prefixTypename(argTypeMetadata.typename);
generatedArg.type = wrapTypename(argTypename, argTypeMetadata.stack);
const typeInSchema = allHasuraTypes[argTypeMetadata.typename];
@ -208,19 +218,10 @@ const deriveAction = (
outputTypeField => {
const fieldTypeMetadata = getUnderlyingType(outputTypeField.type);
if (isScalarType(fieldTypeMetadata.type)) {
if (inbuiltTypes[fieldTypeMetadata.type.name]) {
outputTypeFields[outputTypeField.name] = wrapTypename(
fieldTypeMetadata.type.name,
fieldTypeMetadata.wraps
);
} else {
const fieldTypename = prefixTypename(fieldTypeMetadata.type.name);
outputTypeFields[outputTypeField.name] = wrapTypename(
fieldTypename,
fieldTypeMetadata.wraps
);
handleType(fieldTypeMetadata.type, fieldTypename);
}
outputTypeFields[outputTypeField.name] = wrapTypename(
fieldTypeMetadata.type.name,
fieldTypeMetadata.wraps
);
}
}
);

View File

@ -258,6 +258,7 @@ ${enumValuesSdl.join('\n')}
};
const getTypeSdl = type => {
if (!type) return '';
switch (type.kind) {
case 'scalar':
return getScalarTypeSdl(type);

View File

@ -141,6 +141,22 @@ a scalar called ``Date``, you can define it like.
These scalars can be used as arguments of the mutation or as fields of object
types and input types.
.. admonition:: Postgres scalars
Postgres base types are implicitly made available as GraphQL scalars; there
is no need to declare them separately. For example, in the definition
.. code-block:: graphql
type User {
id: uuid!
name: String!
location: geography
}
the ``uuid`` and ``geography`` types are assumed to refer to Postgres
scalars (assuming no other definition for them is provided).
Enum types
----------
@ -165,4 +181,3 @@ This means that wherever we use the type ``Color`` in our schema, we expect it
to be exactly one of RED, GREEN, or BLUE.
`See reference <https://graphql.org/learn/schema/#enumeration-types>`__

View File

@ -295,7 +295,7 @@ asyncActionsProcessor cacheRef pgPool httpManager = forever $ do
Nothing -> return ()
Just actionInfo -> do
let definition = _aiDefinition actionInfo
outputFields = _aiOutputFields actionInfo
outputFields = getActionOutputFields $ _aiOutputObject actionInfo
webhookUrl = _adHandler definition
forwardClientHeaders = _adForwardClientHeaders definition
confHeaders = _adHeaders definition

View File

@ -720,11 +720,11 @@ noFilter = annBoolExpTrue
mkGCtxMap
:: forall m. (MonadError QErr m)
=> AnnotatedObjects -> TableCache -> FunctionCache -> ActionCache -> m GCtxMap
mkGCtxMap annotatedObjects tableCache functionCache actionCache = do
=> TableCache -> FunctionCache -> ActionCache -> m GCtxMap
mkGCtxMap tableCache functionCache actionCache = do
typesMapL <- mapM (mkGCtxMapTable tableCache functionCache) $
filter (tableFltr . _tiCoreInfo) $ Map.elems tableCache
actionsSchema <- mkActionsSchema annotatedObjects actionCache
let actionsSchema = mkActionsSchema actionCache
typesMap <- combineTypes actionsSchema typesMapL
let gCtxMap = flip Map.map typesMap $
\(ty, flds, insCtxMap) -> mkGCtx ty flds insCtxMap

View File

@ -3,6 +3,7 @@ module Hasura.GraphQL.Schema.Action
) where
import qualified Data.HashMap.Strict as Map
import qualified Data.HashSet as Set
import qualified Language.GraphQL.Draft.Syntax as G
import Data.Coerce (coerce)
@ -68,14 +69,14 @@ mkMutationField actionName actionInfo definitionList =
ActionSynchronous ->
ActionExecutionSyncWebhook $ SyncActionExecutionContext actionName
(_adOutputType definition)
(_aiOutputFields actionInfo)
(getActionOutputFields $ _aiOutputObject actionInfo)
definitionList
(_adHandler definition)
(_adHeaders definition)
(_adForwardClientHeaders definition)
ActionAsynchronous -> ActionExecutionAsync
description = mkDescriptionWith (PGDescription <$> (_aiComment actionInfo)) $
description = mkDescriptionWith (PGDescription <$> _aiComment actionInfo) $
"perform the action: " <>> actionName
fieldInfo =
@ -123,22 +124,21 @@ mkQueryField actionName comment definition definitionList =
idDescription = G.Description $ "id of the action: " <>> actionName
mkActionFieldsAndTypes
:: (QErrM m)
=> ActionInfo
-> AnnotatedObjectType
:: ActionInfo
-> ActionPermissionInfo
-> m ( Maybe (ActionSelectOpContext, ObjFldInfo, TypeInfo)
-> ( Maybe (ActionSelectOpContext, ObjFldInfo, TypeInfo)
-- context, field, response type info
, (ActionExecutionContext, ObjFldInfo) -- mutation field
, FieldMap
)
mkActionFieldsAndTypes actionInfo annotatedOutputType permission =
return ( mkQueryField actionName comment definition definitionList
, mkMutationField actionName actionInfo definitionList
, fieldMap
)
mkActionFieldsAndTypes actionInfo permission =
( mkQueryField actionName comment definition definitionList
, mkMutationField actionName actionInfo definitionList
, fieldMap
)
where
actionName = _aiName actionInfo
annotatedOutputType = _aiOutputObject actionInfo
definition = _aiDefinition actionInfo
roleName = _apiRole permission
comment = _aiComment actionInfo
@ -220,46 +220,38 @@ mkActionFieldsAndTypes actionInfo annotatedOutputType permission =
G.getBaseType $ unGraphQLType $ _adOutputType $ _aiDefinition actionInfo
mkActionSchemaOne
:: (QErrM m)
=> AnnotatedObjects
-> ActionInfo
-> m (Map.HashMap RoleName
( Maybe (ActionSelectOpContext, ObjFldInfo, TypeInfo)
, (ActionExecutionContext, ObjFldInfo)
, FieldMap
)
:: ActionInfo
-> Map.HashMap RoleName
( Maybe (ActionSelectOpContext, ObjFldInfo, TypeInfo)
, (ActionExecutionContext, ObjFldInfo)
, FieldMap
)
mkActionSchemaOne annotatedObjects actionInfo = do
annotatedOutputType <- onNothing
(Map.lookup (ObjectTypeName actionOutputBaseType) annotatedObjects) $
throw500 $ "missing annotated type for: " <> showNamedTy actionOutputBaseType
forM permissions $ \permission ->
mkActionFieldsAndTypes actionInfo annotatedOutputType permission
mkActionSchemaOne actionInfo =
flip Map.map permissions $ \permission ->
mkActionFieldsAndTypes actionInfo permission
where
adminPermission = ActionPermissionInfo adminRole
permissions = Map.insert adminRole adminPermission $ _aiPermissions actionInfo
actionOutputBaseType =
G.getBaseType $ unGraphQLType $ _adOutputType $ _aiDefinition actionInfo
mkActionsSchema
:: (QErrM m)
=> AnnotatedObjects
-> ActionCache
-> m (Map.HashMap RoleName (RootFields, TyAgg))
mkActionsSchema annotatedObjects =
foldM
:: ActionCache
-> Map.HashMap RoleName (RootFields, TyAgg)
mkActionsSchema =
foldl'
(\aggregate actionInfo ->
Map.foldrWithKey f aggregate <$>
mkActionSchemaOne annotatedObjects actionInfo
Map.foldrWithKey (accumulate (_aiPgScalars actionInfo)) aggregate $ mkActionSchemaOne actionInfo
)
mempty
where
-- we'll need to add uuid and timestamptz for actions
newRoleState = (mempty, addScalarToTyAgg PGJSON $
addScalarToTyAgg PGTimeStampTZ $
addScalarToTyAgg PGUUID mempty)
f roleName (queryFieldM, mutationField, fields) =
Map.alter (Just . addToState . fromMaybe newRoleState) roleName
mkNewRoleState pgScalars =
( mempty
, foldr addScalarToTyAgg mempty $
pgScalars <> Set.fromList [PGJSON, PGTimeStampTZ, PGUUID]
)
accumulate pgScalars roleName (queryFieldM, mutationField, fields) =
Map.alter (Just . addToState . fromMaybe (mkNewRoleState pgScalars)) roleName
where
addToState = case queryFieldM of
Just (fldCtx, fldDefinition, responseTypeInfo) ->

View File

@ -31,7 +31,7 @@ buildObjectTypeInfo roleName annotatedObjectType =
\(TypeRelationship name ty remoteTableInfo _) ->
if isJust (getSelectPermissionInfoM remoteTableInfo roleName) ||
roleName == adminRole
then Just (relationshipToFieldInfo name ty $ _tciName $ _tiCoreInfo $ remoteTableInfo)
then Just (relationshipToFieldInfo name ty $ _tciName $ _tiCoreInfo remoteTableInfo)
else Nothing
where
relationshipToFieldInfo name relTy remoteTableName =
@ -116,14 +116,18 @@ annotateObjectType tableCache nonObjectTypeMap objectDefinition = do
buildCustomTypesSchemaPartial
:: (QErrM m)
=> TableCache -> CustomTypes -> m (NonObjectTypeMap, AnnotatedObjects)
buildCustomTypesSchemaPartial tableCache customTypes = do
=> TableCache
-> CustomTypes
-> HashSet PGScalarType
-- ^ Postgres base types used in the custom type definitions;
-- see Note [Postgres scalars in custom types].
-> m (NonObjectTypeMap, AnnotatedObjects)
buildCustomTypesSchemaPartial tableCache customTypes pgScalars = do
let typeInfos =
map (VT.TIEnum . convertEnumDefinition) enumDefinitions <>
-- map (VT.TIObj . convertObjectDefinition) objectDefinitions <>
map (VT.TIInpObj . convertInputObjectDefinition) inputObjectDefinitions <>
map (VT.TIScalar . convertScalarDefinition) scalarDefinitions
-- <> defaultTypes
map (VT.TIScalar . convertScalarDefinition) scalarDefinitions <>
map (VT.TIScalar . VT.mkHsraScalarTyInfo) (toList pgScalars)
nonObjectTypeMap = NonObjectTypeMap $ mapFromL VT.getNamedTy typeInfos
annotatedObjectTypes <- mapFromL (_otdName . _aotDefinition) <$>

View File

@ -28,7 +28,8 @@ import Control.Monad.Fail as M (MonadFail)
import Control.Monad.Identity as M
import Control.Monad.Reader as M
import Control.Monad.State.Strict as M
import Control.Monad.Writer.Strict as M (MonadWriter (..), WriterT (..))
import Control.Monad.Writer.Strict as M (MonadWriter (..), WriterT (..),
execWriterT, runWriterT)
import Data.Align as M (Align (align, alignWith))
import Data.Align.Key as M (AlignWithKey (..))
import Data.Bool as M (bool)

View File

@ -36,6 +36,7 @@ import qualified Data.Aeson as J
import qualified Data.Aeson.Casing as J
import qualified Data.Aeson.TH as J
import qualified Data.HashMap.Strict as Map
import qualified Data.HashSet as Set
import qualified Data.Text as T
import qualified Database.PG.Query as Q
import qualified Language.GraphQL.Draft.Syntax as G
@ -77,36 +78,67 @@ persistCreateAction (CreateAction actionName actionDefinition comment) = do
VALUES ($1, $2, $3)
|] (actionName, Q.AltJ actionDefinition, comment) True
{- Note [Postgres scalars in action input arguments]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's very comfortable to be able to reference Postgres scalars in actions
input arguments. For example, see the following action mutation:
extend type mutation_root {
create_user (
name: String!
created_at: timestamptz
): User
}
The timestamptz is a Postgres scalar. We need to validate the presence of
timestamptz type in the Postgres database. So, the 'resolveAction' function
takes all Postgres scalar types as one of the inputs and returns the set of
referred scalars.
-}
resolveAction
:: (QErrM m, MonadIO m)
=> (NonObjectTypeMap, AnnotatedObjects)
-> HashSet PGScalarType -- ^ List of all Postgres scalar types.
-> ActionDefinitionInput
-> m (ResolvedActionDefinition, ActionOutputFields)
resolveAction customTypes actionDefinition = do
-> m ( ResolvedActionDefinition
, AnnotatedObjectType
, HashSet PGScalarType -- ^ see Note [Postgres scalars in action input arguments].
)
resolveAction customTypes allPGScalars actionDefinition = do
let responseType = unGraphQLType $ _adOutputType actionDefinition
responseBaseType = G.getBaseType responseType
forM (_adArguments actionDefinition) $ \argument -> do
let argumentBaseType = G.getBaseType $ unGraphQLType $ _argType argument
argTypeInfo <- getNonObjectTypeInfo argumentBaseType
case argTypeInfo of
VT.TIScalar _ -> return ()
VT.TIEnum _ -> return ()
VT.TIInpObj _ -> return ()
_ -> throw400 InvalidParams $ "the argument's base type: "
<> showNamedTy argumentBaseType <>
" should be a scalar/enum/input_object"
reusedPGScalars <- execWriterT $
forM (_adArguments actionDefinition) $ \argument -> do
let argumentBaseType = G.getBaseType $ unGraphQLType $ _argType argument
maybeArgTypeInfo = getNonObjectTypeInfo argumentBaseType
maybePGScalar = find ((==) argumentBaseType . VT.mkScalarTy) allPGScalars
if | Just argTypeInfo <- maybeArgTypeInfo ->
case argTypeInfo of
VT.TIScalar _ -> pure ()
VT.TIEnum _ -> pure ()
VT.TIInpObj _ -> pure ()
_ -> throw400 InvalidParams $ "the argument's base type: "
<> showNamedTy argumentBaseType <>
" should be a scalar/enum/input_object"
-- Collect the referred Postgres scalar. See Note [Postgres scalars in action input arguments].
| Just pgScalar <- maybePGScalar -> tell $ Set.singleton pgScalar
| Nothing <- maybeArgTypeInfo ->
throw400 NotExists $ "the type: " <> showNamedTy argumentBaseType
<> " is not defined in custom types"
| otherwise -> pure ()
-- Check if the response type is an object
annFields <- _aotAnnotatedFields <$> getObjectTypeInfo responseBaseType
let outputFields = Map.fromList $ map (unObjectFieldName *** fst) $ Map.toList annFields
outputObject <- getObjectTypeInfo responseBaseType
resolvedDef <- traverse resolveWebhook actionDefinition
pure (resolvedDef, outputFields)
pure (resolvedDef, outputObject, reusedPGScalars)
where
getNonObjectTypeInfo typeName = do
getNonObjectTypeInfo typeName =
let nonObjectTypeMap = unNonObjectTypeMap $ fst $ customTypes
inputTypeInfos = nonObjectTypeMap <> mapFromL VT.getNamedTy defaultTypes
onNothing (Map.lookup typeName inputTypeInfos) $
throw400 NotExists $ "the type: " <> showNamedTy typeName <>
" is not defined in custom types"
in Map.lookup typeName inputTypeInfos
resolveWebhook (InputWebhook urlTemplate) = do
eitherRenderedTemplate <- renderURLTemplate urlTemplate

View File

@ -15,16 +15,47 @@ import qualified Database.PG.Query as Q
import qualified Language.GraphQL.Draft.Syntax as G
import Hasura.EncJSON
import Hasura.GraphQL.Validate.Types (mkScalarTy)
import Hasura.Prelude
import Hasura.RQL.Types
import Hasura.SQL.Types
import Hasura.GraphQL.Schema.CustomTypes (buildCustomTypesSchemaPartial)
{- Note [Postgres scalars in custom types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Its very convenient to be able to reference Postgres scalars in custom type
definitions. For example, we might have a type like this:
type User {
id: uuid!
name: String!
location: geography
}
The uuid and geography types are Postgres scalars, not separately-defined
GraphQL types. To support this, we have to take a few extra steps:
1. The set of Postgres base types is not fixed; extensions like PostGIS add
new ones, and users can even define their own. Therefore, we fetch the
currently defined base types from the @pg_catalog.pg_type@ system table as part of
loading the metadata.
2. Its possible for a custom type definition to use a type that doesnt
appear elsewhere in the GraphQL schema, so we record which base types were
referenced while validating the custom type definitions and make sure to
include them in the generated schema explicitly.
-}
-- | Validate the custom types and return any reused Postgres base types (as
-- scalars).
validateCustomTypeDefinitions
:: (MonadValidate [CustomTypeValidationError] m)
=> TableCache -> CustomTypes -> m ()
validateCustomTypeDefinitions tableCache customTypes = do
=> TableCache
-> CustomTypes
-> HashSet PGScalarType -- ^ all Postgres base types
-> m (HashSet PGScalarType) -- ^ see Note [Postgres scalars in custom types]
validateCustomTypeDefinitions tableCache customTypes allPGScalars = execWriterT do
unless (null duplicateTypes) $ dispute $ pure $ DuplicateTypeNames duplicateTypes
traverse_ validateEnum enumDefinitions
traverse_ validateInputObject inputObjectDefinitions
@ -62,7 +93,9 @@ validateCustomTypeDefinitions tableCache customTypes = do
(_etdName enumDefinition) duplicateEnumValues
validateInputObject
:: (MonadValidate [CustomTypeValidationError] m)
:: ( MonadValidate [CustomTypeValidationError] m
, MonadWriter (Set.HashSet PGScalarType) m
)
=> InputObjectTypeDefinition -> m ()
validateInputObject inputObjectDefinition = do
let inputObjectTypeName = _iotdName inputObjectDefinition
@ -85,13 +118,18 @@ validateCustomTypeDefinitions tableCache customTypes = do
-- check that fields reference input types
for_ (_iotdFields inputObjectDefinition) $ \inputObjectField -> do
let fieldBaseType = G.getBaseType $ unGraphQLType $ _iofdType inputObjectField
unless (Set.member fieldBaseType inputTypes) $
refute $ pure $ InputObjectFieldTypeDoesNotExist
(_iotdName inputObjectDefinition)
(_iofdName inputObjectField) fieldBaseType
if | Set.member fieldBaseType inputTypes -> pure ()
| Just pgScalar <- lookupPGScalar fieldBaseType ->
tell $ Set.singleton pgScalar
| otherwise ->
refute $ pure $ InputObjectFieldTypeDoesNotExist
(_iotdName inputObjectDefinition)
(_iofdName inputObjectField) fieldBaseType
validateObject
:: (MonadValidate [CustomTypeValidationError] m)
:: ( MonadValidate [CustomTypeValidationError] m
, MonadWriter (Set.HashSet PGScalarType) m
)
=> ObjectTypeDefinition -> m ()
validateObject objectDefinition = do
let objectTypeName = _otdName objectDefinition
@ -122,14 +160,16 @@ validateCustomTypeDefinitions tableCache customTypes = do
-- check that the fields only reference scalars and enums
-- and not other object types
if | Set.member fieldBaseType scalarTypes -> return ()
| Set.member fieldBaseType enumTypes -> return ()
if | Set.member fieldBaseType scalarTypes -> pure ()
| Set.member fieldBaseType enumTypes -> pure ()
| Set.member fieldBaseType objectTypes ->
dispute $ pure $ ObjectFieldObjectBaseType
objectTypeName fieldName fieldBaseType
objectTypeName fieldName fieldBaseType
| Just pgScalar <- lookupPGScalar fieldBaseType ->
tell $ Set.singleton pgScalar
| otherwise ->
dispute $ pure $ ObjectFieldTypeDoesNotExist
objectTypeName fieldName fieldBaseType
objectTypeName fieldName fieldBaseType
-- collect all non list scalar types of this object
if (not (isListType fieldType) && Set.member fieldBaseType scalarTypes)
@ -160,6 +200,9 @@ validateCustomTypeDefinitions tableCache customTypes = do
objectTypeName relationshipName remoteTable columnName
return ()
lookupPGScalar baseType = -- see Note [Postgres scalars in custom types]
find ((==) baseType . mkScalarTy) allPGScalars
data CustomTypeValidationError
= DuplicateTypeNames !(Set.HashSet G.NamedType)
-- ^ type names have to be unique across all types
@ -265,11 +308,11 @@ clearCustomTypes = do
resolveCustomTypes
:: (MonadError QErr m)
=> TableCache -> CustomTypes -> m (NonObjectTypeMap, AnnotatedObjects)
resolveCustomTypes tableCache customTypes = do
either (throw400 ConstraintViolation . showErrors) pure
=<< runValidateT (validateCustomTypeDefinitions tableCache customTypes)
buildCustomTypesSchemaPartial tableCache customTypes
=> TableCache -> CustomTypes -> HashSet PGScalarType -> m (NonObjectTypeMap, AnnotatedObjects)
resolveCustomTypes tableCache customTypes allPGScalars = do
reusedPGScalars <- either (throw400 ConstraintViolation . showErrors) pure
=<< runValidateT (validateCustomTypeDefinitions tableCache customTypes allPGScalars)
buildCustomTypesSchemaPartial tableCache customTypes reusedPGScalars
where
showErrors :: [CustomTypeValidationError] -> T.Text
showErrors allErrors =

View File

@ -198,7 +198,7 @@ buildSchemaCacheRule = proc (catalogMetadata, invalidationKeys) -> do
buildAndCollectInfo = proc (catalogMetadata, invalidationKeys) -> do
let CatalogMetadata tables relationships permissions
eventTriggers remoteSchemas functions allowlistDefs
computedFields customTypes actions = catalogMetadata
computedFields catalogCustomTypes actions = catalogMetadata
-- tables
tableRawInfos <- buildTableCache -< (tables, Inc.selectD #_ikMetadata invalidationKeys)
@ -256,26 +256,22 @@ buildSchemaCacheRule = proc (catalogMetadata, invalidationKeys) -> do
& HS.fromList
-- custom types
resolvedCustomTypes <- bindA -< resolveCustomTypes tableCache customTypes
let CatalogCustomTypes customTypes pgScalars = catalogCustomTypes
maybeResolvedCustomTypes <-
(| withRecordInconsistency
(bindErrorA -< resolveCustomTypes tableCache customTypes pgScalars)
|) (MetadataObject MOCustomTypes $ toJSON customTypes)
-- actions
actionCache <- (mapFromL _amName actions >- returnA)
>-> (| Inc.keyed (\_ action -> do
let ActionMetadata name comment def actionPermissions = action
metadataObj = MetadataObject (MOAction name) $ toJSON $
CreateAction name def comment
addActionContext e = "in action " <> name <<> "; " <> e
(| withRecordInconsistency (
(| modifyErrA ( do
(resolvedDef, outFields) <- bindErrorA -< resolveAction resolvedCustomTypes def
let permissionInfos = map (ActionPermissionInfo . _apmRole) actionPermissions
permissionMap = mapFromL _apiRole permissionInfos
returnA -< ActionInfo name outFields resolvedDef permissionMap comment
)
|) addActionContext)
|) metadataObj)
|)
>-> (\actionMap -> returnA -< M.catMaybes actionMap)
actionCache <- case maybeResolvedCustomTypes of
Just resolvedCustomTypes -> buildActions -< ((resolvedCustomTypes, pgScalars), actions)
-- If the custom types themselves are inconsistent, we cant really do
-- anything with actions, so just mark them all inconsistent.
Nothing -> do
recordInconsistencies -< ( map mkActionMetadataObject actions
, "custom types are inconsistent" )
returnA -< M.empty
-- remote schemas
let remoteSchemaInvalidationKeys = Inc.selectD #_ikRemoteSchemas invalidationKeys
@ -287,7 +283,9 @@ buildSchemaCacheRule = proc (catalogMetadata, invalidationKeys) -> do
, _boFunctions = functionCache
, _boRemoteSchemas = remoteSchemaMap
, _boAllowlist = allowList
, _boCustomTypes = resolvedCustomTypes
-- If 'maybeResolvedCustomTypes' is 'Nothing', then custom types are inconsinstent.
-- In such case, use empty resolved value of custom types.
, _boCustomTypes = fromMaybe (NonObjectTypeMap mempty, mempty) maybeResolvedCustomTypes
}
mkEventTriggerMetadataObject (CatalogEventTrigger qt trn configuration) =
@ -295,6 +293,9 @@ buildSchemaCacheRule = proc (catalogMetadata, invalidationKeys) -> do
definition = object ["table" .= qt, "configuration" .= configuration]
in MetadataObject objectId definition
mkActionMetadataObject (ActionMetadata name comment defn _) =
MetadataObject (MOAction name) (toJSON $ CreateAction name defn comment)
mkRemoteSchemaMetadataObject remoteSchema =
MetadataObject (MORemoteSchema (_arsqName remoteSchema)) (toJSON remoteSchema)
@ -353,6 +354,27 @@ buildSchemaCacheRule = proc (catalogMetadata, invalidationKeys) -> do
liftTx $ delTriggerQ triggerName -- executes DROP IF EXISTS.. sql
mkAllTriggersQ triggerName tableName (M.elems tableColumns) triggerDefinition
buildActions
:: ( ArrowChoice arr, Inc.ArrowDistribute arr, Inc.ArrowCache m arr
, ArrowWriter (Seq CollectedInfo) arr, MonadIO m )
=> ( ((NonObjectTypeMap, AnnotatedObjects), HashSet PGScalarType)
, [ActionMetadata]
) `arr` HashMap ActionName ActionInfo
buildActions = buildInfoMap _amName mkActionMetadataObject buildAction
where
buildAction = proc ((resolvedCustomTypes, pgScalars), action) -> do
let ActionMetadata name comment def actionPermissions = action
addActionContext e = "in action " <> name <<> "; " <> e
(| withRecordInconsistency (
(| modifyErrA (do
(resolvedDef, outObject, reusedPgScalars) <- liftEitherA <<< bindA -<
runExceptT $ resolveAction resolvedCustomTypes pgScalars def
let permissionInfos = map (ActionPermissionInfo . _apmRole) actionPermissions
permissionMap = mapFromL _apiRole permissionInfos
returnA -< ActionInfo name outObject resolvedDef permissionMap reusedPgScalars comment)
|) addActionContext)
|) (mkActionMetadataObject action)
buildRemoteSchemas
:: ( ArrowChoice arr, Inc.ArrowDistribute arr, ArrowWriter (Seq CollectedInfo) arr
, Inc.ArrowCache m arr , MonadIO m, HasHttpManager m )
@ -384,7 +406,7 @@ buildSchemaCacheRule = proc (catalogMetadata, invalidationKeys) -> do
, ActionCache
) `arr` (RemoteSchemaMap, GS.GCtxMap, GS.GCtx)
buildGQLSchema = proc (tableCache, functionCache, remoteSchemas, customTypes, actionCache) -> do
baseGQLSchema <- bindA -< GS.mkGCtxMap (snd customTypes) tableCache functionCache actionCache
baseGQLSchema <- bindA -< GS.mkGCtxMap tableCache functionCache actionCache
(| foldlA' (\(remoteSchemaMap, gqlSchemas, remoteGQLSchemas)
(remoteSchemaName, (remoteSchema, metadataObject)) ->
(| withRecordInconsistency (do

View File

@ -14,10 +14,12 @@ module Hasura.RQL.Types.Action
, ResolvedActionDefinition
, ActionOutputFields
, getActionOutputFields
, ActionInfo(..)
, aiName
, aiOutputFields
, aiOutputObject
, aiDefinition
, aiPgScalars
, aiPermissions
, aiComment
, ActionPermissionInfo(..)
@ -119,14 +121,20 @@ data ActionPermissionInfo
$(J.deriveToJSON (J.aesonDrop 4 J.snakeCase) ''ActionPermissionInfo)
type ActionPermissionMap = Map.HashMap RoleName ActionPermissionInfo
type ActionOutputFields = Map.HashMap G.Name G.GType
getActionOutputFields :: AnnotatedObjectType -> ActionOutputFields
getActionOutputFields =
Map.fromList . map (unObjectFieldName *** fst) . Map.toList . _aotAnnotatedFields
data ActionInfo
= ActionInfo
{ _aiName :: !ActionName
, _aiOutputFields :: !ActionOutputFields
, _aiOutputObject :: !AnnotatedObjectType
, _aiDefinition :: !ResolvedActionDefinition
, _aiPermissions :: !ActionPermissionMap
, _aiPgScalars :: !(HashSet PGScalarType)
, _aiComment :: !(Maybe Text)
} deriving (Show, Eq)
$(J.deriveToJSON (J.aesonDrop 3 J.snakeCase) ''ActionInfo)

View File

@ -12,6 +12,7 @@ module Hasura.RQL.Types.Catalog
, CatalogPermission(..)
, CatalogEventTrigger(..)
, CatalogFunction(..)
, CatalogCustomTypes(..)
) where
import Hasura.Prelude
@ -139,6 +140,25 @@ instance NFData CatalogFunction
instance Cacheable CatalogFunction
$(deriveFromJSON (aesonDrop 3 snakeCase) ''CatalogFunction)
data CatalogCustomTypes
= CatalogCustomTypes
{ _cctCustomTypes :: !CustomTypes
, _cctPgScalars :: !(HashSet PGScalarType)
-- ^ All Postgres base types, which may be referenced in custom type definitions.
-- When we validate the custom types (see 'validateCustomTypeDefinitions'),
-- we record which base types were referenced so that we can be sure to include them
-- in the generated GraphQL schema.
--
-- These are not actually part of the Hasura metadata --- we fetch them from
-- @pg_catalog.pg_type@ --- but theyre needed when validating the custom type
-- metadata, so we include them here.
--
-- See Note [Postgres scalars in custom types] for more details.
} deriving (Show, Eq, Generic)
instance NFData CatalogCustomTypes
instance Cacheable CatalogCustomTypes
$(deriveFromJSON (aesonDrop 4 snakeCase) ''CatalogCustomTypes)
type CatalogAction = ActionMetadata
data CatalogMetadata
@ -151,7 +171,7 @@ data CatalogMetadata
, _cmFunctions :: ![CatalogFunction]
, _cmAllowlistCollections :: ![CollectionDef]
, _cmComputedFields :: ![CatalogComputedField]
, _cmCustomTypes :: !CustomTypes
, _cmCustomTypes :: !CatalogCustomTypes
, _cmActions :: ![CatalogAction]
} deriving (Show, Eq, Generic)
instance NFData CatalogMetadata

View File

@ -8,7 +8,7 @@ select
'functions', functions.items,
'allowlist_collections', allowlist.item,
'computed_fields', computed_field.items,
'custom_types', coalesce((select custom_types from hdb_catalog.hdb_custom_types), '{}'),
'custom_types', custom_types.item,
'actions', actions.items
)
from
@ -173,6 +173,15 @@ from
where function_name = cc.function_name and function_schema = cc.function_schema
) fi on 'true'
) as computed_field,
(
select
json_build_object(
'custom_types',
coalesce((select custom_types from hdb_catalog.hdb_custom_types), '{}'),
'pg_scalars', -- See Note [Postgres scalars in custom types]
coalesce((select json_agg(typname) from pg_catalog.pg_type where typtype = 'b'), '[]')
) as item
) as custom_types,
(
select
coalesce(

View File

@ -0,0 +1,28 @@
- description: Create an action with PG scalars in input arguments
url: /v1/query
status: 200
response:
message: success
query:
type: create_action
args:
name: some_action
definition:
kind: synchronous
arguments:
- name: user_id
type: ID!
- name: location
type: geography!
output_type: User!
handler: http://127.0.0.1:5593/create-user
- description: Remove action
url: /v1/query
status: 200
response:
message: success
query:
type: drop_action
args:
name: some_action

View File

@ -0,0 +1,20 @@
description: Set custom types with Postgres scalars
url: /v1/query
status: 200
query:
type: set_custom_types
args:
objects:
- name: User
fields:
- name: user_id
type: uuid!
- name: location
type: geography
input_objects:
- name: UserInput
fields:
- name: name
type: String!
- name: id
type: uuid!

View File

@ -0,0 +1,61 @@
description: Set custom types with Postgres scalars
url: /v1/query
status: 400
response:
internal:
- definition:
input_objects:
- name: UserInput
description:
fields:
- name: name
type: String!
description:
- name: id
type: uuid!
description:
objects:
- name: User
relationships:
description:
fields:
- arguments:
name: user_id
type: uuid!
description:
- arguments:
name: location
type: geography
description:
- arguments:
name: unknown_pgtype
type: unknown_type
description:
scalars:
enums:
reason: validation for the given custom types failed because the type "unknown_type"
for field "unknown_pgtype" in object type "User" does not exist
type: custom_types
path: $.args
error: validation for the given custom types failed because the type "unknown_type"
for field "unknown_pgtype" in object type "User" does not exist
code: constraint-violation
query:
type: set_custom_types
args:
objects:
- name: User
fields:
- name: user_id
type: uuid!
- name: location
type: geography
- name: unknown_pgtype
type: unknown_type
input_objects:
- name: UserInput
fields:
- name: name
type: String!
- name: id
type: uuid!

View File

@ -0,0 +1,27 @@
type: bulk
args:
- type: run_sql
args:
sql: |
CREATE EXTENSION IF NOT EXISTS postgis;
CREATE EXTENSION IF NOT EXISTS postgis_topology;
DO $$
BEGIN
IF PostGIS_lib_version() ~ '^3.*' THEN
CREATE EXTENSION IF NOT EXISTS postgis_raster;
END IF;
END$$;
CREATE TABLE "user"(
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
name TEXT NOT NULL,
location geography
);
- type: set_custom_types
args:
objects:
- name: User
fields:
- name: user_id
type: ID!

View File

@ -0,0 +1,11 @@
type: bulk
args:
# reset custom types
- type: set_custom_types
args: {}
- type: run_sql
args:
cascade: true
sql: |
DROP TABLE "user";

View File

@ -297,3 +297,19 @@ class TestActionsAsync:
# Query the action as user-id 1
# Make request without auth using admin_secret
check_query(hge_ctx, conf_user_1, add_auth = False)
@pytest.mark.usefixtures('per_class_tests_db_state')
class TestSetCustomTypes:
@classmethod
def dir(cls):
return 'queries/actions/custom-types'
def test_resuse_pgscalars(self, hge_ctx):
check_query_f(hge_ctx, self.dir() + '/reuse_pgscalars.yaml')
def test_resuse_unknown_pgscalar(self, hge_ctx):
check_query_f(hge_ctx, self.dir() + '/reuse_unknown_pgscalar.yaml')
def test_create_action_pg_scalar(self, hge_ctx):
check_query_f(hge_ctx, self.dir() + '/create_action_pg_scalar.yaml')