mirror of
https://github.com/hasura/graphql-engine.git
synced 2024-12-14 17:02:49 +03:00
server: codecs for TableMetadata, FunctionMetadata, and permissions
This PR expands the set of codecs for source metadata to include `TableMetadata`, `FunctionMetadata`, and various permission types. This fills out more detail in the generated OpenAPI document. See the [generated OpenAPI spec](https://gist.github.com/hallettj/783d06a926cbc854eececa4964e8aa5b) based on this PR. See also the [generated TypeScript types](https://github.com/hasura/graphql-engine-mono/files/9448102/client-typescript.tar.gz) based on that spec. Ticket: https://hasurahq.atlassian.net/browse/MM-66 PR-URL: https://github.com/hasura/graphql-engine-mono/pull/5664 GitOrigin-RevId: b6e1f32c669368cd6150e6f69fc36b78b748d9bb
This commit is contained in:
parent
7ec5ea86b1
commit
b094947239
@ -394,7 +394,8 @@ library dc-api
|
||||
library
|
||||
import: common-all, lib-depends
|
||||
hs-source-dirs: src-lib
|
||||
exposed-modules: Control.Arrow.Extended
|
||||
exposed-modules: Autodocodec.Extended
|
||||
, Control.Arrow.Extended
|
||||
, Control.Arrow.Interpret
|
||||
, Control.Arrow.Trans
|
||||
, Control.Concurrent.Extended
|
||||
|
85
server/src-lib/Autodocodec/Extended.hs
Normal file
85
server/src-lib/Autodocodec/Extended.hs
Normal file
@ -0,0 +1,85 @@
|
||||
module Autodocodec.Extended
|
||||
( optionalFieldOrIncludedNull,
|
||||
optionalFieldOrIncludedNull',
|
||||
optionalFieldOrIncludedNullWith,
|
||||
optionalFieldOrIncludedNullWith',
|
||||
)
|
||||
where
|
||||
|
||||
import Autodocodec
|
||||
import Hasura.Prelude
|
||||
|
||||
-- | An optional field that might be @null@ where a @Nothing@ value should be
|
||||
-- represented as @null@ on serialization instead of omitting the field.
|
||||
--
|
||||
-- This differs from Autodocodec's stock 'optionalFieldOrNull' in that that
|
||||
-- function omits the field during serialization if the Haskell value is
|
||||
-- @Nothing@. This version includes the field with a serialized value of @null@.
|
||||
optionalFieldOrIncludedNull ::
|
||||
HasCodec output =>
|
||||
-- | Key
|
||||
Text ->
|
||||
-- | Documentation
|
||||
Text ->
|
||||
ObjectCodec (Maybe output) (Maybe output)
|
||||
optionalFieldOrIncludedNull key doc = optionalFieldOrIncludedNullWith key codec doc
|
||||
|
||||
-- | An optional field that might be @null@ where a @Nothing@ value should be
|
||||
-- represented as @null@ on serialization instead of omitting the field.
|
||||
--
|
||||
-- This differs from Autodocodec's stock 'optionalFieldOrNull'' in that that
|
||||
-- function omits the field during serialization if the Haskell value is
|
||||
-- @Nothing@. This version includes the field with a serialized value of @null@.
|
||||
optionalFieldOrIncludedNull' ::
|
||||
HasCodec output =>
|
||||
-- | Key
|
||||
Text ->
|
||||
ObjectCodec (Maybe output) (Maybe output)
|
||||
optionalFieldOrIncludedNull' key = optionalFieldOrIncludedNullWith' key codec
|
||||
|
||||
-- | An optional field that might be @null@ where a @Nothing@ value should be
|
||||
-- represented as @null@ on serialization instead of omitting the field.
|
||||
--
|
||||
-- This differs from Autodocodec's stock 'optionalFieldOrNullWith' in that that
|
||||
-- function omits the field during serialization if the Haskell value is
|
||||
-- @Nothing@. This version includes the field with a serialized value of @null@.
|
||||
optionalFieldOrIncludedNullWith ::
|
||||
-- | Key
|
||||
Text ->
|
||||
-- | Codec for the value
|
||||
JSONCodec output ->
|
||||
-- | Documentation
|
||||
Text ->
|
||||
ObjectCodec (Maybe output) (Maybe output)
|
||||
optionalFieldOrIncludedNullWith key c doc =
|
||||
orIncludedNullHelper $
|
||||
OptionalKeyCodec key (maybeCodec c) (Just doc)
|
||||
|
||||
-- | An optional field that might be @null@ where a @Nothing@ value should be
|
||||
-- represented as @null@ on serialization instead of omitting the field.
|
||||
--
|
||||
-- This differs from Autodocodec's stock 'optionalFieldOrNullWith'' in that that
|
||||
-- function omits the field during serialization if the Haskell value is
|
||||
-- @Nothing@. This version includes the field with a serialized value of @null@.
|
||||
optionalFieldOrIncludedNullWith' ::
|
||||
-- | Key
|
||||
Text ->
|
||||
-- | Codec for the value
|
||||
JSONCodec output ->
|
||||
ObjectCodec (Maybe output) (Maybe output)
|
||||
optionalFieldOrIncludedNullWith' key c =
|
||||
orIncludedNullHelper $
|
||||
OptionalKeyCodec key (maybeCodec c) Nothing
|
||||
|
||||
orIncludedNullHelper :: ObjectCodec (Maybe (Maybe input)) (Maybe (Maybe output)) -> ObjectCodec (Maybe input) (Maybe output)
|
||||
orIncludedNullHelper = dimapCodec dec enc
|
||||
where
|
||||
dec :: Maybe (Maybe input) -> Maybe input
|
||||
dec = \case
|
||||
Nothing -> Nothing
|
||||
Just Nothing -> Nothing
|
||||
Just (Just a) -> Just a
|
||||
enc :: Maybe output -> Maybe (Maybe output)
|
||||
enc = \case
|
||||
Nothing -> Just Nothing -- This is the case that differs from the stock `orNullHelper`
|
||||
Just a -> Just (Just a)
|
@ -4,14 +4,17 @@ module Data.HashMap.Strict.InsOrd.Autodocodec
|
||||
)
|
||||
where
|
||||
|
||||
import Autodocodec (HasCodec (codec), JSONCodec, dimapCodec, listCodec)
|
||||
import Autodocodec (Codec (..), HasCodec (codec), JSONCodec, ValueCodec, bimapCodec, listCodec)
|
||||
import Data.HashMap.Strict.InsOrd (elems)
|
||||
import Data.List.Extended (duplicates)
|
||||
import Data.Text qualified as T
|
||||
import Data.Text.Extended qualified as T
|
||||
import Hasura.Prelude
|
||||
|
||||
-- | Codec for ordered hash maps where the key for each element can be inferred
|
||||
-- from the element value. This codec serializes the hash map as an array sorted
|
||||
-- by key.
|
||||
sortedElemsCodec :: (HasCodec a, Hashable k, Ord k) => (a -> k) -> JSONCodec (InsOrdHashMap k a)
|
||||
sortedElemsCodec :: (HasCodec a, Hashable k, Ord k, T.ToTxt k) => (a -> k) -> JSONCodec (InsOrdHashMap k a)
|
||||
sortedElemsCodec = sortedElemsCodecWith codec
|
||||
|
||||
-- | Codec for ordered hash maps where the key for each element can be inferred
|
||||
@ -20,8 +23,34 @@ sortedElemsCodec = sortedElemsCodecWith codec
|
||||
--
|
||||
-- This version is useful if there is no 'HasCodec' instance for the type of the
|
||||
-- hash map values. You supply a codec as an argument instead.
|
||||
sortedElemsCodecWith :: (Hashable k, Ord k) => JSONCodec a -> (a -> k) -> JSONCodec (InsOrdHashMap k a)
|
||||
sortedElemsCodecWith valueCodec keyForElem = dimapCodec dec enc $ listCodec valueCodec
|
||||
sortedElemsCodecWith :: (Hashable k, Ord k, T.ToTxt k) => JSONCodec a -> (a -> k) -> JSONCodec (InsOrdHashMap k a)
|
||||
sortedElemsCodecWith valueCodec keyForElem = bimapCodec dec enc $ listCodec valueCodec
|
||||
where
|
||||
dec = oMapFromL keyForElem
|
||||
dec xs =
|
||||
let dupKeys = duplicates $ map keyForElem xs
|
||||
in if null dupKeys
|
||||
then Right $ oMapFromL keyForElem xs
|
||||
else Left $ T.unpack $ errMsg <> T.commaSeparated dupKeys
|
||||
|
||||
enc = sortOn keyForElem . elems
|
||||
|
||||
errMsg = case codecName valueCodec of
|
||||
(Just t) -> "multiple " <> t <> " declarations exist: "
|
||||
Nothing -> "multiple declarations exist: "
|
||||
|
||||
codecName :: ValueCodec input output -> Maybe Text
|
||||
codecName = \case
|
||||
NullCodec -> Nothing
|
||||
BoolCodec mname -> mname
|
||||
StringCodec mname -> mname
|
||||
NumberCodec mname _ -> mname
|
||||
ArrayOfCodec mname _ -> mname
|
||||
HashMapCodec _ -> Nothing
|
||||
MapCodec _ -> Nothing
|
||||
ValueCodec -> Nothing
|
||||
EqCodec _ _ -> Nothing
|
||||
BimapCodec _ _ c -> codecName c
|
||||
ObjectOfCodec mname _ -> mname
|
||||
EitherCodec {} -> Nothing
|
||||
CommentCodec _ c -> codecName c
|
||||
ReferenceCodec n _ -> Just n
|
||||
|
@ -1,5 +1,5 @@
|
||||
-- | Utility functions for use defining autodocodec codecs.
|
||||
module Hasura.Metadata.DTO.Utils (versionField, optionalVersionField) where
|
||||
module Hasura.Metadata.DTO.Utils (codecNamePrefix, versionField, optionalVersionField) where
|
||||
|
||||
import Autodocodec
|
||||
( Codec (EqCodec),
|
||||
@ -10,7 +10,10 @@ import Autodocodec
|
||||
(.=),
|
||||
)
|
||||
import Data.Scientific (Scientific)
|
||||
import Data.Text qualified as T
|
||||
import Data.Text.Extended qualified as T
|
||||
import Hasura.Prelude
|
||||
import Hasura.SQL.Tag (HasTag (backendTag), reify)
|
||||
|
||||
-- | Defines a required object field named @version@ that must have the given
|
||||
-- integer value. On serialization the field will have the given value
|
||||
@ -30,3 +33,8 @@ optionalVersionField v =
|
||||
optionalFieldWith' "version" (EqCodec n scientificCodec) .= const (Just n)
|
||||
where
|
||||
n = fromInteger v
|
||||
|
||||
-- | Provides a title-cased name for a database kind, inferring the appropriate
|
||||
-- database kind from type context.
|
||||
codecNamePrefix :: forall b. (HasTag b) => Text
|
||||
codecNamePrefix = T.toTitle $ T.toTxt $ reify $ backendTag @b
|
||||
|
@ -95,6 +95,7 @@ import Data.Text qualified as T
|
||||
import Data.Text.Extended qualified as T
|
||||
import Hasura.Incremental (Cacheable)
|
||||
import Hasura.Metadata.DTO.Placeholder (placeholderCodecViaJSON)
|
||||
import Hasura.Metadata.DTO.Utils (codecNamePrefix)
|
||||
import Hasura.Prelude
|
||||
import Hasura.RQL.Types.Action
|
||||
import Hasura.RQL.Types.Allowlist
|
||||
@ -120,7 +121,7 @@ import Hasura.RQL.Types.SourceCustomization
|
||||
import Hasura.RQL.Types.Table
|
||||
import Hasura.SQL.AnyBackend qualified as AB
|
||||
import Hasura.SQL.Backend
|
||||
import Hasura.SQL.Tag (BackendTag, HasTag (backendTag), reify)
|
||||
import Hasura.SQL.Tag (BackendTag, HasTag (backendTag))
|
||||
import Hasura.Session
|
||||
import Language.GraphQL.Draft.Syntax qualified as G
|
||||
|
||||
@ -274,12 +275,50 @@ instance (Backend b) => Cacheable (TableMetadata b)
|
||||
instance (Backend b) => ToJSON (TableMetadata b) where
|
||||
toJSON = genericToJSON hasuraJSON
|
||||
|
||||
-- TODO: Write a proper codec for 'TableMetadata'
|
||||
-- TODO: Replace uses of placeholderCodecViaJSON with proper codecs
|
||||
instance (Backend b) => HasCodec (TableMetadata b) where
|
||||
codec = named (codecNamePrefix @b <> "TableMetadata") placeholderCodecViaJSON
|
||||
codec =
|
||||
CommentCodec "Representation of a table in metadata, 'tables.yaml' and 'metadata.json'" $
|
||||
AC.object (codecNamePrefix @b <> "TableMetadata") $
|
||||
TableMetadata
|
||||
<$> requiredFieldWith' "table" placeholderCodecViaJSON .== _tmTable
|
||||
<*> optionalFieldWithOmittedDefault' "is_enum" False .== _tmIsEnum
|
||||
<*> optionalFieldWithOmittedDefaultWith "configuration" placeholderCodecViaJSON emptyTableConfig configDoc .== _tmConfiguration
|
||||
<*> optSortedListViaJSON "object_relationships" _rdName .== _tmObjectRelationships
|
||||
<*> optSortedListViaJSON "array_relationships" _rdName .== _tmArrayRelationships
|
||||
<*> optSortedListViaJSON "computed_fields" _cfmName .== _tmComputedFields
|
||||
<*> optSortedListViaJSON "remote_relationships" _rrName .== _tmRemoteRelationships
|
||||
<*> optSortedList "insert_permissions" _pdRole .== _tmInsertPermissions
|
||||
<*> optSortedList "select_permissions" _pdRole .== _tmSelectPermissions
|
||||
<*> optSortedList "update_permissions" _pdRole .== _tmUpdatePermissions
|
||||
<*> optSortedList "delete_permissions" _pdRole .== _tmDeletePermissions
|
||||
<*> optSortedListViaJSON "event_triggers" etcName .== _tmEventTriggers
|
||||
<*> optionalFieldOrNullWith' "apollo_federation_config" placeholderCodecViaJSON .== _tmApolloFederationConfig
|
||||
where
|
||||
optSortedListViaJSON ::
|
||||
(Eq a, FromJSON a, ToJSON a, Hashable k, Ord k, T.ToTxt k) =>
|
||||
Text ->
|
||||
(a -> k) ->
|
||||
ObjectCodec (InsOrdHashMap k a) (InsOrdHashMap k a)
|
||||
optSortedListViaJSON name keyForElem =
|
||||
AC.optionalFieldWithOmittedDefaultWith' name (sortedElemsCodecWith placeholderCodecViaJSON keyForElem) mempty
|
||||
|
||||
codecNamePrefix :: forall b. (HasTag b) => Text
|
||||
codecNamePrefix = T.toTitle $ T.toTxt $ reify $ backendTag @b
|
||||
optSortedList ::
|
||||
(HasCodec a, Eq a, Hashable k, Ord k, T.ToTxt k) =>
|
||||
Text ->
|
||||
(a -> k) ->
|
||||
ObjectCodec (InsOrdHashMap k a) (InsOrdHashMap k a)
|
||||
optSortedList name keyForElem =
|
||||
AC.optionalFieldWithOmittedDefaultWith' name (sortedElemsCodec keyForElem) mempty
|
||||
|
||||
configDoc =
|
||||
T.unlines
|
||||
[ "Configuration for the table/view",
|
||||
"",
|
||||
"https://hasura.io/docs/latest/graphql/core/api-reference/schema-metadata-api/table-view.html#table-config"
|
||||
]
|
||||
|
||||
(.==) = (AC..=)
|
||||
|
||||
$(makeLenses ''TableMetadata)
|
||||
|
||||
@ -384,9 +423,27 @@ instance (Backend b) => FromJSON (FunctionMetadata b) where
|
||||
<*> o .:? "permissions" .!= []
|
||||
<*> o .:? "comment"
|
||||
|
||||
-- TODO: Write a proper codec for 'FunctionMetadata'
|
||||
-- TODO: Replace uses of placeholderCodecViaJSON with proper codecs
|
||||
instance (Backend b) => HasCodec (FunctionMetadata b) where
|
||||
codec = named (codecNamePrefix @b <> "FunctionMetadata") $ placeholderCodecViaJSON
|
||||
codec =
|
||||
CommentCodec
|
||||
( T.unlines
|
||||
[ "A custom SQL function to add to the GraphQL schema with configuration.",
|
||||
"",
|
||||
"https://hasura.io/docs/latest/graphql/core/api-reference/schema-metadata-api/custom-functions.html#args-syntax"
|
||||
]
|
||||
)
|
||||
$ AC.object (codecNamePrefix @b <> "FunctionMetadata") $
|
||||
FunctionMetadata
|
||||
<$> requiredFieldWith "function" placeholderCodecViaJSON nameDoc .== _fmFunction
|
||||
<*> optionalFieldOrNullWithOmittedDefaultWith "configuration" placeholderCodecViaJSON emptyFunctionConfig configDoc .== _fmConfiguration
|
||||
<*> optionalFieldOrNullWithOmittedDefaultWith' "permissions" (listCodec placeholderCodecViaJSON) [] .== _fmPermissions
|
||||
<*> optionalFieldOrNull' "comment" .== _fmComment
|
||||
where
|
||||
nameDoc = "Name of the SQL function"
|
||||
configDoc = "Configuration for the SQL function"
|
||||
|
||||
(.==) = (AC..=)
|
||||
|
||||
type Tables b = InsOrdHashMap (TableName b) (TableMetadata b)
|
||||
|
||||
@ -481,7 +538,7 @@ instance Backend b => HasCodec (SourceMetadata b) where
|
||||
<$> requiredField' "name" .== _smName
|
||||
<*> requiredField' "kind" .== _smKind
|
||||
<*> requiredFieldWith' "tables" (sortedElemsCodec _tmTable) .== _smTables
|
||||
<*> optionalFieldOrNullWithOmittedDefaultWith' "functions" (sortedElemsCodec _fmFunction) (OM.fromList []) .== _smFunctions
|
||||
<*> optionalFieldOrNullWithOmittedDefaultWith' "functions" (sortedElemsCodec _fmFunction) mempty .== _smFunctions
|
||||
<*> requiredField' "configuration" .== _smConfiguration
|
||||
<*> optionalFieldOrNullWith' "query_tags" placeholderCodecViaJSON .== _smQueryTags -- TODO: replace placeholder
|
||||
<*> optionalFieldOrNullWithOmittedDefault' "customization" emptySourceCustomization .== _smCustomization
|
||||
|
@ -28,6 +28,9 @@ module Hasura.RQL.Types.Permission
|
||||
)
|
||||
where
|
||||
|
||||
import Autodocodec hiding (object, (.=))
|
||||
import Autodocodec qualified as AC
|
||||
import Autodocodec.Extended (optionalFieldOrIncludedNullWith')
|
||||
import Control.Lens (makeLenses)
|
||||
import Data.Aeson
|
||||
import Data.Aeson.Casing (snakeCase)
|
||||
@ -35,9 +38,12 @@ import Data.Aeson.TH
|
||||
import Data.HashSet qualified as Set
|
||||
import Data.Hashable
|
||||
import Data.Kind (Type)
|
||||
import Data.List.NonEmpty qualified as NonEmpty
|
||||
import Data.Text qualified as T
|
||||
import Database.PG.Query qualified as Q
|
||||
import Hasura.Incremental (Cacheable (..))
|
||||
import Hasura.Metadata.DTO.Placeholder (placeholderCodecViaJSON)
|
||||
import Hasura.Metadata.DTO.Utils (codecNamePrefix)
|
||||
import Hasura.Prelude
|
||||
import Hasura.RQL.IR.BoolExp
|
||||
import Hasura.RQL.Types.Backend
|
||||
@ -154,6 +160,9 @@ instance Backend b => ToJSON (PermDefPermission b perm) where
|
||||
UpdPerm' p -> toJSON p
|
||||
DelPerm' p -> toJSON p
|
||||
|
||||
instance (Backend b, HasCodec (perm b), IsPerm perm) => HasCodec (PermDefPermission b perm) where
|
||||
codec = dimapCodec mkPermDefPermission unPermDefPermission codec
|
||||
|
||||
deriving stock instance Backend b => Show (PermDefPermission b perm)
|
||||
|
||||
deriving stock instance Backend b => Eq (PermDefPermission b perm)
|
||||
@ -166,6 +175,26 @@ instance Backend b => Cacheable (PermDefPermission b perm) where
|
||||
|
||||
-----------------------------
|
||||
|
||||
class IsPerm perm where
|
||||
mkPermDefPermission :: perm b -> PermDefPermission b perm
|
||||
permType :: PermType
|
||||
|
||||
instance IsPerm SelPerm where
|
||||
mkPermDefPermission = SelPerm'
|
||||
permType = PTSelect
|
||||
|
||||
instance IsPerm InsPerm where
|
||||
mkPermDefPermission = InsPerm'
|
||||
permType = PTInsert
|
||||
|
||||
instance IsPerm UpdPerm where
|
||||
mkPermDefPermission = UpdPerm'
|
||||
permType = PTUpdate
|
||||
|
||||
instance IsPerm DelPerm where
|
||||
mkPermDefPermission = DelPerm'
|
||||
permType = PTDelete
|
||||
|
||||
unPermDefPermission :: PermDefPermission b perm -> perm b
|
||||
unPermDefPermission = \case
|
||||
SelPerm' p -> p
|
||||
@ -190,11 +219,21 @@ instance Backend b => ToAesonPairs (PermDef b perm) where
|
||||
"comment" .= comment
|
||||
]
|
||||
|
||||
instance (Backend b, HasCodec (perm b), IsPerm perm) => HasCodec (PermDef b perm) where
|
||||
codec =
|
||||
AC.object (codecNamePrefix @b <> T.toTitle (permTypeToCode (permType @perm)) <> "PermDef") $
|
||||
PermDef
|
||||
<$> requiredField' "role" .== _pdRole
|
||||
<*> requiredField' "permission" .== _pdPermission
|
||||
<*> optionalFieldOrNull' "comment" .== _pdComment
|
||||
where
|
||||
(.==) = (AC..=)
|
||||
|
||||
data QueryRootFieldType
|
||||
= QRFTSelect
|
||||
| QRFTSelectByPk
|
||||
| QRFTSelectAggregate
|
||||
deriving stock (Show, Eq, Generic)
|
||||
deriving stock (Show, Eq, Generic, Enum, Bounded)
|
||||
deriving anyclass (Cacheable, Hashable, NFData)
|
||||
|
||||
instance FromJSON QueryRootFieldType where
|
||||
@ -203,12 +242,18 @@ instance FromJSON QueryRootFieldType where
|
||||
instance ToJSON QueryRootFieldType where
|
||||
toJSON = genericToJSON defaultOptions {constructorTagModifier = snakeCase . drop 4}
|
||||
|
||||
instance HasCodec QueryRootFieldType where
|
||||
codec =
|
||||
stringConstCodec $
|
||||
NonEmpty.fromList $
|
||||
(\x -> (x, T.pack $ snakeCase $ drop 4 $ show x)) <$> [minBound ..]
|
||||
|
||||
data SubscriptionRootFieldType
|
||||
= SRFTSelect
|
||||
| SRFTSelectByPk
|
||||
| SRFTSelectAggregate
|
||||
| SRFTSelectStream
|
||||
deriving stock (Show, Eq, Generic)
|
||||
deriving stock (Show, Eq, Generic, Enum, Bounded)
|
||||
deriving anyclass (Cacheable, Hashable, NFData)
|
||||
|
||||
instance FromJSON SubscriptionRootFieldType where
|
||||
@ -217,6 +262,12 @@ instance FromJSON SubscriptionRootFieldType where
|
||||
instance ToJSON SubscriptionRootFieldType where
|
||||
toJSON = genericToJSON defaultOptions {constructorTagModifier = snakeCase . drop 4}
|
||||
|
||||
instance HasCodec SubscriptionRootFieldType where
|
||||
codec =
|
||||
stringConstCodec $
|
||||
NonEmpty.fromList $
|
||||
(\x -> (x, T.pack $ snakeCase $ drop 4 $ show x)) <$> [minBound ..]
|
||||
|
||||
-- Insert permission
|
||||
data InsPerm (b :: BackendType) = InsPerm
|
||||
{ ipCheck :: BoolExp b,
|
||||
@ -239,6 +290,17 @@ instance Backend b => FromJSON (InsPerm b) where
|
||||
instance Backend b => ToJSON (InsPerm b) where
|
||||
toJSON = genericToJSON hasuraJSON {omitNothingFields = True}
|
||||
|
||||
instance Backend b => HasCodec (InsPerm b) where
|
||||
codec =
|
||||
AC.object (codecNamePrefix @b <> "InsPerm") $
|
||||
InsPerm
|
||||
<$> requiredFieldWith' "check" placeholderCodecViaJSON .== ipCheck
|
||||
<*> optionalFieldWith' "set" placeholderCodecViaJSON .== ipSet
|
||||
<*> optionalFieldWith' "columns" placeholderCodecViaJSON .== ipColumns
|
||||
<*> optionalFieldWithDefault' "backend_only" False .== ipBackendOnly
|
||||
where
|
||||
(.==) = (AC..=)
|
||||
|
||||
type InsPermDef b = PermDef b InsPerm
|
||||
|
||||
data AllowedRootFields rootFieldType
|
||||
@ -255,6 +317,18 @@ instance (ToJSON rootFieldType) => ToJSON (AllowedRootFields rootFieldType) wher
|
||||
ARFAllowAllRootFields -> String "allow all root fields"
|
||||
ARFAllowConfiguredRootFields configuredRootFields -> toJSON configuredRootFields
|
||||
|
||||
-- | Serializes set of allowed fields as a nullable array, where @null@ maps to
|
||||
-- 'ARFAllowAllRootFields', and any array value maps to
|
||||
-- 'ARFAllowConfiguredRootFields'.
|
||||
instance (Eq rootFieldType, Hashable rootFieldType, HasCodec rootFieldType) => HasCodec (AllowedRootFields rootFieldType) where
|
||||
codec = dimapCodec dec enc $ maybeCodec $ listCodec codec
|
||||
where
|
||||
dec (Just fields) = ARFAllowConfiguredRootFields $ Set.fromList fields
|
||||
dec (Nothing) = ARFAllowAllRootFields
|
||||
|
||||
enc ARFAllowAllRootFields = Nothing
|
||||
enc (ARFAllowConfiguredRootFields fields) = Just $ Set.toList fields
|
||||
|
||||
instance Semigroup (HashSet rootFieldType) => Semigroup (AllowedRootFields rootFieldType) where
|
||||
ARFAllowAllRootFields <> _ = ARFAllowAllRootFields
|
||||
_ <> ARFAllowAllRootFields = ARFAllowAllRootFields
|
||||
@ -336,6 +410,20 @@ instance Backend b => FromJSON (SelPerm b) where
|
||||
<*> pure allowedQueryRootFields
|
||||
<*> pure allowedSubscriptionRootFields
|
||||
|
||||
instance Backend b => HasCodec (SelPerm b) where
|
||||
codec =
|
||||
AC.object (codecNamePrefix @b <> "SelPerm") $
|
||||
SelPerm
|
||||
<$> requiredFieldWith' "columns" placeholderCodecViaJSON .== spColumns
|
||||
<*> requiredFieldWith' "filter" placeholderCodecViaJSON .== spFilter
|
||||
<*> optionalField' "limit" .== spLimit
|
||||
<*> optionalFieldWithOmittedDefault' "allow_aggregations" False .== spAllowAggregations
|
||||
<*> optionalFieldWithOmittedDefaultWith' "computed_fields" placeholderCodecViaJSON [] .== spComputedFields
|
||||
<*> optionalFieldWithOmittedDefault' "query_root_fields" ARFAllowAllRootFields .== spAllowedQueryRootFields
|
||||
<*> optionalFieldWithOmittedDefault' "subscription_root_fields" ARFAllowAllRootFields .== spAllowedSubscriptionRootFields
|
||||
where
|
||||
(.==) = (AC..=)
|
||||
|
||||
type SelPermDef b = PermDef b SelPerm
|
||||
|
||||
-- Delete permission
|
||||
@ -356,6 +444,15 @@ instance Backend b => FromJSON (DelPerm b) where
|
||||
instance Backend b => ToJSON (DelPerm b) where
|
||||
toJSON = genericToJSON hasuraJSON {omitNothingFields = True}
|
||||
|
||||
instance Backend b => HasCodec (DelPerm b) where
|
||||
codec =
|
||||
AC.object (codecNamePrefix @b <> "DelPerm") $
|
||||
DelPerm
|
||||
<$> requiredFieldWith' "filter" placeholderCodecViaJSON .== dcFilter
|
||||
<*> optionalFieldWithOmittedDefault' "backend_only" False .== dcBackendOnly
|
||||
where
|
||||
(.==) = (AC..=)
|
||||
|
||||
type DelPermDef b = PermDef b DelPerm
|
||||
|
||||
-- Update constraint
|
||||
@ -387,6 +484,20 @@ instance Backend b => FromJSON (UpdPerm b) where
|
||||
instance Backend b => ToJSON (UpdPerm b) where
|
||||
toJSON = genericToJSON hasuraJSON {omitNothingFields = True}
|
||||
|
||||
instance Backend b => HasCodec (UpdPerm b) where
|
||||
codec =
|
||||
AC.object (codecNamePrefix @b <> "UpdPerm") $
|
||||
UpdPerm
|
||||
<$> requiredFieldWith "columns" placeholderCodecViaJSON "Allowed columns" .== ucColumns
|
||||
<*> optionalFieldWith "set" placeholderCodecViaJSON "Preset columns" .== ucSet
|
||||
<*> requiredFieldWith' "filter" placeholderCodecViaJSON .== ucFilter
|
||||
-- Include @null@ in serialized output for this field because that is
|
||||
-- the way the @toOrdJSON@ serialization is written.
|
||||
<*> optionalFieldOrIncludedNullWith' "check" placeholderCodecViaJSON .== ucCheck
|
||||
<*> optionalFieldWithOmittedDefault' "backend_only" False .== ucBackendOnly
|
||||
where
|
||||
(.==) = (AC..=)
|
||||
|
||||
type UpdPermDef b = PermDef b UpdPerm
|
||||
|
||||
-- The Expression-level TemplateHaskell splices below fail unless there is a
|
||||
|
@ -5,128 +5,27 @@
|
||||
-- do not incorporate it into essential workflows at this time.
|
||||
module Hasura.Server.MetadataOpenAPI (metadataOpenAPI) where
|
||||
|
||||
import Autodocodec (HasCodec, JSONCodec)
|
||||
import Autodocodec.OpenAPI (declareNamedSchemaVia, declareNamedSchemaViaCodec)
|
||||
import Control.Lens ((.~), (^.))
|
||||
import Data.Data (Proxy)
|
||||
import Autodocodec.OpenAPI (declareNamedSchemaViaCodec)
|
||||
import Data.HashMap.Strict.InsOrd qualified as HM
|
||||
import Data.OpenApi
|
||||
( HasComponents (components),
|
||||
HasName (name),
|
||||
HasSchema (schema),
|
||||
HasSchemas (schemas),
|
||||
OpenApi,
|
||||
)
|
||||
import Data.OpenApi qualified as OpenApi
|
||||
import Data.OpenApi.Declare (undeclare)
|
||||
import Data.OpenApi (Components (..), NamedSchema (..), OpenApi (..))
|
||||
import Data.OpenApi.Declare (MonadDeclare (declare), runDeclare)
|
||||
import Data.Proxy (Proxy (..))
|
||||
import Hasura.Backends.BigQuery.Source (BigQueryConnSourceConfig)
|
||||
import Hasura.Backends.DataConnector.Adapter.Types qualified as DataConnector
|
||||
import Hasura.Backends.MSSQL.Connection (MSSQLConnConfiguration)
|
||||
import Hasura.Backends.MySQL.Types qualified as MySQL
|
||||
import Hasura.Backends.Postgres.Connection.Settings (PostgresConnConfiguration)
|
||||
import Hasura.Metadata.DTO.Metadata (MetadataDTO)
|
||||
import Hasura.Metadata.DTO.MetadataV1 (MetadataV1)
|
||||
import Hasura.Metadata.DTO.MetadataV2 (MetadataV2)
|
||||
import Hasura.Metadata.DTO.MetadataV3 (MetadataV3)
|
||||
import Hasura.Prelude
|
||||
import Hasura.RQL.Types.Metadata.Common
|
||||
( BackendSourceMetadata,
|
||||
FunctionMetadata,
|
||||
SourceMetadata,
|
||||
TableMetadata,
|
||||
backendSourceMetadataCodec,
|
||||
)
|
||||
import Hasura.RQL.Types.SourceCustomization (SourceCustomization)
|
||||
import Hasura.SQL.Backend (BackendType (..), PostgresKind (..))
|
||||
|
||||
-- | An OpenApi document includes \"schemas\" that describe the data that may be
|
||||
-- produced or consumed by an API. It can also include \"paths\" which describe
|
||||
-- REST endpoints, and the document can include other API metadata. This example
|
||||
-- only includes schemas.
|
||||
--
|
||||
-- Throws an error if any schema listed in 'openApiSchemas' does not have
|
||||
-- a name.
|
||||
--
|
||||
-- The OpenAPI specification for metadata is experimental and incomplete. Please
|
||||
-- do not incorporate it into essential workflows at this time.
|
||||
metadataOpenAPI :: OpenApi
|
||||
metadataOpenAPI =
|
||||
(mempty :: OpenApi)
|
||||
& components . schemas .~ HM.fromList (applySchemaName <$> openApiSchemas)
|
||||
|
||||
-- | All metadata DTOs should be listed here. Schemas in this list must be
|
||||
-- named! Some autodocodec combinators apply names for you, like 'object'.
|
||||
-- Otherwise you can use the 'named' combinator to apply a name.
|
||||
--
|
||||
-- As far as I can tell it is necessary to explicitly list all of the data
|
||||
-- types that should be included in the OpenApi document with their names. It
|
||||
-- would be nice to provide only a top-level type ('Metadata' in this case), and
|
||||
-- have all of the types referenced by that type included automatically; but
|
||||
-- I haven't seen a way to do that.
|
||||
openApiSchemas :: [OpenApi.NamedSchema]
|
||||
openApiSchemas =
|
||||
[ toNamedSchema (Proxy @MetadataDTO),
|
||||
toNamedSchema (Proxy @MetadataV1),
|
||||
toNamedSchema (Proxy @MetadataV2),
|
||||
toNamedSchema (Proxy @MetadataV3),
|
||||
toNamedSchemaVia backendSourceMetadataCodec (Proxy @BackendSourceMetadata),
|
||||
toNamedSchema (Proxy @SourceCustomization),
|
||||
-- SourceMetadata
|
||||
toNamedSchema (Proxy @(SourceMetadata ('Postgres 'Vanilla))),
|
||||
toNamedSchema (Proxy @(SourceMetadata ('Postgres 'Citus))),
|
||||
toNamedSchema (Proxy @(SourceMetadata ('Postgres 'Cockroach))),
|
||||
toNamedSchema (Proxy @(SourceMetadata ('MSSQL))),
|
||||
toNamedSchema (Proxy @(SourceMetadata ('BigQuery))),
|
||||
toNamedSchema (Proxy @(SourceMetadata ('MySQL))),
|
||||
toNamedSchema (Proxy @(SourceMetadata ('DataConnector))),
|
||||
-- FunctionMetadata
|
||||
toNamedSchema (Proxy @(FunctionMetadata ('Postgres 'Vanilla))),
|
||||
toNamedSchema (Proxy @(FunctionMetadata ('Postgres 'Citus))),
|
||||
toNamedSchema (Proxy @(FunctionMetadata ('Postgres 'Cockroach))),
|
||||
toNamedSchema (Proxy @(FunctionMetadata ('MSSQL))),
|
||||
toNamedSchema (Proxy @(FunctionMetadata ('BigQuery))),
|
||||
toNamedSchema (Proxy @(FunctionMetadata ('MySQL))),
|
||||
toNamedSchema (Proxy @(FunctionMetadata ('DataConnector))),
|
||||
-- TableMetadata
|
||||
toNamedSchema (Proxy @(TableMetadata ('Postgres 'Vanilla))),
|
||||
toNamedSchema (Proxy @(TableMetadata ('Postgres 'Citus))),
|
||||
toNamedSchema (Proxy @(TableMetadata ('Postgres 'Cockroach))),
|
||||
toNamedSchema (Proxy @(TableMetadata ('MSSQL))),
|
||||
toNamedSchema (Proxy @(TableMetadata ('BigQuery))),
|
||||
toNamedSchema (Proxy @(TableMetadata ('MySQL))),
|
||||
toNamedSchema (Proxy @(TableMetadata ('DataConnector))),
|
||||
-- Postgres-specific types
|
||||
toNamedSchema (Proxy @PostgresConnConfiguration),
|
||||
-- MSSQL-specific types
|
||||
toNamedSchema (Proxy @MSSQLConnConfiguration),
|
||||
-- BigQuery-specific types
|
||||
toNamedSchema (Proxy @BigQueryConnSourceConfig),
|
||||
-- MySQL-specific types
|
||||
toNamedSchema (Proxy @MySQL.ConnSourceConfig),
|
||||
-- DataConnector-specific types
|
||||
toNamedSchema (Proxy @DataConnector.ConnSourceConfig)
|
||||
]
|
||||
|
||||
-- | Introspect a given 'OpenApi.NamedSchema' to get its name, and return the
|
||||
-- name with the unwrapped schema. (NamedSchema wraps a pair of an
|
||||
-- 'OpenApi.Schema' and an optional name.)
|
||||
--
|
||||
-- Throws an exception if the named schema has no name. If this happens to you
|
||||
-- then use autodocodec's 'named' combinator to apply a name to your codec.
|
||||
applySchemaName :: OpenApi.NamedSchema -> (Text, OpenApi.Schema)
|
||||
applySchemaName givenSchema = (schemaName, givenSchema ^. schema)
|
||||
mempty {_openApiComponents = mempty {_componentsSchemas = definitions}}
|
||||
where
|
||||
schemaName = case givenSchema ^. name of
|
||||
Just n -> n
|
||||
Nothing ->
|
||||
error $
|
||||
"a codec listed in 'openApiSchemas' does not have a name; "
|
||||
<> "use the 'named' combinator from autodocodec to apply a name "
|
||||
<> "to any codec in that list that does not already have one"
|
||||
|
||||
toNamedSchema :: HasCodec a => Proxy a -> OpenApi.NamedSchema
|
||||
toNamedSchema proxy = undeclare $ declareNamedSchemaViaCodec proxy
|
||||
|
||||
toNamedSchemaVia :: JSONCodec a -> Proxy a -> OpenApi.NamedSchema
|
||||
toNamedSchemaVia codec proxy = undeclare $ declareNamedSchemaVia codec proxy
|
||||
definitions = fst $
|
||||
flip runDeclare mempty $ do
|
||||
NamedSchema mName codecSchema <- declareNamedSchemaViaCodec (Proxy @MetadataDTO)
|
||||
declare $ HM.fromList [(fromMaybe "MetadataDTO" mName, codecSchema)]
|
||||
pure codecSchema
|
||||
|
@ -28,6 +28,7 @@ module Hasura.Session
|
||||
)
|
||||
where
|
||||
|
||||
import Autodocodec (HasCodec (codec), dimapCodec)
|
||||
import Data.Aeson
|
||||
import Data.Aeson.Types (Parser, toJSONKeyText)
|
||||
import Data.CaseInsensitive qualified as CI
|
||||
@ -62,6 +63,9 @@ newtype RoleName = RoleName {getRoleTxt :: NonEmptyText}
|
||||
Cacheable
|
||||
)
|
||||
|
||||
instance HasCodec RoleName where
|
||||
codec = dimapCodec RoleName getRoleTxt nonEmptyTextCodec
|
||||
|
||||
roleNameToTxt :: RoleName -> Text
|
||||
roleNameToTxt = unNonEmptyText . getRoleTxt
|
||||
|
||||
|
@ -1,16 +1,27 @@
|
||||
{-# LANGUAGE QuasiQuotes #-}
|
||||
{-# LANGUAGE TemplateHaskell #-}
|
||||
|
||||
module Hasura.Metadata.DTO.MetadataDTOSpec (spec) where
|
||||
|
||||
import Data.Aeson (ToJSON (toJSON), eitherDecode)
|
||||
import Data.Aeson
|
||||
( FromJSON (parseJSON),
|
||||
ToJSON (toJSON),
|
||||
Value,
|
||||
eitherDecode,
|
||||
eitherDecodeFileStrict',
|
||||
)
|
||||
import Data.Aeson.QQ.Simple (aesonQQ)
|
||||
import Data.Aeson.Types (parseEither)
|
||||
import Data.Either (isLeft)
|
||||
import Data.Either.Combinators (fromRight')
|
||||
import Data.FileEmbed (makeRelativeToProject, strToExp)
|
||||
import Hasura.Metadata.DTO.Metadata (MetadataDTO (..))
|
||||
import Hasura.Metadata.DTO.MetadataV1 (MetadataV1 (..))
|
||||
import Hasura.Metadata.DTO.MetadataV2 (MetadataV2 (..))
|
||||
import Hasura.Metadata.DTO.MetadataV3 (MetadataV3 (..))
|
||||
import Hasura.Metadata.DTO.Placeholder (PlaceholderArray (PlaceholderArray))
|
||||
import Hasura.Prelude
|
||||
import Hasura.RQL.Types.Metadata (Metadata, metadataToDTO)
|
||||
import Test.Hspec
|
||||
|
||||
spec :: Spec
|
||||
@ -59,24 +70,22 @@ spec = describe "MetadataDTO" $ do
|
||||
let actual = eitherDecode input :: Either String MetadataDTO
|
||||
actual `shouldSatisfy` isLeft
|
||||
|
||||
-- TODO: Currently there are discrepancies between Metadata and DTO
|
||||
-- serialization. These tests are disabled until those discrepancies are
|
||||
-- resolved.
|
||||
-- beforeAll getMetadataFixture $ do
|
||||
-- describe "v3" $ do
|
||||
-- it "deserializes and re-serializes equivalently to Metadata" $ \metadataFixture -> do
|
||||
-- let dto = parseEither parseJSON =<< metadataFixture :: Either String MetadataDTO
|
||||
-- let fromDto = toJSON <$> dto
|
||||
-- fromDto `shouldSatisfy` isRight
|
||||
-- fromDto `shouldBe` metadataFixture
|
||||
beforeAll getMetadataFixture $ do
|
||||
describe "v3" $ do
|
||||
-- TODO: There are some cases where DTO serialization emits @null@ where
|
||||
-- Metadata serialization omits the field instead. So this test doesn't
|
||||
-- quite pass yet. This is expected to be re-enabled in an upcoming PR.
|
||||
-- it "deserializes and re-serializes equivalently to Metadata" $ \(MetadataFixture {..}) -> do
|
||||
-- let dto = parseEither (parseJSON @MetadataDTO) _mfJSON
|
||||
-- let fromDto = toJSON <$> dto
|
||||
-- fromDto `shouldSatisfy` isRight
|
||||
-- (fromRight' fromDto) `shouldBeJson` _mfJSON
|
||||
|
||||
-- it "converts metadata to DTO to JSON to metadata" $ \metadataFixture -> do
|
||||
-- let origMetadata = parseEither (parseJSON @Metadata) =<< metadataFixture
|
||||
-- let dto = metadataToDTO <$> origMetadata
|
||||
-- let json = toJSON <$> dto
|
||||
-- let metadata = parseEither (parseJSON @Metadata) =<< json
|
||||
-- metadata `shouldSatisfy` isRight
|
||||
-- metadata `shouldBe` origMetadata
|
||||
it "converts metadata to DTO to JSON to metadata" $ \(MetadataFixture {..}) -> do
|
||||
let dto = metadataToDTO $ _mfMetadata
|
||||
let json = toJSON dto
|
||||
let metadata = parseEither (parseJSON @Metadata) json
|
||||
metadata `shouldBe` (Right _mfMetadata)
|
||||
|
||||
emptyMetadataV3 :: MetadataV3
|
||||
emptyMetadataV3 =
|
||||
@ -118,10 +127,14 @@ emptyMetadataV1 =
|
||||
metaV1Tables = PlaceholderArray mempty
|
||||
}
|
||||
|
||||
-- getMetadataFixture :: IO (Either String Value)
|
||||
-- getMetadataFixture = do
|
||||
-- let filePath = $(strToExp =<< makeRelativeToProject "../cli/internal/metadatautil/testdata/json/t2/metadata.json")
|
||||
-- -- Round-trip fixture data through the server's old serialization so that we
|
||||
-- -- will get consistent results on the next round-trip.
|
||||
-- metadata <- eitherDecodeFileStrict' filePath :: IO (Either String Metadata)
|
||||
-- return $ toJSON <$> metadata
|
||||
data MetadataFixture = MetadataFixture {_mfMetadata :: Metadata, _mfJSON :: Value}
|
||||
|
||||
getMetadataFixture :: IO MetadataFixture
|
||||
getMetadataFixture = do
|
||||
let filePath = $(strToExp =<< makeRelativeToProject "../cli/internal/metadatautil/testdata/json/t2/metadata.json")
|
||||
-- Instead of returning loaded JSON as-is, run it through Metadata parsing so
|
||||
-- that its format is up-to-date with the current state of the Metadata
|
||||
-- structure.
|
||||
json <- eitherDecodeFileStrict' @Value filePath
|
||||
let metadata = parseEither (parseJSON @Metadata) =<< json
|
||||
return $ fromRight' $ MetadataFixture <$> metadata <*> json
|
||||
|
Loading…
Reference in New Issue
Block a user