server: codecs for remaining database configuration types

PR-URL: https://github.com/hasura/graphql-engine-mono/pull/6062
GitOrigin-RevId: f1ba9fa30267d1825ba36480103cd973616f3079
This commit is contained in:
Jesse Hallett 2022-10-12 12:28:51 -04:00 committed by hasura-bot
parent ac3c054b27
commit 332faabc24
12 changed files with 352 additions and 28 deletions

View File

@ -969,6 +969,7 @@ test-suite graphql-engine-tests
Discover Discover
Hasura.AppSpec Hasura.AppSpec
Hasura.Base.Error.TestInstances Hasura.Base.Error.TestInstances
Hasura.Backends.BigQuery.SourceSpec
Hasura.Backends.DataConnector.API.V0.AggregateSpec Hasura.Backends.DataConnector.API.V0.AggregateSpec
Hasura.Backends.DataConnector.API.V0.CapabilitiesSpec Hasura.Backends.DataConnector.API.V0.CapabilitiesSpec
Hasura.Backends.DataConnector.API.V0.ColumnSpec Hasura.Backends.DataConnector.API.V0.ColumnSpec

View File

@ -12,6 +12,7 @@ module Database.MSSQL.Pool
) )
where where
import Autodocodec (HasCodec (codec), dimapCodec)
import Control.Exception.Lifted import Control.Exception.Lifted
import Control.Monad.Trans.Control import Control.Monad.Trans.Control
import Data.Aeson import Data.Aeson
@ -24,6 +25,9 @@ import Prelude
newtype ConnectionString = ConnectionString {unConnectionString :: Text} newtype ConnectionString = ConnectionString {unConnectionString :: Text}
deriving (Show, Eq, ToJSON, FromJSON, Generic) deriving (Show, Eq, ToJSON, FromJSON, Generic)
instance HasCodec ConnectionString where
codec = dimapCodec ConnectionString unConnectionString codec
data ConnectionOptions = ConnectionOptions data ConnectionOptions = ConnectionOptions
{ _coConnections :: Int, { _coConnections :: Int,
_coStripes :: Int, _coStripes :: Int,

View File

@ -18,7 +18,7 @@ module Hasura.Backends.BigQuery.Source
) )
where where
import Autodocodec (HasCodec, codec, named) import Autodocodec
import Control.Concurrent.MVar import Control.Concurrent.MVar
import Crypto.PubKey.RSA.Types qualified as Cry import Crypto.PubKey.RSA.Types qualified as Cry
import Data.Aeson qualified as J import Data.Aeson qualified as J
@ -27,11 +27,12 @@ import Data.Aeson.KeyMap qualified as KM
import Data.Aeson.TH qualified as J import Data.Aeson.TH qualified as J
import Data.ByteString.Lazy qualified as BL import Data.ByteString.Lazy qualified as BL
import Data.Int qualified as Int import Data.Int qualified as Int
import Data.Scientific (Scientific)
import Data.Text.Encoding qualified as TE import Data.Text.Encoding qualified as TE
import Data.X509 qualified as X509 import Data.X509 qualified as X509
import Data.X509.Memory qualified as X509 import Data.X509.Memory qualified as X509
import Hasura.Incremental (Cacheable (..)) import Hasura.Incremental (Cacheable (..))
import Hasura.Metadata.DTO.Placeholder (placeholderCodecViaJSON) import Hasura.Metadata.DTO.Utils (fromEnvCodec)
import Hasura.Prelude import Hasura.Prelude
data PKey = PKey data PKey = PKey
@ -52,6 +53,13 @@ deriving instance Hashable Cry.PrivateKey -- orphan
deriving instance Hashable Cry.PublicKey -- orphan deriving instance Hashable Cry.PublicKey -- orphan
instance HasCodec PKey where
codec = bimapCodec dec originalBS codec
where
dec k = case X509.readKeyFileFromMemory $ TE.encodeUtf8 k of
[X509.PrivKeyRSA k'] -> Right $ PKey k' k
_ -> Left "unable to parse private key"
instance J.FromJSON PKey where instance J.FromJSON PKey where
parseJSON = J.withText "private_key" $ \k -> parseJSON = J.withText "private_key" $ \k ->
case X509.readKeyFileFromMemory $ TE.encodeUtf8 k of case X509.readKeyFileFromMemory $ TE.encodeUtf8 k of
@ -84,6 +92,14 @@ data ServiceAccount = ServiceAccount
} }
deriving (Eq, Show, Data, NFData, Generic, Hashable) deriving (Eq, Show, Data, NFData, Generic, Hashable)
instance HasCodec ServiceAccount where
codec =
object "BigQueryServiceAccount" $
ServiceAccount
<$> requiredField' "client_email" .= _saClientEmail
<*> requiredField' "private_key" .= _saPrivateKey
<*> requiredField' "project_id" .= _saProjectId
$(J.deriveJSON (J.aesonDrop 3 J.snakeCase) {J.omitNothingFields = False} ''ServiceAccount) $(J.deriveJSON (J.aesonDrop 3 J.snakeCase) {J.omitNothingFields = False} ''ServiceAccount)
data ConfigurationJSON a data ConfigurationJSON a
@ -92,6 +108,54 @@ data ConfigurationJSON a
deriving stock (Show, Eq, Generic) deriving stock (Show, Eq, Generic)
deriving (NFData, Hashable) deriving (NFData, Hashable)
-- This codec has straightforward encoding, but on decoding there is
-- a possibility of receiving a string that contains JSON that is recursively
-- handled by this codec. There is also the issue that decoding the
-- @FromYamlJSON@ case should be attempted last because there is a possibility
-- that the decoding for @a@ is not disjoint from the other decoding cases. This
-- presents some asymmetry that is a little tricky to capture in a codec.
instance HasCodec a => HasCodec (ConfigurationJSON a) where
codec = parseAlternative (parseAlternative mainCodec fromEnvEncodedAsNestedJSON) yamlJSONCodec
where
-- This is the only codec in this implementation that is used for
-- encoding. It must cover both the @FromEnvJSON@ and @FromYamlJSON@ cases
-- because Autodocodec does not support codecs that are partial in
-- encoding.
mainCodec :: JSONCodec (ConfigurationJSON a)
mainCodec =
dimapCodec dec enc $
eitherCodec
fromEnvCodec
( bimapCodec
-- Fail parsing at this point because @codec \@a@ should only be
-- used for parsing after trying @fromEnvEncodedAsNestedJSON@.
(const $ Left "not used for parsing")
id
$ codec @a
)
where
dec (Left text) = FromEnvJSON text
dec (Right a) = FromYamlJSON a
enc (FromEnvJSON i) = Left i
enc (FromYamlJSON j) = Right j
-- The JSON-encoded string case is used as an alternative in
-- a 'parseAlternative' because we can implement the decoding direction,
-- but not the encoding direction. (There isn't a good way to implement
-- @ConfigurationJSON a -> Text@.) Fortunately an alternative in
-- a 'parseAlternative' is only used for decoding so we don't need to
-- implement encoding logic here.
fromEnvEncodedAsNestedJSON :: ValueCodec Text (ConfigurationJSON a)
fromEnvEncodedAsNestedJSON =
bimapCodec
(eitherDecodeJSONViaCodec . BL.fromStrict . TE.encodeUtf8)
id
$ codec @Text <?> "JSON-encoded string"
yamlJSONCodec :: ValueCodec a (ConfigurationJSON a)
yamlJSONCodec = FromYamlJSON <$> codec @a
instance J.FromJSON a => J.FromJSON (ConfigurationJSON a) where instance J.FromJSON a => J.FromJSON (ConfigurationJSON a) where
parseJSON = \case parseJSON = \case
J.Object o | Just (J.String text) <- KM.lookup "from_env" o -> pure (FromEnvJSON text) J.Object o | Just (J.String text) <- KM.lookup "from_env" o -> pure (FromEnvJSON text)
@ -105,7 +169,7 @@ instance J.ToJSON a => J.ToJSON (ConfigurationJSON a) where
FromEnvJSON i -> J.object ["from_env" J..= i] FromEnvJSON i -> J.object ["from_env" J..= i]
FromYamlJSON j -> J.toJSON j FromYamlJSON j -> J.toJSON j
-- | Configuration inputs when they are a YAML array or an Env var whos value is -- | Configuration inputs when they are a YAML array or an Env var whose value is
-- a comma-separated string -- a comma-separated string
data ConfigurationInputs data ConfigurationInputs
= FromYamls [Text] = FromYamls [Text]
@ -113,6 +177,15 @@ data ConfigurationInputs
deriving stock (Show, Eq, Generic) deriving stock (Show, Eq, Generic)
deriving (NFData, Hashable) deriving (NFData, Hashable)
instance HasCodec ConfigurationInputs where
codec =
dimapCodec
(either FromYamls FromEnvs)
(\case FromYamls i -> Left i; FromEnvs i -> Right i)
$ disjointEitherCodec
(codec @[Text])
fromEnvCodec
instance J.ToJSON ConfigurationInputs where instance J.ToJSON ConfigurationInputs where
toJSON = \case toJSON = \case
FromYamls i -> J.toJSON i FromYamls i -> J.toJSON i
@ -125,13 +198,25 @@ instance J.FromJSON ConfigurationInputs where
_ -> fail "one of array or object must be provided" _ -> fail "one of array or object must be provided"
-- | Configuration input when the YAML value as well as the Env var have -- | Configuration input when the YAML value as well as the Env var have
-- singlular values -- singular values
data ConfigurationInput data ConfigurationInput
= FromYaml Text = FromYaml Text
| FromEnv Text | FromEnv Text
deriving stock (Show, Eq, Generic) deriving stock (Show, Eq, Generic)
deriving (NFData, Hashable) deriving (NFData, Hashable)
instance HasCodec ConfigurationInput where
codec =
dimapCodec
(either FromYaml FromEnv)
(\case FromYaml i -> Left i; FromEnv i -> Right i)
$ disjointEitherCodec fromYamls fromEnvCodec
where
fromYamls =
parseAlternative
(codec @Text)
(tshow <$> codec @Scientific)
instance J.ToJSON ConfigurationInput where instance J.ToJSON ConfigurationInput where
toJSON = \case toJSON = \case
FromYaml i -> J.toJSON i FromYaml i -> J.toJSON i
@ -159,7 +244,15 @@ $(J.deriveJSON (J.aesonDrop 4 J.snakeCase) {J.omitNothingFields = True} ''BigQue
-- TODO: Write a proper codec, and use it to derive FromJSON and ToJSON -- TODO: Write a proper codec, and use it to derive FromJSON and ToJSON
-- instances. -- instances.
instance HasCodec BigQueryConnSourceConfig where instance HasCodec BigQueryConnSourceConfig where
codec = named "BigQueryConnSourceConfig" $ placeholderCodecViaJSON codec =
object "BigQueryConnSourceConfig" $
BigQueryConnSourceConfig
<$> requiredField' "service_account" .= _cscServiceAccount
<*> requiredField' "datasets" .= _cscDatasets
<*> requiredField' "project_id" .= _cscProjectId
<*> optionalFieldOrNull' "global_select_limit" .= _cscGlobalSelectLimit
<*> optionalFieldOrNull' "retry_base_delay" .= _cscRetryBaseDelay
<*> optionalFieldOrNull' "retry_limit" .= _cscRetryLimit
deriving instance Show BigQueryConnSourceConfig deriving instance Show BigQueryConnSourceConfig

View File

@ -23,7 +23,8 @@ module Hasura.Backends.MSSQL.Connection
) )
where where
import Autodocodec (HasCodec (codec), named) import Autodocodec (HasCodec (codec), dimapCodec, disjointEitherCodec, optionalFieldOrNull', optionalFieldWithDefault', requiredField')
import Autodocodec qualified as AC
import Control.Monad.Morph (hoist) import Control.Monad.Morph (hoist)
import Control.Monad.Trans.Control import Control.Monad.Trans.Control
import Data.Aeson import Data.Aeson
@ -38,7 +39,7 @@ import Database.ODBC.SQLServer qualified as ODBC
import Hasura.Backends.MSSQL.SQL.Error import Hasura.Backends.MSSQL.SQL.Error
import Hasura.Base.Error import Hasura.Base.Error
import Hasura.Incremental (Cacheable (..)) import Hasura.Incremental (Cacheable (..))
import Hasura.Metadata.DTO.Placeholder (placeholderCodecViaJSON) import Hasura.Metadata.DTO.Utils (fromEnvCodec)
import Hasura.Prelude import Hasura.Prelude
class MonadError QErr m => MonadMSSQLTx m where class MonadError QErr m => MonadMSSQLTx m where
@ -79,6 +80,13 @@ instance Hashable InputConnectionString
instance NFData InputConnectionString instance NFData InputConnectionString
instance HasCodec InputConnectionString where
codec =
dimapCodec
(either RawString FromEnvironment)
(\case RawString m -> Left m; FromEnvironment wEnv -> Right wEnv)
$ disjointEitherCodec codec fromEnvCodec
instance ToJSON InputConnectionString where instance ToJSON InputConnectionString where
toJSON = toJSON =
\case \case
@ -112,6 +120,13 @@ instance FromJSON MSSQLPoolSettings where
<$> o .:? "max_connections" .!= _mpsMaxConnections defaultMSSQLPoolSettings <$> o .:? "max_connections" .!= _mpsMaxConnections defaultMSSQLPoolSettings
<*> o .:? "idle_timeout" .!= _mpsIdleTimeout defaultMSSQLPoolSettings <*> o .:? "idle_timeout" .!= _mpsIdleTimeout defaultMSSQLPoolSettings
instance HasCodec MSSQLPoolSettings where
codec =
AC.object "MSSQLPoolSettings" $
MSSQLPoolSettings
<$> optionalFieldWithDefault' "max_connections" (_mpsMaxConnections defaultMSSQLPoolSettings) AC..= _mpsMaxConnections
<*> optionalFieldWithDefault' "idle_timeout" (_mpsMaxConnections defaultMSSQLPoolSettings) AC..= _mpsIdleTimeout
defaultMSSQLPoolSettings :: MSSQLPoolSettings defaultMSSQLPoolSettings :: MSSQLPoolSettings
defaultMSSQLPoolSettings = defaultMSSQLPoolSettings =
MSSQLPoolSettings MSSQLPoolSettings
@ -131,6 +146,13 @@ instance Hashable MSSQLConnectionInfo
instance NFData MSSQLConnectionInfo instance NFData MSSQLConnectionInfo
instance HasCodec MSSQLConnectionInfo where
codec =
AC.object "MSSQLConnectionInfo" $
MSSQLConnectionInfo
<$> requiredField' "connection_string" AC..= _mciConnectionString
<*> requiredField' "pool_settings" AC..= _mciPoolSettings
$(deriveToJSON hasuraJSON ''MSSQLConnectionInfo) $(deriveToJSON hasuraJSON ''MSSQLConnectionInfo)
instance FromJSON MSSQLConnectionInfo where instance FromJSON MSSQLConnectionInfo where
@ -151,12 +173,14 @@ instance Hashable MSSQLConnConfiguration
instance NFData MSSQLConnConfiguration instance NFData MSSQLConnConfiguration
$(deriveJSON hasuraJSON {omitNothingFields = True} ''MSSQLConnConfiguration)
-- TODO: Write a proper codec, and use it to derive FromJSON and ToJSON
-- instances.
instance HasCodec MSSQLConnConfiguration where instance HasCodec MSSQLConnConfiguration where
codec = named "MSSQLConnConfiguration" $ placeholderCodecViaJSON codec =
AC.object "MSSQLConnConfiguration" $
MSSQLConnConfiguration
<$> requiredField' "connection_info" AC..= _mccConnectionInfo
<*> optionalFieldOrNull' "read_replicas" AC..= _mccReadReplicas
$(deriveJSON hasuraJSON {omitNothingFields = True} ''MSSQLConnConfiguration)
createMSSQLPool :: createMSSQLPool ::
MonadIO m => MonadIO m =>

View File

@ -5,6 +5,7 @@
-- Defines a 'Hasura.RQL.Types.Backend.Backend' type class instance for MSSQL. -- Defines a 'Hasura.RQL.Types.Backend.Backend' type class instance for MSSQL.
module Hasura.Backends.MSSQL.Instances.Types () where module Hasura.Backends.MSSQL.Instances.Types () where
import Autodocodec (codec)
import Data.Aeson import Data.Aeson
import Data.Text.Casing (GQLNameIdentifier) import Data.Text.Casing (GQLNameIdentifier)
import Database.ODBC.SQLServer qualified as ODBC import Database.ODBC.SQLServer qualified as ODBC
@ -14,7 +15,6 @@ import Hasura.Backends.MSSQL.Types.Insert qualified as MSSQL (BackendInsert)
import Hasura.Backends.MSSQL.Types.Internal qualified as MSSQL import Hasura.Backends.MSSQL.Types.Internal qualified as MSSQL
import Hasura.Backends.MSSQL.Types.Update qualified as MSSQL (BackendUpdate) import Hasura.Backends.MSSQL.Types.Update qualified as MSSQL (BackendUpdate)
import Hasura.Base.Error import Hasura.Base.Error
import Hasura.Metadata.DTO.Placeholder (placeholderCodecViaJSON)
import Hasura.Prelude import Hasura.Prelude
import Hasura.RQL.Types.Backend import Hasura.RQL.Types.Backend
import Hasura.RQL.Types.HealthCheck import Hasura.RQL.Types.HealthCheck
@ -66,7 +66,7 @@ instance Backend 'MSSQL where
Just $ Just $
HealthCheckImplementation HealthCheckImplementation
{ _hciDefaultTest = defaultHealthCheckTestSql, { _hciDefaultTest = defaultHealthCheckTestSql,
_hciTestCodec = placeholderCodecViaJSON _hciTestCodec = codec
} }
isComparableType :: ScalarType 'MSSQL -> Bool isComparableType :: ScalarType 'MSSQL -> Bool

View File

@ -5,7 +5,8 @@
-- | Instances that're slow to compile. -- | Instances that're slow to compile.
module Hasura.Backends.MySQL.Types.Instances () where module Hasura.Backends.MySQL.Types.Instances () where
import Autodocodec (HasCodec (codec), named) import Autodocodec (HasCodec (codec), optionalFieldWithDefault', requiredField, requiredField')
import Autodocodec qualified as AC
import Control.DeepSeq import Control.DeepSeq
import Data.Aeson qualified as J import Data.Aeson qualified as J
import Data.Aeson.Casing qualified as J import Data.Aeson.Casing qualified as J
@ -20,7 +21,6 @@ import Hasura.Backends.MySQL.Types.Internal
import Hasura.Base.ErrorValue qualified as ErrorValue import Hasura.Base.ErrorValue qualified as ErrorValue
import Hasura.Base.ToErrorValue import Hasura.Base.ToErrorValue
import Hasura.Incremental.Internal.Dependency import Hasura.Incremental.Internal.Dependency
import Hasura.Metadata.DTO.Placeholder (placeholderCodecViaJSON)
import Hasura.Prelude import Hasura.Prelude
import Language.Haskell.TH import Language.Haskell.TH
import Language.Haskell.TH.Syntax import Language.Haskell.TH.Syntax
@ -196,6 +196,13 @@ instance Semigroup Top where
(<>) x NoTop = x (<>) x NoTop = x
(<>) (Top x) (Top y) = Top (min x y) (<>) (Top x) (Top y) = Top (min x y)
instance HasCodec ConnPoolSettings where
codec =
AC.object "MySQLConnPoolSettings" $
ConnPoolSettings
<$> optionalFieldWithDefault' "idle_timeout" (_cscIdleTimeout defaultConnPoolSettings) AC..= _cscIdleTimeout
<*> optionalFieldWithDefault' "max_connections" (_cscMaxConnections defaultConnPoolSettings) AC..= _cscMaxConnections
instance J.FromJSON ConnPoolSettings where instance J.FromJSON ConnPoolSettings where
parseJSON = J.withObject "MySQL pool settings" $ \o -> parseJSON = J.withObject "MySQL pool settings" $ \o ->
ConnPoolSettings ConnPoolSettings
@ -211,12 +218,20 @@ instance J.ToJSON Expression where
instance J.FromJSON Expression where instance J.FromJSON Expression where
parseJSON value = ValueExpression <$> J.parseJSON value parseJSON value = ValueExpression <$> J.parseJSON value
$(J.deriveJSON (J.aesonDrop 4 J.snakeCase) {J.omitNothingFields = False} ''ConnSourceConfig)
-- TODO: Write a proper codec, and use it to derive FromJSON and ToJSON
-- instances.
instance HasCodec ConnSourceConfig where instance HasCodec ConnSourceConfig where
codec = named "MySQLConnConfiguration" $ placeholderCodecViaJSON codec =
AC.object "MySQLConnSourceConfig" $
ConnSourceConfig
<$> requiredField "host" hostDoc AC..= _cscHost
<*> requiredField' "port" AC..= _cscPort
<*> requiredField' "user" AC..= _cscUser
<*> requiredField' "password" AC..= _cscPassword
<*> requiredField' "database" AC..= _cscDatabase
<*> requiredField' "pool_settings" AC..= _cscPoolSettings
where
hostDoc = "Works with `127.0.0.1` but not with `localhost`: https://mariadb.com/kb/en/troubleshooting-connection-issues/#localhost-and"
$(J.deriveJSON (J.aesonDrop 4 J.snakeCase) {J.omitNothingFields = False} ''ConnSourceConfig)
instance J.ToJSON (Pool Connection) where instance J.ToJSON (Pool Connection) where
toJSON = const (J.String "_REDACTED_") toJSON = const (J.String "_REDACTED_")

View File

@ -48,7 +48,6 @@ import Data.Time.Clock.Compat ()
import Database.PG.Query qualified as PG import Database.PG.Query qualified as PG
import Hasura.Base.Instances () import Hasura.Base.Instances ()
import Hasura.Incremental (Cacheable (..)) import Hasura.Incremental (Cacheable (..))
import Hasura.Metadata.DTO.Placeholder (placeholderCodecViaJSON)
import Hasura.Prelude import Hasura.Prelude
import Hasura.RQL.Types.Common (UrlConf (..)) import Hasura.RQL.Types.Common (UrlConf (..))
import Hasura.SQL.Types (ExtensionsSchema (..)) import Hasura.SQL.Types (ExtensionsSchema (..))
@ -328,7 +327,7 @@ instance HasCodec PostgresSourceConnInfo where
CommentCodec "https://hasura.io/docs/latest/graphql/core/api-reference/syntax-defs.html#pgsourceconnectioninfo" $ CommentCodec "https://hasura.io/docs/latest/graphql/core/api-reference/syntax-defs.html#pgsourceconnectioninfo" $
AC.object "PostgresSourceConnInfo" $ AC.object "PostgresSourceConnInfo" $
PostgresSourceConnInfo PostgresSourceConnInfo
<$> requiredFieldWith "database_url" placeholderCodecViaJSON databaseUrlDoc .== _psciDatabaseUrl <$> requiredField "database_url" databaseUrlDoc .== _psciDatabaseUrl
<*> optionalFieldOrNull "pool_settings" poolSettingsDoc .== _psciPoolSettings <*> optionalFieldOrNull "pool_settings" poolSettingsDoc .== _psciPoolSettings
<*> optionalFieldWithOmittedDefault "use_prepared_statements" False usePreparedStatementsDoc .== _psciUsePreparedStatements <*> optionalFieldWithOmittedDefault "use_prepared_statements" False usePreparedStatementsDoc .== _psciUsePreparedStatements
<*> optionalFieldWithOmittedDefault "isolation_level" PG.ReadCommitted isolationLevelDoc .== _psciIsolationLevel <*> optionalFieldWithOmittedDefault "isolation_level" PG.ReadCommitted isolationLevelDoc .== _psciIsolationLevel

View File

@ -9,7 +9,7 @@ module Hasura.Backends.Postgres.Instances.Types
) )
where where
import Autodocodec (HasCodec) import Autodocodec (HasCodec (codec))
import Data.Aeson (FromJSON) import Data.Aeson (FromJSON)
import Data.Aeson qualified as J import Data.Aeson qualified as J
import Data.Kind (Type) import Data.Kind (Type)
@ -25,7 +25,6 @@ import Hasura.Backends.Postgres.Types.Function qualified as Postgres
import Hasura.Backends.Postgres.Types.Insert qualified as Postgres (BackendInsert) import Hasura.Backends.Postgres.Types.Insert qualified as Postgres (BackendInsert)
import Hasura.Backends.Postgres.Types.Update qualified as Postgres import Hasura.Backends.Postgres.Types.Update qualified as Postgres
import Hasura.Base.Error import Hasura.Base.Error
import Hasura.Metadata.DTO.Placeholder (placeholderCodecViaJSON)
import Hasura.Prelude import Hasura.Prelude
import Hasura.RQL.IR.BoolExp.AggregationPredicates qualified as Agg import Hasura.RQL.IR.BoolExp.AggregationPredicates qualified as Agg
import Hasura.RQL.Types.Backend import Hasura.RQL.Types.Backend
@ -116,7 +115,7 @@ instance
Just $ Just $
HealthCheckImplementation HealthCheckImplementation
{ _hciDefaultTest = defaultHealthCheckTestSql, { _hciDefaultTest = defaultHealthCheckTestSql,
_hciTestCodec = placeholderCodecViaJSON _hciTestCodec = codec
} }
isComparableType = Postgres.isComparableType isComparableType = Postgres.isComparableType

View File

@ -1,10 +1,19 @@
-- | Utility functions for use defining autodocodec codecs. -- | Utility functions for use defining autodocodec codecs.
module Hasura.Metadata.DTO.Utils (codecNamePrefix, versionField, optionalVersionField) where module Hasura.Metadata.DTO.Utils
( codecNamePrefix,
fromEnvCodec,
versionField,
optionalVersionField,
)
where
import Autodocodec import Autodocodec
( Codec (EqCodec), ( Codec (EqCodec),
JSONCodec,
ObjectCodec, ObjectCodec,
object,
optionalFieldWith', optionalFieldWith',
requiredField',
requiredFieldWith', requiredFieldWith',
scientificCodec, scientificCodec,
(.=), (.=),
@ -38,3 +47,13 @@ optionalVersionField v =
-- database kind from type context. -- database kind from type context.
codecNamePrefix :: forall b. (HasTag b) => Text codecNamePrefix :: forall b. (HasTag b) => Text
codecNamePrefix = T.toTitle $ T.toTxt $ reify $ backendTag @b codecNamePrefix = T.toTitle $ T.toTxt $ reify $ backendTag @b
-- | Represents a text field wrapped in an object with a single property
-- named @from_env@.
--
-- Objects of this form appear in many places in the Metadata API. If we
-- reproduced this codec in each use case the OpenAPI document would have many
-- identical object definitions. Using a shared codec allows a single shared
-- reference.
fromEnvCodec :: JSONCodec Text
fromEnvCodec = object "FromEnv" $ requiredField' "from_env"

View File

@ -43,7 +43,8 @@ module Hasura.RQL.Types.Common
) )
where where
import Autodocodec (HasCodec (codec), dimapCodec) import Autodocodec (HasCodec (codec), bimapCodec, dimapCodec, disjointEitherCodec, optionalFieldOrNull', requiredField')
import Autodocodec qualified as AC
import Data.Aeson import Data.Aeson
import Data.Aeson qualified as J import Data.Aeson qualified as J
import Data.Aeson.Casing import Data.Aeson.Casing
@ -63,6 +64,7 @@ import Hasura.Base.ToErrorValue
import Hasura.EncJSON import Hasura.EncJSON
import Hasura.GraphQL.Schema.Options qualified as Options import Hasura.GraphQL.Schema.Options qualified as Options
import Hasura.Incremental (Cacheable (..)) import Hasura.Incremental (Cacheable (..))
import Hasura.Metadata.DTO.Utils (fromEnvCodec)
import Hasura.Prelude import Hasura.Prelude
import Hasura.RQL.DDL.Headers () import Hasura.RQL.DDL.Headers ()
import Language.GraphQL.Draft.Syntax qualified as G import Language.GraphQL.Draft.Syntax qualified as G
@ -284,6 +286,15 @@ instance Cacheable InputWebhook
instance Hashable InputWebhook instance Hashable InputWebhook
instance HasCodec InputWebhook where
codec = dimapCodec InputWebhook unInputWebhook urlTemplateCodec
where
urlTemplateCodec =
bimapCodec
(mapLeft ("Parsing URL template failed: " ++) . parseURLTemplate)
printURLTemplate
codec
instance ToJSON InputWebhook where instance ToJSON InputWebhook where
toJSON = String . printURLTemplate . unInputWebhook toJSON = String . printURLTemplate . unInputWebhook
@ -336,6 +347,16 @@ instance Cacheable PGConnectionParams
instance Hashable PGConnectionParams instance Hashable PGConnectionParams
instance HasCodec PGConnectionParams where
codec =
AC.object "PGConnectionParams" $
PGConnectionParams
<$> requiredField' "host" AC..= _pgcpHost
<*> requiredField' "username" AC..= _pgcpUsername
<*> optionalFieldOrNull' "password" AC..= _pgcpPassword
<*> requiredField' "port" AC..= _pgcpPort
<*> requiredField' "database" AC..= _pgcpDatabase
$(deriveToJSON hasuraJSON {omitNothingFields = True} ''PGConnectionParams) $(deriveToJSON hasuraJSON {omitNothingFields = True} ''PGConnectionParams)
instance FromJSON PGConnectionParams where instance FromJSON PGConnectionParams where
@ -362,6 +383,22 @@ instance Cacheable UrlConf
instance Hashable UrlConf instance Hashable UrlConf
instance HasCodec UrlConf where
codec =
dimapCodec dec enc $
disjointEitherCodec valCodec $ disjointEitherCodec fromEnvCodec fromParamsCodec
where
valCodec = codec
fromParamsCodec = AC.object "UrlConfFromParams" $ requiredField' "connection_parameters"
dec (Left w) = UrlValue w
dec (Right (Left wEnv)) = UrlFromEnv wEnv
dec (Right (Right wParams)) = UrlFromParams wParams
enc (UrlValue w) = Left w
enc (UrlFromEnv wEnv) = Right $ Left wEnv
enc (UrlFromParams wParams) = Right $ Right wParams
instance ToJSON UrlConf where instance ToJSON UrlConf where
toJSON (UrlValue w) = toJSON w toJSON (UrlValue w) = toJSON w
toJSON (UrlFromEnv wEnv) = object ["from_env" .= wEnv] toJSON (UrlFromEnv wEnv) = object ["from_env" .= wEnv]

View File

@ -27,6 +27,12 @@ newtype HealthCheckTestSql = HealthCheckTestSql
} }
deriving (Eq, Generic, Show, Cacheable, Hashable, NFData) deriving (Eq, Generic, Show, Cacheable, Hashable, NFData)
instance HasCodec HealthCheckTestSql where
codec =
AC.object "HealthCheckTestSql" $
HealthCheckTestSql
<$> optionalFieldWithDefault' "sql" defaultTestSql AC..= _hctSql
instance ToJSON HealthCheckTestSql where instance ToJSON HealthCheckTestSql where
toJSON = genericToJSON hasuraJSON {omitNothingFields = True} toJSON = genericToJSON hasuraJSON {omitNothingFields = True}

View File

@ -0,0 +1,127 @@
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
module Hasura.Backends.BigQuery.SourceSpec (spec) where
import Autodocodec (HasCodec (codec), eitherDecodeJSONViaCodec, object, requiredField', toJSONViaCodec, (.=))
import Data.Aeson (FromJSON, ToJSON (toJSON), Value, eitherDecode, encode)
import Data.Aeson qualified as J
import Data.Aeson.Casing qualified as J
import Data.Aeson.QQ (aesonQQ)
import Data.Aeson.TH qualified as J
import Data.ByteString.Lazy (ByteString)
import Data.Text.Lazy.Encoding (decodeUtf8)
import Hasura.Backends.BigQuery.Source (ConfigurationInput (FromEnv), ConfigurationJSON (..))
import Hasura.Prelude
import Test.Hspec
-- ConfigurationJSON is used with the real ServiceAccount type - but that type
-- has a private key property that is obnoxious to get a mock value for.
data MockServiceAccount = MockServiceAccount
{ _msaClientEmail :: Text,
_msaPrivateKey :: Text,
_msaProjectId :: Text
}
deriving (Eq, Show)
instance HasCodec MockServiceAccount where
codec =
object "MockServiceAccount" $
MockServiceAccount
<$> requiredField' "client_email" .= _msaClientEmail
<*> requiredField' "private_key" .= _msaPrivateKey
<*> requiredField' "project_id" .= _msaProjectId
$(J.deriveJSON (J.aesonDrop 4 J.snakeCase) {J.omitNothingFields = False} ''MockServiceAccount)
spec :: Spec
spec = do
describe "BigQuery" do
describe "HasCodec (ConfigurationJSON a)" do
it "should accept text from a @from_env@ property" do
let input = encode $ [aesonQQ|{"from_env": "config text"}|]
input `shouldDecodeTo` FromEnvJSON @(ConfigurationJSON MockServiceAccount) "config text"
it "should accept data parsed via underlying codec" do
let clientEmail = "client@test.net"
let privateKey = "-----BEGIN RSA PRIVATE KEY----- etc."
let projectId = "2"
let input =
encode $
[aesonQQ|
{ client_email: #{clientEmail},
private_key: #{privateKey},
project_id: #{projectId}
}
|]
input
`shouldDecodeTo` FromYamlJSON
( MockServiceAccount clientEmail privateKey projectId
)
it "should accept a string containing a serialized object with a @from_env@ property" do
let input = "{\"from_env\":\"config text\"}"
input `shouldDecodeTo` FromEnvJSON @(ConfigurationJSON MockServiceAccount) "config text"
it "should accept a string containing a serialized value parsed via underlying codec" do
let clientEmail = "client@test.net"
let privateKey = "-----BEGIN RSA PRIVATE KEY----- etc."
let projectId = "2"
let input =
decodeUtf8 $
encode $
[aesonQQ|
{ client_email: #{clientEmail},
private_key: #{privateKey},
project_id: #{projectId}
}
|]
let stringInput = encode input
stringInput
`shouldDecodeTo` FromYamlJSON
( MockServiceAccount clientEmail privateKey projectId
)
it "should encode to an object with a @from_env@ property" do
let output = FromEnv "config text"
output `shouldEncodeTo` [aesonQQ|{from_env: "config text"}|]
it "should encode via the underlying codec" do
let clientEmail = "client@test.net"
let privateKey = "-----BEGIN RSA PRIVATE KEY----- etc."
let projectId = "2"
let output = FromYamlJSON $ MockServiceAccount clientEmail privateKey projectId
output
`shouldEncodeTo` [aesonQQ|
{ client_email: #{clientEmail},
private_key: #{privateKey},
project_id: #{projectId}
}
|]
-- | Assert that the given bytestring decodes to the expected value when
-- decoding via Autodocodec, and that the decoded value matches decoding via a
-- FromJSON instance.
shouldDecodeTo ::
forall a.
(Eq a, FromJSON a, HasCodec a, Show a) =>
ByteString ->
a ->
Expectation
input `shouldDecodeTo` expected = do
decoded `shouldBe` (Right expected)
decoded `shouldBe` decodedViaFromJSON
where
decoded = eitherDecodeJSONViaCodec @a input
decodedViaFromJSON = eitherDecode @a input
-- | Assert that the given value encodes to the expected JSON value when
-- encoding via Autodocodec, and that the encoded value matches encoding via
-- a ToJSON instance.
shouldEncodeTo :: forall a. (HasCodec a, ToJSON a) => a -> Value -> Expectation
output `shouldEncodeTo` expected = do
encoded `shouldBe` expected
encoded `shouldBe` encodedViaToJSON
where
encoded = toJSONViaCodec output
encodedViaToJSON = toJSON output