mirror of
https://github.com/hasura/graphql-engine.git
synced 2024-12-15 01:12:56 +03:00
server: codecs for remaining database configuration types
PR-URL: https://github.com/hasura/graphql-engine-mono/pull/6062 GitOrigin-RevId: f1ba9fa30267d1825ba36480103cd973616f3079
This commit is contained in:
parent
ac3c054b27
commit
332faabc24
@ -969,6 +969,7 @@ test-suite graphql-engine-tests
|
||||
Discover
|
||||
Hasura.AppSpec
|
||||
Hasura.Base.Error.TestInstances
|
||||
Hasura.Backends.BigQuery.SourceSpec
|
||||
Hasura.Backends.DataConnector.API.V0.AggregateSpec
|
||||
Hasura.Backends.DataConnector.API.V0.CapabilitiesSpec
|
||||
Hasura.Backends.DataConnector.API.V0.ColumnSpec
|
||||
|
@ -12,6 +12,7 @@ module Database.MSSQL.Pool
|
||||
)
|
||||
where
|
||||
|
||||
import Autodocodec (HasCodec (codec), dimapCodec)
|
||||
import Control.Exception.Lifted
|
||||
import Control.Monad.Trans.Control
|
||||
import Data.Aeson
|
||||
@ -24,6 +25,9 @@ import Prelude
|
||||
newtype ConnectionString = ConnectionString {unConnectionString :: Text}
|
||||
deriving (Show, Eq, ToJSON, FromJSON, Generic)
|
||||
|
||||
instance HasCodec ConnectionString where
|
||||
codec = dimapCodec ConnectionString unConnectionString codec
|
||||
|
||||
data ConnectionOptions = ConnectionOptions
|
||||
{ _coConnections :: Int,
|
||||
_coStripes :: Int,
|
||||
|
@ -18,7 +18,7 @@ module Hasura.Backends.BigQuery.Source
|
||||
)
|
||||
where
|
||||
|
||||
import Autodocodec (HasCodec, codec, named)
|
||||
import Autodocodec
|
||||
import Control.Concurrent.MVar
|
||||
import Crypto.PubKey.RSA.Types qualified as Cry
|
||||
import Data.Aeson qualified as J
|
||||
@ -27,11 +27,12 @@ import Data.Aeson.KeyMap qualified as KM
|
||||
import Data.Aeson.TH qualified as J
|
||||
import Data.ByteString.Lazy qualified as BL
|
||||
import Data.Int qualified as Int
|
||||
import Data.Scientific (Scientific)
|
||||
import Data.Text.Encoding qualified as TE
|
||||
import Data.X509 qualified as X509
|
||||
import Data.X509.Memory qualified as X509
|
||||
import Hasura.Incremental (Cacheable (..))
|
||||
import Hasura.Metadata.DTO.Placeholder (placeholderCodecViaJSON)
|
||||
import Hasura.Metadata.DTO.Utils (fromEnvCodec)
|
||||
import Hasura.Prelude
|
||||
|
||||
data PKey = PKey
|
||||
@ -52,6 +53,13 @@ deriving instance Hashable Cry.PrivateKey -- orphan
|
||||
|
||||
deriving instance Hashable Cry.PublicKey -- orphan
|
||||
|
||||
instance HasCodec PKey where
|
||||
codec = bimapCodec dec originalBS codec
|
||||
where
|
||||
dec k = case X509.readKeyFileFromMemory $ TE.encodeUtf8 k of
|
||||
[X509.PrivKeyRSA k'] -> Right $ PKey k' k
|
||||
_ -> Left "unable to parse private key"
|
||||
|
||||
instance J.FromJSON PKey where
|
||||
parseJSON = J.withText "private_key" $ \k ->
|
||||
case X509.readKeyFileFromMemory $ TE.encodeUtf8 k of
|
||||
@ -84,6 +92,14 @@ data ServiceAccount = ServiceAccount
|
||||
}
|
||||
deriving (Eq, Show, Data, NFData, Generic, Hashable)
|
||||
|
||||
instance HasCodec ServiceAccount where
|
||||
codec =
|
||||
object "BigQueryServiceAccount" $
|
||||
ServiceAccount
|
||||
<$> requiredField' "client_email" .= _saClientEmail
|
||||
<*> requiredField' "private_key" .= _saPrivateKey
|
||||
<*> requiredField' "project_id" .= _saProjectId
|
||||
|
||||
$(J.deriveJSON (J.aesonDrop 3 J.snakeCase) {J.omitNothingFields = False} ''ServiceAccount)
|
||||
|
||||
data ConfigurationJSON a
|
||||
@ -92,6 +108,54 @@ data ConfigurationJSON a
|
||||
deriving stock (Show, Eq, Generic)
|
||||
deriving (NFData, Hashable)
|
||||
|
||||
-- This codec has straightforward encoding, but on decoding there is
|
||||
-- a possibility of receiving a string that contains JSON that is recursively
|
||||
-- handled by this codec. There is also the issue that decoding the
|
||||
-- @FromYamlJSON@ case should be attempted last because there is a possibility
|
||||
-- that the decoding for @a@ is not disjoint from the other decoding cases. This
|
||||
-- presents some asymmetry that is a little tricky to capture in a codec.
|
||||
instance HasCodec a => HasCodec (ConfigurationJSON a) where
|
||||
codec = parseAlternative (parseAlternative mainCodec fromEnvEncodedAsNestedJSON) yamlJSONCodec
|
||||
where
|
||||
-- This is the only codec in this implementation that is used for
|
||||
-- encoding. It must cover both the @FromEnvJSON@ and @FromYamlJSON@ cases
|
||||
-- because Autodocodec does not support codecs that are partial in
|
||||
-- encoding.
|
||||
mainCodec :: JSONCodec (ConfigurationJSON a)
|
||||
mainCodec =
|
||||
dimapCodec dec enc $
|
||||
eitherCodec
|
||||
fromEnvCodec
|
||||
( bimapCodec
|
||||
-- Fail parsing at this point because @codec \@a@ should only be
|
||||
-- used for parsing after trying @fromEnvEncodedAsNestedJSON@.
|
||||
(const $ Left "not used for parsing")
|
||||
id
|
||||
$ codec @a
|
||||
)
|
||||
where
|
||||
dec (Left text) = FromEnvJSON text
|
||||
dec (Right a) = FromYamlJSON a
|
||||
|
||||
enc (FromEnvJSON i) = Left i
|
||||
enc (FromYamlJSON j) = Right j
|
||||
|
||||
-- The JSON-encoded string case is used as an alternative in
|
||||
-- a 'parseAlternative' because we can implement the decoding direction,
|
||||
-- but not the encoding direction. (There isn't a good way to implement
|
||||
-- @ConfigurationJSON a -> Text@.) Fortunately an alternative in
|
||||
-- a 'parseAlternative' is only used for decoding so we don't need to
|
||||
-- implement encoding logic here.
|
||||
fromEnvEncodedAsNestedJSON :: ValueCodec Text (ConfigurationJSON a)
|
||||
fromEnvEncodedAsNestedJSON =
|
||||
bimapCodec
|
||||
(eitherDecodeJSONViaCodec . BL.fromStrict . TE.encodeUtf8)
|
||||
id
|
||||
$ codec @Text <?> "JSON-encoded string"
|
||||
|
||||
yamlJSONCodec :: ValueCodec a (ConfigurationJSON a)
|
||||
yamlJSONCodec = FromYamlJSON <$> codec @a
|
||||
|
||||
instance J.FromJSON a => J.FromJSON (ConfigurationJSON a) where
|
||||
parseJSON = \case
|
||||
J.Object o | Just (J.String text) <- KM.lookup "from_env" o -> pure (FromEnvJSON text)
|
||||
@ -105,7 +169,7 @@ instance J.ToJSON a => J.ToJSON (ConfigurationJSON a) where
|
||||
FromEnvJSON i -> J.object ["from_env" J..= i]
|
||||
FromYamlJSON j -> J.toJSON j
|
||||
|
||||
-- | Configuration inputs when they are a YAML array or an Env var whos value is
|
||||
-- | Configuration inputs when they are a YAML array or an Env var whose value is
|
||||
-- a comma-separated string
|
||||
data ConfigurationInputs
|
||||
= FromYamls [Text]
|
||||
@ -113,6 +177,15 @@ data ConfigurationInputs
|
||||
deriving stock (Show, Eq, Generic)
|
||||
deriving (NFData, Hashable)
|
||||
|
||||
instance HasCodec ConfigurationInputs where
|
||||
codec =
|
||||
dimapCodec
|
||||
(either FromYamls FromEnvs)
|
||||
(\case FromYamls i -> Left i; FromEnvs i -> Right i)
|
||||
$ disjointEitherCodec
|
||||
(codec @[Text])
|
||||
fromEnvCodec
|
||||
|
||||
instance J.ToJSON ConfigurationInputs where
|
||||
toJSON = \case
|
||||
FromYamls i -> J.toJSON i
|
||||
@ -125,13 +198,25 @@ instance J.FromJSON ConfigurationInputs where
|
||||
_ -> fail "one of array or object must be provided"
|
||||
|
||||
-- | Configuration input when the YAML value as well as the Env var have
|
||||
-- singlular values
|
||||
-- singular values
|
||||
data ConfigurationInput
|
||||
= FromYaml Text
|
||||
| FromEnv Text
|
||||
deriving stock (Show, Eq, Generic)
|
||||
deriving (NFData, Hashable)
|
||||
|
||||
instance HasCodec ConfigurationInput where
|
||||
codec =
|
||||
dimapCodec
|
||||
(either FromYaml FromEnv)
|
||||
(\case FromYaml i -> Left i; FromEnv i -> Right i)
|
||||
$ disjointEitherCodec fromYamls fromEnvCodec
|
||||
where
|
||||
fromYamls =
|
||||
parseAlternative
|
||||
(codec @Text)
|
||||
(tshow <$> codec @Scientific)
|
||||
|
||||
instance J.ToJSON ConfigurationInput where
|
||||
toJSON = \case
|
||||
FromYaml i -> J.toJSON i
|
||||
@ -159,7 +244,15 @@ $(J.deriveJSON (J.aesonDrop 4 J.snakeCase) {J.omitNothingFields = True} ''BigQue
|
||||
-- TODO: Write a proper codec, and use it to derive FromJSON and ToJSON
|
||||
-- instances.
|
||||
instance HasCodec BigQueryConnSourceConfig where
|
||||
codec = named "BigQueryConnSourceConfig" $ placeholderCodecViaJSON
|
||||
codec =
|
||||
object "BigQueryConnSourceConfig" $
|
||||
BigQueryConnSourceConfig
|
||||
<$> requiredField' "service_account" .= _cscServiceAccount
|
||||
<*> requiredField' "datasets" .= _cscDatasets
|
||||
<*> requiredField' "project_id" .= _cscProjectId
|
||||
<*> optionalFieldOrNull' "global_select_limit" .= _cscGlobalSelectLimit
|
||||
<*> optionalFieldOrNull' "retry_base_delay" .= _cscRetryBaseDelay
|
||||
<*> optionalFieldOrNull' "retry_limit" .= _cscRetryLimit
|
||||
|
||||
deriving instance Show BigQueryConnSourceConfig
|
||||
|
||||
|
@ -23,7 +23,8 @@ module Hasura.Backends.MSSQL.Connection
|
||||
)
|
||||
where
|
||||
|
||||
import Autodocodec (HasCodec (codec), named)
|
||||
import Autodocodec (HasCodec (codec), dimapCodec, disjointEitherCodec, optionalFieldOrNull', optionalFieldWithDefault', requiredField')
|
||||
import Autodocodec qualified as AC
|
||||
import Control.Monad.Morph (hoist)
|
||||
import Control.Monad.Trans.Control
|
||||
import Data.Aeson
|
||||
@ -38,7 +39,7 @@ import Database.ODBC.SQLServer qualified as ODBC
|
||||
import Hasura.Backends.MSSQL.SQL.Error
|
||||
import Hasura.Base.Error
|
||||
import Hasura.Incremental (Cacheable (..))
|
||||
import Hasura.Metadata.DTO.Placeholder (placeholderCodecViaJSON)
|
||||
import Hasura.Metadata.DTO.Utils (fromEnvCodec)
|
||||
import Hasura.Prelude
|
||||
|
||||
class MonadError QErr m => MonadMSSQLTx m where
|
||||
@ -79,6 +80,13 @@ instance Hashable InputConnectionString
|
||||
|
||||
instance NFData InputConnectionString
|
||||
|
||||
instance HasCodec InputConnectionString where
|
||||
codec =
|
||||
dimapCodec
|
||||
(either RawString FromEnvironment)
|
||||
(\case RawString m -> Left m; FromEnvironment wEnv -> Right wEnv)
|
||||
$ disjointEitherCodec codec fromEnvCodec
|
||||
|
||||
instance ToJSON InputConnectionString where
|
||||
toJSON =
|
||||
\case
|
||||
@ -112,6 +120,13 @@ instance FromJSON MSSQLPoolSettings where
|
||||
<$> o .:? "max_connections" .!= _mpsMaxConnections defaultMSSQLPoolSettings
|
||||
<*> o .:? "idle_timeout" .!= _mpsIdleTimeout defaultMSSQLPoolSettings
|
||||
|
||||
instance HasCodec MSSQLPoolSettings where
|
||||
codec =
|
||||
AC.object "MSSQLPoolSettings" $
|
||||
MSSQLPoolSettings
|
||||
<$> optionalFieldWithDefault' "max_connections" (_mpsMaxConnections defaultMSSQLPoolSettings) AC..= _mpsMaxConnections
|
||||
<*> optionalFieldWithDefault' "idle_timeout" (_mpsMaxConnections defaultMSSQLPoolSettings) AC..= _mpsIdleTimeout
|
||||
|
||||
defaultMSSQLPoolSettings :: MSSQLPoolSettings
|
||||
defaultMSSQLPoolSettings =
|
||||
MSSQLPoolSettings
|
||||
@ -131,6 +146,13 @@ instance Hashable MSSQLConnectionInfo
|
||||
|
||||
instance NFData MSSQLConnectionInfo
|
||||
|
||||
instance HasCodec MSSQLConnectionInfo where
|
||||
codec =
|
||||
AC.object "MSSQLConnectionInfo" $
|
||||
MSSQLConnectionInfo
|
||||
<$> requiredField' "connection_string" AC..= _mciConnectionString
|
||||
<*> requiredField' "pool_settings" AC..= _mciPoolSettings
|
||||
|
||||
$(deriveToJSON hasuraJSON ''MSSQLConnectionInfo)
|
||||
|
||||
instance FromJSON MSSQLConnectionInfo where
|
||||
@ -151,12 +173,14 @@ instance Hashable MSSQLConnConfiguration
|
||||
|
||||
instance NFData MSSQLConnConfiguration
|
||||
|
||||
$(deriveJSON hasuraJSON {omitNothingFields = True} ''MSSQLConnConfiguration)
|
||||
|
||||
-- TODO: Write a proper codec, and use it to derive FromJSON and ToJSON
|
||||
-- instances.
|
||||
instance HasCodec MSSQLConnConfiguration where
|
||||
codec = named "MSSQLConnConfiguration" $ placeholderCodecViaJSON
|
||||
codec =
|
||||
AC.object "MSSQLConnConfiguration" $
|
||||
MSSQLConnConfiguration
|
||||
<$> requiredField' "connection_info" AC..= _mccConnectionInfo
|
||||
<*> optionalFieldOrNull' "read_replicas" AC..= _mccReadReplicas
|
||||
|
||||
$(deriveJSON hasuraJSON {omitNothingFields = True} ''MSSQLConnConfiguration)
|
||||
|
||||
createMSSQLPool ::
|
||||
MonadIO m =>
|
||||
|
@ -5,6 +5,7 @@
|
||||
-- Defines a 'Hasura.RQL.Types.Backend.Backend' type class instance for MSSQL.
|
||||
module Hasura.Backends.MSSQL.Instances.Types () where
|
||||
|
||||
import Autodocodec (codec)
|
||||
import Data.Aeson
|
||||
import Data.Text.Casing (GQLNameIdentifier)
|
||||
import Database.ODBC.SQLServer qualified as ODBC
|
||||
@ -14,7 +15,6 @@ import Hasura.Backends.MSSQL.Types.Insert qualified as MSSQL (BackendInsert)
|
||||
import Hasura.Backends.MSSQL.Types.Internal qualified as MSSQL
|
||||
import Hasura.Backends.MSSQL.Types.Update qualified as MSSQL (BackendUpdate)
|
||||
import Hasura.Base.Error
|
||||
import Hasura.Metadata.DTO.Placeholder (placeholderCodecViaJSON)
|
||||
import Hasura.Prelude
|
||||
import Hasura.RQL.Types.Backend
|
||||
import Hasura.RQL.Types.HealthCheck
|
||||
@ -66,7 +66,7 @@ instance Backend 'MSSQL where
|
||||
Just $
|
||||
HealthCheckImplementation
|
||||
{ _hciDefaultTest = defaultHealthCheckTestSql,
|
||||
_hciTestCodec = placeholderCodecViaJSON
|
||||
_hciTestCodec = codec
|
||||
}
|
||||
|
||||
isComparableType :: ScalarType 'MSSQL -> Bool
|
||||
|
@ -5,7 +5,8 @@
|
||||
-- | Instances that're slow to compile.
|
||||
module Hasura.Backends.MySQL.Types.Instances () where
|
||||
|
||||
import Autodocodec (HasCodec (codec), named)
|
||||
import Autodocodec (HasCodec (codec), optionalFieldWithDefault', requiredField, requiredField')
|
||||
import Autodocodec qualified as AC
|
||||
import Control.DeepSeq
|
||||
import Data.Aeson qualified as J
|
||||
import Data.Aeson.Casing qualified as J
|
||||
@ -20,7 +21,6 @@ import Hasura.Backends.MySQL.Types.Internal
|
||||
import Hasura.Base.ErrorValue qualified as ErrorValue
|
||||
import Hasura.Base.ToErrorValue
|
||||
import Hasura.Incremental.Internal.Dependency
|
||||
import Hasura.Metadata.DTO.Placeholder (placeholderCodecViaJSON)
|
||||
import Hasura.Prelude
|
||||
import Language.Haskell.TH
|
||||
import Language.Haskell.TH.Syntax
|
||||
@ -196,6 +196,13 @@ instance Semigroup Top where
|
||||
(<>) x NoTop = x
|
||||
(<>) (Top x) (Top y) = Top (min x y)
|
||||
|
||||
instance HasCodec ConnPoolSettings where
|
||||
codec =
|
||||
AC.object "MySQLConnPoolSettings" $
|
||||
ConnPoolSettings
|
||||
<$> optionalFieldWithDefault' "idle_timeout" (_cscIdleTimeout defaultConnPoolSettings) AC..= _cscIdleTimeout
|
||||
<*> optionalFieldWithDefault' "max_connections" (_cscMaxConnections defaultConnPoolSettings) AC..= _cscMaxConnections
|
||||
|
||||
instance J.FromJSON ConnPoolSettings where
|
||||
parseJSON = J.withObject "MySQL pool settings" $ \o ->
|
||||
ConnPoolSettings
|
||||
@ -211,12 +218,20 @@ instance J.ToJSON Expression where
|
||||
instance J.FromJSON Expression where
|
||||
parseJSON value = ValueExpression <$> J.parseJSON value
|
||||
|
||||
$(J.deriveJSON (J.aesonDrop 4 J.snakeCase) {J.omitNothingFields = False} ''ConnSourceConfig)
|
||||
|
||||
-- TODO: Write a proper codec, and use it to derive FromJSON and ToJSON
|
||||
-- instances.
|
||||
instance HasCodec ConnSourceConfig where
|
||||
codec = named "MySQLConnConfiguration" $ placeholderCodecViaJSON
|
||||
codec =
|
||||
AC.object "MySQLConnSourceConfig" $
|
||||
ConnSourceConfig
|
||||
<$> requiredField "host" hostDoc AC..= _cscHost
|
||||
<*> requiredField' "port" AC..= _cscPort
|
||||
<*> requiredField' "user" AC..= _cscUser
|
||||
<*> requiredField' "password" AC..= _cscPassword
|
||||
<*> requiredField' "database" AC..= _cscDatabase
|
||||
<*> requiredField' "pool_settings" AC..= _cscPoolSettings
|
||||
where
|
||||
hostDoc = "Works with `127.0.0.1` but not with `localhost`: https://mariadb.com/kb/en/troubleshooting-connection-issues/#localhost-and"
|
||||
|
||||
$(J.deriveJSON (J.aesonDrop 4 J.snakeCase) {J.omitNothingFields = False} ''ConnSourceConfig)
|
||||
|
||||
instance J.ToJSON (Pool Connection) where
|
||||
toJSON = const (J.String "_REDACTED_")
|
||||
|
@ -48,7 +48,6 @@ import Data.Time.Clock.Compat ()
|
||||
import Database.PG.Query qualified as PG
|
||||
import Hasura.Base.Instances ()
|
||||
import Hasura.Incremental (Cacheable (..))
|
||||
import Hasura.Metadata.DTO.Placeholder (placeholderCodecViaJSON)
|
||||
import Hasura.Prelude
|
||||
import Hasura.RQL.Types.Common (UrlConf (..))
|
||||
import Hasura.SQL.Types (ExtensionsSchema (..))
|
||||
@ -328,7 +327,7 @@ instance HasCodec PostgresSourceConnInfo where
|
||||
CommentCodec "https://hasura.io/docs/latest/graphql/core/api-reference/syntax-defs.html#pgsourceconnectioninfo" $
|
||||
AC.object "PostgresSourceConnInfo" $
|
||||
PostgresSourceConnInfo
|
||||
<$> requiredFieldWith "database_url" placeholderCodecViaJSON databaseUrlDoc .== _psciDatabaseUrl
|
||||
<$> requiredField "database_url" databaseUrlDoc .== _psciDatabaseUrl
|
||||
<*> optionalFieldOrNull "pool_settings" poolSettingsDoc .== _psciPoolSettings
|
||||
<*> optionalFieldWithOmittedDefault "use_prepared_statements" False usePreparedStatementsDoc .== _psciUsePreparedStatements
|
||||
<*> optionalFieldWithOmittedDefault "isolation_level" PG.ReadCommitted isolationLevelDoc .== _psciIsolationLevel
|
||||
|
@ -9,7 +9,7 @@ module Hasura.Backends.Postgres.Instances.Types
|
||||
)
|
||||
where
|
||||
|
||||
import Autodocodec (HasCodec)
|
||||
import Autodocodec (HasCodec (codec))
|
||||
import Data.Aeson (FromJSON)
|
||||
import Data.Aeson qualified as J
|
||||
import Data.Kind (Type)
|
||||
@ -25,7 +25,6 @@ import Hasura.Backends.Postgres.Types.Function qualified as Postgres
|
||||
import Hasura.Backends.Postgres.Types.Insert qualified as Postgres (BackendInsert)
|
||||
import Hasura.Backends.Postgres.Types.Update qualified as Postgres
|
||||
import Hasura.Base.Error
|
||||
import Hasura.Metadata.DTO.Placeholder (placeholderCodecViaJSON)
|
||||
import Hasura.Prelude
|
||||
import Hasura.RQL.IR.BoolExp.AggregationPredicates qualified as Agg
|
||||
import Hasura.RQL.Types.Backend
|
||||
@ -116,7 +115,7 @@ instance
|
||||
Just $
|
||||
HealthCheckImplementation
|
||||
{ _hciDefaultTest = defaultHealthCheckTestSql,
|
||||
_hciTestCodec = placeholderCodecViaJSON
|
||||
_hciTestCodec = codec
|
||||
}
|
||||
|
||||
isComparableType = Postgres.isComparableType
|
||||
|
@ -1,10 +1,19 @@
|
||||
-- | Utility functions for use defining autodocodec codecs.
|
||||
module Hasura.Metadata.DTO.Utils (codecNamePrefix, versionField, optionalVersionField) where
|
||||
module Hasura.Metadata.DTO.Utils
|
||||
( codecNamePrefix,
|
||||
fromEnvCodec,
|
||||
versionField,
|
||||
optionalVersionField,
|
||||
)
|
||||
where
|
||||
|
||||
import Autodocodec
|
||||
( Codec (EqCodec),
|
||||
JSONCodec,
|
||||
ObjectCodec,
|
||||
object,
|
||||
optionalFieldWith',
|
||||
requiredField',
|
||||
requiredFieldWith',
|
||||
scientificCodec,
|
||||
(.=),
|
||||
@ -38,3 +47,13 @@ optionalVersionField v =
|
||||
-- database kind from type context.
|
||||
codecNamePrefix :: forall b. (HasTag b) => Text
|
||||
codecNamePrefix = T.toTitle $ T.toTxt $ reify $ backendTag @b
|
||||
|
||||
-- | Represents a text field wrapped in an object with a single property
|
||||
-- named @from_env@.
|
||||
--
|
||||
-- Objects of this form appear in many places in the Metadata API. If we
|
||||
-- reproduced this codec in each use case the OpenAPI document would have many
|
||||
-- identical object definitions. Using a shared codec allows a single shared
|
||||
-- reference.
|
||||
fromEnvCodec :: JSONCodec Text
|
||||
fromEnvCodec = object "FromEnv" $ requiredField' "from_env"
|
||||
|
@ -43,7 +43,8 @@ module Hasura.RQL.Types.Common
|
||||
)
|
||||
where
|
||||
|
||||
import Autodocodec (HasCodec (codec), dimapCodec)
|
||||
import Autodocodec (HasCodec (codec), bimapCodec, dimapCodec, disjointEitherCodec, optionalFieldOrNull', requiredField')
|
||||
import Autodocodec qualified as AC
|
||||
import Data.Aeson
|
||||
import Data.Aeson qualified as J
|
||||
import Data.Aeson.Casing
|
||||
@ -63,6 +64,7 @@ import Hasura.Base.ToErrorValue
|
||||
import Hasura.EncJSON
|
||||
import Hasura.GraphQL.Schema.Options qualified as Options
|
||||
import Hasura.Incremental (Cacheable (..))
|
||||
import Hasura.Metadata.DTO.Utils (fromEnvCodec)
|
||||
import Hasura.Prelude
|
||||
import Hasura.RQL.DDL.Headers ()
|
||||
import Language.GraphQL.Draft.Syntax qualified as G
|
||||
@ -284,6 +286,15 @@ instance Cacheable InputWebhook
|
||||
|
||||
instance Hashable InputWebhook
|
||||
|
||||
instance HasCodec InputWebhook where
|
||||
codec = dimapCodec InputWebhook unInputWebhook urlTemplateCodec
|
||||
where
|
||||
urlTemplateCodec =
|
||||
bimapCodec
|
||||
(mapLeft ("Parsing URL template failed: " ++) . parseURLTemplate)
|
||||
printURLTemplate
|
||||
codec
|
||||
|
||||
instance ToJSON InputWebhook where
|
||||
toJSON = String . printURLTemplate . unInputWebhook
|
||||
|
||||
@ -336,6 +347,16 @@ instance Cacheable PGConnectionParams
|
||||
|
||||
instance Hashable PGConnectionParams
|
||||
|
||||
instance HasCodec PGConnectionParams where
|
||||
codec =
|
||||
AC.object "PGConnectionParams" $
|
||||
PGConnectionParams
|
||||
<$> requiredField' "host" AC..= _pgcpHost
|
||||
<*> requiredField' "username" AC..= _pgcpUsername
|
||||
<*> optionalFieldOrNull' "password" AC..= _pgcpPassword
|
||||
<*> requiredField' "port" AC..= _pgcpPort
|
||||
<*> requiredField' "database" AC..= _pgcpDatabase
|
||||
|
||||
$(deriveToJSON hasuraJSON {omitNothingFields = True} ''PGConnectionParams)
|
||||
|
||||
instance FromJSON PGConnectionParams where
|
||||
@ -362,6 +383,22 @@ instance Cacheable UrlConf
|
||||
|
||||
instance Hashable UrlConf
|
||||
|
||||
instance HasCodec UrlConf where
|
||||
codec =
|
||||
dimapCodec dec enc $
|
||||
disjointEitherCodec valCodec $ disjointEitherCodec fromEnvCodec fromParamsCodec
|
||||
where
|
||||
valCodec = codec
|
||||
fromParamsCodec = AC.object "UrlConfFromParams" $ requiredField' "connection_parameters"
|
||||
|
||||
dec (Left w) = UrlValue w
|
||||
dec (Right (Left wEnv)) = UrlFromEnv wEnv
|
||||
dec (Right (Right wParams)) = UrlFromParams wParams
|
||||
|
||||
enc (UrlValue w) = Left w
|
||||
enc (UrlFromEnv wEnv) = Right $ Left wEnv
|
||||
enc (UrlFromParams wParams) = Right $ Right wParams
|
||||
|
||||
instance ToJSON UrlConf where
|
||||
toJSON (UrlValue w) = toJSON w
|
||||
toJSON (UrlFromEnv wEnv) = object ["from_env" .= wEnv]
|
||||
|
@ -27,6 +27,12 @@ newtype HealthCheckTestSql = HealthCheckTestSql
|
||||
}
|
||||
deriving (Eq, Generic, Show, Cacheable, Hashable, NFData)
|
||||
|
||||
instance HasCodec HealthCheckTestSql where
|
||||
codec =
|
||||
AC.object "HealthCheckTestSql" $
|
||||
HealthCheckTestSql
|
||||
<$> optionalFieldWithDefault' "sql" defaultTestSql AC..= _hctSql
|
||||
|
||||
instance ToJSON HealthCheckTestSql where
|
||||
toJSON = genericToJSON hasuraJSON {omitNothingFields = True}
|
||||
|
||||
|
127
server/src-test/Hasura/Backends/BigQuery/SourceSpec.hs
Normal file
127
server/src-test/Hasura/Backends/BigQuery/SourceSpec.hs
Normal file
@ -0,0 +1,127 @@
|
||||
{-# LANGUAGE QuasiQuotes #-}
|
||||
{-# LANGUAGE TemplateHaskell #-}
|
||||
|
||||
module Hasura.Backends.BigQuery.SourceSpec (spec) where
|
||||
|
||||
import Autodocodec (HasCodec (codec), eitherDecodeJSONViaCodec, object, requiredField', toJSONViaCodec, (.=))
|
||||
import Data.Aeson (FromJSON, ToJSON (toJSON), Value, eitherDecode, encode)
|
||||
import Data.Aeson qualified as J
|
||||
import Data.Aeson.Casing qualified as J
|
||||
import Data.Aeson.QQ (aesonQQ)
|
||||
import Data.Aeson.TH qualified as J
|
||||
import Data.ByteString.Lazy (ByteString)
|
||||
import Data.Text.Lazy.Encoding (decodeUtf8)
|
||||
import Hasura.Backends.BigQuery.Source (ConfigurationInput (FromEnv), ConfigurationJSON (..))
|
||||
import Hasura.Prelude
|
||||
import Test.Hspec
|
||||
|
||||
-- ConfigurationJSON is used with the real ServiceAccount type - but that type
|
||||
-- has a private key property that is obnoxious to get a mock value for.
|
||||
data MockServiceAccount = MockServiceAccount
|
||||
{ _msaClientEmail :: Text,
|
||||
_msaPrivateKey :: Text,
|
||||
_msaProjectId :: Text
|
||||
}
|
||||
deriving (Eq, Show)
|
||||
|
||||
instance HasCodec MockServiceAccount where
|
||||
codec =
|
||||
object "MockServiceAccount" $
|
||||
MockServiceAccount
|
||||
<$> requiredField' "client_email" .= _msaClientEmail
|
||||
<*> requiredField' "private_key" .= _msaPrivateKey
|
||||
<*> requiredField' "project_id" .= _msaProjectId
|
||||
|
||||
$(J.deriveJSON (J.aesonDrop 4 J.snakeCase) {J.omitNothingFields = False} ''MockServiceAccount)
|
||||
|
||||
spec :: Spec
|
||||
spec = do
|
||||
describe "BigQuery" do
|
||||
describe "HasCodec (ConfigurationJSON a)" do
|
||||
it "should accept text from a @from_env@ property" do
|
||||
let input = encode $ [aesonQQ|{"from_env": "config text"}|]
|
||||
input `shouldDecodeTo` FromEnvJSON @(ConfigurationJSON MockServiceAccount) "config text"
|
||||
|
||||
it "should accept data parsed via underlying codec" do
|
||||
let clientEmail = "client@test.net"
|
||||
let privateKey = "-----BEGIN RSA PRIVATE KEY----- etc."
|
||||
let projectId = "2"
|
||||
let input =
|
||||
encode $
|
||||
[aesonQQ|
|
||||
{ client_email: #{clientEmail},
|
||||
private_key: #{privateKey},
|
||||
project_id: #{projectId}
|
||||
}
|
||||
|]
|
||||
input
|
||||
`shouldDecodeTo` FromYamlJSON
|
||||
( MockServiceAccount clientEmail privateKey projectId
|
||||
)
|
||||
|
||||
it "should accept a string containing a serialized object with a @from_env@ property" do
|
||||
let input = "{\"from_env\":\"config text\"}"
|
||||
input `shouldDecodeTo` FromEnvJSON @(ConfigurationJSON MockServiceAccount) "config text"
|
||||
|
||||
it "should accept a string containing a serialized value parsed via underlying codec" do
|
||||
let clientEmail = "client@test.net"
|
||||
let privateKey = "-----BEGIN RSA PRIVATE KEY----- etc."
|
||||
let projectId = "2"
|
||||
let input =
|
||||
decodeUtf8 $
|
||||
encode $
|
||||
[aesonQQ|
|
||||
{ client_email: #{clientEmail},
|
||||
private_key: #{privateKey},
|
||||
project_id: #{projectId}
|
||||
}
|
||||
|]
|
||||
let stringInput = encode input
|
||||
stringInput
|
||||
`shouldDecodeTo` FromYamlJSON
|
||||
( MockServiceAccount clientEmail privateKey projectId
|
||||
)
|
||||
|
||||
it "should encode to an object with a @from_env@ property" do
|
||||
let output = FromEnv "config text"
|
||||
output `shouldEncodeTo` [aesonQQ|{from_env: "config text"}|]
|
||||
|
||||
it "should encode via the underlying codec" do
|
||||
let clientEmail = "client@test.net"
|
||||
let privateKey = "-----BEGIN RSA PRIVATE KEY----- etc."
|
||||
let projectId = "2"
|
||||
let output = FromYamlJSON $ MockServiceAccount clientEmail privateKey projectId
|
||||
output
|
||||
`shouldEncodeTo` [aesonQQ|
|
||||
{ client_email: #{clientEmail},
|
||||
private_key: #{privateKey},
|
||||
project_id: #{projectId}
|
||||
}
|
||||
|]
|
||||
|
||||
-- | Assert that the given bytestring decodes to the expected value when
|
||||
-- decoding via Autodocodec, and that the decoded value matches decoding via a
|
||||
-- FromJSON instance.
|
||||
shouldDecodeTo ::
|
||||
forall a.
|
||||
(Eq a, FromJSON a, HasCodec a, Show a) =>
|
||||
ByteString ->
|
||||
a ->
|
||||
Expectation
|
||||
input `shouldDecodeTo` expected = do
|
||||
decoded `shouldBe` (Right expected)
|
||||
decoded `shouldBe` decodedViaFromJSON
|
||||
where
|
||||
decoded = eitherDecodeJSONViaCodec @a input
|
||||
decodedViaFromJSON = eitherDecode @a input
|
||||
|
||||
-- | Assert that the given value encodes to the expected JSON value when
|
||||
-- encoding via Autodocodec, and that the encoded value matches encoding via
|
||||
-- a ToJSON instance.
|
||||
shouldEncodeTo :: forall a. (HasCodec a, ToJSON a) => a -> Value -> Expectation
|
||||
output `shouldEncodeTo` expected = do
|
||||
encoded `shouldBe` expected
|
||||
encoded `shouldBe` encodedViaToJSON
|
||||
where
|
||||
encoded = toJSONViaCodec output
|
||||
encodedViaToJSON = toJSON output
|
Loading…
Reference in New Issue
Block a user