server: don't memoize backend scalar type reps through WithScalarType (#136)

Co-authored-by: Auke Booij <auke@hasura.io>
GITHUB_PR_NUMBER: 6281
GITHUB_PR_URL: https://github.com/hasura/graphql-engine/pull/6281
GitOrigin-RevId: b7ab3352af21175f0065f1bc2304a1232f6a5580
This commit is contained in:
hasura-bot 2020-12-03 17:51:27 +05:30
parent 20913c254d
commit 115f2cb621
17 changed files with 126 additions and 121 deletions

View File

@ -308,6 +308,7 @@ library
, Hasura.Backends.Postgres.Execute.Mutation
, Hasura.Backends.Postgres.Execute.RemoteJoin
, Hasura.Backends.Postgres.Translate.BoolExp
, Hasura.Backends.Postgres.Translate.Column
, Hasura.Backends.Postgres.Translate.Delete
, Hasura.Backends.Postgres.Translate.Insert
, Hasura.Backends.Postgres.Translate.Mutation

View File

@ -41,7 +41,6 @@ module Hasura.Backends.Postgres.SQL.Types
, isGraphQLCompliantTableName
, PGScalarType(..)
, WithScalarType(..)
, textToPGScalarType
, pgTypeOid
@ -481,12 +480,6 @@ geoTypes = [PGGeometry, PGGeography]
isGeoType :: PGScalarType -> Bool
isGeoType = (`elem` geoTypes)
data WithScalarType a
= WithScalarType
{ pstType :: !PGScalarType
, pstValue :: !a
} deriving (Show, Eq, Functor, Foldable, Traversable)
data PGTypeKind
= PGKindBase
| PGKindComposite

View File

@ -12,8 +12,6 @@ module Hasura.Backends.Postgres.SQL.Value
, binEncoder
, txtEncoder
, toBinaryValue
, toTxtValue
, toPrepParam
) where
@ -128,17 +126,19 @@ withConstructorFn ty v
scientificToInteger :: (Integral i, Bounded i) => Scientific -> AT.Parser i
scientificToInteger num = case toBoundedInteger num of
Just parsed -> pure parsed
Nothing -> fail $ "The value " ++ show num ++ " lies outside the "
++ "bounds or is not an integer. Maybe it is a "
++ "float, or is there integer overflow?"
scientificToInteger num =
toBoundedInteger num
`onNothing`
fail ("The value " ++ show num ++ " lies outside the "
++ "bounds or is not an integer. Maybe it is a "
++ "float, or is there integer overflow?")
scientificToFloat :: (RealFloat f) => Scientific -> AT.Parser f
scientificToFloat num = case toBoundedRealFloat num of
Right parsed -> pure parsed
Left _ -> fail $ "The value " ++ show num ++ " lies outside the "
++ "bounds. Is it overflowing the float bounds?"
scientificToFloat num =
toBoundedRealFloat num
`onLeft` \ _ ->
fail ("The value " ++ show num ++ " lies outside the "
++ "bounds. Is it overflowing the float bounds?")
parsePGValue :: PGScalarType -> Value -> AT.Parser PGScalarValue
@ -279,10 +279,3 @@ toPrepParam :: Int -> PGScalarType -> S.SQLExp
toPrepParam i ty =
-- See Note [Type casting prepared params] above
S.withTyAnn ty . withConstructorFn ty $ S.SEPrep i
toBinaryValue :: WithScalarType PGScalarValue -> Q.PrepArg
toBinaryValue = binEncoder . pstValue
toTxtValue :: WithScalarType PGScalarValue -> S.SQLExp
toTxtValue (WithScalarType ty val) =
S.withTyAnn ty . withConstructorFn ty $ txtEncoder val

View File

@ -0,0 +1,16 @@
module Hasura.Backends.Postgres.Translate.Column
( toTxtValue
) where
import Hasura.Prelude
import Hasura.Backends.Postgres.SQL.DML
import Hasura.Backends.Postgres.SQL.Value
import Hasura.RQL.Types
toTxtValue :: ColumnValue 'Postgres -> SQLExp
toTxtValue ColumnValue{..} =
withTyAnn ty . withConstructorFn ty $ txtEncoder cvValue
where
ty = unsafePGColumnToBackend cvType

View File

@ -42,8 +42,9 @@ import qualified Hasura.RQL.IR.Select as RS
import qualified Hasura.Tracing as Tracing
import Hasura.Backends.Postgres.SQL.Types
import Hasura.Backends.Postgres.SQL.Value (PGScalarValue (..), toTxtValue)
import Hasura.Backends.Postgres.SQL.Value (PGScalarValue (..))
import Hasura.Backends.Postgres.Translate.Select (asSingleRowJsonResp)
import Hasura.Backends.Postgres.Translate.Column (toTxtValue)
import Hasura.EncJSON
import Hasura.GraphQL.Execute.Prepare
import Hasura.GraphQL.Parser
@ -159,7 +160,7 @@ resolveActionExecution env logger userInfo annAction execContext = do
(webhookRes, respHeaders) <- flip runReaderT logger $ callWebhook env manager outputType outputFields reqHeaders confHeaders
forwardClientHeaders resolvedWebhook handlerPayload timeout
let webhookResponseExpression = RS.AEInput $ UVLiteral $
toTxtValue $ WithScalarType PGJSONB $ PGValJSONB $ Q.JSONB $ J.toJSON webhookRes
toTxtValue $ ColumnValue (ColumnScalar PGJSONB) $ PGValJSONB $ Q.JSONB $ J.toJSON webhookRes
selectAstUnresolved = processOutputSelectionSet webhookResponseExpression
outputType definitionList annFields stringifyNum
(astResolved, _expectedVariables) <- flip runStateT Set.empty $ RS.traverseAnnSimpleSelect prepareWithoutPlan selectAstUnresolved
@ -276,8 +277,8 @@ resolveAsyncActionQuery userInfo annAction =
actionIdColumnEq = BoolFld $ AVCol actionIdColumnInfo [AEQ True actionId]
sessionVarsColumnInfo = ColumnInfo (unsafePGCol "session_variables") $$(G.litName "session_variables")
0 (ColumnScalar PGJSONB) False Nothing
sessionVarValue = flip UVParameter Nothing $ ColumnValue (ColumnScalar PGJSONB) $
WithScalarType PGJSONB $ PGValJSONB $ Q.JSONB $ J.toJSON $ _uiSession userInfo
sessionVarValue = UVParameter Nothing $ ColumnValue (ColumnScalar PGJSONB) $
PGValJSONB $ Q.JSONB $ J.toJSON $ _uiSession userInfo
sessionVarsColumnEq = BoolFld $ AVCol sessionVarsColumnInfo [AEQ True sessionVarValue]
-- For non-admin roles, accessing an async action's response should be allowed only for the user

View File

@ -54,6 +54,7 @@ import Hasura.Backends.Postgres.Connection
import Hasura.Backends.Postgres.SQL.Error
import Hasura.Backends.Postgres.SQL.Types
import Hasura.Backends.Postgres.SQL.Value
import Hasura.Backends.Postgres.Translate.Column (toTxtValue)
import Hasura.GraphQL.Context
import Hasura.GraphQL.Execute.Action
import Hasura.GraphQL.Execute.Query
@ -248,17 +249,17 @@ type ValidatedSyntheticVariables = ValidatedVariables []
validateVariables
:: (Traversable f, MonadError QErr m, MonadIO m)
=> PGExecCtx
-> f (WithScalarType PGScalarValue)
-> f (ColumnValue 'Postgres)
-> m (ValidatedVariables f)
validateVariables pgExecCtx variableValues = do
let valSel = mkValidationSel $ toList variableValues
Q.Discard () <- runQueryTx_ $ liftTx $
Q.rawQE dataExnErrHandler (Q.fromBuilder $ toSQL valSel) [] False
pure . ValidatedVariables $ fmap (txtEncodedPGVal . pstValue) variableValues
pure . ValidatedVariables $ fmap (txtEncodedPGVal . cvValue) variableValues
where
mkExtrs = map (flip S.Extractor Nothing . toTxtValue)
mkExtr = flip S.Extractor Nothing . toTxtValue
mkValidationSel vars =
S.mkSelect { S.selExtr = mkExtrs vars }
S.mkSelect { S.selExtr = map mkExtr vars }
runQueryTx_ tx = do
res <- liftIO $ runExceptT (runQueryTx pgExecCtx tx)
liftEither res
@ -289,7 +290,7 @@ resolveMultiplexedValue
:: (MonadState QueryParametersInfo m)
=> UnpreparedValue 'Postgres -> m S.SQLExp
resolveMultiplexedValue = \case
UVParameter colVal varM -> do
UVParameter varM colVal -> do
varJsonPath <- case fmap PS.getName varM of
Just varName -> do
modifying qpiReusableVariableValues $ Map.insert varName colVal
@ -298,7 +299,7 @@ resolveMultiplexedValue = \case
syntheticVarIndex <- use (qpiSyntheticVariableValues . to length)
modifying qpiSyntheticVariableValues (|> colVal)
pure ["synthetic", T.pack $ show syntheticVarIndex]
pure $ fromResVars (CollectableTypeScalar $ pstType $ cvValue colVal) varJsonPath
pure $ fromResVars (CollectableTypeScalar $ unsafePGColumnToBackend $ cvType colVal) varJsonPath
UVSessionVar ty sessVar -> do
modifying qpiReferencedSessionVariables (Set.insert sessVar)
pure $ fromResVars ty ["session", sessionVariableToText sessVar]
@ -374,8 +375,8 @@ buildLiveQueryPlan pgExecCtx userInfo unpreparedAST = do
-- We need to ensure that the values provided for variables are correct according to Postgres.
-- Without this check an invalid value for a variable for one instance of the subscription will
-- take down the entire multiplexed query.
validatedQueryVars <- validateVariables pgExecCtx $ fmap cvValue _qpiReusableVariableValues
validatedSyntheticVars <- validateVariables pgExecCtx $ map cvValue $ toList _qpiSyntheticVariableValues
validatedQueryVars <- validateVariables pgExecCtx _qpiReusableVariableValues
validatedSyntheticVars <- validateVariables pgExecCtx $ toList _qpiSyntheticVariableValues
let -- TODO validatedQueryVars validatedSyntheticVars
cohortVariables = mkCohortVariables _qpiReferencedSessionVariables

View File

@ -27,8 +27,8 @@ import Data.Text.Extended
import qualified Hasura.Backends.Postgres.SQL.DML as S
import qualified Hasura.GraphQL.Transport.HTTP.Protocol as GH
import Hasura.Backends.Postgres.SQL.Types
import Hasura.Backends.Postgres.SQL.Value
import Hasura.Backends.Postgres.Translate.Column
import Hasura.GraphQL.Parser.Column
import Hasura.GraphQL.Parser.Schema
import Hasura.RQL.DML.Internal (currentSession)
@ -75,10 +75,10 @@ initPlanningSt =
prepareWithPlan :: (MonadState PlanningSt m) => UnpreparedValue 'Postgres -> m S.SQLExp
prepareWithPlan = \case
UVParameter ColumnValue{ cvValue = colVal } varInfoM -> do
UVParameter varInfoM ColumnValue{..} -> do
argNum <- maybe getNextArgNum (getVarArgNum . getName) varInfoM
addPrepArg argNum (toBinaryValue colVal, pstValue colVal)
return $ toPrepParam argNum (pstType colVal)
addPrepArg argNum (binEncoder cvValue, cvValue)
return $ toPrepParam argNum (unsafePGColumnToBackend cvType)
UVSessionVar ty sessVar -> do
sessVarVal <- retrieveAndFlagSessionVariableValue insertSessionVariable sessVar currentSessionExp
@ -95,7 +95,7 @@ prepareWithPlan = \case
prepareWithoutPlan :: (MonadState (Set.HashSet SessionVariable) m) => UnpreparedValue 'Postgres -> m S.SQLExp
prepareWithoutPlan = \case
UVParameter pgValue _ -> pure $ toTxtValue $ cvValue pgValue
UVParameter _ cv -> pure $ toTxtValue cv
UVLiteral sqlExp -> pure sqlExp
UVSession -> pure currentSession
UVSessionVar ty sessVar -> do

View File

@ -82,7 +82,7 @@ actionQueryToRootFieldPlan prepped = \case
-- let varName = G.unName var
-- colVal <- onNothing (Map.lookup var annVars) $
-- throw500 $ "missing variable in annVars : " <> varName
-- let prepVal = (toBinaryValue colVal, pstValue colVal)
-- let prepVal = (binEncoder colVal, pstValue colVal)
-- return $ IntMap.insert prepNo prepVal accum

View File

@ -26,6 +26,7 @@ import qualified Hasura.GraphQL.Transport.HTTP.Protocol as GH
import qualified Hasura.RQL.IR.Select as DS
import Hasura.Backends.Postgres.SQL.Value
import Hasura.Backends.Postgres.Translate.Column (toTxtValue)
import Hasura.EncJSON
import Hasura.GraphQL.Context
import Hasura.GraphQL.Parser
@ -59,7 +60,7 @@ resolveUnpreparedValue
:: (MonadError QErr m)
=> UserInfo -> UnpreparedValue 'Postgres -> m S.SQLExp
resolveUnpreparedValue userInfo = \case
UVParameter pgValue _ -> pure $ toTxtValue $ cvValue pgValue
UVParameter _ cv -> pure $ toTxtValue cv
UVLiteral sqlExp -> pure sqlExp
UVSession -> pure $ sessionInfoJsonExp $ _uiSession userInfo
UVSessionVar ty sessionVariable -> do

View File

@ -1,8 +1,7 @@
{-# LANGUAGE StrictData #-}
module Hasura.GraphQL.Parser.Column
( ColumnValue(..)
, mkScalarTypeName
( mkScalarTypeName
, UnpreparedValue(..)
@ -49,9 +48,9 @@ openOpaque (Opaque (Just _) value) = markNotReusable $> value
data UnpreparedValue (b :: BackendType)
-- | A SQL value that can be parameterized over.
= UVParameter
(ColumnValue b)
(Maybe VariableInfo)
-- ^ The GraphQL variable this value came from, if any.
(ColumnValue b)
-- | A literal SQL expression that /cannot/ be parameterized over.
| UVLiteral (SQLExpression b)
-- | The entire session variables JSON object.
@ -59,16 +58,11 @@ data UnpreparedValue (b :: BackendType)
-- | A single session variable.
| UVSessionVar (SessionVarType b) SessionVariable
data ColumnValue (b :: BackendType) = ColumnValue
{ cvType :: ColumnType b
, cvValue :: ColumnValueType b
}
-- FIXME exporting this method means doing away with the opaqueness of the
-- 'Opaque' data type, since the constructors of 'UnpreparedValue' are exported
-- globally.
mkParameter :: Opaque (ColumnValue b) -> UnpreparedValue b
mkParameter (Opaque variable value) = UVParameter value variable
mkParameter (Opaque variable value) = UVParameter variable value
-- -------------------------------------------------------------------------------------------------

View File

@ -11,7 +11,7 @@ import qualified Hasura.RQL.IR.Select as IR
import Language.GraphQL.Draft.Syntax (Nullability, Name)
import Hasura.GraphQL.Parser ( InputFieldsParser, Kind (..), Parser
, UnpreparedValue (..), Opaque, ColumnValue
, UnpreparedValue (..), Opaque
, Definition, EnumValueInfo, FieldParser)
import Hasura.GraphQL.Parser.Class
import Hasura.GraphQL.Schema.Common
@ -51,7 +51,7 @@ class Backend b => BackendSchema (b :: BackendType) where
:: (MonadError QErr m)
=> ColumnType b
-> Value
-> m (ColumnValueType b)
-> m (ScalarValue b)
-- TODO: THIS IS A TEMPORARY FIX
-- while offset is exposed in the schema as a GraphQL Int, which
-- is a bounded Int32, previous versions of the code used to also

View File

@ -32,7 +32,6 @@ import Hasura.GraphQL.Parser (Definition, InputFieldsP
Opaque, Parser, UnpreparedValue (..),
Variable (..), mkParameter)
import Hasura.GraphQL.Parser.Class
import Hasura.GraphQL.Parser.Column (ColumnValue)
import Hasura.GraphQL.Parser.Internal.Parser (Parser (..), peelVariable, typeCheck,
typeMismatch, valueToJSON)
import Hasura.GraphQL.Schema.Backend (BackendSchema, ComparisonExp)
@ -49,8 +48,8 @@ columnParser columnType (G.Nullability isNullable) =
-- TODO(PDV): It might be worth memoizing this function even though it isnt
-- recursive simply for performance reasons, since its likely to be hammered
-- during schema generation. Need to profile to see whether or not its a win.
opaque . fmap (P.ColumnValue columnType) <$> case columnType of
ColumnScalar scalarType -> withScalarType scalarType <$> case scalarType of
opaque . fmap (ColumnValue columnType) <$> case columnType of
ColumnScalar scalarType -> possiblyNullable scalarType <$> case scalarType of
PGInteger -> pure (PGValInteger <$> P.int)
PGBoolean -> pure (PGValBoolean <$> P.boolean)
PGFloat -> pure (PGValDouble <$> P.float)
@ -76,7 +75,7 @@ columnParser columnType (G.Nullability isNullable) =
case nonEmpty (Map.toList enumValues) of
Just enumValuesList -> do
name <- qualifiedObjectToName tableName <&> (<> $$(G.litName "_enum"))
pure $ withScalarType PGText $ P.enum name Nothing (mkEnumValue <$> enumValuesList)
pure $ possiblyNullable PGText $ P.enum name Nothing (mkEnumValue <$> enumValuesList)
Nothing -> throw400 ValidationFailed "empty enum values"
where
-- Sadly, this combinator is not sound in general, so we cant export it
@ -102,7 +101,6 @@ columnParser columnType (G.Nullability isNullable) =
P.mkOpaque (Just vInfo) <$> pParser parser (absurd <$> vValue)
value -> P.mkOpaque Nothing <$> pParser parser value
}
withScalarType scalarType = fmap (WithScalarType scalarType) . possiblyNullable scalarType
possiblyNullable scalarType
| isNullable = fmap (fromMaybe $ PGNull scalarType) . P.nullable
| otherwise = id
@ -302,9 +300,9 @@ comparisonExps = P.memoize 'comparisonExps \columnType -> do
]
]
where
mkListLiteral :: ColumnType 'Postgres -> [P.ColumnValue 'Postgres] -> UnpreparedValue 'Postgres
mkListLiteral :: ColumnType 'Postgres -> [ColumnValue 'Postgres] -> UnpreparedValue 'Postgres
mkListLiteral columnType columnValues = P.UVLiteral $ SETyAnn
(SEArray $ txtEncoder . pstValue . P.cvValue <$> columnValues)
(SEArray $ txtEncoder . cvValue <$> columnValues)
(mkTypeAnn $ CollectableTypeArray $ unsafePGColumnToBackend columnType)
castExp :: ColumnType 'Postgres -> m (Maybe (Parser 'Input n (CastExp 'Postgres (UnpreparedValue 'Postgres))))

View File

@ -704,7 +704,7 @@ tableConnectionArgs pkeyColumns table selectPermissions = do
pgColumnValue <- iResultToMaybe (executeJSONPath columnJsonPath cursorValue)
`onNothing` throwInvalidCursor
pgValue <- liftQErr $ parseScalarValue columnType pgColumnValue
let unresolvedValue = flip UVParameter Nothing $ P.ColumnValue columnType pgValue
let unresolvedValue = UVParameter Nothing $ ColumnValue columnType pgValue
pure $ IR.ConnectionSplit splitKind unresolvedValue $
IR.OrderByItemG Nothing (IR.AOCColumn pgColumnInfo) Nothing
Just orderBys ->
@ -714,7 +714,7 @@ tableConnectionArgs pkeyColumns table selectPermissions = do
orderByItemValue <- iResultToMaybe (executeJSONPath (getPathFromOrderBy annObCol) cursorValue)
`onNothing` throwInvalidCursor
pgValue <- liftQErr $ parseScalarValue columnType orderByItemValue
let unresolvedValue = flip UVParameter Nothing $ P.ColumnValue columnType pgValue
let unresolvedValue = UVParameter Nothing $ ColumnValue columnType pgValue
pure $ IR.ConnectionSplit splitKind unresolvedValue $
IR.OrderByItemG orderType (() <$ annObCol) nullsOrder
where
@ -1307,5 +1307,5 @@ nodeField = do
<<> " in node id: " <> t
pgColumnType = pgiType columnInfo
pgValue <- modifyErr modifyErrFn $ parseScalarValue pgColumnType columnValue
let unpreparedValue = flip UVParameter Nothing $ P.ColumnValue pgColumnType pgValue
let unpreparedValue = UVParameter Nothing $ ColumnValue pgColumnType pgValue
pure $ IR.BoolFld $ IR.AVCol columnInfo [IR.AEQ True unpreparedValue]

View File

@ -17,8 +17,8 @@ import Data.Aeson.Types
import Data.Text.Extended
import Hasura.Backends.Postgres.SQL.Types
import Hasura.Backends.Postgres.SQL.Value
import Hasura.Backends.Postgres.Translate.BoolExp
import Hasura.Backends.Postgres.Translate.Column
import Hasura.RQL.Types
import Hasura.SQL.Types
import Hasura.Server.Utils
@ -161,17 +161,18 @@ valueParser
valueParser pgType = \case
-- When it is a special variable
String t
| isSessionVariable t -> return $ mkTypedSessionVar pgType $ mkSessionVariable t
| isReqUserId t -> return $ mkTypedSessionVar pgType userIdHeader
| isSessionVariable t -> return $ mkTypedSessionVar pgType $ mkSessionVariable t
| isReqUserId t -> return $ mkTypedSessionVar pgType userIdHeader
-- Typical value as Aeson's value
val -> case pgType of
CollectableTypeScalar columnType -> PSESQLExp . toTxtValue <$> parsePGScalarValue columnType val
CollectableTypeScalar cvType ->
PSESQLExp . toTxtValue . ColumnValue cvType <$> parsePGScalarValue cvType val
CollectableTypeArray ofType -> do
vals <- runAesonParser parseJSON val
WithScalarType scalarType scalarValues <- parsePGScalarValues ofType vals
scalarValues <- parsePGScalarValues ofType vals
return . PSESQLExp $ S.SETyAnn
(S.SEArray $ map (toTxtValue . WithScalarType scalarType) scalarValues)
(S.mkTypeAnn $ CollectableTypeArray scalarType)
(S.SEArray $ map (toTxtValue . ColumnValue ofType) scalarValues)
(S.mkTypeAnn $ CollectableTypeArray (unsafePGColumnToBackend ofType))
mkTypedSessionVar :: CollectableType (ColumnType 'Postgres) -> SessionVariable -> PartialSQLExp 'Postgres
mkTypedSessionVar columnType =

View File

@ -21,6 +21,7 @@ import Hasura.Backends.Postgres.SQL.Error
import Hasura.Backends.Postgres.SQL.Types
import Hasura.Backends.Postgres.SQL.Value
import Hasura.Backends.Postgres.Translate.BoolExp
import Hasura.Backends.Postgres.Translate.Column
import Hasura.RQL.Types
import Hasura.SQL.Types
import Hasura.Session
@ -146,8 +147,8 @@ binRHSBuilder :: (QErrM m) => ColumnType 'Postgres -> Value -> DMLP1T m S.SQLExp
binRHSBuilder colType val = do
preparedArgs <- get
scalarValue <- parsePGScalarValue colType val
put (preparedArgs DS.|> toBinaryValue scalarValue)
return $ toPrepParam (DS.length preparedArgs + 1) (pstType scalarValue)
put (preparedArgs DS.|> binEncoder scalarValue)
return $ toPrepParam (DS.length preparedArgs + 1) (unsafePGColumnToBackend colType)
fetchRelTabInfo
:: (QErrM m, CacheRM m)
@ -259,10 +260,10 @@ convBoolExp cim spi be sessVarBldr prepValBldr = do
CollectableTypeArray ofTy -> do
-- for arrays, we don't use the prepared builder
vals <- runAesonParser parseJSON val
WithScalarType scalarType scalarValues <- parsePGScalarValues ofTy vals
scalarValues <- parsePGScalarValues ofTy vals
return $ S.SETyAnn
(S.SEArray $ map (toTxtValue . WithScalarType scalarType) scalarValues)
(S.mkTypeAnn $ CollectableTypeArray scalarType)
(S.SEArray $ map (toTxtValue . ColumnValue ofTy) scalarValues)
(S.mkTypeAnn $ CollectableTypeArray (unsafePGColumnToBackend ofTy))
dmlTxErrorHandler :: Q.PGTxErr -> QErr
dmlTxErrorHandler = mkTxErrorHandler $ \case

View File

@ -14,6 +14,8 @@ module Hasura.RQL.Types.Column
, unsafePGColumnToBackend
, parseTxtEncodedPGValue
, ColumnValue(..)
, ColumnInfo(..)
, RawColumnInfo(..)
, PrimaryKeyColumns
@ -77,9 +79,10 @@ instance Backend b => FromJSON (EnumReference b) where
instance Backend b => ToJSON (EnumReference b) where
toJSON = genericToJSON $ aesonPrefix snakeCase
-- | The type we use for columns, which are currently always “scalars” (though see the note about
-- 'PGType'). Unlike 'PGScalarType', which represents a type that /Postgres/ knows about, this type
-- characterizes distinctions we make but Postgres doesnt.
-- | The type we use for columns, which are currently always “scalars” (though
-- see the note about 'CollectableType'). Unlike 'ScalarType', which represents
-- a type that a backend knows about, this type characterizes distinctions we
-- make but the backend doesnt.
data ColumnType (b :: BackendType)
-- | Ordinary Postgres columns.
= ColumnScalar !(ScalarType b)
@ -103,6 +106,11 @@ instance Backend b => ToTxt (ColumnType b) where
ColumnScalar scalar -> toTxt scalar
ColumnEnumReference (EnumReference tableName _) -> toTxt tableName
data ColumnValue (b :: BackendType) = ColumnValue
{ cvType :: ColumnType b
, cvValue :: ScalarValue b
}
isScalarColumnWhere :: (ScalarType b -> Bool) -> ColumnType b -> Bool
isScalarColumnWhere f = \case
ColumnScalar scalar -> f scalar
@ -111,19 +119,18 @@ isScalarColumnWhere f = \case
-- | Gets the representation type associated with a 'ColumnType'. Avoid using this if possible.
-- Prefer 'parsePGScalarValue', 'parsePGScalarValues', or
-- 'Hasura.RQL.Types.BoolExp.mkTypedSessionVar'.
unsafePGColumnToBackend :: ColumnType 'Postgres -> PGScalarType
unsafePGColumnToBackend :: ColumnType 'Postgres -> ScalarType 'Postgres
unsafePGColumnToBackend = \case
ColumnScalar scalarType -> scalarType
ColumnEnumReference _ -> PGText
-- | Note: Unconditionally accepts null values and returns 'PGNull'.
parsePGScalarValue
:: forall m. (MonadError QErr m) => ColumnType 'Postgres -> Value -> m (WithScalarType PGScalarValue)
:: forall m. (MonadError QErr m) => ColumnType 'Postgres -> Value -> m PGScalarValue
parsePGScalarValue columnType value = case columnType of
ColumnScalar scalarType ->
WithScalarType scalarType <$> runAesonParser (parsePGValue scalarType) value
ColumnScalar scalarType -> runAesonParser (parsePGValue scalarType) value
ColumnEnumReference (EnumReference tableName enumValues) ->
WithScalarType PGText <$> (maybe (pure $ PGNull PGText) parseEnumValue =<< decodeValue value)
maybe (pure $ PGNull PGText) parseEnumValue =<< decodeValue value
where
parseEnumValue :: G.Name -> m PGScalarValue
parseEnumValue enumValueName = do
@ -135,20 +142,18 @@ parsePGScalarValue columnType value = case columnType of
parsePGScalarValues
:: (MonadError QErr m)
=> ColumnType 'Postgres -> [Value] -> m (WithScalarType [PGScalarValue])
parsePGScalarValues columnType values = do
scalarValues <- indexedMapM (fmap pstValue . parsePGScalarValue columnType) values
pure $ WithScalarType (unsafePGColumnToBackend columnType) scalarValues
=> ColumnType 'Postgres -> [Value] -> m [PGScalarValue]
parsePGScalarValues columnType values =
indexedMapM (parsePGScalarValue columnType) values
parseTxtEncodedPGValue
:: (MonadError QErr m)
=> ColumnType 'Postgres -> TxtEncodedPGVal -> m (WithScalarType PGScalarValue)
=> ColumnType 'Postgres -> TxtEncodedPGVal -> m PGScalarValue
parseTxtEncodedPGValue colTy val =
parsePGScalarValue colTy $ case val of
TENull -> Null
TELit t -> String t
-- | “Raw” column info, as stored in the catalog (but not in the schema cache). Instead of
-- containing a 'PGColumnType', it only contains a 'PGScalarType', which is combined with the
-- 'pcirReferences' field and other table data to eventually resolve the type to a 'PGColumnType'.

View File

@ -143,40 +143,40 @@ class
, ToTxt (Column b)
, Typeable b
) => Backend (b :: BackendType) where
type Identifier b :: Type
type Alias b :: Type
type TableName b :: Type
type FunctionName b :: Type
type ConstraintName b :: Type
type BasicOrderType b :: Type
type NullsOrderType b :: Type
type CountType b :: Type
type Column b :: Type
type ColumnValueType b :: Type
type ScalarType b :: Type
type SQLExpression b :: Type
type SQLOperator b :: Type
type XAILIKE b :: Type
type XANILIKE b :: Type
type Identifier b :: Type
type Alias b :: Type
type TableName b :: Type
type FunctionName b :: Type
type ConstraintName b :: Type
type BasicOrderType b :: Type
type NullsOrderType b :: Type
type CountType b :: Type
type Column b :: Type
type ScalarValue b :: Type
type ScalarType b :: Type
type SQLExpression b :: Type
type SQLOperator b :: Type
type XAILIKE b :: Type
type XANILIKE b :: Type
isComparableType :: ScalarType b -> Bool
isNumType :: ScalarType b -> Bool
instance Backend 'Postgres where
type Identifier 'Postgres = PG.Identifier
type Alias 'Postgres = PG.Alias
type TableName 'Postgres = PG.QualifiedTable
type FunctionName 'Postgres = PG.QualifiedFunction
type ConstraintName 'Postgres = PG.ConstraintName
type BasicOrderType 'Postgres = PG.OrderType
type NullsOrderType 'Postgres = PG.NullsOrder
type CountType 'Postgres = PG.CountType
type Column 'Postgres = PG.PGCol
type ColumnValueType 'Postgres = PG.WithScalarType PG.PGScalarValue
type ScalarType 'Postgres = PG.PGScalarType
type SQLExpression 'Postgres = PG.SQLExp
type SQLOperator 'Postgres = PG.SQLOp
type XAILIKE 'Postgres = ()
type XANILIKE 'Postgres = ()
type Identifier 'Postgres = PG.Identifier
type Alias 'Postgres = PG.Alias
type TableName 'Postgres = PG.QualifiedTable
type FunctionName 'Postgres = PG.QualifiedFunction
type ConstraintName 'Postgres = PG.ConstraintName
type BasicOrderType 'Postgres = PG.OrderType
type NullsOrderType 'Postgres = PG.NullsOrder
type CountType 'Postgres = PG.CountType
type Column 'Postgres = PG.PGCol
type ScalarValue 'Postgres = PG.PGScalarValue
type ScalarType 'Postgres = PG.PGScalarType
type SQLExpression 'Postgres = PG.SQLExp
type SQLOperator 'Postgres = PG.SQLOp
type XAILIKE 'Postgres = ()
type XANILIKE 'Postgres = ()
isComparableType = PG.isComparableType
isNumType = PG.isNumType