server: all remaining IR changes (#75)

Co-authored-by: Antoine Leblanc <antoine@hasura.io>
GITHUB_PR_NUMBER: 6233
GITHUB_PR_URL: https://github.com/hasura/graphql-engine/pull/6233

Co-authored-by: Antoine Leblanc <antoine@hasura.io>
Co-authored-by: Auke Booij <auke@hasura.io>
GitOrigin-RevId: 268cdad529ad5d9bebeb5b881fda5413ea9d7715
This commit is contained in:
hasura-bot 2020-11-25 15:18:58 +01:00
parent 29925eb08d
commit 64743cb189
40 changed files with 487 additions and 447 deletions

View File

@ -313,6 +313,7 @@ library
, Hasura.Backends.Postgres.Translate.Mutation
, Hasura.Backends.Postgres.Translate.Returning
, Hasura.Backends.Postgres.Translate.Select
, Hasura.Backends.Postgres.Translate.Types
, Hasura.Backends.Postgres.Translate.Update
, Hasura.Backends.Postgres.SQL.DML
, Hasura.Backends.Postgres.SQL.Error

View File

@ -45,7 +45,7 @@ import Hasura.RQL.DML.Internal
import Hasura.RQL.IR.RemoteJoin
import Hasura.RQL.IR.Returning
import Hasura.RQL.IR.Select
import Hasura.RQL.Types
import Hasura.RQL.Types hiding (Alias)
import Hasura.Server.Version (HasVersion)
import Hasura.Session
@ -127,7 +127,7 @@ parseGraphQLName txt = onNothing (G.mkName txt) (throw400 RemoteSchemaError $ er
-- | Generate the alias for remote field.
pathToAlias :: (MonadError QErr m) => FieldPath -> Counter -> m Alias
pathToAlias path counter = do
pathToAlias path counter =
parseGraphQLName $ T.intercalate "_" (map getFieldNameTxt $ unFieldPath path)
<> "__" <> (T.pack . show . unCounter) counter
@ -656,8 +656,8 @@ substituteVariables values = traverse go
where
go = \case
G.VVariable variableName ->
onNothing (Map.lookup variableName values) $
Failure ["Value for variable " <> variableName <<> " not provided"]
Map.lookup variableName values
`onNothing` Failure ["Value for variable " <> variableName <<> " not provided"]
G.VList listValue ->
fmap G.VList (traverse go listValue)
G.VObject objectValue ->

View File

@ -20,7 +20,7 @@ import Hasura.Backends.Postgres.SQL.Types
import Hasura.RQL.Types
type OpRhsParser m v =
PGType PGColumnType -> Value -> m v
PGType (ColumnType 'Postgres) -> Value -> m v
-- | Represents a reference to a Postgres column, possibly casted an arbitrary
-- number of times. Used within 'parseOperationsExpression' for bookkeeping.
@ -115,13 +115,13 @@ parseOperationsExpression rhsParser fim columnInfo =
"$contains" -> guardType [PGJSONB] >> AContains <$> parseOne
"_contained_in" -> guardType [PGJSONB] >> AContainedIn <$> parseOne
"$contained_in" -> guardType [PGJSONB] >> AContainedIn <$> parseOne
"_has_key" -> guardType [PGJSONB] >> AHasKey <$> parseWithTy (PGColumnScalar PGText)
"$has_key" -> guardType [PGJSONB] >> AHasKey <$> parseWithTy (PGColumnScalar PGText)
"_has_key" -> guardType [PGJSONB] >> AHasKey <$> parseWithTy (ColumnScalar PGText)
"$has_key" -> guardType [PGJSONB] >> AHasKey <$> parseWithTy (ColumnScalar PGText)
"_has_keys_any" -> guardType [PGJSONB] >> AHasKeysAny <$> parseManyWithType (PGColumnScalar PGText)
"$has_keys_any" -> guardType [PGJSONB] >> AHasKeysAny <$> parseManyWithType (PGColumnScalar PGText)
"_has_keys_all" -> guardType [PGJSONB] >> AHasKeysAll <$> parseManyWithType (PGColumnScalar PGText)
"$has_keys_all" -> guardType [PGJSONB] >> AHasKeysAll <$> parseManyWithType (PGColumnScalar PGText)
"_has_keys_any" -> guardType [PGJSONB] >> AHasKeysAny <$> parseManyWithType (ColumnScalar PGText)
"$has_keys_any" -> guardType [PGJSONB] >> AHasKeysAny <$> parseManyWithType (ColumnScalar PGText)
"_has_keys_all" -> guardType [PGJSONB] >> AHasKeysAll <$> parseManyWithType (ColumnScalar PGText)
"$has_keys_all" -> guardType [PGJSONB] >> AHasKeysAll <$> parseManyWithType (ColumnScalar PGText)
-- geometry types
"_st_contains" -> parseGeometryOp ASTContains
@ -196,7 +196,7 @@ parseOperationsExpression rhsParser fim columnInfo =
parsedCastOperations <-
forM (M.toList castOperations) $ \(targetTypeName, castedComparisons) -> do
let targetType = textToPGScalarType targetTypeName
castedColumn = ColumnReferenceCast column (PGColumnScalar targetType)
castedColumn = ColumnReferenceCast column (ColumnScalar targetType)
checkValidCast targetType
parsedCastedComparisons <- withPathK targetTypeName $
parseOperations castedColumn castedComparisons
@ -204,8 +204,8 @@ parseOperationsExpression rhsParser fim columnInfo =
return . ACast $ M.fromList parsedCastOperations
checkValidCast targetType = case (colTy, targetType) of
(PGColumnScalar PGGeometry, PGGeography) -> return ()
(PGColumnScalar PGGeography, PGGeometry) -> return ()
(ColumnScalar PGGeometry, PGGeography) -> return ()
(ColumnScalar PGGeography, PGGeometry) -> return ()
_ -> throw400 UnexpectedPayload $
"cannot cast column of type " <> colTy <<> " to type " <>> targetType
@ -215,16 +215,16 @@ parseOperationsExpression rhsParser fim columnInfo =
guardType geoTypes >> f <$> parseOneNoSess colTy val
parseSTDWithinObj = case colTy of
PGColumnScalar PGGeometry -> do
ColumnScalar PGGeometry -> do
DWithinGeomOp distVal fromVal <- parseVal
dist <- withPathK "distance" $ parseOneNoSess (PGColumnScalar PGFloat) distVal
dist <- withPathK "distance" $ parseOneNoSess (ColumnScalar PGFloat) distVal
from <- withPathK "from" $ parseOneNoSess colTy fromVal
return $ ASTDWithinGeom $ DWithinGeomOp dist from
PGColumnScalar PGGeography -> do
ColumnScalar PGGeography -> do
DWithinGeogOp distVal fromVal sphVal <- parseVal
dist <- withPathK "distance" $ parseOneNoSess (PGColumnScalar PGFloat) distVal
dist <- withPathK "distance" $ parseOneNoSess (ColumnScalar PGFloat) distVal
from <- withPathK "from" $ parseOneNoSess colTy fromVal
useSpheroid <- withPathK "use_spheroid" $ parseOneNoSess (PGColumnScalar PGBoolean) sphVal
useSpheroid <- withPathK "use_spheroid" $ parseOneNoSess (ColumnScalar PGBoolean) sphVal
return $ ASTDWithinGeog $ DWithinGeogOp dist from useSpheroid
_ -> throwError $ buildMsg colTy [PGGeometry, PGGeography]
@ -258,14 +258,14 @@ parseOperationsExpression rhsParser fim columnInfo =
-- This convoluted expression instead of col = val
-- to handle the case of col : null
equalsBoolExpBuilder :: SQLExp 'Postgres -> SQLExp 'Postgres -> S.BoolExp
equalsBoolExpBuilder :: SQLExpression 'Postgres -> SQLExpression 'Postgres -> S.BoolExp
equalsBoolExpBuilder qualColExp rhsExp =
S.BEBin S.OrOp (S.BECompare S.SEQ qualColExp rhsExp)
(S.BEBin S.AndOp
(S.BENull qualColExp)
(S.BENull rhsExp))
notEqualsBoolExpBuilder :: SQLExp 'Postgres -> SQLExp 'Postgres -> S.BoolExp
notEqualsBoolExpBuilder :: SQLExpression 'Postgres -> SQLExpression 'Postgres -> S.BoolExp
notEqualsBoolExpBuilder qualColExp rhsExp =
S.BEBin S.OrOp (S.BECompare S.SNE qualColExp rhsExp)
(S.BEBin S.AndOp
@ -302,7 +302,7 @@ annColExp
annColExp rhsParser colInfoMap (ColExp fieldName colVal) = do
colInfo <- askFieldInfo colInfoMap fieldName
case colInfo of
FIColumn (ColumnInfo _ _ _ (PGColumnScalar PGJSON) _ _) ->
FIColumn (ColumnInfo _ _ _ (ColumnScalar PGJSON) _ _) ->
throwError (err400 UnexpectedPayload "JSON column can not be part of where clause")
FIColumn pgi ->
AVCol pgi <$> parseOperationsExpression rhsParser colInfoMap pgi colVal
@ -377,13 +377,13 @@ foldBoolExp f = \case
BoolFld ce -> f ce
mkFieldCompExp
:: S.Qual -> FieldName -> OpExpG 'Postgres (SQLExp 'Postgres) -> S.BoolExp
:: S.Qual -> FieldName -> OpExpG 'Postgres (SQLExpression 'Postgres) -> S.BoolExp
mkFieldCompExp qual lhsField = mkCompExp (mkQField lhsField)
where
mkQCol = S.SEQIdentifier . S.QIdentifier qual . toIdentifier
mkQField = S.SEQIdentifier . S.QIdentifier qual . Identifier . getFieldNameTxt
mkCompExp :: SQLExp 'Postgres -> OpExpG 'Postgres (SQLExp 'Postgres) -> S.BoolExp
mkCompExp :: SQLExpression 'Postgres -> OpExpG 'Postgres (SQLExpression 'Postgres) -> S.BoolExp
mkCompExp lhs = \case
ACast casts -> mkCastsExp casts
AEQ False val -> equalsBoolExpBuilder lhs val

View File

@ -54,4 +54,4 @@ mkSelectExpFromColumnValues qt allCols = \case
txtEncodedToSQLExp colTy = \case
TENull -> S.SENull
TELit textValue ->
S.withTyAnn (unsafePGColumnToRepresentation colTy) $ S.SELit textValue
S.withTyAnn (unsafePGColumnToBackend colTy) $ S.SELit textValue

View File

@ -22,7 +22,7 @@ import Hasura.Backends.Postgres.Translate.Select
import Hasura.RQL.DML.Internal
import Hasura.RQL.IR.Returning
import Hasura.RQL.IR.Select
import Hasura.RQL.Types
import Hasura.RQL.Types hiding (Identifier)
-- | The postgres common table expression (CTE) for mutation queries.

View File

@ -25,12 +25,13 @@ import qualified Hasura.Backends.Postgres.SQL.DML as S
import Hasura.Backends.Postgres.SQL.Rewrite
import Hasura.Backends.Postgres.SQL.Types
import Hasura.Backends.Postgres.Translate.BoolExp
import Hasura.Backends.Postgres.Translate.Types
import Hasura.EncJSON
import Hasura.GraphQL.Schema.Common
import Hasura.RQL.DML.Internal
import Hasura.RQL.IR.OrderBy
import Hasura.RQL.IR.Select
import Hasura.RQL.Types
import Hasura.RQL.Types hiding (Identifier)
import Hasura.SQL.Types
@ -229,7 +230,7 @@ mkArrayRelationAlias parentFieldName similarFieldsMap fieldName =
HM.lookupDefault [fieldName] fieldName similarFieldsMap
fromTableRowArgs
:: Identifier -> FunctionArgsExpTableRow S.SQLExp -> S.FunctionArgs
:: Identifier -> FunctionArgsExpTableRow 'Postgres S.SQLExp -> S.FunctionArgs
fromTableRowArgs pfx = toFunctionArgs . fmap toSQLExp
where
toFunctionArgs (FunctionArgsExp positional named) =
@ -826,7 +827,7 @@ processAnnFields sourcePrefix fieldAlias similarArrFields annFields = do
fromScalarComputedField :: ComputedFieldScalarSelect 'Postgres S.SQLExp -> m S.SQLExp
fromScalarComputedField computedFieldScalar = do
strfyNum <- ask
pure $ toJSONableExp strfyNum (PGColumnScalar ty) False $ withColumnOp colOpM $
pure $ toJSONableExp strfyNum (ColumnScalar ty) False $ withColumnOp colOpM $
S.SEFunction $ S.FunctionExp fn (fromTableRowArgs sourcePrefix args) Nothing
where
ComputedFieldScalarSelect fn args ty colOpM = computedFieldScalar

View File

@ -0,0 +1,135 @@
{-# LANGUAGE UndecidableInstances #-}
module Hasura.Backends.Postgres.Translate.Types where
import Hasura.Prelude
import qualified Data.HashMap.Strict as HM
import qualified Hasura.Backends.Postgres.SQL.DML as PG
import qualified Hasura.Backends.Postgres.SQL.Types as PG
import Hasura.RQL.IR.Select
import Hasura.RQL.Types.Common
import Hasura.SQL.Backend
data SourcePrefixes
= SourcePrefixes
{ _pfThis :: !PG.Identifier -- ^ Current source prefix
, _pfBase :: !PG.Identifier
-- ^ Base table source row identifier to generate
-- the table's column identifiers for computed field
-- function input parameters
} deriving (Show, Eq, Generic)
instance Hashable SourcePrefixes
data SelectSource (b :: BackendType)
= SelectSource
{ _ssPrefix :: !PG.Identifier
, _ssFrom :: !PG.FromItem
, _ssDistinct :: !(Maybe PG.DistinctExpr)
, _ssWhere :: !PG.BoolExp
, _ssOrderBy :: !(Maybe PG.OrderByExp)
, _ssLimit :: !(Maybe Int)
, _ssOffset :: !(Maybe (SQLExpression b))
} deriving (Generic)
instance Hashable (SelectSource 'Postgres)
deriving instance Show (SelectSource 'Postgres)
deriving instance Eq (SelectSource 'Postgres)
data SelectNode (b :: BackendType)
= SelectNode
{ _snExtractors :: !(HM.HashMap (Alias b) (SQLExpression b))
, _snJoinTree :: !(JoinTree b)
}
instance Semigroup (SelectNode 'Postgres) where
SelectNode lExtrs lJoinTree <> SelectNode rExtrs rJoinTree =
SelectNode (lExtrs <> rExtrs) (lJoinTree <> rJoinTree)
data ObjectSelectSource
= ObjectSelectSource
{ _ossPrefix :: !PG.Identifier
, _ossFrom :: !PG.FromItem
, _ossWhere :: !PG.BoolExp
} deriving (Show, Eq, Generic)
instance Hashable ObjectSelectSource
objectSelectSourceToSelectSource :: ObjectSelectSource -> (SelectSource backend)
objectSelectSourceToSelectSource ObjectSelectSource{..} =
SelectSource _ossPrefix _ossFrom Nothing _ossWhere Nothing Nothing Nothing
data ObjectRelationSource (b :: BackendType)
= ObjectRelationSource
{ _orsRelationshipName :: !RelName
, _orsRelationMapping :: !(HM.HashMap (Column b) (Column b))
, _orsSelectSource :: !ObjectSelectSource
} deriving (Generic)
instance Hashable (ObjectRelationSource 'Postgres)
deriving instance Eq (Column b) => Eq (ObjectRelationSource b)
data ArrayRelationSource (b :: BackendType)
= ArrayRelationSource
{ _arsAlias :: !(Alias b)
, _arsRelationMapping :: !(HM.HashMap (Column b) (Column b))
, _arsSelectSource :: !(SelectSource b)
} deriving (Generic)
instance Hashable (ArrayRelationSource 'Postgres)
deriving instance Eq (ArrayRelationSource 'Postgres)
data ArraySelectNode (b :: BackendType)
= ArraySelectNode
{ _asnTopExtractors :: ![PG.Extractor]
, _asnSelectNode :: !(SelectNode b)
}
instance Semigroup (ArraySelectNode 'Postgres) where
ArraySelectNode lTopExtrs lSelNode <> ArraySelectNode rTopExtrs rSelNode =
ArraySelectNode (lTopExtrs <> rTopExtrs) (lSelNode <> rSelNode)
data ComputedFieldTableSetSource (b :: BackendType)
= ComputedFieldTableSetSource
{ _cftssFieldName :: !FieldName
, _cftssSelectType :: !JsonAggSelect
, _cftssSelectSource :: !(SelectSource b)
} deriving (Generic)
instance Hashable (ComputedFieldTableSetSource 'Postgres)
deriving instance Show (ComputedFieldTableSetSource 'Postgres)
deriving instance Eq (ComputedFieldTableSetSource 'Postgres)
data ArrayConnectionSource (b :: BackendType)
= ArrayConnectionSource
{ _acsAlias :: !(Alias b)
, _acsRelationMapping :: !(HM.HashMap (Column b) (Column b))
, _acsSplitFilter :: !(Maybe PG.BoolExp)
, _acsSlice :: !(Maybe ConnectionSlice)
, _acsSource :: !(SelectSource b)
} deriving (Generic)
deriving instance Eq (ArrayConnectionSource 'Postgres)
instance Hashable (ArrayConnectionSource 'Postgres)
data JoinTree (b :: BackendType)
= JoinTree
{ _jtObjectRelations :: !(HM.HashMap (ObjectRelationSource b) (SelectNode b))
, _jtArrayRelations :: !(HM.HashMap (ArrayRelationSource b) (ArraySelectNode b))
, _jtArrayConnections :: !(HM.HashMap (ArrayConnectionSource b) (ArraySelectNode b))
, _jtComputedFieldTableSets :: !(HM.HashMap (ComputedFieldTableSetSource b) (SelectNode b))
}
instance Semigroup (JoinTree 'Postgres) where
JoinTree lObjs lArrs lArrConns lCfts <> JoinTree rObjs rArrs rArrConns rCfts =
JoinTree (HM.unionWith (<>) lObjs rObjs)
(HM.unionWith (<>) lArrs rArrs)
(HM.unionWith (<>) lArrConns rArrConns)
(HM.unionWith (<>) lCfts rCfts)
instance Monoid (JoinTree 'Postgres) where
mempty = JoinTree mempty mempty mempty mempty
data PermissionLimitSubQuery
= PLSQRequired !Int -- ^ Permission limit
| PLSQNotRequired
deriving (Show, Eq)

View File

@ -12,8 +12,8 @@ import Hasura.Backends.Postgres.SQL.Types
import Hasura.Backends.Postgres.Translate.BoolExp
import Hasura.Backends.Postgres.Translate.Insert
import Hasura.Backends.Postgres.Translate.Returning
import Hasura.RQL.Instances ()
import Hasura.RQL.IR.Update
import Hasura.RQL.Instances ()
import Hasura.RQL.Types
@ -51,5 +51,5 @@ expandOperator infos (column, op) = S.SetExpItem $ (column,) $ case op of
asArray a = S.SETyAnn (S.SEArray a) S.textArrTypeAnn
asNum e = S.SETyAnn e $
case find (\info -> pgiColumn info == column) infos <&> pgiType of
Just (PGColumnScalar s) -> S.mkTypeAnn $ PGTypeScalar s
_ -> S.numericTypeAnn
Just (ColumnScalar s) -> S.mkTypeAnn $ PGTypeScalar s
_ -> S.numericTypeAnn

View File

@ -205,7 +205,7 @@ resolveActionMutationAsync
-> [HTTP.Header]
-> SessionVariables
-> m (tx EncJSON)
resolveActionMutationAsync annAction reqHeaders sessionVariables = do
resolveActionMutationAsync annAction reqHeaders sessionVariables =
pure $ liftTx do
actionId <- runIdentity . Q.getRow <$> Q.withQE defaultTxErrorHandler [Q.sql|
INSERT INTO
@ -268,15 +268,15 @@ resolveAsyncActionQuery userInfo annAction =
-- TODO (from master):- Avoid using ColumnInfo
mkAnnFldFromPGCol column' columnType =
flip RS.mkAnnColumnField Nothing $
ColumnInfo (unsafePGCol column') (G.unsafeMkName column') 0 (PGColumnScalar columnType) True Nothing
ColumnInfo (unsafePGCol column') (G.unsafeMkName column') 0 (ColumnScalar columnType) True Nothing
tableBoolExpression =
let actionIdColumnInfo = ColumnInfo (unsafePGCol "id") $$(G.litName "id")
0 (PGColumnScalar PGUUID) False Nothing
0 (ColumnScalar PGUUID) False Nothing
actionIdColumnEq = BoolFld $ AVCol actionIdColumnInfo [AEQ True actionId]
sessionVarsColumnInfo = ColumnInfo (unsafePGCol "session_variables") $$(G.litName "session_variables")
0 (PGColumnScalar PGJSONB) False Nothing
sessionVarValue = flip UVParameter Nothing $ PGColumnValue (PGColumnScalar PGJSONB) $
0 (ColumnScalar PGJSONB) False Nothing
sessionVarValue = flip UVParameter Nothing $ PGColumnValue (ColumnScalar PGJSONB) $
WithScalarType PGJSONB $ PGValJSONB $ Q.JSONB $ J.toJSON $ _uiSession userInfo
sessionVarsColumnEq = BoolFld $ AVCol sessionVarsColumnInfo [AEQ True sessionVarValue]
@ -519,12 +519,12 @@ mkJsonAggSelect =
bool RS.JASSingleObject RS.JASMultipleRows . isListType
processOutputSelectionSet
:: RS.ArgumentExp v
:: RS.ArgumentExp 'Postgres v
-> GraphQLType
-> [(Column backend, ScalarType backend)]
-> RS.AnnFieldsG backend v
-> [(Column 'Postgres, ScalarType 'Postgres)]
-> RS.AnnFieldsG 'Postgres v
-> Bool
-> RS.AnnSimpleSelG backend v
-> RS.AnnSimpleSelG 'Postgres v
processOutputSelectionSet tableRowInput actionOutputType definitionList annotatedFields =
RS.AnnSelectG annotatedFields selectFrom RS.noTablePermissions RS.noSelectArgs
where

View File

@ -245,9 +245,9 @@ insertArrRel env resCols remoteJoinCtx planVars stringifyNum arrRelIns =
-- | insert object relations and additional columns from parent
validateInsert
:: (MonadError QErr m)
=> [PGCol] -- ^ inserting columns
-> [RelInfo] -- ^ object relation inserts
-> [PGCol] -- ^ additional fields from parent
=> [PGCol] -- ^ inserting columns
-> [RelInfo 'Postgres] -- ^ object relation inserts
-> [PGCol] -- ^ additional fields from parent
-> m ()
validateInsert insCols objRels addCols = do
-- validate insertCols

View File

@ -28,21 +28,21 @@ import Hasura.Prelude
import qualified Data.Aeson.Casing as J
import qualified Data.Aeson.Extended as J
import qualified Data.ByteString as B
import qualified Data.HashSet as Set
import qualified Data.Aeson.TH as J
import qualified Data.ByteString as B
import qualified Data.HashMap.Strict as Map
import qualified Data.HashMap.Strict.InsOrd as OMap
import qualified Data.HashSet as Set
import qualified Data.Text as T
import qualified Data.UUID.V4 as UUID
import qualified Database.PG.Query as Q
import qualified Language.GraphQL.Draft.Syntax as G
import qualified Database.PG.Query.PTI as PTI
import qualified Language.GraphQL.Draft.Syntax as G
import qualified PostgreSQL.Binary.Encoding as PE
import Control.Lens
import Data.UUID (UUID)
import Data.Semigroup.Generic
import Data.UUID (UUID)
import qualified Hasura.Backends.Postgres.Execute.RemoteJoin as RR
import qualified Hasura.Backends.Postgres.SQL.DML as S
@ -337,7 +337,7 @@ data ReusableLiveQueryPlan
{ _rlqpParameterizedPlan :: !ParameterizedLiveQueryPlan
, _rlqpRequiredSessionVariables :: !(Set.HashSet SessionVariable)
, _rlqpSyntheticVariableValues :: !ValidatedSyntheticVariables
, _rlqpQueryVariableTypes :: HashMap G.Name PGColumnType
, _rlqpQueryVariableTypes :: HashMap G.Name (ColumnType 'Postgres)
} deriving (Show)
$(J.deriveToJSON (J.aesonDrop 4 J.snakeCase) ''ReusableLiveQueryPlan)

View File

@ -33,6 +33,7 @@ import Hasura.GraphQL.Parser.Internal.Parser
import Hasura.GraphQL.Parser.Schema
import Hasura.RQL.Types.Column hiding (EnumValue (..), EnumValueInfo (..))
import Hasura.RQL.Types.Error
import Hasura.SQL.Backend
import Hasura.SQL.Types
import Hasura.Session (SessionVariable)
@ -62,7 +63,7 @@ data UnpreparedValue
| UVSessionVar (PGType PGScalarType) SessionVariable
data PGColumnValue = PGColumnValue
{ pcvType :: PGColumnType
{ pcvType :: ColumnType 'Postgres
, pcvValue :: WithScalarType PGScalarValue
}
@ -73,7 +74,7 @@ mkParameter (Opaque variable value) = UVParameter value variable
column
:: (MonadSchema n m, MonadError QErr m)
=> PGColumnType
=> ColumnType 'Postgres
-> Nullability
-> m (Parser 'Both n (Opaque PGColumnValue))
column columnType (Nullability isNullable) =
@ -81,7 +82,7 @@ column columnType (Nullability isNullable) =
-- recursive simply for performance reasons, since its likely to be hammered
-- during schema generation. Need to profile to see whether or not its a win.
opaque . fmap (PGColumnValue columnType) <$> case columnType of
PGColumnScalar scalarType -> withScalarType scalarType <$> case scalarType of
ColumnScalar scalarType -> withScalarType scalarType <$> case scalarType of
PGInteger -> pure (PGValInteger <$> int)
PGBoolean -> pure (PGValBoolean <$> boolean)
PGFloat -> pure (PGValDouble <$> float)
@ -104,7 +105,7 @@ column columnType (Nullability isNullable) =
valueToJSON (toGraphQLType schemaType) >=>
either (parseErrorWith ParseFailed . qeError) pure . runAesonParser (parsePGValue scalarType)
}
PGColumnEnumReference (EnumReference tableName enumValues) ->
ColumnEnumReference (EnumReference tableName enumValues) ->
case nonEmpty (M.toList enumValues) of
Just enumValuesList -> do
name <- qualifiedObjectToName tableName <&> (<> $$(litName "_enum"))

View File

@ -115,9 +115,9 @@ actionAsyncQuery actionInfo = runMaybeT do
actionId <- lift actionIdParser
actionOutputParser <- lift $ actionOutputFields outputObject
createdAtFieldParser <-
lift $ P.column (PGColumnScalar PGTimeStampTZ) (G.Nullability False)
lift $ P.column (ColumnScalar PGTimeStampTZ) (G.Nullability False)
errorsFieldParser <-
lift $ P.column (PGColumnScalar PGJSON) (G.Nullability True)
lift $ P.column (ColumnScalar PGJSON) (G.Nullability True)
let fieldName = unActionName actionName
description = G.Description <$> comment
@ -161,7 +161,7 @@ actionIdParser
:: (MonadSchema n m, MonadError QErr m)
=> m (Parser 'Both n UnpreparedValue)
actionIdParser =
fmap P.mkParameter <$> P.column (PGColumnScalar PGUUID) (G.Nullability False)
fmap P.mkParameter <$> P.column (ColumnScalar PGUUID) (G.Nullability False)
actionOutputFields
:: forall m n r. (MonadSchema n m, MonadTableInfo r m, MonadRole r m, Has QueryContext r)
@ -185,7 +185,7 @@ actionOutputFields outputObject = do
fieldName = unObjectFieldName name
-- FIXME? (from master)
pgColumnInfo = ColumnInfo (unsafePGCol $ G.unName fieldName)
fieldName 0 (PGColumnScalar PGJSON) (G.isNullable gType) Nothing
fieldName 0 (ColumnScalar PGJSON) (G.isNullable gType) Nothing
fieldParser = case objectFieldType of
AOFTScalar def -> customScalarParser def
AOFTEnum def -> customEnumParser def
@ -222,7 +222,7 @@ mkDefinitionList ObjectTypeDefinition{..} =
(unsafePGCol . G.unName . unObjectFieldName $ _ofdName,) $
case Map.lookup _ofdName fieldReferences of
Nothing -> fieldTypeToScalarType $ snd _ofdType
Just columnInfo -> unsafePGColumnToRepresentation $ pgiType columnInfo
Just columnInfo -> unsafePGColumnToBackend $ pgiType columnInfo
where
fieldReferences =
Map.unions $ map _trFieldMapping $ maybe [] toList _otdRelationships

View File

@ -83,7 +83,7 @@ boolExp table selectPermissions = memoizeOn 'boolExp table $ do
comparisonExps
:: forall m n. (MonadSchema n m, MonadError QErr m)
=> PGColumnType -> m (Parser 'Input n [ComparisonExp 'Postgres])
=> ColumnType 'Postgres -> m (Parser 'Input n [ComparisonExp 'Postgres])
comparisonExps = P.memoize 'comparisonExps \columnType -> do
geogInputParser <- geographyWithinDistanceInput
geomInputParser <- geometryWithinDistanceInput
@ -91,8 +91,8 @@ comparisonExps = P.memoize 'comparisonExps \columnType -> do
ingInputParser <- intersectsNbandGeomInput
-- see Note [Columns in comparison expression are never nullable]
columnParser <- P.column columnType (G.Nullability False)
nullableTextParser <- P.column (PGColumnScalar PGText) (G.Nullability True)
textParser <- P.column (PGColumnScalar PGText) (G.Nullability False)
nullableTextParser <- P.column (ColumnScalar PGText) (G.Nullability True)
textParser <- P.column (ColumnScalar PGText) (G.Nullability False)
maybeCastParser <- castExp columnType
let name = P.getName columnParser <> $$(G.litName "_comparison_exp")
desc = G.Description $ "Boolean expression to compare columns of type "
@ -164,10 +164,10 @@ comparisonExps = P.memoize 'comparisonExps \columnType -> do
(AHasKey . mkParameter <$> nullableTextParser)
, P.fieldOptional $$(G.litName "_has_keys_any")
(Just "do any of these strings exist as top-level keys in the column")
(AHasKeysAny . mkListLiteral (PGColumnScalar PGText) <$> textListParser)
(AHasKeysAny . mkListLiteral (ColumnScalar PGText) <$> textListParser)
, P.fieldOptional $$(G.litName "_has_keys_all")
(Just "do all of these strings exist as top-level keys in the column")
(AHasKeysAll . mkListLiteral (PGColumnScalar PGText) <$> textListParser)
(AHasKeysAll . mkListLiteral (ColumnScalar PGText) <$> textListParser)
]
-- Ops for Geography type
, guard (isScalarColumnWhere (== PGGeography) columnType) *>
@ -207,22 +207,22 @@ comparisonExps = P.memoize 'comparisonExps \columnType -> do
]
]
where
mkListLiteral :: PGColumnType -> [P.PGColumnValue] -> UnpreparedValue
mkListLiteral :: ColumnType 'Postgres -> [P.PGColumnValue] -> UnpreparedValue
mkListLiteral columnType columnValues = P.UVLiteral $ SETyAnn
(SEArray $ txtEncoder . pstValue . P.pcvValue <$> columnValues)
(mkTypeAnn $ PGTypeArray $ unsafePGColumnToRepresentation columnType)
(mkTypeAnn $ PGTypeArray $ unsafePGColumnToBackend columnType)
castExp :: PGColumnType -> m (Maybe (Parser 'Input n (CastExp 'Postgres UnpreparedValue)))
castExp :: ColumnType 'Postgres -> m (Maybe (Parser 'Input n (CastExp 'Postgres UnpreparedValue)))
castExp sourceType = do
let maybeScalars = case sourceType of
PGColumnScalar PGGeography -> Just (PGGeography, PGGeometry)
PGColumnScalar PGGeometry -> Just (PGGeometry, PGGeography)
_ -> Nothing
ColumnScalar PGGeography -> Just (PGGeography, PGGeometry)
ColumnScalar PGGeometry -> Just (PGGeometry, PGGeography)
_ -> Nothing
forM maybeScalars $ \(sourceScalar, targetScalar) -> do
sourceName <- P.mkScalarTypeName sourceScalar <&> (<> $$(G.litName "_cast_exp"))
targetName <- P.mkScalarTypeName targetScalar
targetOpExps <- comparisonExps $ PGColumnScalar targetScalar
targetOpExps <- comparisonExps $ ColumnScalar targetScalar
let field = P.fieldOptional targetName Nothing $ (targetScalar, ) <$> targetOpExps
pure $ P.object sourceName Nothing $ M.fromList . maybeToList <$> field
@ -230,15 +230,15 @@ geographyWithinDistanceInput
:: forall m n. (MonadSchema n m, MonadError QErr m)
=> m (Parser 'Input n (DWithinGeogOp UnpreparedValue))
geographyWithinDistanceInput = do
geographyParser <- P.column (PGColumnScalar PGGeography) (G.Nullability False)
geographyParser <- P.column (ColumnScalar PGGeography) (G.Nullability False)
-- FIXME
-- It doesn't make sense for this value to be nullable; it only is for
-- backwards compatibility; if an explicit Null value is given, it will be
-- forwarded to the underlying SQL function, that in turns treat a null value
-- as an error. We can fix this by rejecting explicit null values, by marking
-- this field non-nullable in a future release.
booleanParser <- P.column (PGColumnScalar PGBoolean) (G.Nullability True)
floatParser <- P.column (PGColumnScalar PGFloat) (G.Nullability False)
booleanParser <- P.column (ColumnScalar PGBoolean) (G.Nullability True)
floatParser <- P.column (ColumnScalar PGFloat) (G.Nullability False)
pure $ P.object $$(G.litName "st_d_within_geography_input") Nothing $
DWithinGeogOp <$> (mkParameter <$> P.field $$(G.litName "distance") Nothing floatParser)
<*> (mkParameter <$> P.field $$(G.litName "from") Nothing geographyParser)
@ -248,8 +248,8 @@ geometryWithinDistanceInput
:: forall m n. (MonadSchema n m, MonadError QErr m)
=> m (Parser 'Input n (DWithinGeomOp UnpreparedValue))
geometryWithinDistanceInput = do
geometryParser <- P.column (PGColumnScalar PGGeometry) (G.Nullability False)
floatParser <- P.column (PGColumnScalar PGFloat) (G.Nullability False)
geometryParser <- P.column (ColumnScalar PGGeometry) (G.Nullability False)
floatParser <- P.column (ColumnScalar PGFloat) (G.Nullability False)
pure $ P.object $$(G.litName "st_d_within_input") Nothing $
DWithinGeomOp <$> (mkParameter <$> P.field $$(G.litName "distance") Nothing floatParser)
<*> (mkParameter <$> P.field $$(G.litName "from") Nothing geometryParser)
@ -258,8 +258,8 @@ intersectsNbandGeomInput
:: forall m n. (MonadSchema n m, MonadError QErr m)
=> m (Parser 'Input n (STIntersectsNbandGeommin UnpreparedValue))
intersectsNbandGeomInput = do
geometryParser <- P.column (PGColumnScalar PGGeometry) (G.Nullability False)
integerParser <- P.column (PGColumnScalar PGInteger) (G.Nullability False)
geometryParser <- P.column (ColumnScalar PGGeometry) (G.Nullability False)
integerParser <- P.column (ColumnScalar PGInteger) (G.Nullability False)
pure $ P.object $$(G.litName "st_intersects_nband_geom_input") Nothing $
STIntersectsNbandGeommin <$> (mkParameter <$> P.field $$(G.litName "nband") Nothing integerParser)
<*> (mkParameter <$> P.field $$(G.litName "geommin") Nothing geometryParser)
@ -268,8 +268,8 @@ intersectsGeomNbandInput
:: forall m n. (MonadSchema n m, MonadError QErr m)
=> m (Parser 'Input n (STIntersectsGeomminNband UnpreparedValue))
intersectsGeomNbandInput = do
geometryParser <- P.column (PGColumnScalar PGGeometry) (G.Nullability False)
integerParser <- P.column (PGColumnScalar PGInteger) (G.Nullability False)
geometryParser <- P.column (ColumnScalar PGGeometry) (G.Nullability False)
integerParser <- P.column (ColumnScalar PGInteger) (G.Nullability False)
pure $ P.object $$(G.litName "st_intersects_geom_nband_input") Nothing $ STIntersectsGeomminNband
<$> ( mkParameter <$> P.field $$(G.litName "geommin") Nothing geometryParser)
<*> (fmap mkParameter <$> P.fieldOptional $$(G.litName "nband") Nothing integerParser)

View File

@ -399,9 +399,9 @@ updateOperators table updatePermissions = do
pure $ presetColumns <> flattenedExps
where
columnParser columnInfo = fmap P.mkParameter <$> P.column (pgiType columnInfo) (G.Nullability $ pgiIsNullable columnInfo)
nonNullableTextParser _ = fmap P.mkParameter <$> P.column (PGColumnScalar PGText) (G.Nullability False)
nullableTextParser _ = fmap P.mkParameter <$> P.column (PGColumnScalar PGText) (G.Nullability True)
nonNullableIntParser _ = fmap P.mkParameter <$> P.column (PGColumnScalar PGInteger) (G.Nullability False)
nonNullableTextParser _ = fmap P.mkParameter <$> P.column (ColumnScalar PGText) (G.Nullability False)
nullableTextParser _ = fmap P.mkParameter <$> P.column (ColumnScalar PGText) (G.Nullability True)
nonNullableIntParser _ = fmap P.mkParameter <$> P.column (ColumnScalar PGInteger) (G.Nullability False)
updateOperator
:: G.Name

View File

@ -766,7 +766,7 @@ tableConnectionArgs pkeyColumns table selectPermissions = do
IR.AOCObjectRelation _ _ obCol -> getOrderByColumnType obCol
IR.AOCArrayAggregation _ _ aggOb ->
case aggOb of
IR.AAOCount -> PGColumnScalar PGInteger
IR.AAOCount -> ColumnScalar PGInteger
IR.AAOOp _ colInfo -> pgiType colInfo
-- | Aggregation fields
@ -917,7 +917,7 @@ fieldSelection table maybePkeyColumns fieldInfo selectPermissions =
-- | Field parsers for a table relationship
relationshipField
:: (MonadSchema n m, MonadTableInfo r m, MonadRole r m, Has QueryContext r)
=> RelInfo -> m (Maybe [FieldParser n (AnnotatedField 'Postgres)])
=> RelInfo 'Postgres -> m (Maybe [FieldParser n (AnnotatedField 'Postgres)])
relationshipField relationshipInfo = runMaybeT do
let otherTable = riRTable relationshipInfo
colMapping = riMapping relationshipInfo
@ -973,14 +973,14 @@ computedField ComputedFieldInfo{..} selectPermissions = runMaybeT do
guard $ _cfiName `Set.member` spiScalarComputedFields selectPermissions
let fieldArgsParser = do
args <- functionArgsParser
colOp <- jsonPathArg $ PGColumnScalar scalarReturnType
colOp <- jsonPathArg $ ColumnScalar scalarReturnType
pure $ IR.AFComputedField $ IR.CFSScalar $ IR.ComputedFieldScalarSelect
{ IR._cfssFunction = _cffName _cfiFunction
, IR._cfssType = scalarReturnType
, IR._cfssColumnOp = colOp
, IR._cfssArguments = args
}
dummyParser <- lift $ P.column (PGColumnScalar scalarReturnType) (G.Nullability True)
dummyParser <- lift $ P.column (ColumnScalar scalarReturnType) (G.Nullability True)
pure $ P.selection fieldName (Just fieldDescription) fieldArgsParser dummyParser
CFRSetofTable tableName -> do
remotePerms <- MaybeT $ tableSelectPermissions tableName
@ -1002,7 +1002,7 @@ computedField ComputedFieldInfo{..} selectPermissions = runMaybeT do
in mkDescriptionWith (_cffDescription _cfiFunction) defaultDescription
computedFieldFunctionArgs
:: ComputedFieldFunction -> m (InputFieldsParser n (IR.FunctionArgsExpTableRow UnpreparedValue))
:: ComputedFieldFunction -> m (InputFieldsParser n (IR.FunctionArgsExpTableRow 'Postgres UnpreparedValue))
computedFieldFunctionArgs ComputedFieldFunction{..} =
functionArgs _cffName (IAUserProvided <$> _cffInputArgs) <&> fmap addTableAndSessionArgument
where
@ -1031,12 +1031,8 @@ remoteRelationshipField remoteFieldInfo = runMaybeT do
remoteSchemasFieldDefns <- asks $ qcRemoteFields . getter
let remoteSchemaName = _rfiRemoteSchemaName remoteFieldInfo
fieldDefns <-
case Map.lookup remoteSchemaName remoteSchemasFieldDefns of
Nothing ->
throw500 $ "unexpected: remote schema "
<> remoteSchemaName
<<> " not found"
Just fieldDefns -> pure fieldDefns
Map.lookup remoteSchemaName remoteSchemasFieldDefns
`onNothing` throw500 ("unexpected: remote schema " <> remoteSchemaName <<> " not found")
fieldName <- textToName $ remoteRelationshipNameToText $ _rfiName remoteFieldInfo
remoteFieldsArgumentsParser <-
@ -1069,7 +1065,7 @@ remoteRelationshipField remoteFieldInfo = runMaybeT do
customSQLFunctionArgs
:: (MonadSchema n m, MonadTableInfo r m)
=> FunctionInfo
-> m (InputFieldsParser n (IR.FunctionArgsExpTableRow UnpreparedValue))
-> m (InputFieldsParser n (IR.FunctionArgsExpTableRow 'Postgres UnpreparedValue))
customSQLFunctionArgs FunctionInfo{..} = functionArgs fiName fiInputArgs
-- | Parses the arguments to the underlying sql function of a computed field or
@ -1087,7 +1083,7 @@ functionArgs
:: forall m n r. (MonadSchema n m, MonadTableInfo r m)
=> QualifiedFunction
-> Seq.Seq FunctionInputArgument
-> m (InputFieldsParser n (IR.FunctionArgsExpTableRow UnpreparedValue))
-> m (InputFieldsParser n (IR.FunctionArgsExpTableRow 'Postgres UnpreparedValue))
functionArgs functionName (toList -> inputArgs) = do
-- First, we iterate through the original sql arguments in order, to find the
-- corresponding graphql names. At the same time, we create the input field
@ -1137,16 +1133,16 @@ functionArgs functionName (toList -> inputArgs) = do
pure $ P.field fieldName (Just fieldDesc) objectParser
where
sessionPlaceholder :: IR.ArgumentExp UnpreparedValue
sessionPlaceholder :: IR.ArgumentExp 'Postgres UnpreparedValue
sessionPlaceholder = IR.AEInput P.UVSession
splitArguments
:: Int
-> FunctionInputArgument
-> (Int, ( [Text] -- graphql names, in order
, [(Text, IR.ArgumentExp UnpreparedValue)] -- session argument
, [m (InputFieldsParser n (Maybe (Text, IR.ArgumentExp UnpreparedValue)))] -- optional argument
, [m (InputFieldsParser n (Maybe (Text, IR.ArgumentExp UnpreparedValue)))] -- mandatory argument
, [(Text, IR.ArgumentExp 'Postgres UnpreparedValue)] -- session argument
, [m (InputFieldsParser n (Maybe (Text, IR.ArgumentExp 'Postgres UnpreparedValue)))] -- optional argument
, [m (InputFieldsParser n (Maybe (Text, IR.ArgumentExp 'Postgres UnpreparedValue)))] -- mandatory argument
)
)
splitArguments positionalIndex (IASessionVariables name) =
@ -1160,9 +1156,9 @@ functionArgs functionName (toList -> inputArgs) = do
then (newIndex, ([argName], [], [parseArgument arg argName], []))
else (newIndex, ([argName], [], [], [parseArgument arg argName]))
parseArgument :: FunctionArg -> Text -> m (InputFieldsParser n (Maybe (Text, IR.ArgumentExp UnpreparedValue)))
parseArgument :: FunctionArg -> Text -> m (InputFieldsParser n (Maybe (Text, IR.ArgumentExp 'Postgres UnpreparedValue)))
parseArgument arg name = do
columnParser <- P.column (PGColumnScalar $ _qptName $ faType arg) (G.Nullability True)
columnParser <- P.column (ColumnScalar $ _qptName $ faType arg) (G.Nullability True)
fieldName <- textToName name
-- While some arguments are "mandatory" (i.e. they don't have a default
@ -1179,9 +1175,9 @@ functionArgs functionName (toList -> inputArgs) = do
pure $ argParser `mapField` ((name,) . IR.AEInput . mkParameter)
namedArgument
:: HashMap Text (IR.ArgumentExp UnpreparedValue)
:: HashMap Text (IR.ArgumentExp 'Postgres UnpreparedValue)
-> (Text, InputArgument FunctionArg)
-> n (Maybe (Text, IR.ArgumentExp UnpreparedValue))
-> n (Maybe (Text, IR.ArgumentExp 'Postgres UnpreparedValue))
namedArgument dictionary (name, inputArgument) = case inputArgument of
IASessionVariables _ -> pure $ Just (name, sessionPlaceholder)
IAUserProvided arg -> case Map.lookup name dictionary of
@ -1193,7 +1189,7 @@ functionArgs functionName (toList -> inputArgs) = do
-- | The "path" argument for json column fields
jsonPathArg :: MonadParse n => PGColumnType -> InputFieldsParser n (Maybe (IR.ColumnOp 'Postgres))
jsonPathArg :: MonadParse n => ColumnType 'Postgres -> InputFieldsParser n (Maybe (IR.ColumnOp 'Postgres))
jsonPathArg columnType
| isScalarColumnWhere isJSONType columnType =
P.fieldOptional fieldName description P.string `P.bindFields` fmap join . traverse toColExp

View File

@ -132,7 +132,7 @@ addEventTriggerToCatalog
:: QualifiedTable
-> EventTriggerConf
-> Q.TxE QErr ()
addEventTriggerToCatalog qt etc = do
addEventTriggerToCatalog qt etc =
Q.unitQE defaultTxErrorHandler
[Q.sql|
INSERT into hdb_catalog.event_triggers
@ -154,7 +154,7 @@ delEventTriggerFromCatalog trn = do
archiveEvents trn
archiveEvents :: TriggerName -> Q.TxE QErr ()
archiveEvents trn = do
archiveEvents trn =
Q.unitQE defaultTxErrorHandler [Q.sql|
UPDATE hdb_catalog.event_log
SET archived = 't'

View File

@ -129,7 +129,7 @@ saveMetadata (Metadata tables functions
indexedForM_ collections $ \c -> liftTx $ Collection.addCollectionToCatalog c systemDefined
-- allow list
withPathK "allowlist" $ do
withPathK "allowlist" $
indexedForM_ allowlist $ \(Collection.CollectionReq name) ->
liftTx $ Collection.addCollectionToAllowlistCatalog name
@ -143,7 +143,7 @@ saveMetadata (Metadata tables functions
-- cron triggers
withPathK "cron_triggers" $
indexedForM_ cronTriggers $ \ct -> liftTx $ do
indexedForM_ cronTriggers $ \ct -> liftTx $
addCronTriggerToCatalog ct
-- actions

View File

@ -87,7 +87,7 @@ objRelP2Setup
=> QualifiedTable
-> HashSet ForeignKey
-> RelDef ObjRelUsing
-> m (RelInfo, [SchemaDependency])
-> m (RelInfo 'Postgres, [SchemaDependency])
objRelP2Setup qt foreignKeys (RelDef rn ru _) = case ru of
RUManual rm -> do
let refqt = rmTable rm
@ -115,7 +115,7 @@ arrRelP2Setup
=> HashMap QualifiedTable (HashSet ForeignKey)
-> QualifiedTable
-> ArrRelDef
-> m (RelInfo, [SchemaDependency])
-> m (RelInfo 'Postgres, [SchemaDependency])
arrRelP2Setup foreignKeys qt (RelDef rn ru _) = case ru of
RUManual rm -> do
let refqt = rmTable rm
@ -144,7 +144,7 @@ purgeRelDep d = throw500 $ "unexpected dependency of relationship : "
validateRelP1
:: (UserInfoM m, QErrM m, TableCoreInfoRM m)
=> QualifiedTable -> RelName -> m RelInfo
=> QualifiedTable -> RelName -> m (RelInfo 'Postgres)
validateRelP1 qt rn = do
tabInfo <- askTableCoreInfo qt
askRelType (_tciFieldInfoMap tabInfo) rn ""

View File

@ -13,7 +13,7 @@ import qualified Data.HashMap.Strict as Map
renameRelP2
:: (QErrM m, MonadTx m, CacheRM m)
=> QualifiedTable -> RelName -> RelInfo -> m ()
=> QualifiedTable -> RelName -> (RelInfo 'Postgres) -> m ()
renameRelP2 qt newRN relInfo = withNewInconsistentObjsCheck $ do
tabInfo <- askTableCoreInfo qt
-- check for conflicts in fieldInfoMap

View File

@ -130,11 +130,11 @@ validateRemoteRelationship remoteRelationship remoteSchemaMap pgColumns = do
let baseTy = G.getBaseType (G._fldType field)
in
case (lookupType schemaDoc baseTy) of
Just (G.TypeDefinitionScalar _) -> True
Just (G.TypeDefinitionScalar _) -> True
Just (G.TypeDefinitionInterface _) -> True
Just (G.TypeDefinitionUnion _) -> True
Just (G.TypeDefinitionEnum _) -> True
_ -> False
Just (G.TypeDefinitionUnion _) -> True
Just (G.TypeDefinitionEnum _) -> True
_ -> False
buildRelationshipTypeInfo pgColumnsVariablesMap schemaDoc (objTyInfo,(_,typeMap)) fieldCall = do
objFldDefinition <- lookupField (fcName fieldCall) objTyInfo
let providedArguments = getRemoteArguments $ fcArguments fieldCall
@ -287,7 +287,7 @@ renameNamedType rename =
pgColumnToVariable :: MonadError ValidationError m => PGCol -> m G.Name
pgColumnToVariable pgCol =
let pgColText = getPGColTxt pgCol
in onNothing (G.mkName pgColText) (throwError $ InvalidGraphQLName pgColText)
in G.mkName pgColText `onNothing` throwError (InvalidGraphQLName pgColText)
-- | Lookup the field in the schema.
lookupField
@ -440,5 +440,5 @@ columnInfoToNamedType
-> m G.Name
columnInfoToNamedType pci =
case pgiType pci of
PGColumnScalar scalarType -> getPGScalarTypeName scalarType
_ -> throwError UnsupportedEnum
ColumnScalar scalarType -> getPGScalarTypeName scalarType
_ -> throwError UnsupportedEnum

View File

@ -452,11 +452,11 @@ withMetadataCheck cascade action = do
SOFunction qf -> Just qf
_ -> Nothing
forM_ (droppedFuncs \\ purgedFuncs) $ \qf -> do
forM_ (droppedFuncs \\ purgedFuncs) $ \qf ->
liftTx $ delFunctionFromCatalog qf
-- Process altered functions
forM_ alteredFuncs $ \(qf, newTy) -> do
forM_ alteredFuncs $ \(qf, newTy) ->
when (newTy == FTVOLATILE) $
throw400 NotSupported $
"type of function " <> qf <<> " is altered to \"VOLATILE\" which is not supported now"

View File

@ -114,7 +114,7 @@ mkRelationshipMetadataObject (CatalogRelation qt rn rt rDef cmnt) =
buildRelationship
:: (ArrowChoice arr, ArrowWriter (Seq CollectedInfo) arr)
=> (HashMap QualifiedTable (HashSet ForeignKey), CatalogRelation) `arr` Maybe RelInfo
=> (HashMap QualifiedTable (HashSet ForeignKey), CatalogRelation) `arr` Maybe (RelInfo 'Postgres)
buildRelationship = proc (foreignKeys, relationship) -> do
let CatalogRelation tableName rn rt rDef _ = relationship
metadataObject = mkRelationshipMetadataObject relationship

View File

@ -49,11 +49,11 @@ import Hasura.Server.Utils (makeReasonMessage)
resolveEnumReferences
:: HashMap QualifiedTable (PrimaryKey PGCol, EnumValues)
-> HashSet ForeignKey
-> HashMap PGCol (NonEmpty EnumReference)
-> HashMap PGCol (NonEmpty (EnumReference 'Postgres))
resolveEnumReferences enumTables =
M.fromListWith (<>) . map (fmap (:|[])) . mapMaybe resolveEnumReference . toList
where
resolveEnumReference :: ForeignKey -> Maybe (PGCol, EnumReference)
resolveEnumReference :: ForeignKey -> Maybe (PGCol, EnumReference 'Postgres)
resolveEnumReference foreignKey = do
[(localColumn, foreignColumn)] <- pure $ M.toList (_fkColumnMapping foreignKey)
(primaryKey, enumValues) <- M.lookup (_fkForeignTable foreignKey) enumTables

View File

@ -481,7 +481,7 @@ buildTableCache = Inc.cache proc (catalogTables, reloadMetadataInvalidationKey)
-- known enum tables.
processColumnInfo
:: (QErrM n)
=> Map.HashMap PGCol (NonEmpty EnumReference)
=> Map.HashMap PGCol (NonEmpty (EnumReference 'Postgres))
-> QualifiedTable -- ^ the table this column belongs to
-> (RawColumnInfo 'Postgres, G.Name)
-> n (ColumnInfo 'Postgres)
@ -500,9 +500,9 @@ buildTableCache = Inc.cache proc (catalogTables, reloadMetadataInvalidationKey)
resolveColumnType =
case Map.lookup pgCol tableEnumReferences of
-- no references? not an enum
Nothing -> pure $ PGColumnScalar (prciType rawInfo)
Nothing -> pure $ ColumnScalar (prciType rawInfo)
-- one reference? is an enum
Just (enumReference:|[]) -> pure $ PGColumnEnumReference enumReference
Just (enumReference:|[]) -> pure $ ColumnEnumReference enumReference
-- multiple referenced enums? the schema is strange, so lets reject it
Just enumReferences -> throw400 ConstraintViolation
$ "column " <> prciName rawInfo <<> " in table " <> tableName

View File

@ -68,7 +68,7 @@ mkSQLCount (CountQueryP1 tn (permFltr, mWc) mDistCols) =
validateCountQWith
:: (UserInfoM m, QErrM m, CacheRM m)
=> SessVarBldr 'Postgres m
-> (PGColumnType -> Value -> m S.SQLExp)
-> (ColumnType 'Postgres -> Value -> m S.SQLExp)
-> CountQuery
-> m CountQueryP1
validateCountQWith sessVarBldr prepValBldr (CountQuery qt mDistCols mWhere) = do

View File

@ -77,15 +77,13 @@ askPermInfo
-> TableInfo 'Postgres
-> m c
askPermInfo pa tableInfo = do
roleName <- askCurRole
roleName <- askCurRole
mPermInfo <- askPermInfo' pa tableInfo
case mPermInfo of
Just c -> return c
Nothing -> throw400 PermissionDenied $ mconcat
[ pt <> " on " <>> _tciName (_tiCoreInfo tableInfo)
, " for role " <>> roleName
, " is not allowed. "
]
onNothing mPermInfo $ throw400 PermissionDenied $ mconcat
[ pt <> " on " <>> _tciName (_tiCoreInfo tableInfo)
, " for role " <>> roleName
, " is not allowed. "
]
where
pt = permTypeToCode $ permAccToType pa
@ -143,7 +141,7 @@ checkPermOnCol pt allowedCols pgCol = do
, permTypeToCode pt <> " column " <>> pgCol
]
binRHSBuilder :: (QErrM m) => PGColumnType -> Value -> DMLP1T m S.SQLExp
binRHSBuilder :: (QErrM m) => ColumnType 'Postgres -> Value -> DMLP1T m S.SQLExp
binRHSBuilder colType val = do
preparedArgs <- get
scalarValue <- parsePGScalarValue colType val
@ -158,7 +156,7 @@ fetchRelTabInfo refTabName =
-- Internal error
modifyErrAndSet500 ("foreign " <> ) $ askTabInfo refTabName
type SessVarBldr b m = PGType (ScalarType b) -> SessionVariable -> m (SQLExp b)
type SessVarBldr b m = PGType (ScalarType b) -> SessionVariable -> m (SQLExpression b)
fetchRelDet
:: (UserInfoM m, QErrM m, CacheRM m)
@ -211,7 +209,7 @@ convPartialSQLExp
:: (Applicative f)
=> SessVarBldr backend f
-> PartialSQLExp backend
-> f (SQLExp backend)
-> f (SQLExpression backend)
convPartialSQLExp f = \case
PSESQLExp sqlExp -> pure sqlExp
PSESessVar colTy sessionVariable -> f colTy sessionVariable
@ -249,7 +247,7 @@ convBoolExp
-> SelPermInfo 'Postgres
-> BoolExp 'Postgres
-> SessVarBldr 'Postgres m
-> (PGColumnType -> Value -> m S.SQLExp)
-> (ColumnType 'Postgres -> Value -> m S.SQLExp)
-> m (AnnBoolExpSQL 'Postgres)
convBoolExp cim spi be sessVarBldr prepValBldr = do
abe <- annBoolExp rhsParser cim $ unBoolExp be
@ -274,7 +272,7 @@ dmlTxErrorHandler = mkTxErrorHandler $ \case
, PGInvalidColumnReference ]
_ -> False
toJSONableExp :: Bool -> PGColumnType -> Bool -> S.SQLExp -> S.SQLExp
toJSONableExp :: Bool -> ColumnType 'Postgres -> Bool -> S.SQLExp -> S.SQLExp
toJSONableExp strfyNum colTy asText expn
| asText || (isScalarColumnWhere isBigNum colTy && strfyNum) =
expn `S.SETyAnn` S.textTypeAnn

View File

@ -198,7 +198,7 @@ convSelectQ
-> SelPermInfo 'Postgres -- Additional select permission info
-> SelectQExt 'Postgres -- Given Select Query
-> SessVarBldr 'Postgres m
-> (PGColumnType -> Value -> m S.SQLExp)
-> (ColumnType 'Postgres -> Value -> m S.SQLExp)
-> m (AnnSimpleSel 'Postgres)
convSelectQ table fieldInfoMap selPermInfo selQ sessVarBldr prepValBldr = do
@ -264,7 +264,7 @@ convExtRel
-> Maybe RelName
-> SelectQExt 'Postgres
-> SessVarBldr 'Postgres m
-> (PGColumnType -> Value -> m S.SQLExp)
-> (ColumnType 'Postgres -> Value -> m S.SQLExp)
-> m (Either (ObjectRelationSelect 'Postgres) (ArraySelect 'Postgres))
convExtRel fieldInfoMap relName mAlias selQ sessVarBldr prepValBldr = do
-- Point to the name key

View File

@ -33,9 +33,9 @@ import Hasura.Session
convInc
:: (QErrM m)
=> (PGColumnType -> Value -> m S.SQLExp)
=> (ColumnType 'Postgres -> Value -> m S.SQLExp)
-> PGCol
-> PGColumnType
-> ColumnType 'Postgres
-> Value
-> m (PGCol, S.SQLExp)
convInc f col colType val = do
@ -44,9 +44,9 @@ convInc f col colType val = do
convMul
:: (QErrM m)
=> (PGColumnType -> Value -> m S.SQLExp)
=> (ColumnType 'Postgres -> Value -> m S.SQLExp)
-> PGCol
-> PGColumnType
-> ColumnType 'Postgres
-> Value
-> m (PGCol, S.SQLExp)
convMul f col colType val = do
@ -55,16 +55,16 @@ convMul f col colType val = do
convSet
:: (QErrM m)
=> (PGColumnType -> Value -> m S.SQLExp)
=> (ColumnType 'Postgres -> Value -> m S.SQLExp)
-> PGCol
-> PGColumnType
-> ColumnType 'Postgres
-> Value
-> m (PGCol, S.SQLExp)
convSet f col colType val = do
prepExp <- f colType val
return (col, prepExp)
convDefault :: (Monad m) => PGCol -> PGColumnType -> () -> m (PGCol, S.SQLExp)
convDefault :: (Monad m) => PGCol -> ColumnType 'Postgres -> () -> m (PGCol, S.SQLExp)
convDefault col _ _ = return (col, S.SEUnsafe "DEFAULT")
convOp
@ -73,7 +73,7 @@ convOp
-> [PGCol]
-> UpdPermInfo 'Postgres
-> [(PGCol, a)]
-> (PGCol -> PGColumnType -> a -> m (PGCol, S.SQLExp))
-> (PGCol -> ColumnType 'Postgres -> a -> m (PGCol, S.SQLExp))
-> m [(PGCol, S.SQLExp)]
convOp fieldInfoMap preSetCols updPerm objs conv =
forM objs $ \(pgCol, a) -> do
@ -95,7 +95,7 @@ convOp fieldInfoMap preSetCols updPerm objs conv =
validateUpdateQueryWith
:: (UserInfoM m, QErrM m, CacheRM m)
=> SessVarBldr 'Postgres m
-> (PGColumnType -> Value -> m S.SQLExp)
-> (ColumnType 'Postgres -> Value -> m S.SQLExp)
-> UpdateQuery
-> m (AnnUpd 'Postgres)
validateUpdateQueryWith sessVarBldr prepValBldr uq = do

View File

@ -274,16 +274,11 @@ data OpExpG (b :: BackendType) a
| CGTE !(Column b)
| CLTE !(Column b)
deriving (Functor, Foldable, Traversable, Generic)
deriving instance (Eq a) => Eq (OpExpG 'Postgres a)
instance (NFData a) => NFData (OpExpG 'Postgres a)
instance (Cacheable a) => Cacheable (OpExpG 'Postgres a)
instance (Hashable a) => Hashable (OpExpG 'Postgres a)
type family XAILIKE (b :: BackendType) where
XAILIKE 'Postgres = ()
XAILIKE 'MySQL = Void
type family XANILIKE (b :: BackendType) where
XANILIKE 'Postgres = ()
XANILIKE 'MySQL = Void
deriving instance (Backend b, Eq a) => Eq (OpExpG b a)
instance (Backend b, NFData a) => NFData (OpExpG b a)
instance (Backend b, Cacheable a) => Cacheable (OpExpG b a)
instance (Backend b, Hashable a) => Hashable (OpExpG b a)
opExpDepCol :: OpExpG backend a -> Maybe (Column backend)
opExpDepCol = \case
@ -352,13 +347,13 @@ opExpToJPair f = \case
opExpsToJSON = object . map (opExpToJPair f)
data AnnBoolExpFld (b :: BackendType) a
= AVCol !(ColumnInfo b) ![OpExpG 'Postgres a]
| AVRel !RelInfo !(AnnBoolExp b a)
= AVCol !(ColumnInfo b) ![OpExpG b a]
| AVRel !(RelInfo b) !(AnnBoolExp b a)
deriving (Functor, Foldable, Traversable, Generic)
deriving instance Eq a => Eq (AnnBoolExpFld 'Postgres a)
instance (NFData a) => NFData (AnnBoolExpFld 'Postgres a)
instance (Cacheable a) => Cacheable (AnnBoolExpFld 'Postgres a)
instance (Hashable a) => Hashable (AnnBoolExpFld 'Postgres a)
deriving instance (Backend b, Eq (ColumnInfo b), Eq a) => Eq (AnnBoolExpFld b a)
instance (Backend b, NFData (ColumnInfo b), NFData a) => NFData (AnnBoolExpFld b a)
instance (Backend b, Cacheable (ColumnInfo b), Cacheable a) => Cacheable (AnnBoolExpFld b a)
instance (Backend b, Hashable (ColumnInfo b), Hashable a) => Hashable (AnnBoolExpFld b a)
type AnnBoolExp b a
= GBoolExp b (AnnBoolExpFld b a)
@ -389,8 +384,8 @@ andAnnBoolExps :: AnnBoolExp backend a -> AnnBoolExp backend a -> AnnBoolExp bac
andAnnBoolExps l r =
BoolAnd [l, r]
type AnnBoolExpFldSQL b = AnnBoolExpFld b (SQLExp b)
type AnnBoolExpSQL b = AnnBoolExp b (SQLExp b)
type AnnBoolExpFldSQL b = AnnBoolExpFld b (SQLExpression b)
type AnnBoolExpSQL b = AnnBoolExp b (SQLExpression b)
type AnnBoolExpFldPartialSQL b = AnnBoolExpFld b (PartialSQLExp b)
type AnnBoolExpPartialSQL b = AnnBoolExp b (PartialSQLExp b)
@ -401,16 +396,16 @@ type PreSetColsPartial b = M.HashMap (Column b) (PartialSQLExp b)
-- doesn't resolve the session variable
data PartialSQLExp (b :: BackendType)
= PSESessVar !(PG.PGType (ScalarType b)) !SessionVariable
| PSESQLExp !(SQLExp b)
| PSESQLExp !(SQLExpression b)
deriving (Generic)
deriving instance Eq (PartialSQLExp 'Postgres)
deriving instance Data (PartialSQLExp 'Postgres)
instance NFData (PartialSQLExp 'Postgres)
instance Cacheable (PartialSQLExp 'Postgres)
deriving instance Backend b => Eq (PartialSQLExp b)
deriving instance Backend b => Data (PartialSQLExp b)
instance Backend b => NFData (PartialSQLExp b)
instance Backend b => Cacheable (PartialSQLExp b)
mkTypedSessionVar :: PG.PGType PGColumnType -> SessionVariable -> PartialSQLExp 'Postgres
mkTypedSessionVar :: PG.PGType (ColumnType 'Postgres) -> SessionVariable -> PartialSQLExp 'Postgres
mkTypedSessionVar columnType =
PSESessVar (unsafePGColumnToRepresentation <$> columnType)
PSESessVar (unsafePGColumnToBackend <$> columnType)
instance ToJSON (PartialSQLExp 'Postgres) where
toJSON = \case

View File

@ -17,7 +17,7 @@ data AnnDelG (b :: BackendType) v
, dqp1AllCols :: ![ColumnInfo b]
}
type AnnDel b = AnnDelG b (SQLExp b)
type AnnDel b = AnnDelG b (SQLExpression b)
traverseAnnDel
:: (Applicative f)

View File

@ -29,14 +29,14 @@ data AnnIns (b :: BackendType) a v
type SingleObjIns b v = AnnIns b (AnnInsObj b v) v
type MultiObjIns b v = AnnIns b [AnnInsObj b v] v
data RelIns a
data RelIns (b :: BackendType) a
= RelIns
{ _riAnnIns :: !a
, _riRelInfo :: !RelInfo
, _riRelInfo :: !(RelInfo b)
} deriving (Show, Eq)
type ObjRelIns b v = RelIns (SingleObjIns b v)
type ArrRelIns b v = RelIns (MultiObjIns b v)
type ObjRelIns b v = RelIns b (SingleObjIns b v)
type ArrRelIns b v = RelIns b (MultiObjIns b v)
data AnnInsObj (b :: BackendType) v
= AnnInsObj
@ -74,8 +74,8 @@ data InsertQueryP1 (b :: BackendType)
= InsertQueryP1
{ iqp1Table :: !(TableName b)
, iqp1Cols :: ![Column b]
, iqp1Tuples :: ![[SQLExp b]]
, iqp1Conflict :: !(Maybe (ConflictClauseP1 b (SQLExp b)))
, iqp1Tuples :: ![[SQLExpression b]]
, iqp1Conflict :: !(Maybe (ConflictClauseP1 b (SQLExpression b)))
, iqp1CheckCond :: !(AnnBoolExpSQL b, Maybe (AnnBoolExpSQL b))
, iqp1Output :: !(MutationOutput b)
, iqp1AllCols :: ![ColumnInfo b]

View File

@ -16,7 +16,7 @@ data MutFldG (b :: BackendType) v
| MExp !Text
| MRet !(AnnFieldsG b v)
type MutFld b = MutFldG b (SQLExp b)
type MutFld b = MutFldG b (SQLExpression b)
type MutFldsG b v = Fields (MutFldG b v)
@ -24,9 +24,9 @@ data MutationOutputG (b :: BackendType) v
= MOutMultirowFields !(MutFldsG b v)
| MOutSinglerowObject !(AnnFieldsG b v)
type MutationOutput b = MutationOutputG b (SQLExp b)
type MutationOutput b = MutationOutputG b (SQLExpression b)
type MutFlds b = MutFldsG b (SQLExp b)
type MutFlds b = MutFldsG b (SQLExpression b)
buildEmptyMutResp :: MutationOutput backend -> EncJSON
buildEmptyMutResp = \case

View File

@ -1,5 +1,3 @@
{-# LANGUAGE DeriveLift #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE UndecidableInstances #-}
module Hasura.RQL.IR.Select where
@ -13,9 +11,6 @@ import qualified Language.GraphQL.Draft.Syntax as G
import Control.Lens.TH (makeLenses, makePrisms)
import qualified Hasura.Backends.Postgres.SQL.DML as PG
import qualified Hasura.Backends.Postgres.SQL.Types as PG
import Hasura.GraphQL.Parser.Schema
import Hasura.RQL.IR.BoolExp
import Hasura.RQL.IR.OrderBy
@ -37,16 +32,16 @@ data AnnAggregateOrderBy (b :: BackendType)
= AAOCount
| AAOOp !Text !(ColumnInfo b)
deriving (Generic)
deriving instance Eq (AnnAggregateOrderBy 'Postgres)
instance Hashable (AnnAggregateOrderBy 'Postgres)
deriving instance (Backend b, Eq (ColumnInfo b)) => Eq (AnnAggregateOrderBy b)
instance (Backend b, Hashable (ColumnInfo b)) => Hashable (AnnAggregateOrderBy b)
data AnnOrderByElementG (b :: BackendType) v
= AOCColumn !(ColumnInfo b)
| AOCObjectRelation !RelInfo !v !(AnnOrderByElementG b v)
| AOCArrayAggregation !RelInfo !v !(AnnAggregateOrderBy b)
| AOCObjectRelation !(RelInfo b) !v !(AnnOrderByElementG b v)
| AOCArrayAggregation !(RelInfo b) !v !(AnnAggregateOrderBy b)
deriving (Generic, Functor)
deriving instance Eq v => Eq (AnnOrderByElementG 'Postgres v)
instance (Hashable v) => Hashable (AnnOrderByElementG 'Postgres v)
deriving instance (Backend b, Eq (ColumnInfo b), Eq v) => Eq (AnnOrderByElementG b v)
instance (Backend b, Hashable (ColumnInfo b), Hashable v) => Hashable (AnnOrderByElementG b v)
type AnnOrderByElement b v = AnnOrderByElementG b (AnnBoolExp b v)
@ -72,10 +67,10 @@ traverseAnnOrderByItem
traverseAnnOrderByItem f =
traverse (traverseAnnOrderByElement f)
type AnnOrderByItem b = AnnOrderByItemG b (SQLExp b)
type AnnOrderByItem b = AnnOrderByItemG b (SQLExpression b)
type OrderByItemExp b =
OrderByItemG b (AnnOrderByElement b (SQLExp b), (PG.Alias, (SQLExp b)))
OrderByItemG b (AnnOrderByElement b (SQLExpression b), (Alias b, (SQLExpression b)))
data AnnRelationSelectG (b :: BackendType) a
= AnnRelationSelectG
@ -87,7 +82,7 @@ data AnnRelationSelectG (b :: BackendType) a
type ArrayRelationSelectG b v = AnnRelationSelectG b (AnnSimpleSelG b v)
type ArrayAggregateSelectG b v = AnnRelationSelectG b (AnnAggregateSelectG b v)
type ArrayConnectionSelect b v = AnnRelationSelectG b (ConnectionSelect b v)
type ArrayAggregateSelect b = ArrayAggregateSelectG b (SQLExp b)
type ArrayAggregateSelect b = ArrayAggregateSelectG b (SQLExpression b)
data AnnObjectSelectG (b :: BackendType) v
= AnnObjectSelectG
@ -96,7 +91,7 @@ data AnnObjectSelectG (b :: BackendType) v
, _aosTableFilter :: !(AnnBoolExp b v)
}
type AnnObjectSelect b = AnnObjectSelectG b (SQLExp b)
type AnnObjectSelect b = AnnObjectSelectG b (SQLExpression b)
traverseAnnObjectSelect
:: (Applicative f)
@ -109,17 +104,17 @@ traverseAnnObjectSelect f (AnnObjectSelectG fields fromTable permissionFilter) =
<*> traverseAnnBoolExp f permissionFilter
type ObjectRelationSelectG b v = AnnRelationSelectG b (AnnObjectSelectG b v)
type ObjectRelationSelect b = ObjectRelationSelectG b (SQLExp b)
type ObjectRelationSelect b = ObjectRelationSelectG b (SQLExpression b)
data ComputedFieldScalarSelect (b :: BackendType) v
= ComputedFieldScalarSelect
{ _cfssFunction :: !PG.QualifiedFunction
, _cfssArguments :: !(FunctionArgsExpTableRow v)
, _cfssType :: !PG.PGScalarType
{ _cfssFunction :: !(FunctionName b)
, _cfssArguments :: !(FunctionArgsExpTableRow b v)
, _cfssType :: !(ScalarType b)
, _cfssColumnOp :: !(Maybe (ColumnOp b))
} deriving (Functor, Foldable, Traversable)
deriving instance Show v => Show (ComputedFieldScalarSelect 'Postgres v)
deriving instance Eq v => Eq (ComputedFieldScalarSelect 'Postgres v)
deriving instance (Backend b, Show v) => Show (ComputedFieldScalarSelect b v)
deriving instance (Backend b, Eq v) => Eq (ComputedFieldScalarSelect b v)
data ComputedFieldSelect (b :: BackendType) v
= CFSScalar !(ComputedFieldScalarSelect b v)
@ -153,17 +148,17 @@ traverseArraySelect f = \case
ASConnection relConnection ->
ASConnection <$> traverse (traverseConnectionSelect f) relConnection
type ArraySelect b = ArraySelectG b (SQLExp b)
type ArraySelect b = ArraySelectG b (SQLExpression b)
type ArraySelectFieldsG b v = Fields (ArraySelectG b v)
data ColumnOp (b :: BackendType)
= ColumnOp
{ _colOp :: PG.SQLOp
, _colExp :: (SQLExp b)
{ _colOp :: (SQLOperator b)
, _colExp :: (SQLExpression b)
}
deriving instance Show (ColumnOp 'Postgres)
deriving instance Eq (ColumnOp 'Postgres)
deriving instance Backend b => Show (ColumnOp b)
deriving instance Backend b => Eq (ColumnOp b)
data AnnColumnField (b :: BackendType)
= AnnColumnField
@ -219,18 +214,18 @@ traverseAnnField f = \case
AFNodeId qt pKeys -> pure $ AFNodeId qt pKeys
AFExpression t -> AFExpression <$> pure t
type AnnField b = AnnFieldG b (SQLExp b)
type AnnField b = AnnFieldG b (SQLExpression b)
data SelectArgsG (b :: BackendType) v
= SelectArgs
{ _saWhere :: !(Maybe (AnnBoolExp b v))
, _saOrderBy :: !(Maybe (NE.NonEmpty (AnnOrderByItemG b v)))
, _saLimit :: !(Maybe Int)
, _saOffset :: !(Maybe (SQLExp b))
, _saOffset :: !(Maybe (SQLExpression b))
, _saDistinct :: !(Maybe (NE.NonEmpty (Column b)))
} deriving (Generic)
deriving instance Eq v => Eq (SelectArgsG 'Postgres v)
instance (Hashable v) => Hashable (SelectArgsG 'Postgres v)
deriving instance (Backend b, Eq (ColumnInfo b), Eq v) => Eq (SelectArgsG b v)
instance (Backend b, Hashable (ColumnInfo b), Hashable v) => Hashable (SelectArgsG b v)
traverseSelectArgs
:: (Applicative f)
@ -244,7 +239,7 @@ traverseSelectArgs f (SelectArgs wh ordBy lmt ofst distCols) =
<*> pure ofst
<*> pure distCols
type SelectArgs b = SelectArgsG b (SQLExp b)
type SelectArgs b = SelectArgsG b (SQLExpression b)
noSelectArgs :: SelectArgsG backend v
noSelectArgs = SelectArgs Nothing Nothing Nothing Nothing Nothing
@ -266,7 +261,7 @@ data AggregateOp (b :: BackendType)
}
data AggregateField (b :: BackendType)
= AFCount !PG.CountType
= AFCount !(CountType b)
| AFOp !(AggregateOp b)
| AFExp !Text
@ -278,7 +273,7 @@ traverseAnnFields
=> (a -> f b) -> AnnFieldsG backend a -> f (AnnFieldsG backend b)
traverseAnnFields f = traverse (traverse (traverseAnnField f))
type AnnFields b = AnnFieldsG b (SQLExp b)
type AnnFields b = AnnFieldsG b (SQLExpression b)
data TableAggregateFieldG (b :: BackendType) v
= TAFAgg !(AggregateFields b)
@ -331,37 +326,39 @@ traverseTableAggregateField f = \case
TAFNodes annFlds -> TAFNodes <$> traverseAnnFields f annFlds
TAFExp t -> pure $ TAFExp t
type TableAggregateField b = TableAggregateFieldG b (SQLExp b)
type TableAggregateField b = TableAggregateFieldG b (SQLExpression b)
type TableAggregateFieldsG b v = Fields (TableAggregateFieldG b v)
type TableAggregateFields b = TableAggregateFieldsG b (SQLExp b)
type TableAggregateFields b = TableAggregateFieldsG b (SQLExpression b)
data ArgumentExp a
= AETableRow !(Maybe PG.Identifier) -- ^ table row accessor
data ArgumentExp (b :: BackendType) a
= AETableRow !(Maybe (Identifier b)) -- ^ table row accessor
| AESession !a -- ^ JSON/JSONB hasura session variable object
| AEInput !a
deriving (Show, Eq, Functor, Foldable, Traversable, Generic)
instance (Hashable v) => Hashable (ArgumentExp v)
deriving (Functor, Foldable, Traversable, Generic)
deriving instance (Backend b, Show a) => Show (ArgumentExp b a)
deriving instance (Backend b, Eq a) => Eq (ArgumentExp b a)
instance (Backend b, Hashable v) => Hashable (ArgumentExp b v)
type FunctionArgsExpTableRow v = FunctionArgsExpG (ArgumentExp v)
type FunctionArgsExpTableRow b v = FunctionArgsExpG (ArgumentExp b v)
data SelectFromG (b :: BackendType) v
= FromTable !(TableName b)
| FromIdentifier !PG.Identifier
| FromFunction !PG.QualifiedFunction
!(FunctionArgsExpTableRow v)
| FromIdentifier !(Identifier b)
| FromFunction !(FunctionName b)
!(FunctionArgsExpTableRow b v)
-- a definition list
!(Maybe [(Column b, ScalarType b)])
deriving (Functor, Foldable, Traversable, Generic)
instance (Hashable v) => Hashable (SelectFromG 'Postgres v)
instance (Backend b, Hashable v) => Hashable (SelectFromG b v)
type SelectFrom b = SelectFromG b (SQLExp b)
type SelectFrom b = SelectFromG b (SQLExpression b)
data TablePermG (b :: BackendType) v
= TablePerm
{ _tpFilter :: !(AnnBoolExp b v)
, _tpLimit :: !(Maybe Int)
} deriving (Generic)
instance (Hashable v) => Hashable (TablePermG 'Postgres v)
instance (Backend b, Hashable (ColumnInfo b), Hashable v) => Hashable (TablePermG b v)
traverseTablePerm
:: (Applicative f)
@ -377,7 +374,7 @@ noTablePermissions :: TablePermG backend v
noTablePermissions =
TablePerm annBoolExpTrue Nothing
type TablePerm b = TablePermG b (SQLExp b)
type TablePerm b = TablePermG b (SQLExpression b)
data AnnSelectG (b :: BackendType) a v
= AnnSelectG
@ -414,10 +411,10 @@ traverseAnnSelect f1 f2 (AnnSelectG flds tabFrom perm args strfyNum) =
<*> pure strfyNum
type AnnSimpleSelG b v = AnnSelectG b (AnnFieldsG b v) v
type AnnSimpleSel b = AnnSimpleSelG b (SQLExp b)
type AnnSimpleSel b = AnnSimpleSelG b (SQLExpression b)
type AnnAggregateSelectG b v = AnnSelectG b (TableAggregateFieldsG b v) v
type AnnAggregateSelect b = AnnAggregateSelectG b (SQLExp b)
type AnnAggregateSelect b = AnnAggregateSelectG b (SQLExpression b)
data ConnectionSlice
= SliceFirst !Int
@ -437,7 +434,7 @@ data ConnectionSplit (b :: BackendType) v
, _csValue :: !v
, _csOrderBy :: !(OrderByItemG b (AnnOrderByElementG b ()))
} deriving (Functor, Generic, Foldable, Traversable)
instance (Hashable v) => Hashable (ConnectionSplit 'Postgres v)
instance (Backend b, Hashable (ColumnInfo b), Hashable v) => Hashable (ConnectionSplit b v)
traverseConnectionSplit
:: (Applicative f)
@ -473,7 +470,7 @@ instance (Hashable a) => Hashable (FunctionArgsExpG a)
emptyFunctionArgsExp :: FunctionArgsExpG a
emptyFunctionArgsExp = FunctionArgsExp [] HM.empty
type FunctionArgExp b = FunctionArgsExpG (SQLExp b)
type FunctionArgExp b = FunctionArgsExpG (SQLExpression b)
-- | If argument positional index is less than or equal to length of
-- 'positional' arguments then insert the value in 'positional' arguments else
@ -492,124 +489,6 @@ insertFunctionArg argName idx value (FunctionArgsExp positional named) =
where
insertAt i a = toList . Seq.insertAt i a . Seq.fromList
data SourcePrefixes
= SourcePrefixes
{ _pfThis :: !PG.Identifier -- ^ Current source prefix
, _pfBase :: !PG.Identifier
-- ^ Base table source row identifier to generate
-- the table's column identifiers for computed field
-- function input parameters
} deriving (Show, Eq, Generic)
instance Hashable SourcePrefixes
data SelectSource (b :: BackendType)
= SelectSource
{ _ssPrefix :: !PG.Identifier
, _ssFrom :: !PG.FromItem
, _ssDistinct :: !(Maybe PG.DistinctExpr)
, _ssWhere :: !PG.BoolExp
, _ssOrderBy :: !(Maybe PG.OrderByExp)
, _ssLimit :: !(Maybe Int)
, _ssOffset :: !(Maybe (SQLExp b))
} deriving (Generic)
instance Hashable (SelectSource 'Postgres)
deriving instance Show (SelectSource 'Postgres)
deriving instance Eq (SelectSource 'Postgres)
data SelectNode (b :: BackendType)
= SelectNode
{ _snExtractors :: !(HM.HashMap PG.Alias (SQLExp b))
, _snJoinTree :: !(JoinTree b)
}
instance Semigroup (SelectNode 'Postgres) where
SelectNode lExtrs lJoinTree <> SelectNode rExtrs rJoinTree =
SelectNode (lExtrs <> rExtrs) (lJoinTree <> rJoinTree)
data ObjectSelectSource
= ObjectSelectSource
{ _ossPrefix :: !PG.Identifier
, _ossFrom :: !PG.FromItem
, _ossWhere :: !PG.BoolExp
} deriving (Show, Eq, Generic)
instance Hashable ObjectSelectSource
objectSelectSourceToSelectSource :: ObjectSelectSource -> (SelectSource backend)
objectSelectSourceToSelectSource ObjectSelectSource{..} =
SelectSource _ossPrefix _ossFrom Nothing _ossWhere Nothing Nothing Nothing
data ObjectRelationSource (b :: BackendType)
= ObjectRelationSource
{ _orsRelationshipName :: !RelName
, _orsRelationMapping :: !(HM.HashMap (Column b) (Column b))
, _orsSelectSource :: !ObjectSelectSource
} deriving (Generic)
instance Hashable (ObjectRelationSource 'Postgres)
deriving instance Eq (Column b) => Eq (ObjectRelationSource b)
data ArrayRelationSource (b :: BackendType)
= ArrayRelationSource
{ _arsAlias :: !PG.Alias
, _arsRelationMapping :: !(HM.HashMap (Column b) (Column b))
, _arsSelectSource :: !(SelectSource b)
} deriving (Generic)
instance Hashable (ArrayRelationSource 'Postgres)
deriving instance Eq (ArrayRelationSource 'Postgres)
data ArraySelectNode (b :: BackendType)
= ArraySelectNode
{ _asnTopExtractors :: ![PG.Extractor]
, _asnSelectNode :: !(SelectNode b)
}
instance Semigroup (ArraySelectNode 'Postgres) where
ArraySelectNode lTopExtrs lSelNode <> ArraySelectNode rTopExtrs rSelNode =
ArraySelectNode (lTopExtrs <> rTopExtrs) (lSelNode <> rSelNode)
data ComputedFieldTableSetSource (b :: BackendType)
= ComputedFieldTableSetSource
{ _cftssFieldName :: !FieldName
, _cftssSelectType :: !JsonAggSelect
, _cftssSelectSource :: !(SelectSource b)
} deriving (Generic)
instance Hashable (ComputedFieldTableSetSource 'Postgres)
deriving instance Show (ComputedFieldTableSetSource 'Postgres)
deriving instance Eq (ComputedFieldTableSetSource 'Postgres)
data ArrayConnectionSource (b :: BackendType)
= ArrayConnectionSource
{ _acsAlias :: !PG.Alias
, _acsRelationMapping :: !(HM.HashMap (Column b) (Column b))
, _acsSplitFilter :: !(Maybe PG.BoolExp)
, _acsSlice :: !(Maybe ConnectionSlice)
, _acsSource :: !(SelectSource b)
} deriving (Generic)
deriving instance Eq (ArrayConnectionSource 'Postgres)
instance Hashable (ArrayConnectionSource 'Postgres)
data JoinTree (b :: BackendType)
= JoinTree
{ _jtObjectRelations :: !(HM.HashMap (ObjectRelationSource b) (SelectNode b))
, _jtArrayRelations :: !(HM.HashMap (ArrayRelationSource b) (ArraySelectNode b))
, _jtArrayConnections :: !(HM.HashMap (ArrayConnectionSource b) (ArraySelectNode b))
, _jtComputedFieldTableSets :: !(HM.HashMap (ComputedFieldTableSetSource b) (SelectNode b))
}
instance Semigroup (JoinTree 'Postgres) where
JoinTree lObjs lArrs lArrConns lCfts <> JoinTree rObjs rArrs rArrConns rCfts =
JoinTree (HM.unionWith (<>) lObjs rObjs)
(HM.unionWith (<>) lArrs rArrs)
(HM.unionWith (<>) lArrConns rArrConns)
(HM.unionWith (<>) lCfts rCfts)
instance Monoid (JoinTree 'Postgres) where
mempty = JoinTree mempty mempty mempty mempty
data PermissionLimitSubQuery
= PLSQRequired !Int -- ^ Permission limit
| PLSQNotRequired
deriving (Show, Eq)
$(makeLenses ''AnnSelectG)
$(makePrisms ''AnnFieldG)

View File

@ -23,7 +23,7 @@ data AnnUpdG (b :: BackendType) v
, uqp1AllCols :: ![ColumnInfo b]
}
type AnnUpd b = AnnUpdG b (SQLExp b)
type AnnUpd b = AnnUpdG b (SQLExpression b)
data UpdOpExpG v = UpdSet !v
| UpdInc !v

View File

@ -51,7 +51,7 @@ import Hasura.Backends.Postgres.SQL.Types
import Hasura.RQL.IR.BoolExp as R
import Hasura.RQL.Types.Action as R
import Hasura.RQL.Types.Column as R
import Hasura.RQL.Types.Common as R
import Hasura.RQL.Types.Common as R hiding (FunctionName)
import Hasura.RQL.Types.ComputedField as R
import Hasura.RQL.Types.CustomTypes as R
import Hasura.RQL.Types.Error as R
@ -67,8 +67,8 @@ import Hasura.RQL.Types.ScheduledTrigger as R
import Hasura.RQL.Types.SchemaCache as R
import Hasura.RQL.Types.SchemaCache.Build as R
import Hasura.RQL.Types.Table as R
import Hasura.Session
import Hasura.SQL.Backend as R
import Hasura.Session
import Hasura.Tracing (TraceT)
data QCtx
@ -216,7 +216,7 @@ askPGType
=> FieldInfoMap (FieldInfo 'Postgres)
-> PGCol
-> Text
-> m PGColumnType
-> m (ColumnType 'Postgres)
askPGType m c msg =
pgiType <$> askPGColInfo m c msg
@ -275,7 +275,7 @@ askRelType :: (MonadError QErr m)
=> FieldInfoMap (FieldInfo backend)
-> RelName
-> Text
-> m RelInfo
-> m (RelInfo backend)
askRelType m r msg = do
colInfo <- modifyErr ("relationship " <>) $
askFieldInfo m (fromRel r)
@ -293,12 +293,7 @@ askFieldInfo :: (MonadError QErr m)
-> FieldName
-> m fieldInfo
askFieldInfo m f =
case M.lookup f m of
Just colInfo -> return colInfo
Nothing ->
throw400 NotExists $ mconcat
[ f <<> " does not exist"
]
M.lookup f m `onNothing` throw400 NotExists (f <<> " does not exist")
askRemoteRel :: (MonadError QErr m)
=> FieldInfoMap (FieldInfo backend)

View File

@ -1,11 +1,9 @@
module Hasura.RQL.Types.Column
( PGColumnType(..)
, _PGColumnScalar
, _PGColumnEnumReference
( ColumnType(..)
, _ColumnScalar
, _ColumnEnumReference
, isScalarColumnWhere
, ColumnType
, onlyIntCols
, onlyNumCols
, onlyJSONBCols
@ -13,7 +11,7 @@ module Hasura.RQL.Types.Column
, parsePGScalarValue
, parsePGScalarValues
, unsafePGColumnToRepresentation
, unsafePGColumnToBackend
, parseTxtEncodedPGValue
, ColumnInfo(..)
@ -39,7 +37,7 @@ import Data.Aeson.TH
import Data.Text.Extended
import Language.Haskell.TH.Syntax (Lift)
import Hasura.Backends.Postgres.SQL.Types
import Hasura.Backends.Postgres.SQL.Types hiding (TableName)
import Hasura.Backends.Postgres.SQL.Value
import Hasura.Incremental (Cacheable)
import Hasura.RQL.Instances ()
@ -47,6 +45,7 @@ import Hasura.RQL.Types.Common
import Hasura.RQL.Types.Error
import Hasura.SQL.Backend
newtype EnumValue
= EnumValue { getEnumValue :: G.Name }
deriving (Show, Eq, Ord, Lift, NFData, Hashable, ToJSON, ToJSONKey, FromJSON, FromJSONKey, Cacheable)
@ -61,63 +60,71 @@ type EnumValues = M.HashMap EnumValue EnumValueInfo
-- | Represents a reference to an “enum table,” a single-column Postgres table that is referenced
-- via foreign key.
data EnumReference
data EnumReference (b :: BackendType)
= EnumReference
{ erTable :: !QualifiedTable
{ erTable :: !(TableName b)
, erValues :: !EnumValues
} deriving (Show, Eq, Ord, Generic, Lift)
instance NFData EnumReference
instance Hashable EnumReference
instance Cacheable EnumReference
$(deriveJSON (aesonDrop 2 snakeCase) ''EnumReference)
} deriving (Generic)
deriving instance (Backend b) => Show (EnumReference b)
deriving instance (Backend b) => Eq (EnumReference b)
deriving instance (Backend b) => Ord (EnumReference b)
deriving instance (Backend b) => Lift (EnumReference b)
instance (Backend b) => NFData (EnumReference b)
instance (Backend b) => Hashable (EnumReference b)
instance (Backend b) => Cacheable (EnumReference b)
instance Backend b => FromJSON (EnumReference b) where
parseJSON = genericParseJSON $ aesonPrefix snakeCase
instance Backend b => ToJSON (EnumReference b) where
toJSON = genericToJSON $ aesonPrefix snakeCase
-- | The type we use for columns, which are currently always “scalars” (though see the note about
-- 'PGType'). Unlike 'PGScalarType', which represents a type that /Postgres/ knows about, this type
-- characterizes distinctions we make but Postgres doesnt.
data PGColumnType
data ColumnType (b :: BackendType)
-- | Ordinary Postgres columns.
= PGColumnScalar !PGScalarType
= ColumnScalar !(ScalarType b)
-- | Columns that reference enum tables (see "Hasura.RQL.Schema.Enum"). This is not actually a
-- distinct type from the perspective of Postgres (at the time of this writing, we ensure they
-- always have type @text@), but we really want to distinguish this case, since we treat it
-- /completely/ differently in the GraphQL schema.
| PGColumnEnumReference !EnumReference
| ColumnEnumReference !(EnumReference b)
deriving (Show, Eq, Ord, Generic)
instance NFData PGColumnType
instance Hashable PGColumnType
instance Cacheable PGColumnType
$(deriveToJSON defaultOptions{constructorTagModifier = drop 8} ''PGColumnType)
$(makePrisms ''PGColumnType)
instance (Backend b) => NFData (ColumnType b)
instance (Backend b) => Hashable (ColumnType b)
instance (Backend b) => Cacheable (ColumnType b)
instance ToTxt PGColumnType where
instance Backend b => ToJSON (ColumnType b) where
toJSON = genericToJSON $ defaultOptions{constructorTagModifier = drop 6}
$(makePrisms ''ColumnType)
instance Backend b => ToTxt (ColumnType b) where
toTxt = \case
PGColumnScalar scalar -> toTxt scalar
PGColumnEnumReference (EnumReference tableName _) -> toTxt tableName
ColumnScalar scalar -> toTxt scalar
ColumnEnumReference (EnumReference tableName _) -> toTxt tableName
type family ColumnType (b :: BackendType) where
ColumnType 'Postgres = PGColumnType
ColumnType 'MySQL = Void -- TODO
isScalarColumnWhere :: (PGScalarType -> Bool) -> PGColumnType -> Bool
isScalarColumnWhere :: (ScalarType b -> Bool) -> ColumnType b -> Bool
isScalarColumnWhere f = \case
PGColumnScalar scalar -> f scalar
PGColumnEnumReference _ -> False
ColumnScalar scalar -> f scalar
ColumnEnumReference _ -> False
-- | Gets the representation type associated with a 'PGColumnType'. Avoid using this if possible.
-- | Gets the representation type associated with a 'ColumnType'. Avoid using this if possible.
-- Prefer 'parsePGScalarValue', 'parsePGScalarValues', or
-- 'Hasura.RQL.Types.BoolExp.mkTypedSessionVar'.
unsafePGColumnToRepresentation :: PGColumnType -> PGScalarType
unsafePGColumnToRepresentation = \case
PGColumnScalar scalarType -> scalarType
PGColumnEnumReference _ -> PGText
unsafePGColumnToBackend :: ColumnType 'Postgres -> PGScalarType
unsafePGColumnToBackend = \case
ColumnScalar scalarType -> scalarType
ColumnEnumReference _ -> PGText
-- | Note: Unconditionally accepts null values and returns 'PGNull'.
parsePGScalarValue
:: forall m. (MonadError QErr m) => PGColumnType -> Value -> m (WithScalarType PGScalarValue)
:: forall m. (MonadError QErr m) => ColumnType 'Postgres -> Value -> m (WithScalarType PGScalarValue)
parsePGScalarValue columnType value = case columnType of
PGColumnScalar scalarType ->
ColumnScalar scalarType ->
WithScalarType scalarType <$> runAesonParser (parsePGValue scalarType) value
PGColumnEnumReference (EnumReference tableName enumValues) ->
ColumnEnumReference (EnumReference tableName enumValues) ->
WithScalarType PGText <$> (maybe (pure $ PGNull PGText) parseEnumValue =<< decodeValue value)
where
parseEnumValue :: G.Name -> m PGScalarValue
@ -130,14 +137,14 @@ parsePGScalarValue columnType value = case columnType of
parsePGScalarValues
:: (MonadError QErr m)
=> PGColumnType -> [Value] -> m (WithScalarType [PGScalarValue])
=> ColumnType 'Postgres -> [Value] -> m (WithScalarType [PGScalarValue])
parsePGScalarValues columnType values = do
scalarValues <- indexedMapM (fmap pstValue . parsePGScalarValue columnType) values
pure $ WithScalarType (unsafePGColumnToRepresentation columnType) scalarValues
pure $ WithScalarType (unsafePGColumnToBackend columnType) scalarValues
parseTxtEncodedPGValue
:: (MonadError QErr m)
=> PGColumnType -> TxtEncodedPGVal -> m (WithScalarType PGScalarValue)
=> ColumnType 'Postgres -> TxtEncodedPGVal -> m (WithScalarType PGScalarValue)
parseTxtEncodedPGValue colTy val =
parsePGScalarValue colTy $ case val of
TENull -> Null

View File

@ -6,8 +6,6 @@ module Hasura.RQL.Types.Common
, relTypeToTxt
, RelInfo(..)
, ScalarType
, SQLExp
, Backend (..)
, FieldName(..)
@ -90,15 +88,7 @@ import Hasura.RQL.Types.Error
import Hasura.SQL.Backend
type family ScalarType (b :: BackendType) where
ScalarType 'Postgres = PG.PGScalarType
type family ColumnType (b :: BackendType) where
ColumnType 'Postgres = PG.PGType
type family SQLExp (b :: BackendType) where
SQLExp 'Postgres = PG.SQLExp
type Representable a = (Show a, Eq a, Hashable a, Cacheable a, NFData a)
-- | Mapping from abstract types to concrete backend representation
--
@ -111,43 +101,76 @@ type family SQLExp (b :: BackendType) where
-- dedicated type families allows to explicitly list all typeclass requirements,
-- which simplifies the instance declarations of all IR types.
class
( Show (TableName b)
, Show (ConstraintName b)
, Show (Column b)
, Show (BasicOrderType b)
, Show (NullsOrderType b)
, Eq (TableName b)
, Eq (ConstraintName b)
, Eq (Column b)
, Eq (BasicOrderType b)
, Eq (NullsOrderType b)
( Representable (Identifier b)
, Representable (TableName b)
, Representable (FunctionName b)
, Representable (ConstraintName b)
, Representable (BasicOrderType b)
, Representable (NullsOrderType b)
, Representable (Column b)
, Representable (ScalarType b)
, Representable (SQLExpression b)
, Representable (SQLOperator b)
, Representable (XAILIKE b)
, Representable (XANILIKE b)
, Ord (TableName b)
, Ord (ScalarType b)
, Lift (TableName b)
, Lift (BasicOrderType b)
, Lift (NullsOrderType b)
, Cacheable (TableName b)
, Data (TableName b)
, Hashable (BasicOrderType b)
, Hashable (NullsOrderType b)
, Hashable (TableName b)
, NFData (TableName b)
, Data (ScalarType b)
, Data (SQLExpression b)
, FromJSON (TableName b)
, FromJSON (ScalarType b)
, FromJSON (BasicOrderType b)
, FromJSON (NullsOrderType b)
, FromJSON (Column b)
, ToJSON (TableName b)
, ToJSON (ScalarType b)
, ToJSON (BasicOrderType b)
, ToJSON (NullsOrderType b)
, ToJSON (Column b)
, FromJSONKey (Column b)
, ToJSONKey (Column b)
, ToTxt (TableName b)
, ToTxt (ScalarType b)
, Typeable b
) => Backend (b :: BackendType) where
type Identifier b :: Type
type Alias b :: Type
type TableName b :: Type
type FunctionName b :: Type
type ConstraintName b :: Type
type BasicOrderType b :: Type
type NullsOrderType b :: Type
type CountType b :: Type
type Column b :: Type
type ScalarType b :: Type
type SQLExpression b :: Type
type SQLOperator b :: Type
type XAILIKE b :: Type
type XANILIKE b :: Type
instance Backend 'Postgres where
type Identifier 'Postgres = PG.Identifier
type Alias 'Postgres = PG.Alias
type TableName 'Postgres = PG.QualifiedTable
type FunctionName 'Postgres = PG.QualifiedFunction
type ConstraintName 'Postgres = PG.ConstraintName
type BasicOrderType 'Postgres = PG.OrderType
type NullsOrderType 'Postgres = PG.NullsOrder
type CountType 'Postgres = PG.CountType
type Column 'Postgres = PG.PGCol
type ScalarType 'Postgres = PG.PGScalarType
type SQLExpression 'Postgres = PG.SQLExp
type SQLOperator 'Postgres = PG.SQLOp
type XAILIKE 'Postgres = ()
type XANILIKE 'Postgres = ()
-- instance Backend 'Mysql where
-- type XAILIKE 'MySQL = Void
-- type XANILIKE 'MySQL = Void
adminText :: NonEmptyText
@ -198,19 +221,28 @@ instance Q.FromCol RelType where
"array" -> Just ArrRel
_ -> Nothing
data RelInfo
-- should this be parameterized by both the source and the destination backend?
data RelInfo (b :: BackendType)
= RelInfo
{ riName :: !RelName
, riType :: !RelType
, riMapping :: !(HashMap PG.PGCol PG.PGCol)
, riRTable :: !PG.QualifiedTable
, riMapping :: !(HashMap (Column b) (Column b))
, riRTable :: !(TableName b)
, riIsManual :: !Bool
, riIsNullable :: !Bool
} deriving (Show, Eq, Generic)
instance NFData RelInfo
instance Cacheable RelInfo
instance Hashable RelInfo
$(deriveToJSON (aesonDrop 2 snakeCase) ''RelInfo)
} deriving (Generic)
deriving instance Backend b => Show (RelInfo b)
deriving instance Backend b => Eq (RelInfo b)
instance Backend b => NFData (RelInfo b)
instance Backend b => Cacheable (RelInfo b)
instance Backend b => Hashable (RelInfo b)
instance (Backend b) => FromJSON (RelInfo b) where
parseJSON = genericParseJSON $ aesonPrefix snakeCase
instance (Backend b) => ToJSON (RelInfo b) where
toJSON = genericToJSON $ aesonPrefix snakeCase
newtype FieldName
= FieldName { getFieldNameTxt :: Text }

View File

@ -136,7 +136,7 @@ import Hasura.GraphQL.Context (GQLContext, RemoteField, R
import Hasura.Incremental (Dependency, MonadDepend (..), selectKeyD)
import Hasura.RQL.IR.BoolExp
import Hasura.RQL.Types.Action
import Hasura.RQL.Types.Common
import Hasura.RQL.Types.Common hiding (FunctionName)
import Hasura.RQL.Types.ComputedField
import Hasura.RQL.Types.CustomTypes
import Hasura.RQL.Types.Error

View File

@ -167,7 +167,7 @@ emptyCustomRootFields =
data FieldInfo (b :: BackendType)
= FIColumn !(ColumnInfo b)
| FIRelationship !RelInfo
| FIRelationship !(RelInfo b)
| FIComputedField !(ComputedFieldInfo b)
| FIRemoteRelationship !(RemoteFieldInfo b)
deriving (Generic)
@ -217,7 +217,7 @@ getCols = mapMaybe (^? _FIColumn) . M.elems
sortCols :: [ColumnInfo backend] -> [ColumnInfo backend]
sortCols = sortBy (\l r -> compare (pgiPosition l) (pgiPosition r))
getRels :: FieldInfoMap (FieldInfo backend) -> [RelInfo]
getRels :: FieldInfoMap (FieldInfo backend) -> [RelInfo backend]
getRels = mapMaybe (^? _FIRelationship) . M.elems
getComputedFieldInfos :: FieldInfoMap (FieldInfo backend) -> [ComputedFieldInfo backend]