diff --git a/CHANGELOG.md b/CHANGELOG.md
index bd5d6fb875f..8813d5cbc97 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -2,7 +2,6 @@
## Next release
-
### Bug fixes and improvements
(Add entries here in the order of: server, console, cli, docs, others)
@@ -97,7 +96,7 @@ Read more about the session argument for computed fields in the [docs](https://h
A new `seeds` command is introduced in CLI, this will allow managing seed migrations as SQL files
#### Creating seed
-```
+```
# create a new seed file and use editor to add SQL content
hasura seed create new_table_seed
diff --git a/console/src/components/Common/Tooltip/Tooltip.tsx b/console/src/components/Common/Tooltip/Tooltip.tsx
index 042e109c61a..2115ebe4132 100644
--- a/console/src/components/Common/Tooltip/Tooltip.tsx
+++ b/console/src/components/Common/Tooltip/Tooltip.tsx
@@ -2,7 +2,6 @@ import React from 'react';
import OverlayTrigger from 'react-bootstrap/lib/OverlayTrigger';
import Tooltip from 'react-bootstrap/lib/Tooltip';
import styles from './Tooltip.scss';
-
const tooltipGen = (message: string) => {
return {message};
};
@@ -28,5 +27,4 @@ const ToolTip: React.FC = ({
)}
);
-
-export default ToolTip;
+export default ToolTip;
\ No newline at end of file
diff --git a/server/graphql-engine.cabal b/server/graphql-engine.cabal
index e96f57a6b0a..788c7bfbbe6 100644
--- a/server/graphql-engine.cabal
+++ b/server/graphql-engine.cabal
@@ -233,6 +233,7 @@ library
, ghc-heap-view
, directory
+ , semigroups >= 0.19.1
exposed-modules: Control.Arrow.Extended
, Control.Arrow.Trans
diff --git a/server/src-lib/Data/HashMap/Strict/InsOrd/Extended.hs b/server/src-lib/Data/HashMap/Strict/InsOrd/Extended.hs
index a124fee92e2..765bec11c29 100644
--- a/server/src-lib/Data/HashMap/Strict/InsOrd/Extended.hs
+++ b/server/src-lib/Data/HashMap/Strict/InsOrd/Extended.hs
@@ -1,25 +1,25 @@
module Data.HashMap.Strict.InsOrd.Extended
- ( OMap.elems
+ ( module OMap
, groupTuples
, groupListWith
) where
-import qualified Data.HashMap.Strict.InsOrd as OMap
+import Data.HashMap.Strict.InsOrd as OMap
+import qualified Data.List as L
import qualified Data.Sequence.NonEmpty as NE
import Data.Hashable (Hashable)
-import Data.List (foldl')
-import Prelude (Eq, Foldable, Functor, fmap, ($))
+import Prelude (Eq, Foldable, Functor, fmap, undefined, ($))
groupTuples
:: (Eq k, Hashable k, Foldable t)
=> t (k, v) -> OMap.InsOrdHashMap k (NE.NESeq v)
groupTuples =
- foldl' groupFlds OMap.empty
+ L.foldl' groupFlds OMap.empty
where
groupFlds m (k, v) =
- OMap.insertWith (\_ c -> c NE.|> v) k (NE.singleton v) m
+ OMap.insertWith (\_ c -> c `undefined` v) k (NE.singleton v) m
groupListWith
:: (Eq k, Hashable k, Foldable t, Functor t)
diff --git a/server/src-lib/Data/Sequence/NonEmpty.hs b/server/src-lib/Data/Sequence/NonEmpty.hs
index 639b4b5d4bf..3381ea7f40e 100644
--- a/server/src-lib/Data/Sequence/NonEmpty.hs
+++ b/server/src-lib/Data/Sequence/NonEmpty.hs
@@ -5,8 +5,6 @@ module Data.Sequence.NonEmpty
( NESeq
, pattern (:<||)
, pattern (:||>)
- , (<|)
- , (|>)
, singleton
, head
, tail
@@ -22,9 +20,6 @@ import Data.Aeson
import Data.Foldable
import GHC.Generics (Generic)
-infixr 5 <|
-infixl 5 |>
-
data NESeq a = NESeq
{ head :: a
, tail :: Seq.Seq a
@@ -59,12 +54,6 @@ instance ToJSON a => ToJSON (NESeq a) where
singleton :: a -> NESeq a
singleton a = NESeq a Seq.empty
-(|>) :: NESeq a -> a -> NESeq a
-NESeq h l |> v = NESeq h (l Seq.|> v)
-
-(<|) :: a -> NESeq a -> NESeq a
-v <| NESeq h l = NESeq v (h Seq.<| l)
-
toSeq :: NESeq a -> Seq.Seq a
toSeq (NESeq v l) = v Seq.<| l
diff --git a/server/src-lib/Hasura/GraphQL/Context.hs b/server/src-lib/Hasura/GraphQL/Context.hs
index 0cbf79d84fb..bb8e4eaf94d 100644
--- a/server/src-lib/Hasura/GraphQL/Context.hs
+++ b/server/src-lib/Hasura/GraphQL/Context.hs
@@ -76,9 +76,9 @@ traverseAction f = \case
RFRaw x -> pure $ RFRaw x
data QueryDB v
- = QDBSimple (RQL.AnnSimpleSelG v)
- | QDBPrimaryKey (RQL.AnnSimpleSelG v)
- | QDBAggregation (RQL.AnnAggSelG v)
+ = QDBSimple (RQL.AnnSimpleSelG v)
+ | QDBPrimaryKey (RQL.AnnSimpleSelG v)
+ | QDBAggregation (RQL.AnnAggregateSelectG v)
data ActionQuery v
= AQQuery !(RQL.AnnActionExecution v)
diff --git a/server/src-lib/Hasura/GraphQL/Execute.hs b/server/src-lib/Hasura/GraphQL/Execute.hs
index 1889c767a82..0f02b0812bc 100644
--- a/server/src-lib/Hasura/GraphQL/Execute.hs
+++ b/server/src-lib/Hasura/GraphQL/Execute.hs
@@ -43,8 +43,7 @@ import Hasura.Prelude
import Hasura.RQL.DDL.Headers
import Hasura.RQL.Types
import Hasura.Server.Utils (RequestId, mkClientHeadersForward,
- mkSetCookieHeaders,
- userRoleHeader)
+ mkSetCookieHeaders, userRoleHeader)
import Hasura.Server.Version (HasVersion)
import Hasura.Session
@@ -142,8 +141,8 @@ getExecPlanPartial userInfo sc enableAL queryType req = do
getGCtx :: C.GQLContext
getGCtx =
case Map.lookup roleName contextMap of
- Nothing -> defaultContext
- Just gql -> gql
+ Nothing -> defaultContext
+ Just gql -> gql
-- TODO FIXME implement backend-only field access
{-
Just (RoleContext defaultGCtx maybeBackendGCtx) ->
@@ -206,7 +205,7 @@ getResolvedExecPlan
getResolvedExecPlan pgExecCtx planCache userInfo sqlGenCtx
enableAL sc scVer queryType httpManager reqHeaders reqUnparsed = do
planM <- liftIO $ EP.getPlan scVer (_uiRole userInfo)
- opNameM queryStr planCache
+ opNameM queryStr queryType planCache
let usrVars = _uiSession userInfo
case planM of
-- plans are only for queries and subscriptions
@@ -246,7 +245,7 @@ getResolvedExecPlan pgExecCtx planCache userInfo sqlGenCtx
G.TypedOperationDefinition G.OperationTypeMutation _ varDefs _ selSet -> do
-- (Here the above fragment inlining is actually executed.)
inlinedSelSet <- EI.inlineSelectionSet fragments selSet
- queryTx <- EM.convertMutationSelectionSet gCtx (_uiSession userInfo) httpManager reqHeaders
+ queryTx <- EM.convertMutationSelectionSet gCtx userInfo httpManager reqHeaders
inlinedSelSet varDefs (_grVariables reqUnparsed)
-- traverse_ (addPlanToCache . EP.RPQuery) plan
return $ MutationExecutionPlan $ queryTx
diff --git a/server/src-lib/Hasura/GraphQL/Execute/LiveQuery/Plan.hs b/server/src-lib/Hasura/GraphQL/Execute/LiveQuery/Plan.hs
index ebfd7c57ac5..ef03f89ef16 100644
--- a/server/src-lib/Hasura/GraphQL/Execute/LiveQuery/Plan.hs
+++ b/server/src-lib/Hasura/GraphQL/Execute/LiveQuery/Plan.hs
@@ -29,12 +29,13 @@ import qualified Data.Aeson.Casing as J
import qualified Data.Aeson.Extended as J
import qualified Data.Aeson.TH as J
import qualified Data.HashMap.Strict as Map
+import qualified Data.HashMap.Strict.InsOrd as OMap
+import qualified Data.HashMap.Strict.InsOrd as OMap
+import qualified Data.Sequence as Seq
import qualified Data.Text as T
import qualified Data.UUID.V4 as UUID
import qualified Database.PG.Query as Q
import qualified Language.GraphQL.Draft.Syntax as G
-import qualified Data.HashMap.Strict.InsOrd as OMap
-import qualified Data.Sequence as Seq
-- remove these when array encoding is merged
import qualified Database.PG.Query.PTI as PTI
@@ -43,24 +44,25 @@ import qualified PostgreSQL.Binary.Encoding as PE
import Control.Lens
import Data.UUID (UUID)
-import qualified Hasura.GraphQL.Transport.HTTP.Protocol as GH
import qualified Hasura.GraphQL.Parser.Schema as PS
+import qualified Hasura.GraphQL.Transport.HTTP.Protocol as GH
import qualified Hasura.RQL.DML.Select as DS
import qualified Hasura.SQL.DML as S
--import qualified Hasura.GraphQL.Execute.Query as GEQ
import Hasura.Db
import Hasura.EncJSON
-import Hasura.GraphQL.Parser.Column
+import Hasura.GraphQL.Context
import Hasura.GraphQL.Execute.Prepare
import Hasura.GraphQL.Execute.Query
-import Hasura.RQL.Types
+import Hasura.GraphQL.Parser.Column
import Hasura.GraphQL.Resolve.Action
+import Hasura.RQL.Types
+import Hasura.Server.Version (HasVersion)
+import Hasura.Session
import Hasura.SQL.Error
import Hasura.SQL.Types
import Hasura.SQL.Value
-import Hasura.GraphQL.Context
-import Hasura.Session
-- -------------------------------------------------------------------------------------------------
-- Multiplexed queries
@@ -72,7 +74,7 @@ toSQLSelect :: SubscriptionRootFieldResolved -> S.Select
toSQLSelect = \case
RFDB (QDBPrimaryKey s) -> DS.mkSQLSelect DS.JASSingleObject s
RFDB (QDBSimple s) -> DS.mkSQLSelect DS.JASMultipleRows s
- RFDB (QDBAggregation s) -> DS.mkAggSelect s
+ RFDB (QDBAggregation s) -> DS.mkAggregateSelect s
RFAction s -> DS.mkSQLSelect DS.JASSingleObject s
-- QRFActionSelect s -> DS.mkSQLSelect DS.JASSingleObject s
-- QRFActionExecuteObject s -> DS.mkSQLSelect DS.JASSingleObject s
@@ -279,6 +281,7 @@ $(J.deriveToJSON (J.aesonDrop 4 J.snakeCase) ''ReusableLiveQueryPlan)
buildLiveQueryPlan
:: ( MonadError QErr m
, MonadIO m
+ , HasVersion
)
=> PGExecCtx
-> UserInfo
@@ -310,7 +313,7 @@ buildLiveQueryPlan pgExecCtx userInfo unpreparedAST = do
(preparedAST, (queryVariableValues, querySyntheticVariableValues)) <- flip runStateT (mempty, Seq.empty) $
for unpreparedAST \unpreparedQuery -> do
traverseQueryRootField resolveMultiplexedValue unpreparedQuery
- >>= traverseAction (DS.traverseAnnSimpleSel resolveMultiplexedValue . resolveAsyncActionQuery userInfo)
+ >>= traverseAction (DS.traverseAnnSimpleSelect resolveMultiplexedValue . resolveAsyncActionQuery userInfo)
diff --git a/server/src-lib/Hasura/GraphQL/Execute/Mutation.hs b/server/src-lib/Hasura/GraphQL/Execute/Mutation.hs
index 9ec0c548334..c5d0491821c 100644
--- a/server/src-lib/Hasura/GraphQL/Execute/Mutation.hs
+++ b/server/src-lib/Hasura/GraphQL/Execute/Mutation.hs
@@ -67,37 +67,38 @@ convertMutationRootField
, MonadIO m
, MonadError QErr m
)
- => SessionVariables
+ => UserInfo
-> HTTP.Manager
-> HTTP.RequestHeaders
-> Bool
-> MutationRootField UnpreparedValue
-> m (Either (LazyRespTx, HTTP.ResponseHeaders) RemoteField)
-convertMutationRootField usrVars manager reqHeaders stringifyNum = \case
- RFDB (MDBInsert s) -> noResponseHeaders $ convertInsert usrVars s stringifyNum
- RFDB (MDBUpdate s) -> noResponseHeaders $ convertUpdate usrVars s stringifyNum
- RFDB (MDBDelete s) -> noResponseHeaders $ convertDelete usrVars s stringifyNum
+convertMutationRootField userInfo manager reqHeaders stringifyNum = \case
+ RFDB (MDBInsert s) -> noResponseHeaders $ convertInsert userSession s stringifyNum
+ RFDB (MDBUpdate s) -> noResponseHeaders $ convertUpdate userSession s stringifyNum
+ RFDB (MDBDelete s) -> noResponseHeaders $ convertDelete userSession s stringifyNum
RFRemote remote -> pure $ Right remote
- RFAction (AMSync s) -> Left <$> first liftTx <$> resolveActionExecution s actionExecContext
- RFAction (AMAsync s) -> noResponseHeaders =<< resolveActionMutationAsync s reqHeaders usrVars
+ RFAction (AMSync s) -> Left <$> first liftTx <$> resolveActionExecution userInfo s actionExecContext
+ RFAction (AMAsync s) -> noResponseHeaders =<< resolveActionMutationAsync s reqHeaders userSession
RFRaw s -> noResponseHeaders $ pure $ encJFromJValue s
where
noResponseHeaders :: RespTx -> m (Either (LazyRespTx, HTTP.ResponseHeaders) RemoteField)
noResponseHeaders rTx = pure $ Left (liftTx rTx, [])
- actionExecContext = ActionExecContext manager reqHeaders usrVars
+ userSession = _uiSession userInfo
+ actionExecContext = ActionExecContext manager reqHeaders $ _uiSession userInfo
convertMutationSelectionSet
:: (HasVersion, MonadIO m, MonadError QErr m)
=> GQLContext
- -> SessionVariables
+ -> UserInfo
-> HTTP.Manager
-> HTTP.RequestHeaders
-> G.SelectionSet G.NoFragments G.Name
-> [G.VariableDefinition]
-> Maybe GH.VariableValues
-> m (ExecutionPlan (LazyRespTx, HTTP.ResponseHeaders) RemoteCall (G.Name, J.Value))
-convertMutationSelectionSet gqlContext usrVars manager reqHeaders fields varDefs varValsM = do
+convertMutationSelectionSet gqlContext userInfo manager reqHeaders fields varDefs varValsM = do
-- Parse the GraphQL query into the RQL AST
(unpreparedQueries, _reusability)
:: (OMap.InsOrdHashMap G.Name (MutationRootField UnpreparedValue), QueryReusability)
@@ -106,7 +107,7 @@ convertMutationSelectionSet gqlContext usrVars manager reqHeaders fields varDefs
-- Transform the RQL AST into a prepared SQL query
-- TODO pass the correct stringifyNum somewhere rather than True
- txs <- for unpreparedQueries $ convertMutationRootField usrVars manager reqHeaders True
+ txs <- for unpreparedQueries $ convertMutationRootField userInfo manager reqHeaders True
let txList = OMap.toList txs
case (mapMaybe takeTx txList, mapMaybe takeRemote txList) of
(dbPlans, []) -> do
@@ -148,9 +149,9 @@ convertMutationSelectionSet gqlContext usrVars manager reqHeaders fields varDefs
:: (G.Name, Either (LazyRespTx, HTTP.ResponseHeaders) RemoteField)
-> Maybe (G.Name, (LazyRespTx, HTTP.ResponseHeaders))
takeTx (name, Left tx) = Just (name, tx)
- takeTx _ = Nothing
+ takeTx _ = Nothing
takeRemote
:: (G.Name, Either (LazyRespTx, HTTP.ResponseHeaders) RemoteField)
-> Maybe (G.Name, RemoteField)
takeRemote (name, Right remote) = Just (name, remote)
- takeRemote _ = Nothing
+ takeRemote _ = Nothing
diff --git a/server/src-lib/Hasura/GraphQL/Execute/Plan.hs b/server/src-lib/Hasura/GraphQL/Execute/Plan.hs
index 7d789639ff2..261ac389c35 100644
--- a/server/src-lib/Hasura/GraphQL/Execute/Plan.hs
+++ b/server/src-lib/Hasura/GraphQL/Execute/Plan.hs
@@ -19,6 +19,7 @@ import qualified Data.Aeson.TH as J
import qualified Hasura.Cache as Cache
import qualified Hasura.GraphQL.Execute.LiveQuery as LQ
import qualified Hasura.GraphQL.Execute.Query as EQ
+import qualified Hasura.GraphQL.Execute.Types as ET
import qualified Hasura.GraphQL.Transport.HTTP.Protocol as GH
import Hasura.RQL.Types
import Hasura.Session
@@ -29,17 +30,19 @@ data PlanId
, _piRole :: !RoleName
, _piOperationName :: !(Maybe GH.OperationName)
, _piQuery :: !GH.GQLQueryText
+ , _piQueryType :: !ET.GraphQLQueryType
} deriving (Show, Eq, Ord, Generic)
instance Hashable PlanId
instance J.ToJSON PlanId where
- toJSON (PlanId scVer rn opNameM query) =
+ toJSON (PlanId scVer rn opNameM query queryType) =
J.object
[ "schema_cache_version" J..= scVer
, "role" J..= rn
, "operation" J..= opNameM
, "query" J..= query
+ , "query_type" J..= queryType
]
newtype PlanCache
@@ -68,19 +71,19 @@ initPlanCache options =
getPlan
:: SchemaCacheVer -> RoleName -> Maybe GH.OperationName -> GH.GQLQueryText
- -> PlanCache -> IO (Maybe ReusablePlan)
-getPlan schemaVer rn opNameM q (PlanCache planCache) =
+ -> ET.GraphQLQueryType -> PlanCache -> IO (Maybe ReusablePlan)
+getPlan schemaVer rn opNameM q queryType (PlanCache planCache) =
Cache.lookup planId planCache
where
- planId = PlanId schemaVer rn opNameM q
+ planId = PlanId schemaVer rn opNameM q queryType
addPlan
:: SchemaCacheVer -> RoleName -> Maybe GH.OperationName -> GH.GQLQueryText
- -> ReusablePlan -> PlanCache -> IO ()
-addPlan schemaVer rn opNameM q queryPlan (PlanCache planCache) =
+ -> ReusablePlan -> ET.GraphQLQueryType -> PlanCache -> IO ()
+addPlan schemaVer rn opNameM q queryPlan queryType (PlanCache planCache) =
Cache.insert planId queryPlan planCache
where
- planId = PlanId schemaVer rn opNameM q
+ planId = PlanId schemaVer rn opNameM q queryType
clearPlanCache :: PlanCache -> IO ()
clearPlanCache (PlanCache planCache) =
diff --git a/server/src-lib/Hasura/GraphQL/Execute/Query.hs b/server/src-lib/Hasura/GraphQL/Execute/Query.hs
index 868e4fe1bbc..907e39f3ca8 100644
--- a/server/src-lib/Hasura/GraphQL/Execute/Query.hs
+++ b/server/src-lib/Hasura/GraphQL/Execute/Query.hs
@@ -37,9 +37,9 @@ import Hasura.Prelude
import Hasura.RQL.DML.RemoteJoin
import Hasura.RQL.DML.Select (asSingleRowJsonResp)
import Hasura.RQL.Types
+import Hasura.Session
import Hasura.SQL.Types
import Hasura.SQL.Value
-import Hasura.Session
import qualified Hasura.RQL.DML.Select as DS
@@ -165,7 +165,7 @@ irToRootFieldPlan vars prepped = \case
QDBPrimaryKey s -> mkPGPlan (DS.selectQuerySQL DS.JASSingleObject) s
QDBAggregation s ->
let (annAggSel, aggRemoteJoins) = getRemoteJoinsAggregateSelect s
- in PGPlan (DS.selectAggQuerySQL annAggSel) vars prepped aggRemoteJoins
+ in PGPlan (DS.selectAggregateQuerySQL annAggSel) vars prepped aggRemoteJoins
where
mkPGPlan f simpleSel =
let (simpleSel',remoteJoins) = getRemoteJoins simpleSel
@@ -182,9 +182,9 @@ traverseQueryRootField f =
where
f' :: QueryDB a -> f (QueryDB b)
f' = \case
- QDBSimple s -> QDBSimple <$> DS.traverseAnnSimpleSel f s
- QDBPrimaryKey s -> QDBPrimaryKey <$> DS.traverseAnnSimpleSel f s
- QDBAggregation s -> QDBAggregation <$> DS.traverseAnnAggSel f s
+ QDBSimple s -> QDBSimple <$> DS.traverseAnnSimpleSelect f s
+ QDBPrimaryKey s -> QDBPrimaryKey <$> DS.traverseAnnSimpleSelect f s
+ QDBAggregation s -> QDBAggregation <$> DS.traverseAnnAggregateSelect f s
convertQuerySelSet
:: forall m. (HasVersion, MonadError QErr m, MonadIO m)
@@ -265,9 +265,9 @@ convertQuerySelSet gqlContext userInfo manager reqHeaders fields varDefs varVals
:: ActionQuery UnpreparedValue -> StateT PlanningSt m ActionQueryPlan
convertActionQuery = \case
AQQuery s -> (AQPQuery . fst) <$>
- lift (resolveActionExecution s $ ActionExecContext manager reqHeaders usrVars)
+ lift (resolveActionExecution userInfo s $ ActionExecContext manager reqHeaders usrVars)
AQAsync s -> AQPAsyncQuery <$>
- DS.traverseAnnSimpleSel prepareWithPlan (resolveAsyncActionQuery userInfo s)
+ DS.traverseAnnSimpleSelect prepareWithPlan (resolveAsyncActionQuery userInfo s)
-- use the existing plan and new variables to create a pg query
queryOpFromPlan
@@ -341,3 +341,15 @@ mkGeneratedSqlMap resolved =
RRSql ps -> Just ps
RRActionQuery _ -> Nothing
in (alias, res)
+
+-- The GraphQL Query type
+data GraphQLQueryType
+ = QueryHasura
+ | QueryRelay
+ deriving (Show, Eq, Ord, Generic)
+instance Hashable GraphQLQueryType
+
+instance J.ToJSON GraphQLQueryType where
+ toJSON = \case
+ QueryHasura -> "hasura"
+ QueryRelay -> "relay"
diff --git a/server/src-lib/Hasura/GraphQL/Explain.hs b/server/src-lib/Hasura/GraphQL/Explain.hs
index cdfff117c46..47458da2b36 100644
--- a/server/src-lib/Hasura/GraphQL/Explain.hs
+++ b/server/src-lib/Hasura/GraphQL/Explain.hs
@@ -11,13 +11,13 @@ import qualified Database.PG.Query as Q
import qualified Language.GraphQL.Draft.Syntax as G
import Hasura.EncJSON
+import Hasura.GraphQL.Resolve.Action
import Hasura.Prelude
import Hasura.RQL.DML.Internal
import Hasura.RQL.Types
+import Hasura.Server.Version (HasVersion)
import Hasura.SQL.Types
import Hasura.SQL.Value
-import Hasura.Server.Version (HasVersion)
-import Hasura.GraphQL.Resolve.Action
import qualified Hasura.GraphQL.Execute as E
import qualified Hasura.GraphQL.Execute.LiveQuery as E
@@ -26,11 +26,12 @@ import qualified Hasura.SQL.DML as S
data GQLExplain
= GQLExplain
- { _gqeQuery :: !GH.GQLReqParsed
- , _gqeUser :: !(Maybe (Map.HashMap Text Text))
+ { _gqeQuery :: !GH.GQLReqParsed
+ , _gqeUser :: !(Maybe (Map.HashMap Text Text))
+ , _gqeIsRelay :: !(Maybe Bool)
} deriving (Show, Eq)
-$(J.deriveJSON (J.aesonDrop 4 J.camelCase){J.omitNothingFields=True}
+$(J.deriveJSON (J.aesonDrop 4 J.snakeCase){J.omitNothingFields=True}
''GQLExplain
)
@@ -117,24 +118,26 @@ explainGQLQuery
-> QueryActionExecuter
-> GQLExplain
-> m EncJSON
-explainGQLQuery pgExecCtx sc sqlGenCtx enableAL actionExecuter (GQLExplain query userVarsRaw) = do
+explainGQLQuery pgExecCtx sc sqlGenCtx enableAL actionExecuter (GQLExplain query userVarsRaw maybeIsRelay) = do
+ userInfo <- mkUserInfo (URBFromSessionVariablesFallback adminRoleName) UAdminSecretSent sessionVariables
(execPlan, queryReusability) <- runReusabilityT $
- E.getExecPlanPartial userInfo sc enableAL query
+ E.getExecPlanPartial userInfo sc queryType enableAL query
(gCtx, rootSelSet) <- case execPlan of
E.GExPHasura (gCtx, rootSelSet) ->
return (gCtx, rootSelSet)
- E.GExPRemote _ _ ->
+ E.GExPRemote{} ->
throw400 InvalidParams "only hasura queries can be explained"
case rootSelSet of
GV.RQuery selSet ->
- runInTx $ encJFromJValue <$> traverse (explainField userInfo gCtx sqlGenCtx actionExecuter)
- (toList selSet)
+ runInTx $ encJFromJValue . map snd <$>
+ GV.traverseObjectSelectionSet selSet (explainField userInfo gCtx sqlGenCtx actionExecuter)
GV.RMutation _ ->
throw400 InvalidParams "only queries can be explained"
- GV.RSubscription rootField -> do
- (plan, _) <- E.getSubsOp pgExecCtx gCtx sqlGenCtx userInfo queryReusability actionExecuter rootField
+ GV.RSubscription fields -> do
+ (plan, _) <- E.getSubsOp pgExecCtx gCtx sqlGenCtx userInfo
+ queryReusability actionExecuter fields
runInTx $ encJFromJValue <$> E.explainLiveQueryPlan plan
where
- usrVars = mkUserVars $ maybe [] Map.toList userVarsRaw
- userInfo = mkUserInfo (fromMaybe adminRole $ roleFromVars usrVars) usrVars
+ queryType = bool E.QueryHasura E.QueryRelay $ fromMaybe False maybeIsRelay
+ sessionVariables = mkSessionVariablesText $ maybe [] Map.toList userVarsRaw
runInTx = liftEither <=< liftIO . runExceptT . runLazyTx pgExecCtx Q.ReadOnly
diff --git a/server/src-lib/Hasura/GraphQL/LegacySchema.hs b/server/src-lib/Hasura/GraphQL/LegacySchema.hs
index 6e80216c7be..0e4dd6cd33d 100644
--- a/server/src-lib/Hasura/GraphQL/LegacySchema.hs
+++ b/server/src-lib/Hasura/GraphQL/LegacySchema.hs
@@ -236,7 +236,7 @@ mkGCtxRole' tn descM insPermM selPermM updColsM delPermM pkeyCols constraints vi
in case riType relInfo of
ObjRel -> [relFld]
ArrRel -> bool [relFld] [relFld, aggRelFld] allowAgg
- SFComputedField cf -> pure
+ SAFComputedField cf -> pure
( (ty, mkComputedFieldName $ _cfName cf)
, RFComputedField cf
)
diff --git a/server/src-lib/Hasura/GraphQL/LegacySchema/Builder.hs b/server/src-lib/Hasura/GraphQL/LegacySchema/Builder.hs
index af486bab86b..b4d4230db8f 100644
--- a/server/src-lib/Hasura/GraphQL/LegacySchema/Builder.hs
+++ b/server/src-lib/Hasura/GraphQL/LegacySchema/Builder.hs
@@ -8,6 +8,8 @@ module Hasura.GraphQL.Schema.Builder
, addFieldsToTyAgg
, addTypeInfoToTyAgg
, addScalarToTyAgg
+ , QueryRootFieldMap
+ , MutationRootFieldMap
, RootFields(..)
, addQueryField
, addMutationField
@@ -57,11 +59,14 @@ instance Semigroup TyAgg where
instance Monoid TyAgg where
mempty = TyAgg Map.empty Map.empty Set.empty Map.empty
+type QueryRootFieldMap = Map.HashMap G.Name (QueryCtx, ObjFldInfo)
+type MutationRootFieldMap = Map.HashMap G.Name (MutationCtx, ObjFldInfo)
+
-- | A role-specific mapping from root field names to allowed operations.
data RootFields
= RootFields
- { _rootQueryFields :: !(Map.HashMap G.Name (QueryCtx, ObjFldInfo))
- , _rootMutationFields :: !(Map.HashMap G.Name (MutationCtx, ObjFldInfo))
+ { _rootQueryFields :: !QueryRootFieldMap
+ , _rootMutationFields :: !MutationRootFieldMap
} deriving (Show, Eq)
$(makeLenses ''RootFields)
diff --git a/server/src-lib/Hasura/GraphQL/LegacySchema/Function.hs b/server/src-lib/Hasura/GraphQL/LegacySchema/Function.hs
index f3349635069..426fe22a08f 100644
--- a/server/src-lib/Hasura/GraphQL/LegacySchema/Function.hs
+++ b/server/src-lib/Hasura/GraphQL/LegacySchema/Function.hs
@@ -2,14 +2,17 @@ module Hasura.GraphQL.Schema.Function
( procFuncArgs
, mkFuncArgsInp
, mkFuncQueryFld
+ , mkFuncQueryConnectionFld
, mkFuncAggQueryFld
, mkFuncArgsTy
+ , mkFuncArgItemSeq
) where
import qualified Data.Sequence as Seq
import qualified Data.Text as T
import qualified Language.GraphQL.Draft.Syntax as G
+import Hasura.GraphQL.Resolve.Types
import Hasura.GraphQL.Schema.Common
import Hasura.GraphQL.Schema.Select
import Hasura.GraphQL.Validate.Types
@@ -92,6 +95,20 @@ mkFuncQueryFld funInfo descM =
ty = G.toGT $ G.toNT $ G.toLT $ G.toNT $ mkTableTy retTable
+mkFuncQueryConnectionFld
+ :: FunctionInfo -> Maybe PGDescription -> ObjFldInfo
+mkFuncQueryConnectionFld funInfo descM =
+ mkHsraObjFldInfo (Just desc) fldName (mkFuncArgs funInfo) ty
+ where
+ retTable = fiReturnType funInfo
+ funcName = fiName funInfo
+
+ desc = mkDescriptionWith descM $ "execute function " <> funcName
+ <<> " which returns " <>> retTable
+ fldName = qualObjectToName funcName <> "_connection"
+
+ ty = G.toGT $ G.toNT $ G.toLT $ G.toNT $ mkTableConnectionTy retTable
+
{-
function_aggregate(
@@ -118,3 +135,15 @@ mkFuncAggQueryFld funInfo descM =
fldName = qualObjectToName funcName <> "_aggregate"
ty = G.toGT $ G.toNT $ mkTableAggTy retTable
+
+
+mkFuncArgItemSeq :: FunctionInfo -> Seq (InputArgument FunctionArgItem)
+mkFuncArgItemSeq functionInfo =
+ let inputArgs = fiInputArgs functionInfo
+ in Seq.fromList $ procFuncArgs inputArgs nameFn resultFn
+ where
+ nameFn = \case
+ IAUserProvided fa -> faName fa
+ IASessionVariables name -> Just name
+ resultFn arg gName = flip fmap arg $
+ \fa -> FunctionArgItem (G.Name gName) (faName fa) (faHasDefault fa)
diff --git a/server/src-lib/Hasura/GraphQL/NormalForm.hs b/server/src-lib/Hasura/GraphQL/NormalForm.hs
new file mode 100644
index 00000000000..58e20980168
--- /dev/null
+++ b/server/src-lib/Hasura/GraphQL/NormalForm.hs
@@ -0,0 +1,300 @@
+{-# LANGUAGE RecordWildCards #-}
+{-# LANGUAGE TypeFamilyDependencies #-}
+module Hasura.GraphQL.NormalForm
+ ( Selection(..)
+ , NormalizedSelection
+ , NormalizedSelectionSet
+ , NormalizedField
+ , SelectionSet(..)
+ , RootSelectionSet(..)
+ -- , toGraphQLOperation
+ , ArgsMap
+ , Field(..)
+ , Typename(..)
+ , IsField(..)
+ , toField
+ , AliasedFields(..)
+ , asObjectSelectionSet
+ , ObjectSelectionSet(..)
+ , ObjectSelectionSetMap
+ , traverseObjectSelectionSet
+ , InterfaceSelectionSet
+ , asInterfaceSelectionSet
+ , getMemberSelectionSet
+ , UnionSelectionSet
+ , ScopedSelectionSet(..)
+ , emptyScopedSelectionSet
+ , getUnionSelectionSet
+ , getInterfaceSelectionSet
+ , getObjectSelectionSet
+
+ , AnnInpVal(..)
+ , AnnGValue(..)
+ , AnnGObject
+ , AnnGEnumValue(..)
+ , hasNullVal
+ , getAnnInpValKind
+
+ , toGraphQLField
+ , toGraphQLSelectionSet
+ ) where
+
+import Hasura.Prelude
+
+import qualified Data.Aeson as J
+import qualified Data.Aeson.Casing as J
+import qualified Data.Aeson.TH as J
+import qualified Data.HashMap.Strict as Map
+import qualified Data.HashMap.Strict.InsOrd.Extended as OMap
+import qualified Language.GraphQL.Draft.Syntax as G
+
+import qualified Hasura.RQL.Types.Column as RQL
+import qualified Hasura.RQL.Types.Error as RQL
+import Hasura.SQL.Types
+import Hasura.SQL.Value
+
+data Selection f s
+ = SelectionField !G.Alias !f
+ | SelectionInlineFragmentSpread !s
+ | SelectionFragmentSpread !G.Name !s
+ deriving (Show, Eq)
+
+-- | What a processed G.SelectionSet should look like
+type family NormalizedSelectionSet a = s | s -> a
+
+-- | What a processed G.Field should look like
+type family NormalizedField a
+
+type NormalizedSelection a
+ = Selection (NormalizedField a) (NormalizedSelectionSet a)
+
+-- | Ordered fields
+newtype AliasedFields f
+ = AliasedFields { unAliasedFields :: OMap.InsOrdHashMap G.Alias f }
+ deriving (Show, Eq, Functor, Foldable, Traversable, Semigroup)
+
+newtype ObjectSelectionSet
+ = ObjectSelectionSet { unObjectSelectionSet :: AliasedFields Field }
+ deriving (Show, Eq, Semigroup)
+
+traverseObjectSelectionSet
+ :: (Monad m) => ObjectSelectionSet -> (Field -> m a) -> m [(Text, a)]
+traverseObjectSelectionSet selectionSet f =
+ forM (OMap.toList $ unAliasedFields $ unObjectSelectionSet selectionSet) $
+ \(alias, field) -> (G.unName $ G.unAlias alias,) <$> f field
+
+type ObjectSelectionSetMap
+ = Map.HashMap G.NamedType ObjectSelectionSet
+
+data Typename = Typename
+ deriving (Show, Eq, Generic)
+
+data ScopedSelectionSet f
+ = ScopedSelectionSet
+ { _sssBaseSelectionSet :: !(AliasedFields f)
+ -- ^ Fields that aren't explicitly defined for member types
+ , _sssMemberSelectionSets :: !ObjectSelectionSetMap
+ -- ^ SelectionSets of individual member types
+ } deriving (Show, Eq, Generic)
+
+
+
+emptyScopedSelectionSet :: ScopedSelectionSet f
+emptyScopedSelectionSet =
+ ScopedSelectionSet (AliasedFields mempty) mempty
+
+type InterfaceSelectionSet = ScopedSelectionSet Field
+
+type UnionSelectionSet = ScopedSelectionSet Typename
+
+data RootSelectionSet
+ = RQuery !ObjectSelectionSet
+ | RMutation !ObjectSelectionSet
+ | RSubscription !ObjectSelectionSet
+ deriving (Show, Eq)
+
+-- toGraphQLOperation :: RootSelectionSet -> G.ExecutableDefinition
+-- toGraphQLOperation = \case
+-- RQuery selectionSet ->
+-- mkExecutableDefinition G.OperationTypeQuery $
+-- toGraphQLSelectionSet $ SelectionSetObject selectionSet
+-- RMutation selectionSet ->
+-- mkExecutableDefinition G.OperationTypeQuery $
+-- toGraphQLSelectionSet $ SelectionSetObject selectionSet
+-- RSubscription opDef _ ->
+-- G.ExecutableDefinitionOperation $ G.OperationDefinitionTyped opDef
+-- where
+-- mkExecutableDefinition operationType selectionSet =
+-- G.ExecutableDefinitionOperation $ G.OperationDefinitionTyped $
+-- G.TypedOperationDefinition
+-- { G._todName = Nothing -- TODO, store the name too?
+-- , G._todDirectives = []
+-- , G._todType = operationType
+-- , G._todVariableDefinitions = []
+-- , G._todSelectionSet = selectionSet
+-- }
+
+
+data SelectionSet
+ = SelectionSetObject !ObjectSelectionSet
+ | SelectionSetUnion !UnionSelectionSet
+ | SelectionSetInterface !InterfaceSelectionSet
+ | SelectionSetNone
+ -- ^ in cases of enums and scalars
+ deriving (Show, Eq)
+
+getObjectSelectionSet :: SelectionSet -> Maybe ObjectSelectionSet
+getObjectSelectionSet = \case
+ SelectionSetObject s -> pure s
+ _ -> Nothing
+
+asObjectSelectionSet
+ :: (MonadError RQL.QErr m) => SelectionSet -> m ObjectSelectionSet
+asObjectSelectionSet selectionSet =
+ onNothing (getObjectSelectionSet selectionSet) $
+ RQL.throw500 "expecting ObjectSelectionSet"
+
+getUnionSelectionSet :: SelectionSet -> Maybe UnionSelectionSet
+getUnionSelectionSet = \case
+ SelectionSetUnion s -> pure s
+ _ -> Nothing
+
+getInterfaceSelectionSet :: SelectionSet -> Maybe InterfaceSelectionSet
+getInterfaceSelectionSet = \case
+ SelectionSetInterface s -> pure s
+ _ -> Nothing
+
+asInterfaceSelectionSet
+ :: (MonadError RQL.QErr m) => SelectionSet -> m InterfaceSelectionSet
+asInterfaceSelectionSet selectionSet =
+ onNothing (getInterfaceSelectionSet selectionSet) $
+ RQL.throw500 "expecting InterfaceSelectionSet"
+
+type ArgsMap = Map.HashMap G.Name AnnInpVal
+
+data Field
+ = Field
+ { _fName :: !G.Name
+ , _fType :: !G.NamedType
+ , _fArguments :: !ArgsMap
+ , _fSelSet :: !SelectionSet
+ } deriving (Eq, Show)
+
+toGraphQLField :: G.Alias -> Field -> G.Field
+toGraphQLField alias Field{..} =
+ G.Field
+ { G._fName = _fName
+ , G._fArguments = [] -- TODO
+ , G._fDirectives = []
+ , G._fAlias = Just alias
+ , G._fSelectionSet = toGraphQLSelectionSet _fSelSet
+ }
+
+toGraphQLSelectionSet :: SelectionSet -> G.SelectionSet
+toGraphQLSelectionSet = \case
+ SelectionSetObject selectionSet -> fromSelectionSet selectionSet
+ SelectionSetInterface selectionSet -> fromScopedSelectionSet selectionSet
+ SelectionSetUnion selectionSet -> fromScopedSelectionSet selectionSet
+ SelectionSetNone -> mempty
+ where
+ fromAliasedFields :: (IsField f) => AliasedFields f -> G.SelectionSet
+ fromAliasedFields =
+ map (G.SelectionField . uncurry toGraphQLField) .
+ OMap.toList . fmap toField . unAliasedFields
+ fromSelectionSet =
+ fromAliasedFields . unObjectSelectionSet
+ toInlineSelection typeName =
+ G.SelectionInlineFragment . G.InlineFragment (Just typeName) mempty .
+ fromSelectionSet
+ fromScopedSelectionSet (ScopedSelectionSet base specific) =
+ map (uncurry toInlineSelection) (Map.toList specific) <> fromAliasedFields base
+
+-- $(J.deriveToJSON (J.aesonDrop 2 J.camelCase){J.omitNothingFields=True}
+-- ''Field
+-- )
+
+-- $(J.deriveToJSON (J.aesonDrop 2 J.camelCase){J.omitNothingFields=True}
+-- ''InterfaceSelectionSet
+-- )
+
+-- $(J.deriveToJSON (J.aesonDrop 2 J.camelCase){J.omitNothingFields=True}
+-- ''SelectionSet
+-- )
+
+class IsField f where
+ getFieldName :: f -> G.Name
+ getFieldType :: f -> G.NamedType
+ getFieldArguments :: f -> ArgsMap
+ getFieldSelectionSet :: f -> SelectionSet
+
+toField :: (IsField f) => f -> Field
+toField f =
+ Field (getFieldName f) (getFieldType f)
+ (getFieldArguments f) (getFieldSelectionSet f)
+
+instance IsField Field where
+ getFieldName = _fName
+ getFieldType = _fType
+ getFieldArguments = _fArguments
+ getFieldSelectionSet = _fSelSet
+
+instance IsField Typename where
+ getFieldName _ = "__typename"
+ getFieldType _ = G.NamedType "String"
+ getFieldArguments _ = mempty
+ getFieldSelectionSet _ = SelectionSetNone
+
+getMemberSelectionSet
+ :: IsField f
+ => G.NamedType -> ScopedSelectionSet f -> ObjectSelectionSet
+getMemberSelectionSet namedType (ScopedSelectionSet {..}) =
+ fromMaybe (ObjectSelectionSet (fmap toField _sssBaseSelectionSet)) $
+ Map.lookup namedType $ _sssMemberSelectionSets
+
+data AnnInpVal
+ = AnnInpVal
+ { _aivType :: !G.GType
+ , _aivVariable :: !(Maybe G.Variable)
+ , _aivValue :: !AnnGValue
+ } deriving (Show, Eq)
+
+type AnnGObject = OMap.InsOrdHashMap G.Name AnnInpVal
+
+-- | See 'EnumValuesInfo' for information about what these cases mean.
+data AnnGEnumValue
+ = AGESynthetic !(Maybe G.EnumValue)
+ | AGEReference !RQL.EnumReference !(Maybe RQL.EnumValue)
+ deriving (Show, Eq)
+
+data AnnGValue
+ = AGScalar !PGScalarType !(Maybe PGScalarValue)
+ | AGEnum !G.NamedType !AnnGEnumValue
+ | AGObject !G.NamedType !(Maybe AnnGObject)
+ | AGArray !G.ListType !(Maybe [AnnInpVal])
+ deriving (Show, Eq)
+
+$(J.deriveToJSON (J.aesonDrop 4 J.camelCase){J.omitNothingFields=True}
+ ''AnnInpVal
+ )
+
+instance J.ToJSON AnnGValue where
+ -- toJSON (AGScalar ty valM) =
+ toJSON = const J.Null
+ -- J.
+ -- J.toJSON [J.toJSON ty, J.toJSON valM]
+
+hasNullVal :: AnnGValue -> Bool
+hasNullVal = \case
+ AGScalar _ Nothing -> True
+ AGEnum _ (AGESynthetic Nothing) -> True
+ AGEnum _ (AGEReference _ Nothing) -> True
+ AGObject _ Nothing -> True
+ AGArray _ Nothing -> True
+ _ -> False
+
+getAnnInpValKind :: AnnGValue -> Text
+getAnnInpValKind = \case
+ AGScalar _ _ -> "scalar"
+ AGEnum _ _ -> "enum"
+ AGObject _ _ -> "object"
+ AGArray _ _ -> "array"
diff --git a/server/src-lib/Hasura/GraphQL/RelaySchema.hs b/server/src-lib/Hasura/GraphQL/RelaySchema.hs
new file mode 100644
index 00000000000..a8c5ff663e8
--- /dev/null
+++ b/server/src-lib/Hasura/GraphQL/RelaySchema.hs
@@ -0,0 +1,407 @@
+module Hasura.GraphQL.RelaySchema where
+
+import Control.Lens.Extended hiding (op)
+
+import qualified Data.HashMap.Strict as Map
+import qualified Data.HashSet as Set
+import qualified Data.Text as T
+import qualified Language.GraphQL.Draft.Syntax as G
+
+import Hasura.GraphQL.Context
+import Hasura.GraphQL.Resolve.Types
+import Hasura.GraphQL.Validate.Types
+import Hasura.Prelude
+import Hasura.RQL.Types
+import Hasura.Server.Utils (duplicates)
+import Hasura.Session
+import Hasura.SQL.Types
+
+import Hasura.GraphQL.Schema
+import Hasura.GraphQL.Schema.BoolExp
+import Hasura.GraphQL.Schema.Builder
+import Hasura.GraphQL.Schema.Common
+import Hasura.GraphQL.Schema.Function
+import Hasura.GraphQL.Schema.OrderBy
+import Hasura.GraphQL.Schema.Select
+
+mkNodeInterface :: [QualifiedTable] -> IFaceTyInfo
+mkNodeInterface relayTableNames =
+ let description = G.Description "An object with globally unique ID"
+ in mkIFaceTyInfo (Just description) nodeType (mapFromL _fiName [idField]) $
+ Set.fromList $ map mkTableTy relayTableNames
+ where
+ idField =
+ let description = G.Description "A globally unique identifier"
+ in mkHsraObjFldInfo (Just description) "id" mempty nodeIdType
+
+mkRelayGCtxMap
+ :: forall m. (MonadError QErr m)
+ => TableCache -> FunctionCache -> m GCtxMap
+mkRelayGCtxMap tableCache functionCache = do
+ typesMapL <- mapM (mkRelayGCtxMapTable tableCache functionCache) relayTables
+ typesMap <- combineTypes typesMapL
+ let gCtxMap = flip Map.map typesMap $
+ \(ty, flds, insCtx) -> mkGCtx ty flds insCtx
+ pure $ Map.map (flip RoleContext Nothing) gCtxMap
+ where
+ relayTables =
+ filter (tableFltr . _tiCoreInfo) $ Map.elems tableCache
+
+ tableFltr ti =
+ not (isSystemDefined $ _tciSystemDefined ti)
+ && isValidObjectName (_tciName ti)
+ && isJust (_tciPrimaryKey ti)
+
+ combineTypes
+ :: [Map.HashMap RoleName (TyAgg, RootFields, InsCtxMap)]
+ -> m (Map.HashMap RoleName (TyAgg, RootFields, InsCtxMap))
+ combineTypes maps = do
+ let listMap = foldr (Map.unionWith (++) . Map.map pure) mempty maps
+ flip Map.traverseWithKey listMap $ \roleName typeList -> do
+ let relayTableNames = map (_tciName . _tiCoreInfo) relayTables
+ tyAgg = addTypeInfoToTyAgg
+ (TIIFace $ mkNodeInterface relayTableNames) $
+ mconcat $ map (^. _1) typeList
+ insCtx = mconcat $ map (^. _3) typeList
+ rootFields <- combineRootFields roleName $ map (^. _2) typeList
+ pure (tyAgg, rootFields, insCtx)
+
+ combineRootFields :: RoleName -> [RootFields] -> m RootFields
+ combineRootFields roleName rootFields = do
+ let duplicateQueryFields = duplicates $
+ concatMap (Map.keys . _rootQueryFields) rootFields
+ duplicateMutationFields = duplicates $
+ concatMap (Map.keys . _rootMutationFields) rootFields
+
+ -- TODO: The following exception should result in inconsistency
+ when (not $ null duplicateQueryFields) $
+ throw400 Unexpected $ "following query root fields are duplicated: "
+ <> showNames duplicateQueryFields
+
+ when (not $ null duplicateMutationFields) $
+ throw400 Unexpected $ "following mutation root fields are duplicated: "
+ <> showNames duplicateMutationFields
+
+ pure $ mconcat $ mkNodeQueryRootFields roleName relayTables : rootFields
+
+mkRelayGCtxMapTable
+ :: (MonadError QErr m)
+ => TableCache
+ -> FunctionCache
+ -> TableInfo
+ -> m (Map.HashMap RoleName (TyAgg, RootFields, InsCtxMap))
+mkRelayGCtxMapTable tableCache funcCache tabInfo = do
+ m <- flip Map.traverseWithKey rolePerms $
+ mkRelayGCtxRole tableCache tn descM fields primaryKey validConstraints tabFuncs viewInfo customConfig
+ adminSelFlds <- mkAdminSelFlds fields tableCache
+ adminInsCtx <- mkAdminInsCtx tableCache fields
+ let adminCtx = mkRelayTyAggRole tn descM (Just (cols, icRelations adminInsCtx))
+ (Just (True, adminSelFlds)) (Just cols) (Just ())
+ primaryKey validConstraints viewInfo tabFuncs
+ adminInsCtxMap = Map.singleton tn adminInsCtx
+ return $ Map.insert adminRoleName (adminCtx, adminRootFlds, adminInsCtxMap) m
+ where
+ TableInfo coreInfo rolePerms _ = tabInfo
+ TableCoreInfo tn descM _ fields primaryKey _ _ viewInfo _ customConfig = coreInfo
+ validConstraints = mkValidConstraints $ map _cName (tciUniqueOrPrimaryKeyConstraints coreInfo)
+ tabFuncs = filter (isValidObjectName . fiName) $
+ getFuncsOfTable tn funcCache
+ cols = getValidCols fields
+ adminRootFlds =
+ let insertPermDetails = Just ([], True)
+ selectPermDetails = Just (noFilter, Nothing, [], True)
+ updatePermDetails = Just (getValidCols fields, mempty, noFilter, Nothing, [])
+ deletePermDetails = Just (noFilter, [])
+
+ queryFields = getRelayQueryRootFieldsRole tn primaryKey fields tabFuncs
+ selectPermDetails
+ mutationFields = getMutationRootFieldsRole tn primaryKey
+ validConstraints fields insertPermDetails
+ selectPermDetails updatePermDetails
+ deletePermDetails viewInfo customConfig
+ in RootFields queryFields mutationFields
+
+mkRelayGCtxRole
+ :: (MonadError QErr m)
+ => TableCache
+ -> QualifiedTable
+ -> Maybe PGDescription
+ -> FieldInfoMap FieldInfo
+ -> Maybe (PrimaryKey PGColumnInfo)
+ -> [ConstraintName]
+ -> [FunctionInfo]
+ -> Maybe ViewInfo
+ -> TableConfig
+ -> RoleName
+ -> RolePermInfo
+ -> m (TyAgg, RootFields, InsCtxMap)
+mkRelayGCtxRole tableCache tn descM fields primaryKey constraints funcs viM tabConfigM role permInfo = do
+ selPermM <- mapM (getSelPerm tableCache fields role) selM
+ tabInsInfoM <- forM (_permIns permInfo) $ \ipi -> do
+ ctx <- mkInsCtx role tableCache fields ipi $ _permUpd permInfo
+ let permCols = flip getColInfos allCols $ Set.toList $ ipiCols ipi
+ return (ctx, (permCols, icRelations ctx))
+ let insPermM = snd <$> tabInsInfoM
+ insCtxM = fst <$> tabInsInfoM
+ updColsM = filterColumnFields . upiCols <$> _permUpd permInfo
+ tyAgg = mkRelayTyAggRole tn descM insPermM selPermM updColsM
+ (void $ _permDel permInfo) primaryKey constraints viM funcs
+ queryRootFlds = getRelayQueryRootFieldsRole tn primaryKey fields funcs
+ (mkSel <$> _permSel permInfo)
+ mutationRootFlds = getMutationRootFieldsRole tn primaryKey constraints fields
+ (mkIns <$> insM) (mkSel <$> selM)
+ (mkUpd <$> updM) (mkDel <$> delM) viM tabConfigM
+ insCtxMap = maybe Map.empty (Map.singleton tn) insCtxM
+ return (tyAgg, RootFields queryRootFlds mutationRootFlds, insCtxMap)
+ where
+ RolePermInfo insM selM updM delM = permInfo
+ allCols = getCols fields
+ filterColumnFields allowedSet =
+ filter ((`Set.member` allowedSet) . pgiColumn) $ getValidCols fields
+ mkIns i = (ipiRequiredHeaders i, isJust updM)
+ mkSel s = ( spiFilter s, spiLimit s
+ , spiRequiredHeaders s, spiAllowAgg s
+ )
+ mkUpd u = ( flip getColInfos allCols $ Set.toList $ upiCols u
+ , upiSet u
+ , upiFilter u
+ , upiCheck u
+ , upiRequiredHeaders u
+ )
+ mkDel d = (dpiFilter d, dpiRequiredHeaders d)
+
+mkRelayTyAggRole
+ :: QualifiedTable
+ -> Maybe PGDescription
+ -- ^ Postgres description
+ -> Maybe ([PGColumnInfo], RelationInfoMap)
+ -- ^ insert permission
+ -> Maybe (Bool, [SelField])
+ -- ^ select permission
+ -> Maybe [PGColumnInfo]
+ -- ^ update cols
+ -> Maybe ()
+ -- ^ delete cols
+ -> Maybe (PrimaryKey PGColumnInfo)
+ -> [ConstraintName]
+ -- ^ constraints
+ -> Maybe ViewInfo
+ -> [FunctionInfo]
+ -- ^ all functions
+ -> TyAgg
+mkRelayTyAggRole tn descM insPermM selPermM updColsM delPermM pkeyCols constraints viM funcs =
+ let (mutationTypes, mutationFields) =
+ mkMutationTypesAndFieldsRole tn insPermM selFldsM updColsM delPermM pkeyCols constraints viM
+ in TyAgg (mkTyInfoMap allTypes <> mutationTypes)
+ (fieldMap <> mutationFields)
+ scalars ordByCtx
+ where
+ ordByCtx = fromMaybe Map.empty ordByCtxM
+
+ funcInpArgTys = bool [] (map TIInpObj funcArgInpObjs) $ isJust selFldsM
+
+ allTypes = queryTypes <> aggQueryTypes <> funcInpArgTys <> computedFieldFuncArgsInps
+
+ queryTypes = map TIObj selectObjects <>
+ catMaybes
+ [ TIInpObj <$> boolExpInpObjM
+ , TIInpObj <$> ordByInpObjM
+ , TIEnum <$> selColInpTyM
+ ]
+ aggQueryTypes = map TIObj aggObjs <> map TIInpObj aggOrdByInps
+
+ fieldMap = Map.unions $ catMaybes [boolExpInpObjFldsM, selObjFldsM]
+ scalars = selByPkScalarSet <> funcArgScalarSet <> computedFieldFuncArgScalars
+
+ selFldsM = snd <$> selPermM
+ selColNamesM = map pgiName . getPGColumnFields <$> selFldsM
+ selColInpTyM = mkSelColumnTy tn <$> selColNamesM
+ -- boolexp input type
+ boolExpInpObjM = case selFldsM of
+ Just selFlds -> Just $ mkBoolExpInp tn selFlds
+ -- no select permission
+ Nothing ->
+ -- but update/delete is defined
+ if isJust updColsM || isJust delPermM
+ then Just $ mkBoolExpInp tn []
+ else Nothing
+
+ -- funcargs input type
+ funcArgInpObjs = flip mapMaybe funcs $ \func ->
+ mkFuncArgsInp (fiName func) (getInputArgs func)
+ -- funcArgCtx = Map.unions funcArgCtxs
+ funcArgScalarSet = funcs ^.. folded.to getInputArgs.folded.to (_qptName.faType)
+
+ -- helper
+ mkFldMap ty = Map.fromList . concatMap (mkFld ty)
+ mkFld ty = \case
+ SFPGColumn ci -> [((ty, pgiName ci), RFPGColumn ci)]
+ SFRelationship (RelationshipFieldInfo relInfo allowAgg cols permFilter permLimit maybePkCols _) ->
+ let relationshipName = riName relInfo
+ relFld = ( (ty, mkRelName relationshipName)
+ , RFRelationship $ RelationshipField relInfo RFKSimple cols permFilter permLimit
+ )
+ aggRelFld = ( (ty, mkAggRelName relationshipName)
+ , RFRelationship $ RelationshipField relInfo RFKAggregate cols permFilter permLimit
+ )
+ maybeConnFld = maybePkCols <&> \pkCols ->
+ ( (ty, mkConnectionRelName relationshipName)
+ , RFRelationship $ RelationshipField relInfo
+ (RFKConnection pkCols) cols permFilter permLimit
+ )
+ in case riType relInfo of
+ ObjRel -> [relFld]
+ ArrRel -> bool [relFld] ([relFld, aggRelFld] <> maybe [] pure maybeConnFld) allowAgg
+ SFComputedField cf -> pure
+ ( (ty, mkComputedFieldName $ _cfName cf)
+ , RFComputedField cf
+ )
+ SFRemoteRelationship remoteField -> pure
+ ( (ty, G.Name (remoteRelationshipNameToText (_rfiName remoteField)))
+ , RFRemoteRelationship remoteField
+ )
+
+ -- the fields used in bool exp
+ boolExpInpObjFldsM = mkFldMap (mkBoolExpTy tn) <$> selFldsM
+
+ -- table obj
+ selectObjects = case selPermM of
+ Just (_, selFlds) ->
+ [ (mkRelayTableObj tn descM selFlds)
+ {_otiImplIFaces = Set.singleton nodeType}
+ , mkTableEdgeObj tn
+ , mkTableConnectionObj tn
+ ]
+ Nothing -> []
+
+ -- aggregate objs and order by inputs
+ (aggObjs, aggOrdByInps) = case selPermM of
+ Just (True, selFlds) ->
+ let cols = getPGColumnFields selFlds
+ numCols = onlyNumCols cols
+ compCols = onlyComparableCols cols
+ objs = [ mkTableAggObj tn
+ , mkTableAggregateFieldsObj tn (numCols, numAggregateOps) (compCols, compAggregateOps)
+ ] <> mkColAggregateFieldsObjs selFlds
+ ordByInps = mkTabAggOrdByInpObj tn (numCols, numAggregateOps) (compCols, compAggregateOps)
+ : mkTabAggregateOpOrdByInpObjs tn (numCols, numAggregateOps) (compCols, compAggregateOps)
+ in (objs, ordByInps)
+ _ -> ([], [])
+
+ getNumericCols = onlyNumCols . getPGColumnFields
+ getComparableCols = onlyComparableCols . getPGColumnFields
+ onlyFloat = const $ mkScalarTy PGFloat
+
+ mkTypeMaker "sum" = mkColumnType
+ mkTypeMaker _ = onlyFloat
+
+ mkColAggregateFieldsObjs flds =
+ let numCols = getNumericCols flds
+ compCols = getComparableCols flds
+ mkNumObjFld n = mkTableColAggregateFieldsObj tn n (mkTypeMaker n) numCols
+ mkCompObjFld n = mkTableColAggregateFieldsObj tn n mkColumnType compCols
+ numFldsObjs = bool (map mkNumObjFld numAggregateOps) [] $ null numCols
+ compFldsObjs = bool (map mkCompObjFld compAggregateOps) [] $ null compCols
+ in numFldsObjs <> compFldsObjs
+ -- the fields used in table object
+ nodeFieldM = RFNodeId tn . _pkColumns <$> pkeyCols
+ selObjFldsM = mkFldMap (mkTableTy tn) <$> selFldsM >>=
+ \fm -> nodeFieldM <&> \nodeField ->
+ Map.insert (mkTableTy tn, "id") nodeField fm
+ -- the scalar set for table_by_pk arguments
+ selByPkScalarSet = pkeyCols ^.. folded.to _pkColumns.folded.to pgiType._PGColumnScalar
+
+ ordByInpCtxM = mkOrdByInpObj tn <$> selFldsM
+ (ordByInpObjM, ordByCtxM) = case ordByInpCtxM of
+ Just (a, b) -> (Just a, Just b)
+ Nothing -> (Nothing, Nothing)
+
+ -- computed fields' function args input objects and scalar types
+ mkComputedFieldRequiredTypes computedFieldInfo =
+ let ComputedFieldFunction qf inputArgs _ _ _ = _cfFunction computedFieldInfo
+ scalarArgs = map (_qptName . faType) $ toList inputArgs
+ in (, scalarArgs) <$> mkFuncArgsInp qf inputArgs
+
+ computedFieldReqTypes = catMaybes $
+ maybe [] (map mkComputedFieldRequiredTypes . getComputedFields) selFldsM
+
+ computedFieldFuncArgsInps = map (TIInpObj . fst) computedFieldReqTypes
+ computedFieldFuncArgScalars = Set.fromList $ concatMap snd computedFieldReqTypes
+
+mkSelectOpCtx
+ :: QualifiedTable
+ -> [PGColumnInfo]
+ -> (AnnBoolExpPartialSQL, Maybe Int, [T.Text]) -- select filter
+ -> SelOpCtx
+mkSelectOpCtx tn allCols (fltr, pLimit, hdrs) =
+ SelOpCtx tn hdrs colGNameMap fltr pLimit
+ where
+ colGNameMap = mkPGColGNameMap allCols
+
+getRelayQueryRootFieldsRole
+ :: QualifiedTable
+ -> Maybe (PrimaryKey PGColumnInfo)
+ -> FieldInfoMap FieldInfo
+ -> [FunctionInfo]
+ -> Maybe (AnnBoolExpPartialSQL, Maybe Int, [T.Text], Bool) -- select filter
+ -> QueryRootFieldMap
+getRelayQueryRootFieldsRole tn primaryKey fields funcs selM =
+ makeFieldMap $
+ funcConnectionQueries
+ <> catMaybes
+ [ getSelConnectionDet <$> selM <*> maybePrimaryKeyColumns
+ ]
+ where
+ maybePrimaryKeyColumns = fmap _pkColumns primaryKey
+ colGNameMap = mkPGColGNameMap $ getCols fields
+
+ funcConnectionQueries = fromMaybe [] $ getFuncQueryConnectionFlds
+ <$> selM <*> maybePrimaryKeyColumns
+
+ getSelConnectionDet (selFltr, pLimit, hdrs, _) primaryKeyColumns =
+ selFldHelper (QCSelectConnection primaryKeyColumns)
+ (mkSelFldConnection Nothing) selFltr pLimit hdrs
+
+ selFldHelper f g pFltr pLimit hdrs =
+ ( f $ mkSelectOpCtx tn (getCols fields) (pFltr, pLimit, hdrs)
+ , g tn
+ )
+
+ getFuncQueryConnectionFlds (selFltr, pLimit, hdrs, _) primaryKeyColumns =
+ funcFldHelper (QCFuncConnection primaryKeyColumns) mkFuncQueryConnectionFld selFltr pLimit hdrs
+
+ funcFldHelper f g pFltr pLimit hdrs =
+ flip map funcs $ \fi ->
+ ( f $ FuncQOpCtx (fiName fi) (mkFuncArgItemSeq fi) hdrs colGNameMap pFltr pLimit
+ , g fi $ fiDescription fi
+ )
+
+mkNodeQueryRootFields :: RoleName -> [TableInfo] -> RootFields
+mkNodeQueryRootFields roleName relayTables =
+ RootFields (mapFromL (_fiName . snd) [nodeQueryDet]) mempty
+ where
+ nodeQueryDet =
+ ( QCNodeSelect nodeSelMap
+ , nodeQueryField
+ )
+
+ nodeQueryField =
+ let nodeParams = fromInpValL $ pure $
+ InpValInfo (Just $ G.Description "A globally unique id")
+ "id" Nothing nodeIdType
+ in mkHsraObjFldInfo Nothing "node" nodeParams $ G.toGT nodeType
+
+ nodeSelMap =
+ Map.fromList $ flip mapMaybe relayTables $ \table ->
+ let tableName = _tciName $ _tiCoreInfo table
+ allColumns = getCols $ _tciFieldInfoMap $ _tiCoreInfo table
+ selectPermM = _permSel <$> Map.lookup roleName
+ (_tiRolePermInfoMap table)
+ permDetailsM = join selectPermM <&> \perm ->
+ ( spiFilter perm
+ , spiLimit perm
+ , spiRequiredHeaders perm
+ )
+ adminPermDetails = (noFilter, Nothing, [])
+ in (mkTableTy tableName,) . mkSelectOpCtx tableName allColumns
+ <$> bool permDetailsM (Just adminPermDetails) (isAdmin roleName)
diff --git a/server/src-lib/Hasura/GraphQL/Resolve.hs b/server/src-lib/Hasura/GraphQL/Resolve.hs
index c59b23db735..afeaf54569c 100644
--- a/server/src-lib/Hasura/GraphQL/Resolve.hs
+++ b/server/src-lib/Hasura/GraphQL/Resolve.hs
@@ -12,6 +12,7 @@ module Hasura.GraphQL.Resolve
, QueryRootFldUnresolved
, QueryRootFldResolved
, toPGQuery
+ , toSQLFromItem
, RIntro.schemaR
, RIntro.typeR
@@ -36,14 +37,17 @@ import qualified Hasura.GraphQL.Resolve.Insert as RI
import qualified Hasura.GraphQL.Resolve.Introspect as RIntro
import qualified Hasura.GraphQL.Resolve.Mutation as RM
import qualified Hasura.GraphQL.Resolve.Select as RS
+import qualified Hasura.GraphQL.Schema.Common as GS
import qualified Hasura.GraphQL.Validate as V
import qualified Hasura.RQL.DML.Select as DS
import qualified Hasura.SQL.DML as S
data QueryRootFldAST v
- = QRFPk !(DS.AnnSimpleSelG v)
+ = QRFNode !(DS.AnnSimpleSelG v)
+ | QRFPk !(DS.AnnSimpleSelG v)
| QRFSimple !(DS.AnnSimpleSelG v)
- | QRFAgg !(DS.AnnAggSelG v)
+ | QRFAgg !(DS.AnnAggregateSelectG v)
+ | QRFConnection !(DS.ConnectionSelect v)
| QRFActionSelect !(DS.AnnSimpleSelG v)
| QRFActionExecuteObject !(DS.AnnSimpleSelG v)
| QRFActionExecuteList !(DS.AnnSimpleSelG v)
@@ -58,21 +62,28 @@ traverseQueryRootFldAST
-> QueryRootFldAST a
-> f (QueryRootFldAST b)
traverseQueryRootFldAST f = \case
- QRFPk s -> QRFPk <$> DS.traverseAnnSimpleSel f s
- QRFSimple s -> QRFSimple <$> DS.traverseAnnSimpleSel f s
- QRFAgg s -> QRFAgg <$> DS.traverseAnnAggSel f s
- QRFActionSelect s -> QRFActionSelect <$> DS.traverseAnnSimpleSel f s
- QRFActionExecuteObject s -> QRFActionExecuteObject <$> DS.traverseAnnSimpleSel f s
- QRFActionExecuteList s -> QRFActionExecuteList <$> DS.traverseAnnSimpleSel f s
+ QRFNode s -> QRFNode <$> DS.traverseAnnSimpleSelectect f s
+ QRFPk s -> QRFPk <$> DS.traverseAnnSimpleSelect f s
+ QRFSimple s -> QRFSimple <$> DS.traverseAnnSimpleSelect f s
+ QRFAgg s -> QRFAgg <$> DS.traverseAnnAggregateSelect f s
+ QRFActionSelect s -> QRFActionSelect <$> DS.traverseAnnSimpleSelect f s
+ QRFActionExecuteObject s -> QRFActionExecuteObject <$> DS.traverseAnnSimpleSelect f s
+ QRFActionExecuteList s -> QRFActionExecuteList <$> DS.traverseAnnSimpleSelect f s
+ QRFConnection s -> QRFConnection <$> DS.traverseConnectionSelect f s
toPGQuery :: QueryRootFldResolved -> Q.Query
toPGQuery = \case
- QRFPk s -> DS.selectQuerySQL DS.JASSingleObject s
- QRFSimple s -> DS.selectQuerySQL DS.JASMultipleRows s
- QRFAgg s -> DS.selectAggQuerySQL s
- QRFActionSelect s -> DS.selectQuerySQL DS.JASSingleObject s
- QRFActionExecuteObject s -> DS.selectQuerySQL DS.JASSingleObject s
- QRFActionExecuteList s -> DS.selectQuerySQL DS.JASMultipleRows s
+ QRFNode s -> first (toQuery . DS.mkSQLSelect DS.JASSingleObject) $ RR.getRemoteJoins s
+ QRFPk s -> first (toQuery . DS.mkSQLSelect DS.JASSingleObject) $ RR.getRemoteJoins s
+ QRFSimple s -> first (toQuery . DS.mkSQLSelect DS.JASMultipleRows) $ RR.getRemoteJoins s
+ QRFAgg s -> first (toQuery . DS.mkAggregateSelect) $ RR.getRemoteJoinsAggregateSelect s
+ QRFActionSelect s -> first (toQuery . DS.mkSQLSelect DS.JASSingleObject) $ RR.getRemoteJoins s
+ QRFActionExecuteObject s -> first (toQuery . DS.mkSQLSelect DS.JASSingleObject) $ RR.getRemoteJoins s
+ QRFActionExecuteList s -> first (toQuery . DS.mkSQLSelect DS.JASMultipleRows) $ RR.getRemoteJoins s
+ QRFConnection s -> first (toQuery . DS.mkConnectionSelect) $ RR.getRemoteJoinsConnectionSelect s
+ where
+ toQuery :: ToSQL a => a -> Q.Query
+ toQuery = Q.fromBuilder . toSQL
validateHdrs
:: (Foldable t, QErrM m) => UserInfo -> t Text -> m ()
@@ -101,6 +112,13 @@ queryFldToPGAST fld actionExecuter = do
opCtx <- getOpCtx $ V._fName fld
userInfo <- asks getter
case opCtx of
+ QCNodeSelect nodeSelectMap -> do
+ NodeIdData table pkeyColumnValues <- RS.resolveNodeId fld
+ case Map.lookup (GS.mkTableTy table) nodeSelectMap of
+ Nothing -> throwVE $ "table " <> table <<> " not found"
+ Just selOpCtx -> do
+ validateHdrs userInfo (_socHeaders selOpCtx)
+ QRFNode <$> RS.convertNodeSelect selOpCtx pkeyColumnValues fld
QCSelect ctx -> do
validateHdrs userInfo (_socHeaders ctx)
QRFSimple <$> RS.convertSelect ctx fld
@@ -125,13 +143,15 @@ queryFldToPGAST fld actionExecuter = do
-- an SQL query, but in case of query actions it's converted into JSON
-- and included in the action's webhook payload.
markNotReusable
- let f = case jsonAggType of
+ let jsonAggType = RA.mkJsonAggSelect $ _saecOutputType ctx
+ f = case jsonAggType of
DS.JASMultipleRows -> QRFActionExecuteList
DS.JASSingleObject -> QRFActionExecuteObject
- f <$> actionExecuter (RA.resolveActionQuery fld ctx (userVars userInfo))
- where
- outputType = _saecOutputType ctx
- jsonAggType = RA.mkJsonAggSelect outputType
+ f <$> actionExecuter (RA.resolveActionQuery fld ctx (_uiSession userInfo))
+ QCSelectConnection pk ctx ->
+ QRFConnection <$> RS.convertConnectionSelect pk ctx fld
+ QCFuncConnection pk ctx ->
+ QRFConnection <$> RS.convertConnectionFuncQuery pk ctx fld
mutFldToTx
:: ( HasVersion
@@ -187,3 +207,17 @@ getOpCtx f = do
opCtxMap <- asks getter
onNothing (Map.lookup f opCtxMap) $ throw500 $
"lookup failed: opctx: " <> showName f
+
+toSQLFromItem :: S.Alias -> QueryRootFldResolved -> S.FromItem
+toSQLFromItem alias = \case
+ QRFNode s -> fromSelect $ DS.mkSQLSelect DS.JASSingleObject s
+ QRFPk s -> fromSelect $ DS.mkSQLSelect DS.JASSingleObject s
+ QRFSimple s -> fromSelect $ DS.mkSQLSelect DS.JASMultipleRows s
+ QRFAgg s -> fromSelect $ DS.mkAggregateSelect s
+ QRFActionSelect s -> fromSelect $ DS.mkSQLSelect DS.JASSingleObject s
+ QRFActionExecuteObject s -> fromSelect $ DS.mkSQLSelect DS.JASSingleObject s
+ QRFActionExecuteList s -> fromSelect $ DS.mkSQLSelect DS.JASSingleObject s
+ QRFConnection s -> flip (S.FISelectWith (S.Lateral False)) alias
+ $ DS.mkConnectionSelect s
+ where
+ fromSelect = flip (S.FISelect (S.Lateral False)) alias
diff --git a/server/src-lib/Hasura/GraphQL/Resolve/Action.hs b/server/src-lib/Hasura/GraphQL/Resolve/Action.hs
index 1c58d036cf6..ad3881f0a39 100644
--- a/server/src-lib/Hasura/GraphQL/Resolve/Action.hs
+++ b/server/src-lib/Hasura/GraphQL/Resolve/Action.hs
@@ -36,6 +36,16 @@ import qualified Network.Wreq as Wreq
import qualified Hasura.RQL.DML.Select as RS
+import qualified Control.Concurrent.Async as A
+import qualified Data.Aeson as J
+import qualified Data.Aeson.Casing as J
+import qualified Data.Aeson.TH as J
+import qualified Data.ByteString.Lazy as BL
+import qualified Data.CaseInsensitive as CI
+import qualified Data.HashMap.Strict as Map
+import qualified Data.Text as T
+import qualified Data.UUID as UUID
+import qualified Database.PG.Query as Q
import Hasura.EncJSON
import Hasura.GraphQL.Execute.Prepare
import Hasura.GraphQL.Parser
@@ -48,9 +58,34 @@ import Hasura.RQL.Types
import Hasura.RQL.Types.Run
import Hasura.Server.Utils (mkClientHeadersForward, mkSetCookieHeaders)
import Hasura.Server.Version (HasVersion)
+import Hasura.Session
import Hasura.SQL.Types
import Hasura.SQL.Value (PGScalarValue (..), toTxtValue)
+import qualified Language.GraphQL.Draft.Syntax as G
+import qualified Network.HTTP.Client as HTTP
+import qualified Network.HTTP.Types as HTTP
+import qualified Network.Wreq as Wreq
+
+-- import qualified Hasura.GraphQL.Resolve.Select as GRS
+import qualified Hasura.RQL.DML.RemoteJoin as RJ
+import qualified Hasura.RQL.DML.Select as RS
+
+import Hasura.EncJSON
+-- import Hasura.GraphQL.Resolve.Context
+-- import Hasura.GraphQL.Resolve.InputValue
+-- import Hasura.GraphQL.Resolve.Select (processTableSelectionSet)
+-- import Hasura.GraphQL.Validate.SelectionSet
+import Hasura.HTTP
+import Hasura.RQL.DDL.Headers (makeHeadersFromConf, toHeadersConf)
+import Hasura.RQL.DDL.Schema.Cache
+import Hasura.RQL.DML.Select (asSingleRowJsonResp)
+import Hasura.RQL.Types
+import Hasura.RQL.Types.Run
+import Hasura.Server.Utils (mkClientHeadersForward, mkSetCookieHeaders)
+import Hasura.Server.Version (HasVersion)
import Hasura.Session
+import Hasura.SQL.Types
+import Hasura.SQL.Value (PGScalarValue (..), toTxtValue)
newtype ActionContext
= ActionContext {_acName :: ActionName}
@@ -136,10 +171,11 @@ resolveActionExecution
, MonadError QErr m
, MonadIO m
)
- => AnnActionExecution UnpreparedValue
+ => UserInfo
+ -> AnnActionExecution UnpreparedValue
-> ActionExecContext
-> m (RespTx, HTTP.ResponseHeaders)
-resolveActionExecution annAction execContext = do
+resolveActionExecution userInfo annAction execContext = do
let actionContext = ActionContext actionName
handlerPayload = ActionWebhookPayload actionContext sessionVariables inputPayload
(webhookRes, respHeaders) <- callWebhook manager outputType outputFields reqHeaders confHeaders
@@ -148,9 +184,17 @@ resolveActionExecution annAction execContext = do
toTxtValue $ WithScalarType PGJSONB $ PGValJSONB $ Q.JSONB $ J.toJSON webhookRes
selectAstUnresolved = processOutputSelectionSet webhookResponseExpression
outputType definitionList annFields stringifyNum
- astResolved <- RS.traverseAnnSimpleSel (pure . unpreparedToTextSQL) selectAstUnresolved
- let jsonAggType = mkJsonAggSelect outputType
- return $ (,respHeaders) $ asSingleRowJsonResp (RS.selectQuerySQL jsonAggType astResolved) []
+ astResolved <- RS.traverseAnnSimpleSelect (pure . unpreparedToTextSQL) selectAstUnresolved
+ let (astResolvedWithoutRemoteJoins,maybeRemoteJoins) = RJ.getRemoteJoins astResolved
+ jsonAggType = mkJsonAggSelect outputType
+ return $ (,respHeaders) $
+ case maybeRemoteJoins of
+ Just remoteJoins ->
+ let query = Q.fromBuilder $ toSQL $
+ RS.mkSQLSelect jsonAggType astResolvedWithoutRemoteJoins
+ in RJ.executeQueryWithRemoteJoins manager reqHeaders userInfo query [] remoteJoins
+ Nothing ->
+ asSingleRowJsonResp (Q.fromBuilder $ toSQL $ RS.mkSQLSelect jsonAggType astResolved) []
where
AnnActionExecution actionName outputType annFields inputPayload
outputFields definitionList resolvedWebhook confHeaders
@@ -177,10 +221,17 @@ restrictActionExecuter errMsg _ =
-- resolveActionQuery
-- :: ( HasVersion
+-- , MonadReusability m
-- , MonadError QErr m
+-- , MonadReader r m
-- , MonadIO m
+-- , Has FieldMap r
+-- , Has OrdByCtx r
+-- , Has SQLGenCtx r
-- )
--- => UserVars
+-- => Field
+-- -> ActionExecutionContext
+-- -> SessionVariables
-- -> HTTP.Manager
-- -> [HTTP.Header]
-- -> m (RS.AnnSimpleSelG UnresolvedVal)
@@ -192,9 +243,10 @@ restrictActionExecuter errMsg _ =
-- forwardClientHeaders resolvedWebhook handlerPayload
-- let webhookResponseExpression = RS.AEInput $ UVSQL $
-- toTxtValue $ WithScalarType PGJSONB $ PGValJSONB $ Q.JSONB $ J.toJSON webhookRes
+-- selSet <- asObjectSelectionSet $ _fSelSet field
-- selectAstUnresolved <-
-- processOutputSelectionSet webhookResponseExpression outputType definitionList
--- (_fType field) $ _fSelSet field
+-- (_fType field) selSet
-- return selectAstUnresolved
-- where
-- ActionExecutionContext actionName outputType outputFields definitionList resolvedWebhook confHeaders
@@ -254,12 +306,12 @@ resolveAsyncActionQuery
-> RS.AnnSimpleSelG UnpreparedValue
resolveAsyncActionQuery userInfo annAction =
let annotatedFields = asyncFields <&> second \case
- AsyncTypename t -> RS.FExp t
+ AsyncTypename t -> RS.AFExpression t
AsyncOutput annFields ->
-- See Note [Resolving async action query/subscription]
let inputTableArgument = RS.AETableRow $ Just $ Iden "response_payload"
jsonAggSelect = mkJsonAggSelect outputType
- in RS.FComputedField $ RS.CFSTable jsonAggSelect $
+ in RS.AFComputedField $ RS.CFSTable jsonAggSelect $
processOutputSelectionSet inputTableArgument outputType
definitionList annFields stringifyNumerics
@@ -268,20 +320,20 @@ resolveAsyncActionQuery userInfo annAction =
AsyncErrors -> mkAnnFldFromPGCol "errors" PGJSONB
tableFromExp = RS.FromTable actionLogTable
- tableArguments = RS.noTableArgs
- { RS._taWhere = Just tableBoolExpression}
+ tableArguments = RS.noSelectArgs
+ { RS._saWhere = Just tableBoolExpression}
tablePermissions = RS.TablePerm annBoolExpTrue Nothing
- in RS.AnnSelG annotatedFields tableFromExp tablePermissions
+ in RS.AnnSelectG annotatedFields tableFromExp tablePermissions
tableArguments stringifyNumerics
where
AnnActionAsyncQuery actionName actionId outputType asyncFields definitionList stringifyNumerics = annAction
actionLogTable = QualifiedObject (SchemaName "hdb_catalog") (TableName "hdb_action_log")
-- TODO:- Avoid using PGColumnInfo
- mkAnnFldFromPGCol col columnType =
- flip RS.mkAnnColField Nothing $
- PGColumnInfo (unsafePGCol col) (G.unsafeMkName col) 0 (PGColumnScalar columnType) True Nothing
+ mkAnnFldFromPGCol column columnType =
+ flip RS.mkAnnColumnField Nothing $
+ PGColumnInfo (unsafePGCol column) (G.unsafeMkName column) 0 (PGColumnScalar columnType) True Nothing
tableBoolExpression =
let actionIdColumnInfo = PGColumnInfo (unsafePGCol "id") $$(G.litName "id")
@@ -510,12 +562,12 @@ processOutputSelectionSet
:: RS.ArgumentExp v
-> GraphQLType
-> [(PGCol, PGScalarType)]
- -> RS.AnnFldsG v
+ -> RS.AnnFieldsG v
-> Bool
-> RS.AnnSimpleSelG v
processOutputSelectionSet tableRowInput actionOutputType definitionList
annotatedFields =
- RS.AnnSelG annotatedFields selectFrom RS.noTablePermissions RS.noTableArgs
+ RS.AnnSelectG annotatedFields selectFrom RS.noTablePermissions RS.noSelectArgs
where
jsonbToPostgresRecordFunction =
QualifiedObject "pg_catalog" $ FunctionName $
diff --git a/server/src-lib/Hasura/GraphQL/Resolve/BoolExp.hs b/server/src-lib/Hasura/GraphQL/Resolve/BoolExp.hs
index 48680e47310..45ea56b79d5 100644
--- a/server/src-lib/Hasura/GraphQL/Resolve/BoolExp.hs
+++ b/server/src-lib/Hasura/GraphQL/Resolve/BoolExp.hs
@@ -169,6 +169,10 @@ parseColExp nt n val = do
fmapAnnBoolExp partialSQLExpToUnresolvedVal permExp
RFComputedField _ -> throw500
"computed fields are not allowed in bool_exp"
+ RFRemoteRelationship _ -> throw500
+ "remote relationships are not allowed in bool_exp"
+ RFNodeId _ _ -> throw500
+ "node id is not allowed in bool_exp"
parseBoolExp
:: ( MonadReusability m
diff --git a/server/src-lib/Hasura/GraphQL/Resolve/Context.hs b/server/src-lib/Hasura/GraphQL/Resolve/Context.hs
index 1d1974eae4e..044fbc0fc9e 100644
--- a/server/src-lib/Hasura/GraphQL/Resolve/Context.hs
+++ b/server/src-lib/Hasura/GraphQL/Resolve/Context.hs
@@ -23,7 +23,7 @@ module Hasura.GraphQL.Resolve.Context
, txtConverter
- , withSelSet
+ , traverseObjectSelectionSet
, fieldAsPath
, resolvePGCol
, module Hasura.GraphQL.Utils
@@ -33,21 +33,21 @@ module Hasura.GraphQL.Resolve.Context
import Data.Has
import Hasura.Prelude
-import qualified Data.HashMap.Strict as Map
-import qualified Data.Sequence as Seq
-import qualified Database.PG.Query as Q
-import qualified Language.GraphQL.Draft.Syntax as G
+import qualified Data.HashMap.Strict as Map
+import qualified Data.Sequence as Seq
+import qualified Database.PG.Query as Q
+import qualified Language.GraphQL.Draft.Syntax as G
import Hasura.GraphQL.Resolve.Types
import Hasura.GraphQL.Utils
-import Hasura.GraphQL.Validate.Field
+import Hasura.GraphQL.Validate.SelectionSet
import Hasura.GraphQL.Validate.Types
-import Hasura.RQL.DML.Internal (currentSession, sessVarFromCurrentSetting)
+import Hasura.RQL.DML.Internal (currentSession, sessVarFromCurrentSetting)
import Hasura.RQL.Types
import Hasura.SQL.Types
import Hasura.SQL.Value
-import qualified Hasura.SQL.DML as S
+import qualified Hasura.SQL.DML as S
getFldInfo
:: (MonadError QErr m, MonadReader r m, Has FieldMap r)
@@ -65,9 +65,11 @@ getPGColInfo
getPGColInfo nt n = do
fldInfo <- getFldInfo nt n
case fldInfo of
- RFPGColumn pgColInfo -> return pgColInfo
- RFRelationship _ -> throw500 $ mkErrMsg "relation"
- RFComputedField _ -> throw500 $ mkErrMsg "computed field"
+ RFPGColumn pgColInfo -> return pgColInfo
+ RFRelationship _ -> throw500 $ mkErrMsg "relation"
+ RFComputedField _ -> throw500 $ mkErrMsg "computed field"
+ RFRemoteRelationship _ -> throw500 $ mkErrMsg "remote relationship"
+ RFNodeId _ _ -> throw500 $ mkErrMsg "node id"
where
mkErrMsg ty =
"found " <> ty <> " when expecting pgcolinfo for "
@@ -139,12 +141,6 @@ prepareColVal (WithScalarType scalarType colVal) = do
txtConverter :: Applicative f => AnnPGVal -> f S.SQLExp
txtConverter (AnnPGVal _ _ scalarValue) = pure $ toTxtValue scalarValue
-withSelSet :: (Monad m) => SelSet -> (Field -> m a) -> m [(Text, a)]
-withSelSet selSet f =
- forM (toList selSet) $ \fld -> do
- res <- f fld
- return (G.unName $ G.unAlias $ _fAlias fld, res)
-
fieldAsPath :: (MonadError QErr m) => Field -> m a -> m a
fieldAsPath = nameAsPath . _fName
diff --git a/server/src-lib/Hasura/GraphQL/Resolve/Insert.hs b/server/src-lib/Hasura/GraphQL/Resolve/Insert.hs
index 4c6e7e428b8..81516911142 100644
--- a/server/src-lib/Hasura/GraphQL/Resolve/Insert.hs
+++ b/server/src-lib/Hasura/GraphQL/Resolve/Insert.hs
@@ -8,34 +8,36 @@ import Data.Has
import Hasura.EncJSON
import Hasura.Prelude
-import qualified Data.Aeson as J
-import qualified Data.Aeson.Casing as J
-import qualified Data.Aeson.TH as J
-import qualified Data.HashMap.Strict as Map
-import qualified Data.HashMap.Strict.InsOrd as OMap
-import qualified Data.Sequence as Seq
-import qualified Data.Text as T
-import qualified Language.GraphQL.Draft.Syntax as G
+import qualified Data.Aeson as J
+import qualified Data.Aeson.Casing as J
+import qualified Data.Aeson.TH as J
+import qualified Data.HashMap.Strict as Map
+import qualified Data.HashMap.Strict.InsOrd as OMap
+import qualified Data.Sequence as Seq
+import qualified Data.Text as T
+import qualified Language.GraphQL.Draft.Syntax as G
-import qualified Database.PG.Query as Q
-import qualified Hasura.RQL.DML.Insert as RI
-import qualified Hasura.RQL.DML.Returning as RR
+import qualified Database.PG.Query as Q
+import qualified Hasura.RQL.DML.Insert as RI
+import qualified Hasura.RQL.DML.Returning as RR
-import qualified Hasura.SQL.DML as S
+import qualified Hasura.SQL.DML as S
import Hasura.GraphQL.Resolve.BoolExp
import Hasura.GraphQL.Resolve.Context
import Hasura.GraphQL.Resolve.InputValue
import Hasura.GraphQL.Resolve.Mutation
import Hasura.GraphQL.Resolve.Select
-import Hasura.GraphQL.Validate.Field
+import Hasura.GraphQL.Validate.SelectionSet
import Hasura.GraphQL.Validate.Types
-import Hasura.RQL.DML.Insert (insertOrUpdateCheckExpr)
-import Hasura.RQL.DML.Internal (convAnnBoolExpPartialSQL, convPartialSQLExp,
- dmlTxErrorHandler, sessVarFromCurrentSetting)
+import Hasura.RQL.DML.Insert (insertOrUpdateCheckExpr)
+import Hasura.RQL.DML.Internal (convAnnBoolExpPartialSQL, convPartialSQLExp,
+ sessVarFromCurrentSetting)
import Hasura.RQL.DML.Mutation
-import Hasura.RQL.GBoolExp (toSQLBoolExp)
+import Hasura.RQL.DML.RemoteJoin
+import Hasura.RQL.GBoolExp (toSQLBoolExp)
import Hasura.RQL.Types
+import Hasura.Server.Version (HasVersion)
import Hasura.SQL.Types
import Hasura.SQL.Value
@@ -473,7 +475,8 @@ convertInsert
-> Field -- the mutation field
-> m RespTx
convertInsert role tn fld = prefixErrPath fld $ do
- mutOutputUnres <- RR.MOutMultirowFields <$> resolveMutationFields (_fType fld) (_fSelSet fld)
+ selSet <- asObjectSelectionSet $ _fSelSet fld
+ mutOutputUnres <- RR.MOutMultirowFields <$> resolveMutationFields (_fType fld) selSet
mutOutputRes <- RR.traverseMutationOutput resolveValTxt mutOutputUnres
annVals <- withArg arguments "objects" asArray
-- if insert input objects is empty array then
@@ -508,7 +511,8 @@ convertInsertOne
-> Field -- the mutation field
-> m RespTx
convertInsertOne role qt field = prefixErrPath field $ do
- tableSelFields <- processTableSelectionSet (_fType field) $ _fSelSet field
+ selSet <- asObjectSelectionSet $ _fSelSet field
+ tableSelFields <- processTableSelectionSet (_fType field) selSet
let mutationOutputUnresolved = RR.MOutSinglerowObject tableSelFields
mutationOutputResolved <- RR.traverseMutationOutput resolveValTxt mutationOutputUnresolved
annInputObj <- withArg arguments "object" asObject
diff --git a/server/src-lib/Hasura/GraphQL/Resolve/Introspect.hs b/server/src-lib/Hasura/GraphQL/Resolve/Introspect.hs
index 710c46423ba..ea08b9bdb5b 100644
--- a/server/src-lib/Hasura/GraphQL/Resolve/Introspect.hs
+++ b/server/src-lib/Hasura/GraphQL/Resolve/Introspect.hs
@@ -6,17 +6,19 @@ module Hasura.GraphQL.Resolve.Introspect
import Data.Has
import Hasura.Prelude
-import qualified Data.Aeson as J
-import qualified Data.HashMap.Strict as Map
-import qualified Data.HashSet as Set
-import qualified Data.Text as T
-import qualified Language.GraphQL.Draft.Syntax as G
+import qualified Data.Aeson as J
+import qualified Data.HashMap.Strict as Map
+import qualified Data.HashSet as Set
+import qualified Data.Text as T
+import qualified Hasura.SQL.Types as S
+import qualified Hasura.SQL.Value as S
+import qualified Language.GraphQL.Draft.Syntax as G
import Hasura.GraphQL.Resolve.Context
import Hasura.GraphQL.Resolve.InputValue
import Hasura.GraphQL.Validate.Context
-import Hasura.GraphQL.Validate.Field
import Hasura.GraphQL.Validate.InputValue
+import Hasura.GraphQL.Validate.SelectionSet
import Hasura.GraphQL.Validate.Types
import Hasura.RQL.Types
@@ -35,14 +37,15 @@ instance J.ToJSON TypeKind where
toJSON = J.toJSON . T.pack . drop 2 . show
withSubFields
- :: (Monad m)
- => SelSet
+ :: (MonadError QErr m)
+ => SelectionSet
-> (Field -> m J.Value)
-> m J.Object
-withSubFields selSet fn =
- fmap Map.fromList $ forM (toList selSet) $ \fld -> do
- val <- fn fld
- return (G.unName $ G.unAlias $ _fAlias fld, val)
+withSubFields selSet fn = do
+ objectSelectionSet <- asObjectSelectionSet selSet
+ Map.fromList <$> traverseObjectSelectionSet objectSelectionSet fn
+ -- val <- fn fld
+ -- return (G.unName $ G.unAlias $ _fAlias fld, val)
namedTyToTxt :: G.NamedType -> Text
namedTyToTxt = G.unName . G.unNamedType
@@ -101,9 +104,9 @@ notBuiltinFld f =
getImplTypes :: (MonadReader t m, Has TypeMap t) => AsObjType -> m [ObjTyInfo]
getImplTypes aot = do
- tyInfo :: TypeMap <- asks getter
+ tyInfo <- asks getter
return $ sortOn _otiName $
- Map.elems $ getPossibleObjTypes' tyInfo aot
+ Map.elems $ getPossibleObjTypes tyInfo aot
-- 4.5.2.3
unionR
@@ -139,19 +142,24 @@ ifaceR'
=> IFaceTyInfo
-> Field
-> m J.Object
-ifaceR' i@(IFaceTyInfo descM n flds) fld =
+ifaceR' ifaceTyInfo fld = do
+ dummyReadIncludeDeprecated fld
withSubFields (_fSelSet fld) $ \subFld ->
- case _fName subFld of
- "__typename" -> retJT "__Type"
- "kind" -> retJ TKINTERFACE
- "name" -> retJ $ namedTyToTxt n
- "description" -> retJ $ fmap G.unDescription descM
- "fields" -> fmap J.toJSON $ mapM (`fieldR` subFld) $
- sortOn _fiName $
- filter notBuiltinFld $ Map.elems flds
- "possibleTypes" -> fmap J.toJSON $ mapM (`objectTypeR` subFld)
- =<< getImplTypes (AOTIFace i)
- _ -> return J.Null
+ case _fName subFld of
+ "__typename" -> retJT "__Type"
+ "kind" -> retJ TKINTERFACE
+ "name" -> retJ $ namedTyToTxt name
+ "description" -> retJ $ fmap G.unDescription maybeDescription
+ "fields" -> fmap J.toJSON $ mapM (`fieldR` subFld) $
+ sortOn _fiName $
+ filter notBuiltinFld $ Map.elems fields
+ "possibleTypes" -> fmap J.toJSON $ mapM (`objectTypeR` subFld)
+ =<< getImplTypes (AOTIFace ifaceTyInfo)
+ _ -> return J.Null
+ where
+ maybeDescription = _ifDesc ifaceTyInfo
+ name = _ifName ifaceTyInfo
+ fields = _ifFields ifaceTyInfo
-- 4.5.2.5
enumTypeR
@@ -161,14 +169,64 @@ enumTypeR
-> m J.Object
enumTypeR (EnumTyInfo descM n vals _) fld =
withSubFields (_fSelSet fld) $ \subFld ->
- case _fName subFld of
- "__typename" -> retJT "__Type"
- "kind" -> retJ TKENUM
- "name" -> retJ $ namedTyToTxt n
- "description" -> retJ $ fmap G.unDescription descM
- "enumValues" -> fmap J.toJSON $ mapM (enumValueR subFld) $
- sortOn _eviVal $ Map.elems (normalizeEnumValues vals)
- _ -> return J.Null
+ case _fName subFld of
+ "__typename" -> retJT "__Type"
+ "kind" -> retJ TKENUM
+ "name" -> retJ $ namedTyToTxt n
+ "description" -> retJ $ fmap G.unDescription descM
+ "enumValues" -> do
+ includeDeprecated <- readIncludeDeprecated subFld
+ fmap J.toJSON $
+ mapM (enumValueR subFld) $
+ filter (\val -> includeDeprecated || not (_eviIsDeprecated val)) $
+ sortOn _eviVal $
+ Map.elems (normalizeEnumValues vals)
+ _ -> return J.Null
+
+readIncludeDeprecated
+ :: ( Monad m, MonadReusability m, MonadError QErr m )
+ => Field
+ -> m Bool
+readIncludeDeprecated subFld = do
+ let argM = Map.lookup "includeDeprecated" (_fArguments subFld)
+ case argM of
+ Nothing -> pure False
+ Just arg -> asScalarVal arg S.PGBoolean >>= \case
+ S.PGValBoolean b -> pure b
+ _ -> throw500 "unexpected non-Boolean argument for includeDeprecated"
+
+{- Note [Reusability of introspection queries with variables]
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+Introspection queries can have variables, too, in particular to influence one of
+two arguments: the @name@ argument of the @__type@ field, and the
+@includeDeprecated@ argument of the @fields@ and @enumValues@ fields. The
+current code does not cache all introspection queries with variables correctly.
+As a workaround to this, whenever a variable is passed to an @includeDeprecated@
+argument, we mark the query as unreusable. This is the purpose of
+'dummyReadIncludeDeprecated'.
+
+Now @fields@ and @enumValues@ are intended to be used when introspecting,
+respectively [object and interface types] and enum types. However, it does not
+suffice to only call 'dummyReadIncludeDeprecated' for such types, since @fields@
+and @enumValues@ are valid GraphQL fields regardless of what type we are looking
+at. So precisely because @__Type@ is _thought of_ as a union, but _not
+actually_ a union, we need to call 'dummyReadIncludeDeprecated' in all cases.
+
+See also issue #4547.
+-}
+
+dummyReadIncludeDeprecated
+ :: ( Monad m, MonadReusability m, MonadError QErr m )
+ => Field
+ -> m ()
+dummyReadIncludeDeprecated fld = do
+ selSet <- unAliasedFields . unObjectSelectionSet
+ <$> asObjectSelectionSet (_fSelSet fld)
+ forM_ (toList selSet) $ \subFld ->
+ case _fName subFld of
+ "fields" -> readIncludeDeprecated subFld
+ "enumValues" -> readIncludeDeprecated subFld
+ _ -> return False
-- 4.5.2.6
inputObjR
@@ -276,7 +334,7 @@ inputValueR fld (InpValInfo descM n defM ty) =
-- 4.5.5
enumValueR
- :: (Monad m)
+ :: (MonadError QErr m)
=> Field -> EnumValInfo -> m J.Object
enumValueR fld (EnumValInfo descM enumVal isDeprecated) =
withSubFields (_fSelSet fld) $ \subFld ->
diff --git a/server/src-lib/Hasura/GraphQL/Resolve/Mutation.hs b/server/src-lib/Hasura/GraphQL/Resolve/Mutation.hs
index dec9a37783a..844fab57935 100644
--- a/server/src-lib/Hasura/GraphQL/Resolve/Mutation.hs
+++ b/server/src-lib/Hasura/GraphQL/Resolve/Mutation.hs
@@ -33,7 +33,7 @@ import Hasura.GraphQL.Resolve.BoolExp
import Hasura.GraphQL.Resolve.Context
import Hasura.GraphQL.Resolve.InputValue
import Hasura.GraphQL.Resolve.Select (processTableSelectionSet)
-import Hasura.GraphQL.Validate.Field
+import Hasura.GraphQL.Validate.SelectionSet
import Hasura.GraphQL.Validate.Types
import Hasura.RQL.DML.Internal (currentSession, sessVarFromCurrentSetting)
import Hasura.RQL.Types
@@ -44,15 +44,16 @@ resolveMutationFields
:: ( MonadReusability m, MonadError QErr m, MonadReader r m, Has FieldMap r
, Has OrdByCtx r, Has SQLGenCtx r
)
- => G.NamedType -> SelSet -> m (RR.MutFldsG UnresolvedVal)
+ => G.NamedType -> ObjectSelectionSet -> m (RR.MutFldsG UnresolvedVal)
resolveMutationFields ty selSet = fmap (map (first FieldName)) $
- withSelSet selSet $ \fld -> case _fName fld of
+ traverseObjectSelectionSet selSet $ \fld -> case _fName fld of
"__typename" -> return $ RR.MExp $ G.unName $ G.unNamedType ty
"affected_rows" -> return RR.MCount
"returning" -> do
- annFlds <- processTableSelectionSet (_fType fld) $ _fSelSet fld
+ annFlds <- asObjectSelectionSet (_fSelSet fld)
+ >>= processTableSelectionSet (_fType fld)
annFldsResolved <- traverse
- (traverse (RS.traverseAnnFld convertPGValueToTextValue)) annFlds
+ (traverse (RS.traverseAnnField convertPGValueToTextValue)) annFlds
return $ RR.MRet annFldsResolved
G.Name t -> throw500 $ "unexpected field in mutation resp : " <> t
where
@@ -321,8 +322,9 @@ mutationFieldsResolver
, Has OrdByCtx r, Has SQLGenCtx r
)
=> Field -> m (RR.MutationOutputG UnresolvedVal)
-mutationFieldsResolver field =
- RR.MOutMultirowFields <$> resolveMutationFields (_fType field) (_fSelSet field)
+mutationFieldsResolver field = do
+ asObjectSelectionSet (_fSelSet field) >>= \selSet ->
+ RR.MOutMultirowFields <$> resolveMutationFields (_fType field) selSet
tableSelectionAsMutationOutput
:: ( MonadReusability m, MonadError QErr m
@@ -331,7 +333,8 @@ tableSelectionAsMutationOutput
)
=> Field -> m (RR.MutationOutputG UnresolvedVal)
tableSelectionAsMutationOutput field =
- RR.MOutSinglerowObject <$> processTableSelectionSet (_fType field) (_fSelSet field)
+ asObjectSelectionSet (_fSelSet field) >>= \selSet ->
+ RR.MOutSinglerowObject <$> processTableSelectionSet (_fType field) selSet
-- | build mutation response for empty objects
buildEmptyMutResp :: RR.MutationOutput -> EncJSON
diff --git a/server/src-lib/Hasura/GraphQL/Resolve/Select.hs b/server/src-lib/Hasura/GraphQL/Resolve/Select.hs
index 1d374cb9473..47830e3b892 100644
--- a/server/src-lib/Hasura/GraphQL/Resolve/Select.hs
+++ b/server/src-lib/Hasura/GraphQL/Resolve/Select.hs
@@ -1,37 +1,47 @@
module Hasura.GraphQL.Resolve.Select
( convertSelect
+ , convertConnectionSelect
+ , convertConnectionFuncQuery
, convertSelectByPKey
, convertAggSelect
, convertFuncQuerySimple
, convertFuncQueryAgg
, parseColumns
, processTableSelectionSet
+ , resolveNodeId
+ , convertNodeSelect
, AnnSimpleSelect
) where
-import Control.Lens ((^?), _2)
+import Control.Lens (to, (^..), (^?), _2)
import Data.Has
import Data.Parser.JSONPath
import Hasura.Prelude
-import qualified Data.HashMap.Strict as Map
-import qualified Data.HashMap.Strict.InsOrd as OMap
-import qualified Data.List.NonEmpty as NE
-import qualified Data.Sequence as Seq
-import qualified Data.Text as T
-import qualified Language.GraphQL.Draft.Syntax as G
+import qualified Data.Aeson as J
+import qualified Data.Aeson.Internal as J
+import qualified Data.ByteString.Lazy as BL
+import qualified Data.HashMap.Strict as Map
+import qualified Data.HashMap.Strict.InsOrd as OMap
+import qualified Data.List.NonEmpty as NE
+import qualified Data.Sequence as Seq
+import qualified Data.Text as T
+import qualified Language.GraphQL.Draft.Syntax as G
-import qualified Hasura.RQL.DML.Select as RS
-import qualified Hasura.SQL.DML as S
+import qualified Hasura.RQL.DML.Select as RS
+import qualified Hasura.SQL.DML as S
import Hasura.GraphQL.Resolve.BoolExp
import Hasura.GraphQL.Resolve.Context
import Hasura.GraphQL.Resolve.InputValue
-import Hasura.GraphQL.Schema (isAggFld)
-import Hasura.GraphQL.Validate.Field
+import Hasura.GraphQL.Schema (isAggregateField)
+import Hasura.GraphQL.Schema.Common (mkTableTy)
+import Hasura.GraphQL.Validate
+import Hasura.GraphQL.Validate.SelectionSet
import Hasura.GraphQL.Validate.Types
-import Hasura.RQL.DML.Internal (onlyPositiveInt)
+import Hasura.RQL.DML.Internal (onlyPositiveInt)
import Hasura.RQL.Types
+import Hasura.Server.Utils
import Hasura.SQL.Types
import Hasura.SQL.Value
@@ -45,27 +55,29 @@ jsonPathToColExp t = case parseJSONPath t of
elToColExp (Index i) = S.SELit $ T.pack (show i)
-argsToColOp :: (MonadReusability m, MonadError QErr m) => ArgsMap -> m (Maybe RS.ColOp)
-argsToColOp args = maybe (return Nothing) toOp $ Map.lookup "path" args
- where
- toJsonPathExp = fmap (RS.ColOp S.jsonbPathOp) . jsonPathToColExp
- toOp v = asPGColTextM v >>= traverse toJsonPathExp
+argsToColumnOp :: (MonadReusability m, MonadError QErr m) => ArgsMap -> m (Maybe RS.ColumnOp)
+argsToColumnOp args = case Map.lookup "path" args of
+ Nothing -> return Nothing
+ Just txt -> do
+ mColTxt <- asPGColTextM txt
+ mColExps <- maybe (return Nothing) jsonPathToColExp mColTxt
+ pure $ RS.ColumnOp S.jsonbPathOp <$> mColExps
-type AnnFlds = RS.AnnFldsG UnresolvedVal
+type AnnFields = RS.AnnFieldsG UnresolvedVal
resolveComputedField
:: ( MonadReusability m, MonadReader r m, Has FieldMap r
, Has OrdByCtx r, Has SQLGenCtx r, MonadError QErr m
)
- => ComputedField -> Field -> m (RS.ComputedFieldSel UnresolvedVal)
+ => ComputedField -> Field -> m (RS.ComputedFieldSelect UnresolvedVal)
resolveComputedField computedField fld = fieldAsPath fld $ do
funcArgs <- parseFunctionArgs argSeq argFn $ Map.lookup "args" $ _fArguments fld
let argsWithTableArgument = withTableArgument funcArgs
case fieldType of
CFTScalar scalarTy -> do
- colOpM <- argsToColOp $ _fArguments fld
+ colOpM <- argsToColumnOp $ _fArguments fld
pure $ RS.CFSScalar $
- RS.ComputedFieldScalarSel qf argsWithTableArgument scalarTy colOpM
+ RS.ComputedFieldScalarSelectect qf argsWithTableArgument scalarTy colOpM
CFTTable (ComputedFieldTable _ cols permFilter permLimit) -> do
let functionFrom = RS.FromFunction qf argsWithTableArgument Nothing
RS.CFSTable RS.JASMultipleRows <$> fromField functionFrom cols permFilter permLimit fld
@@ -86,77 +98,142 @@ processTableSelectionSet
:: ( MonadReusability m, MonadError QErr m, MonadReader r m, Has FieldMap r
, Has OrdByCtx r, Has SQLGenCtx r
)
- => G.NamedType -> SelSet -> m AnnFlds
+ => G.NamedType -> ObjectSelectionSet -> m AnnFields
processTableSelectionSet fldTy flds =
- forM (toList flds) $ \fld -> do
+ fmap (map (\(a, b) -> (FieldName a, b))) $ traverseObjectSelectionSet flds $ \fld -> do
let fldName = _fName fld
- let rqlFldName = FieldName $ G.unName $ G.unAlias $ _fAlias fld
- (rqlFldName,) <$> case fldName of
- "__typename" -> return $ RS.FExp $ G.unName $ G.unNamedType fldTy
+ case fldName of
+ "__typename" -> return $ RS.AFExpression $ G.unName $ G.unNamedType fldTy
_ -> do
fldInfo <- getFldInfo fldTy fldName
case fldInfo of
+ RFNodeId tn pkeys -> pure $ RS.AFNodeId tn pkeys
RFPGColumn colInfo ->
- RS.mkAnnColField colInfo <$> argsToColOp (_fArguments fld)
+ RS.mkAnnColumnField colInfo <$> argsToColumnOp (_fArguments fld)
RFComputedField computedField ->
- RS.FComputedField <$> resolveComputedField computedField fld
- RFRelationship (RelationshipField relInfo isAgg colGNameMap tableFilter tableLimit) -> do
+ RS.AFComputedField <$> resolveComputedField computedField fld
+ RFRelationship (RelationshipField relInfo fieldKind colGNameMap tableFilter tableLimit) -> do
let relTN = riRTable relInfo
colMapping = riMapping relInfo
rn = riName relInfo
- if isAgg then do
- aggSel <- fromAggField (RS.FromTable relTN) colGNameMap tableFilter tableLimit fld
- return $ RS.FArr $ RS.ASAgg $ RS.AnnRelG rn colMapping aggSel
- else do
- annSel <- fromField (RS.FromTable relTN) colGNameMap tableFilter tableLimit fld
- let annRel = RS.AnnRelG rn colMapping annSel
- return $ case riType relInfo of
- ObjRel -> RS.FObj annRel
- ArrRel -> RS.FArr $ RS.ASSimple annRel
+ case fieldKind of
+ RFKSimple -> do
+ annSel <- fromField (RS.FromTable relTN) colGNameMap tableFilter tableLimit fld
+ let annRel = RS.AnnRelationSelectG rn colMapping annSel
+ pure $ case riType relInfo of
+ ObjRel -> RS.AFObjectRelation annRel
+ ArrRel -> RS.AFArrayRelation $ RS.ASSimple annRel
+ RFKAggregate -> do
+ aggSel <- fromAggField (RS.FromTable relTN) colGNameMap tableFilter tableLimit fld
+ pure $ RS.AFArrayRelation $ RS.ASAggregate $ RS.AnnRelationSelectG rn colMapping aggSel
+ RFKConnection pkCols -> do
+ connSel <- fromConnectionField (RS.FromTable relTN) pkCols tableFilter tableLimit fld
+ pure $ RS.AFArrayRelation $ RS.ASConnection $ RS.AnnRelationSelectG rn colMapping connSel
-type TableAggFlds = RS.TableAggFldsG UnresolvedVal
+ RFRemoteRelationship info ->
+ pure $ RS.AFRemote $ RS.RemoteSelect
+ (unValidateArgsMap $ _fArguments fld) -- Unvalidate the input arguments
+ (unValidateSelectionSet $ _fSelSet fld) -- Unvalidate the selection fields
+ (_rfiHasuraFields info)
+ (_rfiRemoteFields info)
+ (_rfiRemoteSchema info)
+
+type TableAggregateFields = RS.TableAggregateFieldsG UnresolvedVal
fromAggSelSet
:: ( MonadReusability m, MonadError QErr m, MonadReader r m, Has FieldMap r
, Has OrdByCtx r, Has SQLGenCtx r
)
- => PGColGNameMap -> G.NamedType -> SelSet -> m TableAggFlds
+ => PGColGNameMap -> G.NamedType -> ObjectSelectionSet -> m TableAggregateFields
fromAggSelSet colGNameMap fldTy selSet = fmap toFields $
- withSelSet selSet $ \f -> do
- let fTy = _fType f
- fSelSet = _fSelSet f
- case _fName f of
+ traverseObjectSelectionSet selSet $ \Field{..} ->
+ case _fName of
"__typename" -> return $ RS.TAFExp $ G.unName $ G.unNamedType fldTy
- "aggregate" -> RS.TAFAgg <$> convertAggFld colGNameMap fTy fSelSet
- "nodes" -> RS.TAFNodes <$> processTableSelectionSet fTy fSelSet
+ "aggregate" -> do
+ objSelSet <- asObjectSelectionSet _fSelSet
+ RS.TAFAgg <$> convertAggregateField colGNameMap _fType objSelSet
+ "nodes" -> do
+ objSelSet <- asObjectSelectionSet _fSelSet
+ RS.TAFNodes <$> processTableSelectionSet _fType objSelSet
G.Name t -> throw500 $ "unexpected field in _agg node: " <> t
-type TableArgs = RS.TableArgsG UnresolvedVal
+fromConnectionSelSet
+ :: ( MonadReusability m, MonadError QErr m, MonadReader r m, Has FieldMap r
+ , Has OrdByCtx r, Has SQLGenCtx r
+ )
+ => G.NamedType -> ObjectSelectionSet -> m (RS.ConnectionFields UnresolvedVal)
+fromConnectionSelSet fldTy selSet = fmap toFields $
+ traverseObjectSelectionSet selSet $ \Field{..} ->
+ case _fName of
+ "__typename" -> return $ RS.ConnectionTypename $ G.unName $ G.unNamedType fldTy
+ "pageInfo" -> do
+ fSelSet <- asObjectSelectionSet _fSelSet
+ RS.ConnectionPageInfo <$> parsePageInfoSelectionSet _fType fSelSet
+ "edges" -> do
+ fSelSet <- asObjectSelectionSet _fSelSet
+ RS.ConnectionEdges <$> parseEdgeSelectionSet _fType fSelSet
+ -- "aggregate" -> RS.TAFAgg <$> convertAggregateField colGNameMap fTy fSelSet
+ -- "nodes" -> RS.TAFNodes <$> processTableSelectionSet fTy fSelSet
+ G.Name t -> throw500 $ "unexpected field in _connection node: " <> t
-parseTableArgs
+parseEdgeSelectionSet
+ :: ( MonadReusability m, MonadError QErr m, MonadReader r m, Has FieldMap r
+ , Has OrdByCtx r, Has SQLGenCtx r
+ )
+ => G.NamedType -> ObjectSelectionSet -> m (RS.EdgeFields UnresolvedVal)
+parseEdgeSelectionSet fldTy selSet = fmap toFields $
+ traverseObjectSelectionSet selSet $ \f -> do
+ let fTy = _fType f
+ case _fName f of
+ "__typename" -> pure $ RS.EdgeTypename $ G.unName $ G.unNamedType fldTy
+ "cursor" -> pure RS.EdgeCursor
+ "node" -> do
+ fSelSet <- asObjectSelectionSet $ _fSelSet f
+ RS.EdgeNode <$> processTableSelectionSet fTy fSelSet
+ G.Name t -> throw500 $ "unexpected field in Edge node: " <> t
+
+parsePageInfoSelectionSet
+ :: ( MonadReusability m, MonadError QErr m)
+ => G.NamedType -> ObjectSelectionSet -> m RS.PageInfoFields
+parsePageInfoSelectionSet fldTy selSet =
+ fmap toFields $ traverseObjectSelectionSet selSet $ \f ->
+ case _fName f of
+ "__typename" -> pure $ RS.PageInfoTypename $ G.unName $ G.unNamedType fldTy
+ "hasNextPage" -> pure RS.PageInfoHasNextPage
+ "hasPreviousPage" -> pure RS.PageInfoHasPreviousPage
+ "startCursor" -> pure RS.PageInfoStartCursor
+ "endCursor" -> pure RS.PageInfoEndCursor
+ -- "aggregate" -> RS.TAFAgg <$> convertAggregateField colGNameMap fTy fSelSet
+ -- "nodes" -> RS.TAFNodes <$> processTableSelectionSet fTy fSelSet
+ G.Name t -> throw500 $ "unexpected field in PageInfo node: " <> t
+
+type SelectArgs = RS.SelectArgsG UnresolvedVal
+
+parseSelectArgs
:: ( MonadReusability m, MonadError QErr m, MonadReader r m
, Has FieldMap r, Has OrdByCtx r
)
- => PGColGNameMap -> ArgsMap -> m TableArgs
-parseTableArgs colGNameMap args = do
+ => PGColGNameMap -> ArgsMap -> m SelectArgs
+parseSelectArgs colGNameMap args = do
whereExpM <- withArgM args "where" parseBoolExp
ordByExpML <- withArgM args "order_by" parseOrderBy
let ordByExpM = NE.nonEmpty =<< ordByExpML
- limitExpM <- withArgM args "limit" parseLimit
+ limitExpM <- withArgM args "limit" $
+ parseNonNegativeInt "expecting Integer value for \"limit\""
offsetExpM <- withArgM args "offset" $ asPGColumnValue >=> openOpaqueValue >=> txtConverter
distOnColsML <- withArgM args "distinct_on" $ parseColumns colGNameMap
let distOnColsM = NE.nonEmpty =<< distOnColsML
mapM_ (validateDistOn ordByExpM) distOnColsM
- return $ RS.TableArgs whereExpM ordByExpM limitExpM offsetExpM distOnColsM
+ return $ RS.SelectArgs whereExpM ordByExpM limitExpM offsetExpM distOnColsM
where
validateDistOn Nothing _ = return ()
validateDistOn (Just ordBys) cols = withPathK "args" $ do
let colsLen = length cols
initOrdBys = take colsLen $ toList ordBys
initOrdByCols = flip mapMaybe initOrdBys $ \ob ->
- case obiColumn ob of
- RS.AOCPG pgCol -> Just pgCol
- _ -> Nothing
+ case obiColumn ob of
+ RS.AOCColumn pgCol -> Just $ pgiColumn pgCol
+ _ -> Nothing
isValid = (colsLen == length initOrdByCols)
&& all (`elem` initOrdByCols) (toList cols)
@@ -175,12 +252,13 @@ fromField
-> Maybe Int
-> Field -> m AnnSimpleSelect
fromField selFrom colGNameMap permFilter permLimitM fld = fieldAsPath fld $ do
- tableArgs <- parseTableArgs colGNameMap args
- annFlds <- processTableSelectionSet (_fType fld) $ _fSelSet fld
+ tableArgs <- parseSelectArgs colGNameMap args
+ selSet <- asObjectSelectionSet $ _fSelSet fld
+ annFlds <- processTableSelectionSet (_fType fld) selSet
let unresolvedPermFltr = fmapAnnBoolExp partialSQLExpToUnresolvedVal permFilter
let tabPerm = RS.TablePerm unresolvedPermFltr permLimitM
strfyNum <- stringifyNum <$> asks getter
- return $ RS.AnnSelG annFlds selFrom tabPerm tableArgs strfyNum
+ return $ RS.AnnSelectG annFlds selFrom tabPerm tableArgs strfyNum
where
args = _fArguments fld
@@ -201,7 +279,8 @@ parseOrderBy
, MonadReader r m
, Has OrdByCtx r
)
- => AnnInpVal -> m [RS.AnnOrderByItemG UnresolvedVal]
+ => AnnInpVal
+ -> m [RS.AnnOrderByItemG UnresolvedVal]
parseOrderBy = fmap concat . withArray f
where
f _ = mapM (withObject (getAnnObItems id))
@@ -212,7 +291,7 @@ getAnnObItems
, MonadReader r m
, Has OrdByCtx r
)
- => (RS.AnnObColG UnresolvedVal -> RS.AnnObColG UnresolvedVal)
+ => (RS.AnnOrderByElement UnresolvedVal -> RS.AnnOrderByElement UnresolvedVal)
-> G.NamedType
-> AnnGObject
-> m [RS.AnnOrderByItemG UnresolvedVal]
@@ -224,7 +303,7 @@ getAnnObItems f nt obj = do
<> showNamedTy nt <> " map"
case ordByItem of
OBIPGCol ci -> do
- let aobCol = f $ RS.AOCPG $ pgiColumn ci
+ let aobCol = f $ RS.AOCColumn ci
(_, enumValM) <- asEnumValM v
ordByItemM <- forM enumValM $ \enumVal -> do
(ordTy, nullsOrd) <- parseOrderByEnum enumVal
@@ -233,13 +312,13 @@ getAnnObItems f nt obj = do
OBIRel ri fltr -> do
let unresolvedFltr = fmapAnnBoolExp partialSQLExpToUnresolvedVal fltr
- let annObColFn = f . RS.AOCObj ri unresolvedFltr
+ let annObColFn = f . RS.AOCObjectRelation ri unresolvedFltr
flip withObjectM v $ \nameTy objM ->
maybe (pure []) (getAnnObItems annObColFn nameTy) objM
OBIAgg ri relColGNameMap fltr -> do
let unresolvedFltr = fmapAnnBoolExp partialSQLExpToUnresolvedVal fltr
- let aobColFn = f . RS.AOCAgg ri unresolvedFltr
+ let aobColFn = f . RS.AOCArrayAggregation ri unresolvedFltr
flip withObjectM v $ \_ objM ->
maybe (pure []) (parseAggOrdBy relColGNameMap aobColFn) objM
@@ -250,7 +329,7 @@ mkOrdByItemG ordTy aobCol nullsOrd =
parseAggOrdBy
:: (MonadReusability m, MonadError QErr m)
=> PGColGNameMap
- -> (RS.AnnAggOrdBy -> RS.AnnObColG UnresolvedVal)
+ -> (RS.AnnAggregateOrderBy -> RS.AnnOrderByElement UnresolvedVal)
-> AnnGObject
-> m [RS.AnnOrderByItemG UnresolvedVal]
parseAggOrdBy colGNameMap f annObj =
@@ -263,14 +342,14 @@ parseAggOrdBy colGNameMap f annObj =
return $ mkOrdByItemG ordTy (f RS.AAOCount) nullsOrd
return $ maybe [] pure ordByItemM
- G.Name opT ->
+ G.Name opText ->
flip withObject obVal $ \_ opObObj -> fmap catMaybes $
forM (OMap.toList opObObj) $ \(colName, eVal) -> do
(_, enumValM) <- asEnumValM eVal
forM enumValM $ \enumVal -> do
(ordTy, nullsOrd) <- parseOrderByEnum enumVal
- col <- pgiColumn <$> resolvePGCol colGNameMap colName
- let aobCol = f $ RS.AAOOp opT col
+ col <- resolvePGCol colGNameMap colName
+ let aobCol = f $ RS.AAOOp opText col
return $ mkOrdByItemG ordTy aobCol nullsOrd
parseOrderByEnum
@@ -287,15 +366,14 @@ parseOrderByEnum = \case
G.EnumValue v -> throw500 $
"enum value " <> showName v <> " not found in type order_by"
-parseLimit :: (MonadReusability m, MonadError QErr m) => AnnInpVal -> m Int
-parseLimit v = do
+parseNonNegativeInt
+ :: (MonadReusability m, MonadError QErr m) => Text -> AnnInpVal -> m Int
+parseNonNegativeInt errMsg v = do
pgColVal <- openOpaqueValue =<< asPGColumnValue v
- limit <- maybe noIntErr return . pgColValueToInt . pstValue $ _apvValue pgColVal
+ limit <- maybe (throwVE errMsg) return . pgColValueToInt . pstValue $ _apvValue pgColVal
-- validate int value
onlyPositiveInt limit
return limit
- where
- noIntErr = throwVE "expecting Integer value for \"limit\""
type AnnSimpleSel = RS.AnnSimpleSelG UnresolvedVal
@@ -311,14 +389,15 @@ fromFieldByPKey
-> AnnBoolExpPartialSQL -> Field -> m AnnSimpleSel
fromFieldByPKey tn colArgMap permFilter fld = fieldAsPath fld $ do
boolExp <- pgColValToBoolExp colArgMap $ _fArguments fld
- annFlds <- processTableSelectionSet fldTy $ _fSelSet fld
+ selSet <- asObjectSelectionSet $ _fSelSet fld
+ annFlds <- processTableSelectionSet fldTy selSet
let tabFrom = RS.FromTable tn
unresolvedPermFltr = fmapAnnBoolExp partialSQLExpToUnresolvedVal
permFilter
tabPerm = RS.TablePerm unresolvedPermFltr Nothing
- tabArgs = RS.noTableArgs { RS._taWhere = Just boolExp}
+ tabArgs = RS.noSelectArgs { RS._saWhere = Just boolExp}
strfyNum <- stringifyNum <$> asks getter
- return $ RS.AnnSelG annFlds tabFrom tabPerm tabArgs strfyNum
+ return $ RS.AnnSelectG annFlds tabFrom tabPerm tabArgs strfyNum
where
fldTy = _fType fld
@@ -345,14 +424,18 @@ convertSelectByPKey opCtx fld =
SelPkOpCtx qt _ permFilter colArgMap = opCtx
-- agg select related
-parseColumns :: (MonadReusability m, MonadError QErr m) => PGColGNameMap -> AnnInpVal -> m [PGCol]
+parseColumns
+ :: (MonadReusability m, MonadError QErr m)
+ => PGColGNameMap -> AnnInpVal -> m [PGCol]
parseColumns allColFldMap val =
flip withArray val $ \_ vals ->
forM vals $ \v -> do
(_, G.EnumValue enumVal) <- asEnumVal v
pgiColumn <$> resolvePGCol allColFldMap enumVal
-convertCount :: (MonadReusability m, MonadError QErr m) => PGColGNameMap -> ArgsMap -> m S.CountType
+convertCount
+ :: (MonadReusability m, MonadError QErr m)
+ => PGColGNameMap -> ArgsMap -> m S.CountType
convertCount colGNameMap args = do
columnsM <- withArgM args "columns" $ parseColumns colGNameMap
isDistinct <- or <$> withArgM args "distinct" parseDistinct
@@ -371,34 +454,33 @@ convertCount colGNameMap args = do
toFields :: [(T.Text, a)] -> RS.Fields a
toFields = map (first FieldName)
-convertColFlds
+convertColumnFields
:: (MonadError QErr m)
- => PGColGNameMap -> G.NamedType -> SelSet -> m RS.ColFlds
-convertColFlds colGNameMap ty selSet = fmap toFields $
- withSelSet selSet $ \fld ->
+ => PGColGNameMap -> G.NamedType -> ObjectSelectionSet -> m RS.ColumnFields
+convertColumnFields colGNameMap ty selSet = fmap toFields $
+ traverseObjectSelectionSet selSet $ \fld ->
case _fName fld of
"__typename" -> return $ RS.PCFExp $ G.unName $ G.unNamedType ty
- n -> (RS.PCFCol . pgiColumn) <$> resolvePGCol colGNameMap n
+ n -> RS.PCFCol . pgiColumn <$> resolvePGCol colGNameMap n
-convertAggFld
+convertAggregateField
:: (MonadReusability m, MonadError QErr m)
- => PGColGNameMap -> G.NamedType -> SelSet -> m RS.AggFlds
-convertAggFld colGNameMap ty selSet = fmap toFields $
- withSelSet selSet $ \fld -> do
- let fType = _fType fld
- fSelSet = _fSelSet fld
- case _fName fld of
+ => PGColGNameMap -> G.NamedType -> ObjectSelectionSet -> m RS.AggregateFields
+convertAggregateField colGNameMap ty selSet = fmap toFields $
+ traverseObjectSelectionSet selSet $ \Field{..} ->
+ case _fName of
"__typename" -> return $ RS.AFExp $ G.unName $ G.unNamedType ty
"count" -> RS.AFCount <$> convertCount colGNameMap (_fArguments fld)
n -> do
- colFlds <- convertColFlds colGNameMap fType fSelSet
- unless (isAggFld n) $ throwInvalidFld n
- return $ RS.AFOp $ RS.AggOp (G.unName n) colFlds
+ fSelSet <- asObjectSelectionSet _fSelSet
+ colFlds <- convertColumnFields colGNameMap _fType fSelSet
+ unless (isAggregateField n) $ throwInvalidFld n
+ return $ RS.AFOp $ RS.AggregateOp (G.unName n) colFlds
where
throwInvalidFld (G.Name t) =
throw500 $ "unexpected field in _aggregate node: " <> t
-type AnnAggSel = RS.AnnAggSelG UnresolvedVal
+type AnnAggregateSelect = RS.AnnAggregateSelectG UnresolvedVal
fromAggField
:: ( MonadReusability m, MonadError QErr m, MonadReader r m, Has FieldMap r
@@ -408,29 +490,162 @@ fromAggField
-> PGColGNameMap
-> AnnBoolExpPartialSQL
-> Maybe Int
- -> Field -> m AnnAggSel
+ -> Field -> m AnnAggregateSelect
fromAggField selectFrom colGNameMap permFilter permLimit fld = fieldAsPath fld $ do
- tableArgs <- parseTableArgs colGNameMap args
- aggSelFlds <- fromAggSelSet colGNameMap (_fType fld) (_fSelSet fld)
+ tableArgs <- parseSelectArgs colGNameMap args
+ selSet <- asObjectSelectionSet $ _fSelSet fld
+ aggSelFlds <- fromAggSelSet colGNameMap (_fType fld) selSet
let unresolvedPermFltr =
fmapAnnBoolExp partialSQLExpToUnresolvedVal permFilter
let tabPerm = RS.TablePerm unresolvedPermFltr permLimit
strfyNum <- stringifyNum <$> asks getter
- return $ RS.AnnSelG aggSelFlds selectFrom tabPerm tableArgs strfyNum
+ return $ RS.AnnSelectG aggSelFlds selectFrom tabPerm tableArgs strfyNum
where
args = _fArguments fld
+fromConnectionField
+ :: ( MonadReusability m, MonadError QErr m, MonadReader r m, Has FieldMap r
+ , Has OrdByCtx r, Has SQLGenCtx r
+ )
+ => RS.SelectFromG UnresolvedVal
+ -> NonEmpty PGColumnInfo
+ -> AnnBoolExpPartialSQL
+ -> Maybe Int
+ -> Field -> m (RS.ConnectionSelect UnresolvedVal)
+fromConnectionField selectFrom pkCols permFilter permLimit fld = fieldAsPath fld $ do
+ (tableArgs, slice, split) <- parseConnectionArgs pkCols args
+ selSet <- asObjectSelectionSet $ _fSelSet fld
+ connSelFlds <- fromConnectionSelSet (_fType fld) selSet
+ strfyNum <- stringifyNum <$> asks getter
+ let unresolvedPermFltr =
+ fmapAnnBoolExp partialSQLExpToUnresolvedVal permFilter
+ tabPerm = RS.TablePerm unresolvedPermFltr permLimit
+ annSel = RS.AnnSelectG connSelFlds selectFrom tabPerm tableArgs strfyNum
+ pure $ RS.ConnectionSelect pkCols split slice annSel
+ where
+ args = _fArguments fld
+
+parseConnectionArgs
+ :: forall r m.
+ ( MonadReusability m, MonadError QErr m, MonadReader r m
+ , Has FieldMap r, Has OrdByCtx r
+ )
+ => NonEmpty PGColumnInfo
+ -> ArgsMap
+ -> m ( SelectArgs
+ , Maybe RS.ConnectionSlice
+ , Maybe (NE.NonEmpty (RS.ConnectionSplit UnresolvedVal))
+ )
+parseConnectionArgs pKeyColumns args = do
+ whereExpM <- withArgM args "where" parseBoolExp
+ ordByExpML <- withArgM args "order_by" parseOrderBy
+
+ slice <- case (Map.lookup "first" args, Map.lookup "last" args) of
+ (Nothing, Nothing) -> pure Nothing
+ (Just _, Just _) -> throwVE "\"first\" and \"last\" are not allowed at once"
+ (Just v, Nothing) -> Just . RS.SliceFirst <$> parseNonNegativeInt
+ "expecting Integer value for \"first\"" v
+ (Nothing, Just v) -> Just . RS.SliceLast <$> parseNonNegativeInt
+ "expecting Integer value for \"last\"" v
+
+ maybeSplit <- case (Map.lookup "after" args, Map.lookup "before" args) of
+ (Nothing, Nothing) -> pure Nothing
+ (Just _, Just _) -> throwVE "\"after\" and \"before\" are not allowed at once"
+ (Just v, Nothing) -> fmap ((RS.CSKAfter,) . base64Decode) <$> asPGColTextM v
+ (Nothing, Just v) -> fmap ((RS.CSKBefore,) . base64Decode) <$> asPGColTextM v
+
+ let ordByExpM = NE.nonEmpty =<< appendPrimaryKeyOrderBy <$> ordByExpML
+ tableArgs = RS.SelectArgs whereExpM ordByExpM Nothing Nothing Nothing
+
+ split <- mapM (uncurry (validateConnectionSplit ordByExpM)) maybeSplit
+ pure (tableArgs, slice, split)
+ where
+ appendPrimaryKeyOrderBy :: [RS.AnnOrderByItemG v] -> [RS.AnnOrderByItemG v]
+ appendPrimaryKeyOrderBy orderBys =
+ let orderByColumnNames =
+ orderBys ^.. traverse . to obiColumn . RS._AOCColumn . to pgiColumn
+ pkeyOrderBys = flip mapMaybe (toList pKeyColumns) $ \pgColumnInfo ->
+ if pgiColumn pgColumnInfo `elem` orderByColumnNames then Nothing
+ else Just $ OrderByItemG Nothing (RS.AOCColumn pgColumnInfo) Nothing
+ in orderBys <> pkeyOrderBys
+
+ validateConnectionSplit
+ :: Maybe (NonEmpty (RS.AnnOrderByItemG UnresolvedVal))
+ -> RS.ConnectionSplitKind
+ -> BL.ByteString
+ -> m (NonEmpty (RS.ConnectionSplit UnresolvedVal))
+ validateConnectionSplit maybeOrderBys splitKind cursorSplit = do
+ cursorValue <- either (const throwInvalidCursor) pure $
+ J.eitherDecode cursorSplit
+ case maybeOrderBys of
+ Nothing -> forM pKeyColumns $
+ \pgColumnInfo -> do
+ let columnJsonPath = [J.Key $ getPGColTxt $ pgiColumn pgColumnInfo]
+ pgColumnValue <- maybe throwInvalidCursor pure $ iResultToMaybe $
+ executeJSONPath columnJsonPath cursorValue
+ pgValue <- parsePGScalarValue (pgiType pgColumnInfo) pgColumnValue
+ let unresolvedValue = UVPG $ AnnPGVal Nothing False pgValue
+ pure $ RS.ConnectionSplit splitKind unresolvedValue $
+ OrderByItemG Nothing (RS.AOCColumn pgColumnInfo) Nothing
+ Just orderBys ->
+ forM orderBys $ \orderBy -> do
+ let OrderByItemG orderType annObCol nullsOrder = orderBy
+ orderByItemValue <- maybe throwInvalidCursor pure $ iResultToMaybe $
+ executeJSONPath (getPathFromOrderBy annObCol) cursorValue
+ pgValue <- parsePGScalarValue (getOrderByColumnType annObCol) orderByItemValue
+ let unresolvedValue = UVPG $ AnnPGVal Nothing False pgValue
+ pure $ RS.ConnectionSplit splitKind unresolvedValue $
+ OrderByItemG orderType (() <$ annObCol) nullsOrder
+ where
+ throwInvalidCursor = throwVE "the \"after\" or \"before\" cursor is invalid"
+
+ iResultToMaybe = \case
+ J.ISuccess v -> Just v
+ J.IError{} -> Nothing
+
+ getPathFromOrderBy = \case
+ RS.AOCColumn pgColInfo ->
+ let pathElement = J.Key $ getPGColTxt $ pgiColumn pgColInfo
+ in [pathElement]
+ RS.AOCObjectRelation relInfo _ obCol ->
+ let pathElement = J.Key $ relNameToTxt $ riName relInfo
+ in pathElement : getPathFromOrderBy obCol
+ RS.AOCArrayAggregation relInfo _ aggOb ->
+ let fieldName = J.Key $ relNameToTxt (riName relInfo) <> "_aggregate"
+ in fieldName : case aggOb of
+ RS.AAOCount -> [J.Key "count"]
+ RS.AAOOp t col -> [J.Key t, J.Key $ getPGColTxt $ pgiColumn col]
+
+ getOrderByColumnType = \case
+ RS.AOCColumn pgColInfo -> pgiType pgColInfo
+ RS.AOCObjectRelation _ _ obCol -> getOrderByColumnType obCol
+ RS.AOCArrayAggregation _ _ aggOb ->
+ case aggOb of
+ RS.AAOCount -> PGColumnScalar PGInteger
+ RS.AAOOp _ colInfo -> pgiType colInfo
+
convertAggSelect
:: ( MonadReusability m, MonadError QErr m, MonadReader r m, Has FieldMap r
, Has OrdByCtx r, Has SQLGenCtx r
)
- => SelOpCtx -> Field -> m (RS.AnnAggSelG UnresolvedVal)
+ => SelOpCtx -> Field -> m (RS.AnnAggregateSelectG UnresolvedVal)
convertAggSelect opCtx fld =
withPathK "selectionSet" $
fromAggField (RS.FromTable qt) colGNameMap permFilter permLimit fld
where
SelOpCtx qt _ colGNameMap permFilter permLimit = opCtx
+convertConnectionSelect
+ :: ( MonadReusability m, MonadError QErr m, MonadReader r m, Has FieldMap r
+ , Has OrdByCtx r, Has SQLGenCtx r
+ )
+ => NonEmpty PGColumnInfo -> SelOpCtx -> Field -> m (RS.ConnectionSelect UnresolvedVal)
+convertConnectionSelect pkCols opCtx fld =
+ withPathK "selectionSet" $
+ fromConnectionField (RS.FromTable qt) pkCols permFilter permLimit fld
+ where
+ SelOpCtx qt _ _ permFilter permLimit = opCtx
+
parseFunctionArgs
:: (MonadReusability m, MonadError QErr m)
=> Seq.Seq a
@@ -506,10 +721,77 @@ convertFuncQueryAgg
, Has OrdByCtx r
, Has SQLGenCtx r
)
- => FuncQOpCtx -> Field -> m AnnAggSel
+ => FuncQOpCtx -> Field -> m AnnAggregateSelect
convertFuncQueryAgg funcOpCtx fld =
withPathK "selectionSet" $ fieldAsPath fld $ do
selectFrom <- makeFunctionSelectFrom qf argSeq fld
fromAggField selectFrom colGNameMap permFilter permLimit fld
where
FuncQOpCtx qf argSeq _ colGNameMap permFilter permLimit = funcOpCtx
+
+convertConnectionFuncQuery
+ :: ( MonadReusability m
+ , MonadError QErr m
+ , MonadReader r m
+ , Has FieldMap r
+ , Has OrdByCtx r
+ , Has SQLGenCtx r
+ )
+ => NonEmpty PGColumnInfo -> FuncQOpCtx -> Field -> m (RS.ConnectionSelect UnresolvedVal)
+convertConnectionFuncQuery pkCols funcOpCtx fld =
+ withPathK "selectionSet" $ fieldAsPath fld $ do
+ selectFrom <- makeFunctionSelectFrom qf argSeq fld
+ fromConnectionField selectFrom pkCols permFilter permLimit fld
+ where
+ FuncQOpCtx qf argSeq _ _ permFilter permLimit = funcOpCtx
+
+resolveNodeId
+ :: forall m. ( MonadError QErr m
+ , MonadReusability m
+ )
+ => Field -> m NodeIdData
+resolveNodeId field =
+ withPathK "selectionSet" $ fieldAsPath field $ do
+ nodeIdText <- asPGColText =<< getArg (_fArguments field) "id"
+ either (const throwInvalidNodeId) pure $
+ J.eitherDecode $ base64Decode nodeIdText
+ where
+ throwInvalidNodeId = throwVE "the node id is invalid"
+
+convertNodeSelect
+ :: ( MonadReusability m
+ , MonadError QErr m
+ , MonadReader r m
+ , Has FieldMap r
+ , Has OrdByCtx r
+ , Has SQLGenCtx r
+ )
+ => SelOpCtx
+ -> Map.HashMap PGCol J.Value
+ -> Field
+ -> m (RS.AnnSimpleSelG UnresolvedVal)
+convertNodeSelect selOpCtx pkeyColumnValues field =
+ withPathK "selectionSet" $ fieldAsPath field $ do
+ -- Parse selection set as interface
+ ifaceSelectionSet <- asInterfaceSelectionSet $ _fSelSet field
+ let tableObjectType = mkTableTy table
+ selSet = getMemberSelectionSet tableObjectType ifaceSelectionSet
+ unresolvedPermFilter = fmapAnnBoolExp partialSQLExpToUnresolvedVal permFilter
+ tablePerm = RS.TablePerm unresolvedPermFilter permLimit
+ -- Resolve the table selection set
+ annFields <- processTableSelectionSet tableObjectType selSet
+ -- Resolve the Node id primary key column values
+ unresolvedPkeyValues <- flip Map.traverseWithKey pkeyColumnValues $
+ \pgColumn jsonValue -> case Map.lookup pgColumn pgColumnMap of
+ Nothing -> throwVE $ "column " <> pgColumn <<> " not found"
+ Just columnInfo -> (,columnInfo) . UVPG . AnnPGVal Nothing False <$>
+ parsePGScalarValue (pgiType columnInfo) jsonValue
+ -- Generate the bool expression from the primary key column values
+ let pkeyBoolExp = BoolAnd $ flip map (Map.elems unresolvedPkeyValues) $
+ \(unresolvedValue, columnInfo) -> (BoolFld . AVCol columnInfo) [AEQ True unresolvedValue]
+ selectArgs = RS.noSelectArgs{RS._saWhere = Just pkeyBoolExp}
+ strfyNum <- stringifyNum <$> asks getter
+ pure $ RS.AnnSelectG annFields (RS.FromTable table) tablePerm selectArgs strfyNum
+ where
+ SelOpCtx table _ allColumns permFilter permLimit = selOpCtx
+ pgColumnMap = mapFromL pgiColumn $ Map.elems allColumns
diff --git a/server/src-lib/Hasura/GraphQL/Resolve/Types.hs b/server/src-lib/Hasura/GraphQL/Resolve/Types.hs
index 64c31e88fd5..4b09aa89400 100644
--- a/server/src-lib/Hasura/GraphQL/Resolve/Types.hs
+++ b/server/src-lib/Hasura/GraphQL/Resolve/Types.hs
@@ -7,6 +7,9 @@ module Hasura.GraphQL.Resolve.Types
import Control.Lens.TH
import Hasura.Prelude
+import qualified Data.Aeson as J
+import qualified Data.Aeson.Casing as J
+import qualified Data.Aeson.TH as J
import qualified Data.HashMap.Strict as Map
import qualified Data.Sequence as Seq
import qualified Data.Text as T
@@ -27,12 +30,17 @@ import Hasura.SQL.Value
import qualified Hasura.SQL.DML as S
+type NodeSelectMap = Map.HashMap G.NamedType SelOpCtx
+
data QueryCtx
- = QCSelect !SelOpCtx
+ = QCNodeSelect !NodeSelectMap
+ | QCSelect !SelOpCtx
+ | QCSelectConnection !(NonEmpty PGColumnInfo) !SelOpCtx
| QCSelectPkey !SelPkOpCtx
| QCSelectAgg !SelOpCtx
| QCFuncQuery !FuncQOpCtx
| QCFuncAggQuery !FuncQOpCtx
+ | QCFuncConnection !(NonEmpty PGColumnInfo) !FuncQOpCtx
| QCAsyncActionFetch !ActionSelectOpContext
| QCAction !ActionExecutionContext
deriving (Show, Eq)
@@ -130,10 +138,16 @@ data ActionSelectOpContext
-- used in resolvers
type PGColGNameMap = Map.HashMap G.Name PGColumnInfo
+data RelationshipFieldKind
+ = RFKAggregate
+ | RFKSimple
+ | RFKConnection !(NonEmpty PGColumnInfo)
+ deriving (Show, Eq)
+
data RelationshipField
= RelationshipField
{ _rfInfo :: !RelInfo
- , _rfIsAgg :: !Bool
+ , _rfIsAgg :: !RelationshipFieldKind
, _rfCols :: !PGColGNameMap
, _rfPermFilter :: !AnnBoolExpPartialSQL
, _rfPermLimit :: !(Maybe Int)
@@ -166,6 +180,8 @@ data ResolveField
= RFPGColumn !PGColumnInfo
| RFRelationship !RelationshipField
| RFComputedField !ComputedField
+ | RFRemoteRelationship !RemoteFieldInfo
+ | RFNodeId !QualifiedTable !(NonEmpty PGColumnInfo)
deriving (Show, Eq)
type FieldMap = Map.HashMap (G.NamedType, G.Name) ResolveField
@@ -245,6 +261,13 @@ data InputFunctionArgument
| IFAUnknown !FunctionArgItem -- ^ Unknown value, need to be parsed
deriving (Show, Eq)
+data NodeIdData
+ = NodeIdData
+ { _nidTable :: !QualifiedTable
+ , _nidColumns :: !(Map.HashMap PGCol J.Value)
+ } deriving (Show, Eq)
+$(J.deriveFromJSON (J.aesonDrop 4 J.snakeCase) ''NodeIdData)
+
-- template haskell related
$(makePrisms ''ResolveField)
$(makeLenses ''ComputedField)
diff --git a/server/src-lib/Hasura/GraphQL/Schema/Action.hs b/server/src-lib/Hasura/GraphQL/Schema/Action.hs
index 1b70cf53feb..7549703bbdd 100644
--- a/server/src-lib/Hasura/GraphQL/Schema/Action.hs
+++ b/server/src-lib/Hasura/GraphQL/Schema/Action.hs
@@ -132,7 +132,7 @@ actionIdParser =
actionOutputFields
:: forall m n r. (MonadSchema n m, MonadTableInfo r m, MonadRole r m, Has QueryContext r)
=> AnnotatedObjectType
- -> MaybeT m (Parser 'Output n (RQL.AnnFldsG UnpreparedValue))
+ -> MaybeT m (Parser 'Output n (RQL.AnnFieldsG UnpreparedValue))
actionOutputFields outputObject = do
let scalarOrEnumFields = map scalarOrEnumFieldParser $ toList $ _otdFields outputObject
relationshipFields <- forM (_otdRelationships outputObject) $ traverse relationshipFieldParser
@@ -141,11 +141,11 @@ actionOutputFields outputObject = do
outputTypeName = unObjectTypeName $ _otdName outputObject
outputTypeDescription = _otdDescription outputObject
pure $ P.selectionSet outputTypeName outputTypeDescription allFieldParsers
- <&> parsedSelectionsToFields RQL.FExp
+ <&> parsedSelectionsToFields RQL.AFExpression
where
scalarOrEnumFieldParser
:: ObjectFieldDefinition (G.GType, AnnotatedObjectFieldType)
- -> FieldParser n (RQL.AnnFldG UnpreparedValue)
+ -> FieldParser n (RQL.AnnFieldG UnpreparedValue)
scalarOrEnumFieldParser (ObjectFieldDefinition name _ description ty) =
let (gType, objectFieldType) = ty
fieldName = unObjectFieldName name
@@ -157,11 +157,11 @@ actionOutputFields outputObject = do
AOFTEnum def -> customEnumParser def
in bool P.nonNullableField id (G.isNullable gType) $
P.selection_ (unObjectFieldName name) description fieldParser
- $> RQL.mkAnnColField pgColumnInfo Nothing
+ $> RQL.mkAnnColumnField pgColumnInfo Nothing
relationshipFieldParser
:: TypeRelationship TableInfo PGColumnInfo
- -> MaybeT m (FieldParser n (RQL.AnnFldG UnpreparedValue))
+ -> MaybeT m (FieldParser n (RQL.AnnFieldG UnpreparedValue))
relationshipFieldParser typeRelationship = do
let TypeRelationship relName relType tableInfo fieldMapping = typeRelationship
tableName = _tciName $ _tiCoreInfo tableInfo
@@ -175,10 +175,10 @@ actionOutputFields outputObject = do
[ (unsafePGCol $ G.unName $ unObjectFieldName k, pgiColumn v)
| (k, v) <- Map.toList fieldMapping
]
- annotatedRelationship = RQL.AnnRelG tableRelName columnMapping selectExp
+ annotatedRelationship = RQL.AnnRelationSelectG tableRelName columnMapping selectExp
in case relType of
- ObjRel -> RQL.FObj annotatedRelationship
- ArrRel -> RQL.FArr $ RQL.ASSimple annotatedRelationship
+ ObjRel -> RQL.AFObjectRelation annotatedRelationship
+ ArrRel -> RQL.AFArrayRelation $ RQL.ASSimple annotatedRelationship
mkDefinitionList :: AnnotatedObjectType -> [(PGCol, PGScalarType)]
mkDefinitionList annotatedOutputType =
diff --git a/server/src-lib/Hasura/GraphQL/Schema/OrderBy.hs b/server/src-lib/Hasura/GraphQL/Schema/OrderBy.hs
index 2d8bcd46e3d..d54af2b859e 100644
--- a/server/src-lib/Hasura/GraphQL/Schema/OrderBy.hs
+++ b/server/src-lib/Hasura/GraphQL/Schema/OrderBy.hs
@@ -52,7 +52,7 @@ orderByExp table selectPermissions = memoizeOn 'orderByExp table $ do
FIColumn columnInfo -> do
let fieldName = pgiName columnInfo
pure $ P.fieldOptional fieldName Nothing orderByOperator
- <&> fmap (pure . mkOrderByItemG (RQL.AOCPG $ pgiColumn columnInfo)) . join
+ <&> fmap (pure . mkOrderByItemG (RQL.AOCColumn columnInfo)) . join
FIRelationship relationshipInfo -> do
let remoteTable = riRTable relationshipInfo
fieldName <- MaybeT $ pure $ G.mkName $ relNameToTxt $ riName relationshipInfo
@@ -63,13 +63,13 @@ orderByExp table selectPermissions = memoizeOn 'orderByExp table $ do
otherTableParser <- lift $ orderByExp remoteTable perms
pure $ do
otherTableOrderBy <- join <$> P.fieldOptional fieldName Nothing (P.nullable otherTableParser)
- pure $ fmap (map $ fmap $ RQL.AOCObj relationshipInfo newPerms) otherTableOrderBy
+ pure $ fmap (map $ fmap $ RQL.AOCObjectRelation relationshipInfo newPerms) otherTableOrderBy
ArrRel -> do
let aggregateFieldName = fieldName <> $$(G.litName "_aggregate")
aggregationParser <- lift $ orderByAggregation remoteTable perms
pure $ do
aggregationOrderBy <- join <$> P.fieldOptional aggregateFieldName Nothing (P.nullable aggregationParser)
- pure $ fmap (map $ fmap $ RQL.AOCAgg relationshipInfo newPerms) aggregationOrderBy
+ pure $ fmap (map $ fmap $ RQL.AOCArrayAggregation relationshipInfo newPerms) aggregationOrderBy
FIComputedField _ -> empty
FIRemoteRelationship _ -> empty
@@ -84,7 +84,7 @@ orderByAggregation
:: forall m n r. (MonadSchema n m, MonadTableInfo r m, MonadRole r m)
=> QualifiedTable
-> SelPermInfo
- -> m (Parser 'Input n [OrderByItemG RQL.AnnAggOrdBy])
+ -> m (Parser 'Input n [OrderByItemG RQL.AnnAggregateOrderBy])
orderByAggregation table selectPermissions = do
-- WIP NOTE
-- there is heavy duplication between this and Select.tableAggregationFields
@@ -113,16 +113,16 @@ orderByAggregation table selectPermissions = do
description = G.Description $ "order by aggregate values of table \"" <> table <<> "\""
pure $ P.object objectName (Just description) aggFields
where
- mkField :: PGColumnInfo -> InputFieldsParser n (Maybe (PGCol, OrderInfo))
+ mkField :: PGColumnInfo -> InputFieldsParser n (Maybe (PGColumnInfo, OrderInfo))
mkField columnInfo =
P.fieldOptional (pgiName columnInfo) (pgiDescription columnInfo) orderByOperator
- <&> fmap (pgiColumn columnInfo,) . join
+ <&> fmap (columnInfo,) . join
parseOperator
:: G.Name
-> G.Name
- -> InputFieldsParser n [(PGCol, OrderInfo)]
- -> InputFieldsParser n (Maybe [OrderByItemG RQL.AnnAggOrdBy])
+ -> InputFieldsParser n [(PGColumnInfo, OrderInfo)]
+ -> InputFieldsParser n (Maybe [OrderByItemG RQL.AnnAggregateOrderBy])
parseOperator operator tableName columns =
let opText = G.unName operator
-- FIXME: isn't G.Name a Monoid?
diff --git a/server/src-lib/Hasura/GraphQL/Schema/Select.hs b/server/src-lib/Hasura/GraphQL/Schema/Select.hs
index f318398518e..951d5d92ef5 100644
--- a/server/src-lib/Hasura/GraphQL/Schema/Select.hs
+++ b/server/src-lib/Hasura/GraphQL/Schema/Select.hs
@@ -51,11 +51,11 @@ import Hasura.SQL.Value
type SelectExp = RQL.AnnSimpleSelG UnpreparedValue
-type AggSelectExp = RQL.AnnAggSelG UnpreparedValue
-type TableArgs = RQL.TableArgsG UnpreparedValue
+type AggSelectExp = RQL.AnnAggregateSelectG UnpreparedValue
+type SelectArgs = RQL.SelectArgsG UnpreparedValue
type TablePerms = RQL.TablePermG UnpreparedValue
-type AnnotatedFields = RQL.AnnFldsG UnpreparedValue
-type AnnotatedField = RQL.AnnFldG UnpreparedValue
+type AnnotatedFields = RQL.AnnFieldsG UnpreparedValue
+type AnnotatedField = RQL.AnnFieldG UnpreparedValue
@@ -83,7 +83,7 @@ selectTable table fieldName description selectPermissions = do
tableArgsParser <- tableArgs table selectPermissions
selectionSetParser <- tableSelectionSet table selectPermissions Nothing
pure $ P.subselection fieldName description tableArgsParser selectionSetParser
- <&> \(args, fields) -> RQL.AnnSelG
+ <&> \(args, fields) -> RQL.AnnSelectG
{ RQL._asnFields = fields
, RQL._asnFrom = RQL.FromTable table
, RQL._asnPerm = tablePermissionsInfo selectPermissions
@@ -121,13 +121,13 @@ selectTableByPk table fieldName description selectPermissions = runMaybeT do
<&> \(boolExpr, fields) ->
let defaultPerms = tablePermissionsInfo selectPermissions
whereExpr = Just $ BoolAnd $ toList boolExpr
- in RQL.AnnSelG
+ in RQL.AnnSelectG
{ RQL._asnFields = fields
, RQL._asnFrom = RQL.FromTable table
, RQL._asnPerm = defaultPerms { RQL._tpLimit = Nothing }
-- TODO: check whether this is necessary: ^^^^^^^
-- This is how it was in legacy code.
- , RQL._asnArgs = RQL.noTableArgs { RQL._taWhere = whereExpr }
+ , RQL._asnArgs = RQL.noSelectArgs { RQL._saWhere = whereExpr }
, RQL._asnStrfyNum = stringifyNum
}
@@ -161,7 +161,7 @@ selectTableAggregate table fieldName description selectPermissions = runMaybeT d
, RQL.TAFAgg <$> P.subselection_ $$(G.litName "aggregate") Nothing aggregateParser
]
pure $ P.subselection fieldName description tableArgsParser aggregationParser
- <&> \(args, fields) -> RQL.AnnSelG
+ <&> \(args, fields) -> RQL.AnnSelectG
{ RQL._asnFields = fields
, RQL._asnFrom = RQL.FromTable table
, RQL._asnPerm = tablePermissionsInfo selectPermissions
@@ -249,7 +249,7 @@ tableSelectionSet table selectPermissions interfaceM = memoizeOn 'tableSelection
let description = G.Description . getPGDescription <$> _tciDescription tableInfo
pure $ P.selectionSetObject tableName description fieldParsers (toList interfaceM)
- <&> parsedSelectionsToFields RQL.FExp
+ <&> parsedSelectionsToFields RQL.AFExpression
-- | User-defined function (AKA custom function)
@@ -268,7 +268,7 @@ selectFunction function fieldName description selectPermissions = do
selectionSetParser <- tableSelectionSet table selectPermissions Nothing
let argsParser = liftA2 (,) functionArgsParser tableArgsParser
pure $ P.subselection fieldName description argsParser selectionSetParser
- <&> \((funcArgs, tableArgs'), fields) -> RQL.AnnSelG
+ <&> \((funcArgs, tableArgs'), fields) -> RQL.AnnSelectG
{ RQL._asnFields = fields
, RQL._asnFrom = RQL.FromFunction (fiName function) funcArgs Nothing
, RQL._asnPerm = tablePermissionsInfo selectPermissions
@@ -299,7 +299,7 @@ selectFunctionAggregate function fieldName description selectPermissions = runMa
, RQL.TAFAgg <$> P.subselection_ $$(G.litName "aggregate") Nothing aggregateParser
]
pure $ P.subselection fieldName description argsParser aggregationParser
- <&> \((funcArgs, tableArgs'), fields) -> RQL.AnnSelG
+ <&> \((funcArgs, tableArgs'), fields) -> RQL.AnnSelectG
{ RQL._asnFields = fields
, RQL._asnFrom = RQL.FromFunction (fiName function) funcArgs Nothing
, RQL._asnPerm = tablePermissionsInfo selectPermissions
@@ -323,7 +323,7 @@ tableArgs
:: forall m n r. (MonadSchema n m, MonadTableInfo r m, MonadRole r m)
=> QualifiedTable
-> SelPermInfo
- -> m (InputFieldsParser n TableArgs)
+ -> m (InputFieldsParser n SelectArgs)
tableArgs table selectPermissions = do
boolExpParser <- boolExp table (Just selectPermissions)
orderByParser <- orderByExp table selectPermissions
@@ -342,7 +342,7 @@ tableArgs table selectPermissions = do
-- despite the schema explicitly declaring it as an int; the suspected goal
-- of this was to allow for bigint offsets, but there's no surviving commit
-- message or documentation that states it explicity. A visible artefact of
- -- this is the fact that in the TableArgs we store a SQL expression for the
+ -- this is the fact that in the SelectArgs we store a SQL expression for the
-- offet while the limit is stored as a normal int.
--
-- While it would be possible to write a custom parser that advertises
@@ -352,12 +352,12 @@ tableArgs table selectPermissions = do
-- TODO: distinct_on must be validated ungainst order_by
-- the check at Resolve/Select.hs:152 must be ported here
- pure $ RQL.TableArgs
- { RQL._taWhere = whereF
- , RQL._taOrderBy = nonEmpty . concat =<< orderBy
- , RQL._taLimit = fromIntegral <$> limit
- , RQL._taOffset = txtEncoder . PGValInteger <$> offset
- , RQL._taDistCols = nonEmpty =<< distinct
+ pure $ RQL.SelectArgs
+ { RQL._saWhere = whereF
+ , RQL._saOrderBy = nonEmpty . concat =<< orderBy
+ , RQL._saLimit = fromIntegral <$> limit
+ , RQL._saOffset = txtEncoder . PGValInteger <$> offset
+ , RQL._saDistinct = nonEmpty =<< distinct
}
where
-- TH splices mess up ApplicativeDo
@@ -398,7 +398,7 @@ tableAggregationFields
:: forall m n r. (MonadSchema n m, MonadTableInfo r m, MonadRole r m)
=> QualifiedTable
-> SelPermInfo
- -> m (Parser 'Output n RQL.AggFlds)
+ -> m (Parser 'Output n RQL.AggregateFields)
tableAggregationFields table selectPermissions = do
tableName <- qualifiedObjectToName table
allColumns <- tableSelectColumns table selectPermissions
@@ -447,7 +447,7 @@ tableAggregationFields table selectPermissions = do
:: G.Name
-> G.Name
-> [FieldParser n RQL.PGColFld]
- -> FieldParser n RQL.AggFld
+ -> FieldParser n RQL.AggregateField
parseOperator operator tableName columns =
let opText = G.unName operator
setName = tableName <> $$(G.litName "_") <> operator <> $$(G.litName "_fields")
@@ -455,7 +455,7 @@ tableAggregationFields table selectPermissions = do
subselectionParser = P.selectionSet setName setDesc columns
<&> parsedSelectionsToFields RQL.PCFExp
in P.subselection_ operator Nothing subselectionParser
- <&> (RQL.AFOp . RQL.AggOp opText)
+ <&> (RQL.AFOp . RQL.AggregateOp opText)
lookupRemoteField'
:: (MonadSchema n m, MonadTableInfo r m)
@@ -499,7 +499,7 @@ fieldSelection fieldInfo selectPermissions = do
pathArg = jsonPathArg $ pgiType columnInfo
field <- lift $ P.column (pgiType columnInfo) (G.Nullability $ pgiIsNullable columnInfo)
pure $ P.selection fieldName (pgiDescription columnInfo) pathArg field
- <&> RQL.mkAnnColField columnInfo
+ <&> RQL.mkAnnColumnField columnInfo
FIRelationship relationshipInfo -> concat . maybeToList <$> runMaybeT do
-- TODO: move this to a separate function?
@@ -514,10 +514,10 @@ fieldSelection fieldInfo selectPermissions = do
relFieldName <- lift $ textToName $ relNameToTxt relName
otherTableParser <- lift $ selectTable otherTable relFieldName desc remotePerms
let field = otherTableParser <&> \selectExp ->
- let annotatedRelationship = RQL.AnnRelG relName colMapping selectExp
+ let annotatedRelationship = RQL.AnnRelationSelectG relName colMapping selectExp
in case riType relationshipInfo of
- ObjRel -> RQL.FObj annotatedRelationship
- ArrRel -> RQL.FArr $ RQL.ASSimple annotatedRelationship
+ ObjRel -> RQL.AFObjectRelation annotatedRelationship
+ ArrRel -> RQL.AFArrayRelation $ RQL.ASSimple annotatedRelationship
case riType relationshipInfo of
ObjRel -> pure [field]
ArrRel -> do
@@ -525,7 +525,7 @@ fieldSelection fieldInfo selectPermissions = do
relAggDesc = Just $ G.Description "An aggregate relationship"
remoteAggField <- lift $ selectTableAggregate otherTable relAggFieldName relAggDesc remotePerms
pure $ catMaybes [ Just field
- , fmap (RQL.FArr . RQL.ASAgg . RQL.AnnRelG relName colMapping) <$> remoteAggField
+ , fmap (RQL.AFArrayRelation . RQL.ASAggregate . RQL.AnnRelationSelectG relName colMapping) <$> remoteAggField
]
FIComputedField computedFieldInfo ->
@@ -562,7 +562,7 @@ fieldSelection fieldInfo selectPermissions = do
pure $ pure $ P.unsafeRawField (P.mkDefinition fieldName Nothing fieldInfo')
`P.bindField` \G.Field{ G._fArguments = args, G._fSelectionSet = selSet } -> do
remoteArgs <- P.ifParser remoteFieldsArgumentsParser args
- pure $ RQL.FRemote $ RQL.RemoteSelect
+ pure $ RQL.AFRemote $ RQL.RemoteSelect
{ _rselArgs = remoteArgs
, _rselSelection = selSet
, _rselHasuraColumns = _rfiHasuraFields remoteFieldInfo
@@ -697,7 +697,7 @@ computedFieldFunctionArgs ComputedFieldFunction{..} =
-- FIXME: move to common?
-jsonPathArg :: MonadParse n => PGColumnType -> InputFieldsParser n (Maybe RQL.ColOp)
+jsonPathArg :: MonadParse n => PGColumnType -> InputFieldsParser n (Maybe RQL.ColumnOp)
jsonPathArg columnType
| isScalarColumnWhere isJSONType columnType =
P.fieldOptional fieldName description P.string `P.bindFields` traverse toColExp
@@ -707,7 +707,7 @@ jsonPathArg columnType
description = Just "JSON select path"
toColExp textValue = case parseJSONPath textValue of
Left err -> parseError $ T.pack $ "parse json path error: " ++ err
- Right jPaths -> return $ RQL.ColOp SQL.jsonbPathOp $ SQL.SEArray $ map elToColExp jPaths
+ Right jPaths -> return $ RQL.ColumnOp SQL.jsonbPathOp $ SQL.SEArray $ map elToColExp jPaths
elToColExp (Key k) = SQL.SELit k
elToColExp (Index i) = SQL.SELit $ T.pack (show i)
@@ -726,7 +726,7 @@ computedField ComputedFieldInfo{..} selectPermissions = runMaybeT do
let fieldArgsParser = do
args <- functionArgsParser
colOp <- jsonPathArg $ PGColumnScalar scalarReturnType
- pure $ RQL.FComputedField $ RQL.CFSScalar $ RQL.ComputedFieldScalarSel
+ pure $ RQL.AFComputedField $ RQL.CFSScalar $ RQL.ComputedFieldScalarSelect
{ RQL._cfssFunction = _cffName _cfiFunction
, RQL._cfssType = scalarReturnType
, RQL._cfssColumnOp = colOp
@@ -741,7 +741,7 @@ computedField ComputedFieldInfo{..} selectPermissions = runMaybeT do
let fieldArgsParser = liftA2 (,) functionArgsParser selectArgsParser
pure $ P.subselection fieldName Nothing fieldArgsParser selectionSetParser <&>
\((functionArgs', args), fields) ->
- RQL.FComputedField $ RQL.CFSTable RQL.JASMultipleRows $ RQL.AnnSelG
+ RQL.AFComputedField $ RQL.CFSTable RQL.JASMultipleRows $ RQL.AnnSelectG
{ RQL._asnFields = fields
, RQL._asnFrom = RQL.FromFunction (_cffName _cfiFunction) functionArgs' Nothing
, RQL._asnPerm = tablePermissionsInfo remotePerms
@@ -891,16 +891,16 @@ nodeField allTables = do
(perms, pkeyColumns, fields) <-
onNothing (Map.lookup table parseds) $ throwInvalidNodeId $ "the table " <>> ident
whereExp <- buildNodeIdBoolExp columnValues pkeyColumns
- return $ RQL.AnnSelG
+ return $ RQL.AnnSelectG
{ RQL._asnFields = fields
, RQL._asnFrom = RQL.FromTable table
, RQL._asnPerm = tablePermissionsInfo perms
- , RQL._asnArgs = RQL.TableArgs
- { RQL._taWhere = Just whereExp
- , RQL._taOrderBy = Nothing
- , RQL._taLimit = Nothing
- , RQL._taOffset = Nothing
- , RQL._taDistCols = Nothing
+ , RQL._asnArgs = RQL.SelectArgs
+ { RQL._saWhere = Just whereExp
+ , RQL._saOrderBy = Nothing
+ , RQL._saLimit = Nothing
+ , RQL._saOffset = Nothing
+ , RQL._saDistinct = Nothing
}
, RQL._asnStrfyNum = stringifyNum
}
diff --git a/server/src-lib/Hasura/GraphQL/Transport/HTTP.hs b/server/src-lib/Hasura/GraphQL/Transport/HTTP.hs
index b6e49a59071..093df67d6a8 100644
--- a/server/src-lib/Hasura/GraphQL/Transport/HTTP.hs
+++ b/server/src-lib/Hasura/GraphQL/Transport/HTTP.hs
@@ -44,7 +44,7 @@ runGQ reqId userInfo reqHdrs queryType req = do
(telemTimeTot_DT, (telemCacheHit, telemLocality, (telemTimeIO_DT, telemQueryType, !resp))) <- withElapsedTime $ do
E.ExecutionCtx _ sqlGenCtx pgExecCtx planCache sc scVer httpManager enableAL <- ask
(telemCacheHit, execPlan) <- E.getResolvedExecPlan pgExecCtx planCache
- userInfo sqlGenCtx enableAL sc scVer queryType httpManager reqHdrs req
+ userInfo sqlGenCtx enableAL sc scVer queryType httpManager reqHdrs req
case execPlan of
E.QueryExecutionPlan queryPlan -> do
case queryPlan of
@@ -76,8 +76,8 @@ runGQ reqId userInfo reqHdrs queryType req = do
return (telemCacheHit, Telem.Local, (telemTimeIO, telemQueryType, HttpResponse resp respHdrs))
E.GExPRemote rsi opDef -> do
let telemQueryType | G._todType opDef == G.OperationTypeMutation = Telem.Mutation
- | otherwise = Telem.Query
- (telemTimeIO, resp) <- E.execRemoteGQ reqId userInfo reqHdrs req rsi opDef
+ | otherwise = Telem.Query
+ (telemTimeIO, resp) <- E.execRemoteGQ reqId userInfo reqHdrs req rsi $ G._todType opDef
return (telemCacheHit, Telem.Remote, (telemTimeIO, telemQueryType, resp))
-}
let telemTimeIO = fromUnits telemTimeIO_DT
diff --git a/server/src-lib/Hasura/GraphQL/Validate.hs b/server/src-lib/Hasura/GraphQL/Validate.hs
index d39f4c5bbc1..5340808b820 100644
--- a/server/src-lib/Hasura/GraphQL/Validate.hs
+++ b/server/src-lib/Hasura/GraphQL/Validate.hs
@@ -1,8 +1,8 @@
module Hasura.GraphQL.Validate
( validateGQ
, showVars
- , RootSelSet(..)
- , SelSet
+ , RootSelectionSet(..)
+ , SelectionSet(..)
, Field(..)
, getTypedOp
, QueryParts(..)
@@ -13,6 +13,9 @@ module Hasura.GraphQL.Validate
, validateVariablesForReuse
, isQueryInAllowlist
+ , unValidateArgsMap
+ , unValidateSelectionSet
+ , unValidateField
) where
import Hasura.Prelude
@@ -22,16 +25,24 @@ import Data.Has
import qualified Data.HashMap.Strict as Map
import qualified Data.HashSet as HS
import qualified Data.Sequence as Seq
+import qualified Data.Text as T
+import qualified Data.UUID as UUID
+import qualified Database.PG.Query as Q
import qualified Language.GraphQL.Draft.Syntax as G
+import Hasura.GraphQL.NormalForm
+import Hasura.GraphQL.Resolve.InputValue (annInpValueToJson)
import Hasura.GraphQL.Schema
import Hasura.GraphQL.Transport.HTTP.Protocol
import Hasura.GraphQL.Validate.Context
-import Hasura.GraphQL.Validate.Field
import Hasura.GraphQL.Validate.InputValue
+import Hasura.GraphQL.Validate.SelectionSet
import Hasura.GraphQL.Validate.Types
+import Hasura.RQL.DML.Select.Types
import Hasura.RQL.Types
import Hasura.RQL.Types.QueryCollection
+import Hasura.SQL.Time
+import Hasura.SQL.Value
data QueryParts
= QueryParts
@@ -137,19 +148,12 @@ validateFrag
validateFrag (G.FragmentDefinition n onTy dirs selSet) = do
unless (null dirs) $ throwVE
"unexpected directives at fragment definition"
- tyInfo <- getTyInfoVE onTy
- objTyInfo <- onNothing (getObjTyM tyInfo) $ throwVE
- "fragments can only be defined on object types"
- return $ FragDef n objTyInfo selSet
-
-data RootSelSet
- = RQuery !SelSet
- | RMutation !SelSet
- | RSubscription !Field
- deriving (Show, Eq)
+ fragmentTypeInfo <- getFragmentTyInfo onTy
+ return $ FragDef n fragmentTypeInfo selSet
validateGQ
- :: (MonadError QErr m, MonadReader GCtx m, MonadReusability m) => QueryParts -> m RootSelSet
+ :: (MonadError QErr m, MonadReader GCtx m, MonadReusability m)
+ => QueryParts -> m RootSelectionSet
validateGQ (QueryParts opDef opRoot fragDefsL varValsM) = do
ctx <- ask
@@ -165,19 +169,22 @@ validateGQ (QueryParts opDef opRoot fragDefsL varValsM) = do
-- build a validation ctx
let valCtx = ValidationCtx (_gTypes ctx) annVarVals annFragDefs
- selSet <- flip runReaderT valCtx $ denormSelSet [] opRoot $
+ selSet <- flip runReaderT valCtx $ parseObjectSelectionSet valCtx opRoot $
G._todSelectionSet opDef
case G._todType opDef of
G.OperationTypeQuery -> return $ RQuery selSet
G.OperationTypeMutation -> return $ RMutation selSet
G.OperationTypeSubscription ->
- case Seq.viewl selSet of
- Seq.EmptyL -> throw500 "empty selset for subscription"
- fld Seq.:< rst -> do
- unless (null rst) $
- throwVE "subscription must select only one top level field"
- return $ RSubscription fld
+ case OMap.toList $ unAliasedFields $ unObjectSelectionSet selSet of
+ [] -> throw500 "empty selset for subscription"
+ (_:rst) -> do
+ -- As an internal testing feature, we support subscribing to multiple
+ -- selection sets. First check if the corresponding directive is set.
+ let multipleAllowed = G.Directive "_multiple_top_level_fields" [] `elem` G._todDirectives opDef
+ unless (multipleAllowed || null rst) $
+ throwVE "subscriptions must select one top level field"
+ return $ RSubscription selSet
isQueryInAllowlist :: GQLExecDoc -> HS.HashSet GQLQuery -> Bool
isQueryInAllowlist q = HS.member gqlQuery
@@ -204,3 +211,119 @@ getQueryParts (GQLReq opNameM q varValsM) = do
return $ QueryParts opDef opRoot fragDefsL varValsM
where
(selSets, opDefs, fragDefsL) = G.partitionExDefs $ unGQLExecDoc q
+
+-- | Convert the validated arguments to GraphQL parser AST arguments
+unValidateArgsMap :: ArgsMap -> [RemoteFieldArgument]
+unValidateArgsMap argsMap =
+ map (\(n, inpVal) ->
+ let _rfaArgument = G.Argument n $ unValidateInpVal inpVal
+ _rfaVariable = unValidateInpVariable inpVal
+ in RemoteFieldArgument {..})
+ . Map.toList $ argsMap
+
+-- | Convert the validated field to GraphQL parser AST field
+unValidateField :: G.Alias -> Field -> G.Field
+unValidateField alias (Field name _ argsMap selSet) =
+ let args = map (\(n, inpVal) -> G.Argument n $ unValidateInpVal inpVal) $
+ Map.toList argsMap
+ in G.Field (Just alias) name args [] $ unValidateSelectionSet selSet
+
+-- | Convert the validated selection set to GraphQL parser AST selection set
+unValidateSelectionSet :: SelectionSet -> G.SelectionSet
+unValidateSelectionSet = \case
+ SelectionSetObject selectionSet -> fromSelectionSet selectionSet
+ SelectionSetInterface selectionSet -> fromScopedSelectionSet selectionSet
+ SelectionSetUnion selectionSet -> fromScopedSelectionSet selectionSet
+ SelectionSetNone -> mempty
+ where
+ fromAliasedFields :: (IsField f) => AliasedFields f -> G.SelectionSet
+ fromAliasedFields =
+ map (G.SelectionField . uncurry unValidateField) .
+ OMap.toList . fmap toField . unAliasedFields
+ fromSelectionSet =
+ fromAliasedFields . unObjectSelectionSet
+ toInlineSelection typeName =
+ G.SelectionInlineFragment . G.InlineFragment (Just typeName) mempty .
+ fromSelectionSet
+ fromScopedSelectionSet (ScopedSelectionSet base specific) =
+ map (uncurry toInlineSelection) (Map.toList specific) <> fromAliasedFields base
+
+-- | Get the variable definition and it's value (if exists)
+unValidateInpVariable :: AnnInpVal -> Maybe [(G.VariableDefinition,A.Value)]
+unValidateInpVariable inputValue =
+ case (_aivValue inputValue) of
+ AGScalar _ _ -> mkVariableDefnValueTuple inputValue
+ AGEnum _ _ -> mkVariableDefnValueTuple inputValue
+ AGObject _ o ->
+ (\obj ->
+ let listObjects = OMap.toList obj
+ in concat $
+ mapMaybe (\(_, inpVal) -> unValidateInpVariable inpVal) listObjects)
+ <$> o
+ AGArray _ _ -> mkVariableDefnValueTuple inputValue
+ where
+ mkVariableDefnValueTuple val = maybe Nothing (\vars -> Just [vars]) $
+ variableDefnValueTuple val
+
+ variableDefnValueTuple :: AnnInpVal -> Maybe (G.VariableDefinition,A.Value)
+ variableDefnValueTuple inpVal@AnnInpVal {..} =
+ let varDefn = G.VariableDefinition <$> _aivVariable <*> Just _aivType <*> Just Nothing
+ in (,) <$> varDefn <*> Just (annInpValueToJson inpVal)
+
+-- | Convert the validated input value to GraphQL value, if the input value
+-- is a variable then it will be returned without resolving it, otherwise it
+-- will be resolved
+unValidateInpVal :: AnnInpVal -> G.Value
+unValidateInpVal (AnnInpVal _ var val) = fromMaybe G.VNull $
+ -- if a variable is found, then directly return that, if not found then
+ -- convert it into a G.Value and return it
+ case var of
+ Just var' -> Just $ G.VVariable var'
+ Nothing ->
+ case val of
+ AGScalar _ v -> pgScalarToGValue <$> v
+ AGEnum _ v -> pgEnumToGEnum v
+ AGObject _ o ->
+ (G.VObject . G.ObjectValueG
+ . map (uncurry G.ObjectFieldG . (second unValidateInpVal))
+ . OMap.toList
+ ) <$> o
+ AGArray _ vs -> (G.VList . G.ListValueG . map unValidateInpVal) <$> vs
+
+ where
+ pgEnumToGEnum :: AnnGEnumValue -> Maybe G.Value
+ pgEnumToGEnum = \case
+ AGESynthetic v -> G.VEnum <$> v
+ AGEReference _ v -> (G.VEnum . G.EnumValue . G.Name . getEnumValue) <$> v
+
+ pgScalarToGValue :: PGScalarValue -> G.Value
+ pgScalarToGValue = \case
+ PGValInteger i -> G.VInt $ fromIntegral i
+ PGValSmallInt i -> G.VInt $ fromIntegral i
+ PGValBigInt i -> G.VInt $ fromIntegral i
+ PGValFloat f -> G.VFloat $ realToFrac f
+ PGValDouble d -> G.VFloat $ realToFrac d
+ -- TODO: Scientific is a danger zone; use its safe conv function.
+ PGValNumeric sc -> G.VFloat $ realToFrac sc
+ PGValMoney m -> G.VFloat $ realToFrac m
+ PGValBoolean b -> G.VBoolean b
+ PGValChar t -> toStringValue $ T.singleton t
+ PGValVarchar t -> toStringValue t
+ PGValText t -> toStringValue t
+ PGValCitext t -> toStringValue t
+ PGValDate d -> toStringValue $ T.pack $ showGregorian d
+ PGValTimeStampTZ u -> toStringValue $ T.pack $
+ formatTime defaultTimeLocale "%FT%T%QZ" u
+ PGValTimeStamp u -> toStringValue $ T.pack $
+ formatTime defaultTimeLocale "%FT%T%QZ" u
+ PGValTimeTZ (ZonedTimeOfDay tod tz) ->
+ toStringValue $ T.pack (show tod ++ timeZoneOffsetString tz)
+ PGNull _ -> G.VNull
+ PGValJSON (Q.JSON v) -> jsonValueToGValue v
+ PGValJSONB (Q.JSONB v) -> jsonValueToGValue v
+ PGValGeo v -> jsonValueToGValue $ A.toJSON v
+ PGValRaster v -> jsonValueToGValue $ A.toJSON v
+ PGValUUID u -> toStringValue $ UUID.toText u
+ PGValUnknown t -> toStringValue t
+ where
+ toStringValue = G.VString . G.StringValue
diff --git a/server/src-lib/Hasura/GraphQL/Validate/Context.hs b/server/src-lib/Hasura/GraphQL/Validate/Context.hs
index b82c133812f..a21d8e84d99 100644
--- a/server/src-lib/Hasura/GraphQL/Validate/Context.hs
+++ b/server/src-lib/Hasura/GraphQL/Validate/Context.hs
@@ -4,6 +4,7 @@ module Hasura.GraphQL.Validate.Context
, getInpFieldInfo
, getTyInfo
, getTyInfoVE
+ , getFragmentTyInfo
, module Hasura.GraphQL.Utils
) where
@@ -19,11 +20,11 @@ import Hasura.RQL.Types
getFieldInfo
:: ( MonadError QErr m)
- => ObjTyInfo -> G.Name -> m ObjFldInfo
-getFieldInfo oti fldName =
- onNothing (Map.lookup fldName $ _otiFields oti) $ throwVE $
+ => G.NamedType -> ObjFieldMap -> G.Name -> m ObjFldInfo
+getFieldInfo typeName fieldMap fldName =
+ onNothing (Map.lookup fldName fieldMap) $ throwVE $
"field " <> showName fldName <>
- " not found in type: " <> showNamedTy (_otiName oti)
+ " not found in type: " <> showNamedTy typeName
getInpFieldInfo
:: ( MonadError QErr m)
@@ -65,3 +66,13 @@ getTyInfoVE namedTy = do
tyMap <- asks getter
onNothing (Map.lookup namedTy tyMap) $
throwVE $ "no such type exists in the schema: " <> showNamedTy namedTy
+
+getFragmentTyInfo
+ :: (MonadReader r m, Has TypeMap r, MonadError QErr m)
+ => G.NamedType -> m FragmentTypeInfo
+getFragmentTyInfo onType =
+ getTyInfoVE onType >>= \case
+ TIObj tyInfo -> pure $ FragmentTyObject tyInfo
+ TIIFace tyInfo -> pure $ FragmentTyInterface tyInfo
+ TIUnion tyInfo -> pure $ FragmentTyUnion tyInfo
+ _ -> throwVE "fragments can only be defined on object/interface/union types"
diff --git a/server/src-lib/Hasura/GraphQL/Validate/SelectionSet.hs b/server/src-lib/Hasura/GraphQL/Validate/SelectionSet.hs
new file mode 100644
index 00000000000..64b3972cd7c
--- /dev/null
+++ b/server/src-lib/Hasura/GraphQL/Validate/SelectionSet.hs
@@ -0,0 +1,550 @@
+{-# LANGUAGE RecordWildCards #-}
+{-# LANGUAGE TypeFamilyDependencies #-}
+module Hasura.GraphQL.Validate.SelectionSet
+ ( ArgsMap
+ , Field(..)
+ , AliasedFields(..)
+ , SelectionSet(..)
+ , ObjectSelectionSet(..)
+ , traverseObjectSelectionSet
+ , InterfaceSelectionSet
+ , UnionSelectionSet
+ , RootSelectionSet(..)
+ , parseObjectSelectionSet
+ , asObjectSelectionSet
+ , asInterfaceSelectionSet
+ , getMemberSelectionSet
+ ) where
+
+import Hasura.Prelude
+
+import qualified Data.HashMap.Strict as Map
+import qualified Data.HashMap.Strict.InsOrd.Extended as OMap
+import qualified Data.HashSet as Set
+import qualified Data.List as L
+import qualified Data.Sequence.NonEmpty as NE
+import qualified Data.Text as T
+import qualified Language.GraphQL.Draft.Syntax as G
+
+import Hasura.GraphQL.NormalForm
+import Hasura.GraphQL.Validate.Context
+import Hasura.GraphQL.Validate.InputValue
+import Hasura.GraphQL.Validate.Types
+import Hasura.RQL.Types
+import Hasura.SQL.Value
+
+class HasSelectionSet a where
+
+ getTypename :: a -> G.NamedType
+ getMemberTypes :: a -> Set.HashSet G.NamedType
+
+ fieldToSelectionSet
+ :: G.Alias -> NormalizedField a -> NormalizedSelectionSet a
+
+ parseField_
+ :: ( MonadReader ValidationCtx m
+ , MonadError QErr m
+ , MonadReusability m
+ , MonadState [G.Name] m
+ )
+ => a
+ -> G.Field
+ -> m (Maybe (NormalizedField a))
+
+ mergeNormalizedSelectionSets
+ :: ( MonadReader ValidationCtx m
+ , MonadError QErr m
+ , MonadReusability m
+ )
+ => [NormalizedSelectionSet a]
+ -> m (NormalizedSelectionSet a)
+
+ fromObjectSelectionSet
+ :: G.NamedType
+ -- ^ parent typename
+ -> G.NamedType
+ -- ^ fragment typename
+ -> Set.HashSet G.NamedType
+ -- ^ common types
+ -> NormalizedSelectionSet ObjTyInfo
+ -> NormalizedSelectionSet a
+
+ fromInterfaceSelectionSet
+ :: G.NamedType
+ -- ^ parent typename
+ -> G.NamedType
+ -- ^ fragment typename
+ -> Set.HashSet G.NamedType
+ -> NormalizedSelectionSet IFaceTyInfo
+ -> NormalizedSelectionSet a
+
+ fromUnionSelectionSet
+ :: G.NamedType
+ -- ^ parent typename
+ -> G.NamedType
+ -- ^ fragment typename
+ -> Set.HashSet G.NamedType
+ -- ^ common types
+ -> NormalizedSelectionSet UnionTyInfo
+ -> NormalizedSelectionSet a
+
+parseObjectSelectionSet
+ :: ( MonadError QErr m
+ , MonadReusability m
+ )
+ => ValidationCtx
+ -> ObjTyInfo
+ -> G.SelectionSet
+ -> m ObjectSelectionSet
+parseObjectSelectionSet validationCtx objectTypeInfo selectionSet =
+ flip evalStateT [] $ flip runReaderT validationCtx $
+ parseSelectionSet objectTypeInfo selectionSet
+
+selectionToSelectionSet
+ :: HasSelectionSet a
+ => NormalizedSelection a -> NormalizedSelectionSet a
+selectionToSelectionSet = \case
+ SelectionField alias fld -> fieldToSelectionSet alias fld
+ SelectionInlineFragmentSpread selectionSet -> selectionSet
+ SelectionFragmentSpread _ selectionSet -> selectionSet
+
+parseSelectionSet
+ :: ( MonadReader ValidationCtx m
+ , MonadError QErr m
+ , MonadReusability m
+ , HasSelectionSet a
+ , MonadState [G.Name] m
+ )
+ => a
+ -> G.SelectionSet
+ -> m (NormalizedSelectionSet a)
+parseSelectionSet fieldTypeInfo selectionSet = do
+ visitedFragments <- get
+ withPathK "selectionSet" $ do
+ -- The visited fragments state shouldn't accumulate over a selection set.
+ normalizedSelections <-
+ catMaybes <$> mapM (parseSelection visitedFragments fieldTypeInfo) selectionSet
+ mergeNormalizedSelections normalizedSelections
+ where
+ mergeNormalizedSelections = mergeNormalizedSelectionSets . map selectionToSelectionSet
+
+-- | While interfaces and objects have fields, unions do not, so
+-- this is a specialized function for every Object type
+parseSelection
+ :: ( MonadReader ValidationCtx m
+ , MonadError QErr m
+ , MonadReusability m
+ , HasSelectionSet a
+ )
+ => [G.Name]
+ -> a -- parent type info
+ -> G.Selection
+ -> m (Maybe (NormalizedSelection a))
+parseSelection visitedFragments parentTypeInfo =
+ flip evalStateT visitedFragments . \case
+ G.SelectionField fld -> withPathK (G.unName $ G._fName fld) $ do
+ let fieldName = G._fName fld
+ fieldAlias = fromMaybe (G.Alias fieldName) $ G._fAlias fld
+ fmap (SelectionField fieldAlias) <$> parseField_ parentTypeInfo fld
+ G.SelectionFragmentSpread (G.FragmentSpread name directives) -> do
+ FragDef _ fragmentTyInfo fragmentSelectionSet <- getFragmentInfo name
+ withPathK (G.unName name) $
+ fmap (SelectionFragmentSpread name) <$>
+ parseFragment parentTypeInfo fragmentTyInfo directives fragmentSelectionSet
+ G.SelectionInlineFragment G.InlineFragment{..} -> do
+ let fragmentType = fromMaybe (getTypename parentTypeInfo) _ifTypeCondition
+ fragmentTyInfo <- getFragmentTyInfo fragmentType
+ withPathK "inlineFragment" $ fmap SelectionInlineFragmentSpread <$>
+ parseFragment parentTypeInfo fragmentTyInfo _ifDirectives _ifSelectionSet
+
+parseFragment
+ :: ( MonadReader ValidationCtx m
+ , MonadError QErr m
+ , MonadReusability m
+ , MonadState [G.Name] m
+ , HasSelectionSet a
+ )
+ => a
+ -> FragmentTypeInfo
+ -> [G.Directive]
+ -> G.SelectionSet
+ -> m (Maybe (NormalizedSelectionSet a))
+parseFragment parentTyInfo fragmentTyInfo directives fragmentSelectionSet = do
+ commonTypes <- validateSpread
+ case fragmentTyInfo of
+ FragmentTyObject objTyInfo ->
+ withDirectives directives $
+ fmap (fromObjectSelectionSet parentType fragmentType commonTypes) $
+ parseSelectionSet objTyInfo fragmentSelectionSet
+ FragmentTyInterface interfaceTyInfo ->
+ withDirectives directives $
+ fmap (fromInterfaceSelectionSet parentType fragmentType commonTypes) $
+ parseSelectionSet interfaceTyInfo fragmentSelectionSet
+ FragmentTyUnion unionTyInfo ->
+ withDirectives directives $
+ fmap (fromUnionSelectionSet parentType fragmentType commonTypes) $
+ parseSelectionSet unionTyInfo fragmentSelectionSet
+ where
+ validateSpread = do
+ let commonTypes = parentTypeMembers `Set.intersection` fragmentTypeMembers
+ if null commonTypes then
+ -- TODO: better error location by capturing the fragment source -
+ -- named or otherwise
+ -- throwVE $ "cannot spread fragment " <> showName name <> " defined on " <>
+ throwVE $ "cannot spread fragment defined on " <> showNamedTy fragmentType
+ <> " when selecting fields of type " <> showNamedTy parentType
+ else pure commonTypes
+
+ parentType = getTypename parentTyInfo
+ parentTypeMembers = getMemberTypes parentTyInfo
+
+ fragmentType = case fragmentTyInfo of
+ FragmentTyObject tyInfo -> getTypename tyInfo
+ FragmentTyInterface tyInfo -> getTypename tyInfo
+ FragmentTyUnion tyInfo -> getTypename tyInfo
+ fragmentTypeMembers = case fragmentTyInfo of
+ FragmentTyObject tyInfo -> getMemberTypes tyInfo
+ FragmentTyInterface tyInfo -> getMemberTypes tyInfo
+ FragmentTyUnion tyInfo -> getMemberTypes tyInfo
+
+class IsField f => MergeableField f where
+
+ checkFieldMergeability
+ :: (MonadError QErr m) => G.Alias -> NE.NESeq f -> m f
+
+instance MergeableField Field where
+
+ checkFieldMergeability alias fields = do
+ let groupedFlds = toList $ NE.toSeq fields
+ fldNames = L.nub $ map getFieldName groupedFlds
+ args = L.nub $ map getFieldArguments groupedFlds
+ when (length fldNames > 1) $
+ throwVE $ "cannot merge different fields under the same alias ("
+ <> showName (G.unAlias alias) <> "): "
+ <> showNames fldNames
+ when (length args > 1) $
+ throwVE $ "cannot merge fields with different arguments"
+ <> " under the same alias: "
+ <> showName (G.unAlias alias)
+ let fld = NE.head fields
+ mergedGroupSelectionSet <- mergeSelectionSets $ fmap _fSelSet fields
+ return $ fld { _fSelSet = mergedGroupSelectionSet }
+
+instance MergeableField Typename where
+
+ checkFieldMergeability _ fields = pure $ NE.head fields
+
+parseArguments
+ :: ( MonadReader ValidationCtx m
+ , MonadError QErr m
+ )
+ => ParamMap
+ -> [G.Argument]
+ -> m ArgsMap
+parseArguments fldParams argsL = do
+
+ args <- onLeft (mkMapWith G._aName argsL) $ \dups ->
+ throwVE $ "the following arguments are defined more than once: " <>
+ showNames dups
+
+ let requiredParams = Map.filter (G.isNotNull . _iviType) fldParams
+
+ inpArgs <- forM args $ \(G.Argument argName argVal) ->
+ withPathK (G.unName argName) $ do
+ argTy <- getArgTy argName
+ validateInputValue valueParser argTy argVal
+
+ forM_ requiredParams $ \argDef -> do
+ let param = _iviName argDef
+ onNothing (Map.lookup param inpArgs) $ throwVE $ mconcat
+ [ "the required argument ", showName param, " is missing"]
+
+ return inpArgs
+
+ where
+ getArgTy argName =
+ onNothing (_iviType <$> Map.lookup argName fldParams) $ throwVE $
+ "no such argument " <> showName argName <> " is expected"
+
+mergeFields
+ :: ( MonadError QErr m
+ , MergeableField f
+ )
+ -- => Seq.Seq Field
+ => [AliasedFields f]
+ -> m (AliasedFields f)
+mergeFields flds =
+ AliasedFields <$> OMap.traverseWithKey checkFieldMergeability groups
+ where
+ groups = foldr (OMap.unionWith (<>)) mempty $
+ map (fmap NE.init . unAliasedFields) flds
+
+appendSelectionSets
+ :: (MonadError QErr m) => SelectionSet -> SelectionSet -> m SelectionSet
+appendSelectionSets = curry \case
+ (SelectionSetObject s1, SelectionSetObject s2) ->
+ SelectionSetObject <$> mergeObjectSelectionSets [s1, s2]
+ (SelectionSetInterface s1, SelectionSetInterface s2) ->
+ SelectionSetInterface <$> appendScopedSelectionSet s1 s2
+ (SelectionSetUnion s1, SelectionSetUnion s2) ->
+ SelectionSetUnion <$> appendScopedSelectionSet s1 s2
+ (SelectionSetNone, SelectionSetNone) -> pure SelectionSetNone
+ (_, _) -> throw500 $ "mergeSelectionSets: 'same kind' assertion failed"
+
+
+-- query q {
+-- author {
+-- id
+-- }
+-- author {
+-- name
+-- }
+-- }
+--
+-- | When we are merging two selection sets down two different trees they
+-- should be of the same type, however, as it is not enforced in the type
+-- system, an internal error is thrown when this assumption is violated
+mergeSelectionSets
+ :: (MonadError QErr m) => NE.NESeq SelectionSet -> m SelectionSet
+-- mergeSelectionSets = curry $ \case
+mergeSelectionSets selectionSets =
+ foldM appendSelectionSets (NE.head selectionSets) $ NE.tail selectionSets
+
+mergeObjectSelectionSets
+ :: (MonadError QErr m) => [ObjectSelectionSet] -> m ObjectSelectionSet
+mergeObjectSelectionSets =
+ fmap ObjectSelectionSet . mergeFields . map unObjectSelectionSet
+
+mergeObjectSelectionSetMaps
+ :: (MonadError QErr m) => [ObjectSelectionSetMap] -> m ObjectSelectionSetMap
+mergeObjectSelectionSetMaps selectionSetMaps =
+ traverse mergeObjectSelectionSets $
+ foldr (Map.unionWith (<>)) mempty $ map (fmap (:[])) selectionSetMaps
+
+appendScopedSelectionSet
+ :: (MonadError QErr m, MergeableField f)
+ => ScopedSelectionSet f -> ScopedSelectionSet f -> m (ScopedSelectionSet f)
+appendScopedSelectionSet s1 s2 =
+ ScopedSelectionSet
+ <$> mergeFields [_sssBaseSelectionSet s1, _sssBaseSelectionSet s2]
+ <*> mergeObjectSelectionSetMaps [s1MembersUnified, s2MembersUnified]
+
+ where
+ s1Base = fmap toField $ _sssBaseSelectionSet s1
+ s2Base = fmap toField $ _sssBaseSelectionSet s2
+
+ s1MembersUnified =
+ (_sssMemberSelectionSets s1)
+ <> fmap (const (ObjectSelectionSet s1Base)) (_sssMemberSelectionSets s2)
+
+ s2MembersUnified =
+ (_sssMemberSelectionSets s2)
+ <> fmap (const (ObjectSelectionSet s2Base)) (_sssMemberSelectionSets s1)
+
+mergeScopedSelectionSets
+ :: (MonadError QErr m, MergeableField f)
+ => [ScopedSelectionSet f] -> m (ScopedSelectionSet f)
+mergeScopedSelectionSets selectionSets =
+ foldM appendScopedSelectionSet emptyScopedSelectionSet selectionSets
+
+withDirectives
+ :: ( MonadReader ValidationCtx m
+ , MonadError QErr m
+ , MonadReusability m
+ )
+ => [G.Directive]
+ -> m a
+ -> m (Maybe a)
+withDirectives dirs act = do
+ procDirs <- withPathK "directives" $ do
+ dirDefs <- onLeft (mkMapWith G._dName dirs) $ \dups ->
+ throwVE $ "the following directives are used more than once: " <>
+ showNames dups
+
+ flip Map.traverseWithKey dirDefs $ \name dir ->
+ withPathK (G.unName name) $ do
+ dirInfo <- onNothing (Map.lookup (G._dName dir) defDirectivesMap) $
+ throwVE $ "unexpected directive: " <> showName name
+ procArgs <- withPathK "args" $ parseArguments (_diParams dirInfo)
+ (G._dArguments dir)
+ getIfArg procArgs
+
+ let shouldSkip = fromMaybe False $ Map.lookup "skip" procDirs
+ shouldInclude = fromMaybe True $ Map.lookup "include" procDirs
+
+ if not shouldSkip && shouldInclude
+ then Just <$> act
+ else return Nothing
+
+ where
+ getIfArg m = do
+ val <- onNothing (Map.lookup "if" m) $ throw500
+ "missing if argument in the directive"
+ when (isJust $ _aivVariable val) markNotReusable
+ case _aivValue val of
+ AGScalar _ (Just (PGValBoolean v)) -> return v
+ _ -> throw500 "did not find boolean scalar for if argument"
+
+getFragmentInfo
+ :: (MonadReader ValidationCtx m, MonadError QErr m, MonadState [G.Name] m)
+ => G.Name
+ -- ^ fragment name
+ -> m FragDef
+getFragmentInfo name = do
+ -- check for cycles
+ visitedFragments <- get
+ if name `elem` visitedFragments
+ then throwVE $ "cannot spread fragment " <> showName name
+ <> " within itself via "
+ <> T.intercalate "," (map G.unName visitedFragments)
+ else put $ name:visitedFragments
+ fragInfo <- Map.lookup name <$> asks _vcFragDefMap
+ onNothing fragInfo $ throwVE $ "fragment '" <> G.unName name <> "' not found"
+
+denormalizeField
+ :: ( MonadReader ValidationCtx m
+ , MonadError QErr m
+ , MonadReusability m
+ , MonadState [G.Name] m
+ )
+ => ObjFldInfo
+ -> G.Field
+ -> m (Maybe Field)
+denormalizeField fldInfo (G.Field _ name args dirs selSet) = do
+
+ let fldTy = _fiTy fldInfo
+ fldBaseTy = getBaseTy fldTy
+
+ fldTyInfo <- getTyInfo fldBaseTy
+
+ argMap <- withPathK "args" $ parseArguments (_fiParams fldInfo) args
+
+ fields <- case (fldTyInfo, selSet) of
+
+ (TIObj _, []) ->
+ throwVE $ "field " <> showName name <> " of type "
+ <> G.showGT fldTy <> " must have a selection of subfields"
+
+ (TIObj objTyInfo, _) ->
+ SelectionSetObject <$> parseSelectionSet objTyInfo selSet
+
+ (TIIFace _, []) ->
+ throwVE $ "field " <> showName name <> " of type "
+ <> G.showGT fldTy <> " must have a selection of subfields"
+
+ (TIIFace interfaceTyInfo, _) ->
+ SelectionSetInterface <$> parseSelectionSet interfaceTyInfo selSet
+
+ (TIUnion _, []) ->
+ throwVE $ "field " <> showName name <> " of type "
+ <> G.showGT fldTy <> " must have a selection of subfields"
+
+ (TIUnion unionTyInfo, _) ->
+ SelectionSetUnion <$> parseSelectionSet unionTyInfo selSet
+
+ (TIScalar _, []) -> return SelectionSetNone
+ -- when scalar/enum and no empty set
+ (TIScalar _, _) ->
+ throwVE $ "field " <> showName name <> " must not have a "
+ <> "selection since type " <> G.showGT fldTy <> " has no subfields"
+
+ (TIEnum _, []) -> return SelectionSetNone
+ (TIEnum _, _) ->
+ throwVE $ "field " <> showName name <> " must not have a "
+ <> "selection since type " <> G.showGT fldTy <> " has no subfields"
+
+ (TIInpObj _, _) ->
+ throwVE $ "internal error: unexpected input type for field: "
+ <> showName name
+
+ withDirectives dirs $ pure $ Field name fldBaseTy argMap fields
+
+type instance NormalizedSelectionSet ObjTyInfo = ObjectSelectionSet
+type instance NormalizedField ObjTyInfo = Field
+
+instance HasSelectionSet ObjTyInfo where
+
+ getTypename = _otiName
+ getMemberTypes = Set.singleton . _otiName
+
+ parseField_ objTyInfo field = do
+ fieldInfo <- getFieldInfo (_otiName objTyInfo) (_otiFields objTyInfo) $ G._fName field
+ denormalizeField fieldInfo field
+
+ fieldToSelectionSet alias fld =
+ ObjectSelectionSet $ AliasedFields $ OMap.singleton alias fld
+
+ mergeNormalizedSelectionSets = mergeObjectSelectionSets
+
+ fromObjectSelectionSet _ _ _ objectSelectionSet =
+ objectSelectionSet
+
+ fromInterfaceSelectionSet parentType _ _ interfaceSelectionSet =
+ getMemberSelectionSet parentType interfaceSelectionSet
+
+ fromUnionSelectionSet parentType _ _ unionSelectionSet =
+ getMemberSelectionSet parentType unionSelectionSet
+
+type instance NormalizedSelectionSet IFaceTyInfo = InterfaceSelectionSet
+type instance NormalizedField IFaceTyInfo = Field
+
+instance HasSelectionSet IFaceTyInfo where
+
+ getTypename = _ifName
+ getMemberTypes = _ifMemberTypes
+
+ parseField_ interfaceTyInfo field = do
+ fieldInfo <- getFieldInfo (_ifName interfaceTyInfo) (_ifFields interfaceTyInfo)
+ $ G._fName field
+ denormalizeField fieldInfo field
+
+ fieldToSelectionSet alias field =
+ ScopedSelectionSet (AliasedFields $ OMap.singleton alias field) mempty
+
+ mergeNormalizedSelectionSets = mergeScopedSelectionSets
+
+ fromObjectSelectionSet _ fragmentType _ objectSelectionSet =
+ ScopedSelectionSet (AliasedFields mempty) $
+ Map.singleton fragmentType objectSelectionSet
+
+ fromInterfaceSelectionSet _ _ commonTypes interfaceSelectionSet =
+ ScopedSelectionSet (AliasedFields mempty) $
+ Map.fromList $ flip map (toList commonTypes) $
+ \commonType -> (commonType, getMemberSelectionSet commonType interfaceSelectionSet)
+
+ fromUnionSelectionSet _ _ commonTypes unionSelectionSet =
+ ScopedSelectionSet (AliasedFields mempty) $
+ Map.fromList $ flip map (toList commonTypes) $
+ \commonType -> (commonType, getMemberSelectionSet commonType unionSelectionSet)
+
+type instance NormalizedSelectionSet UnionTyInfo = UnionSelectionSet
+type instance NormalizedField UnionTyInfo = Typename
+
+instance HasSelectionSet UnionTyInfo where
+
+ getTypename = _utiName
+ getMemberTypes = _utiMemberTypes
+
+ parseField_ unionTyInfo field = do
+ let fieldMap = Map.singleton (_fiName typenameFld) typenameFld
+ fieldInfo <- getFieldInfo (_utiName unionTyInfo) fieldMap $ G._fName field
+ fmap (const Typename) <$> denormalizeField fieldInfo field
+
+ fieldToSelectionSet alias field =
+ ScopedSelectionSet (AliasedFields $ OMap.singleton alias field) mempty
+
+ mergeNormalizedSelectionSets = mergeScopedSelectionSets
+
+ fromObjectSelectionSet _ fragmentType _ objectSelectionSet =
+ ScopedSelectionSet (AliasedFields mempty) $
+ Map.singleton fragmentType objectSelectionSet
+
+ fromInterfaceSelectionSet _ _ commonTypes interfaceSelectionSet =
+ ScopedSelectionSet (AliasedFields mempty) $
+ Map.fromList $ flip map (toList commonTypes) $
+ \commonType -> (commonType, getMemberSelectionSet commonType interfaceSelectionSet)
+
+ fromUnionSelectionSet _ _ commonTypes unionSelectionSet =
+ ScopedSelectionSet (AliasedFields mempty) $
+ Map.fromList $ flip map (toList commonTypes) $
+ \commonType -> (commonType, getMemberSelectionSet commonType unionSelectionSet)
diff --git a/server/src-lib/Hasura/GraphQL/Validate/Types.hs b/server/src-lib/Hasura/GraphQL/Validate/Types.hs
new file mode 100644
index 00000000000..9cac61a359a
--- /dev/null
+++ b/server/src-lib/Hasura/GraphQL/Validate/Types.hs
@@ -0,0 +1,812 @@
+{-# LANGUAGE GADTs #-}
+module Hasura.GraphQL.Validate.Types
+ ( InpValInfo(..)
+ , ParamMap
+
+ , typenameFld
+ , ObjFldInfo(..)
+ , mkHsraObjFldInfo
+ , ObjFieldMap
+
+ -- Don't expose 'ObjTyInfo' constructor. Instead use 'mkObjTyInfo' or 'mkHsraObjTyInfo'
+ -- which will auto-insert the compulsory '__typename' field.
+ , ObjTyInfo
+ , _otiDesc
+ , _otiName
+ , _otiImplIFaces
+ , _otiFields
+ , mkObjTyInfo
+ , mkHsraObjTyInfo
+
+ -- Don't expose 'IFaceTyInfo' constructor. Instead use 'mkIFaceTyInfo'
+ -- which will auto-insert the compulsory '__typename' field.
+ , IFaceTyInfo
+ , _ifDesc
+ , _ifName
+ , _ifFields
+ , _ifMemberTypes
+ , mkIFaceTyInfo
+
+ , IFacesSet
+ , UnionTyInfo(..)
+ , FragDef(..)
+ , FragmentTypeInfo(..)
+ , FragDefMap
+ , AnnVarVals
+ , AnnInpVal(..)
+
+ , EnumTyInfo(..)
+ , mkHsraEnumTyInfo
+
+ , EnumValuesInfo(..)
+ , normalizeEnumValues
+ , EnumValInfo(..)
+ , InpObjFldMap
+ , InpObjTyInfo(..)
+ , mkHsraInpTyInfo
+
+ , ScalarTyInfo(..)
+ , fromScalarTyDef
+ , mkHsraScalarTyInfo
+
+ , DirectiveInfo(..)
+ , AsObjType(..)
+ , defaultDirectives
+ , defDirectivesMap
+ , defaultSchema
+ , TypeInfo(..)
+ , isObjTy
+ , isIFaceTy
+ , getPossibleObjTypes
+ , getObjTyM
+ , getUnionTyM
+ , mkScalarTy
+ , pgColTyToScalar
+ , getNamedTy
+ , mkTyInfoMap
+ , fromTyDef
+ , fromSchemaDoc
+ , fromSchemaDocQ
+ , TypeMap
+ , TypeLoc (..)
+ , typeEq
+ , AnnGValue(..)
+ , AnnGEnumValue(..)
+ , AnnGObject
+ , hasNullVal
+ , getAnnInpValKind
+ , stripTypenames
+
+ , ReusableVariableTypes(..)
+ , ReusableVariableValues
+
+ , QueryReusability(..)
+ , _Reusable
+ , _NotReusable
+ , MonadReusability(..)
+ , ReusabilityT
+ , runReusabilityT
+ , runReusabilityTWith
+ , evalReusabilityT
+
+ , module Hasura.GraphQL.Utils
+ ) where
+
+import Hasura.Prelude
+
+import qualified Data.Aeson as J
+import qualified Data.Aeson.Casing as J
+import qualified Data.Aeson.TH as J
+import qualified Data.HashMap.Strict as Map
+import qualified Data.HashSet as Set
+import qualified Data.Text as T
+import qualified Language.GraphQL.Draft.Syntax as G
+import qualified Language.GraphQL.Draft.TH as G
+import qualified Language.Haskell.TH.Syntax as TH
+
+import Control.Lens (makePrisms)
+
+import qualified Hasura.RQL.Types.Column as RQL
+
+import Hasura.GraphQL.NormalForm
+import Hasura.GraphQL.Utils
+import Hasura.RQL.Instances ()
+import Hasura.RQL.Types.Common
+import Hasura.RQL.Types.RemoteSchema (RemoteSchemaInfo, RemoteSchemaName)
+import Hasura.SQL.Types
+import Hasura.SQL.Value
+
+typeEq :: (EquatableGType a, Eq (EqProps a)) => a -> a -> Bool
+typeEq a b = getEqProps a == getEqProps b
+
+data EnumValInfo
+ = EnumValInfo
+ { _eviDesc :: !(Maybe G.Description)
+ , _eviVal :: !G.EnumValue
+ , _eviIsDeprecated :: !Bool
+ } deriving (Show, Eq, TH.Lift)
+
+fromEnumValDef :: G.EnumValueDefinition -> EnumValInfo
+fromEnumValDef (G.EnumValueDefinition descM val _) =
+ EnumValInfo descM val False
+
+data EnumValuesInfo
+ = EnumValuesSynthetic !(Map.HashMap G.EnumValue EnumValInfo)
+ -- ^ Values for an enum that exists only in the GraphQL schema and does not
+ -- have any external source of truth.
+ | EnumValuesReference !RQL.EnumReference
+ -- ^ Values for an enum that is backed by an enum table reference (see
+ -- "Hasura.RQL.Schema.Enum").
+ deriving (Show, Eq, TH.Lift)
+
+normalizeEnumValues :: EnumValuesInfo -> Map.HashMap G.EnumValue EnumValInfo
+normalizeEnumValues = \case
+ EnumValuesSynthetic values -> values
+ EnumValuesReference (RQL.EnumReference _ values) ->
+ mapFromL _eviVal . flip map (Map.toList values) $
+ \(RQL.EnumValue name, RQL.EnumValueInfo maybeDescription) -> EnumValInfo
+ { _eviVal = G.EnumValue $ G.Name name
+ , _eviDesc = G.Description <$> maybeDescription
+ , _eviIsDeprecated = False }
+
+data EnumTyInfo
+ = EnumTyInfo
+ { _etiDesc :: !(Maybe G.Description)
+ , _etiName :: !G.NamedType
+ , _etiValues :: !EnumValuesInfo
+ , _etiLoc :: !TypeLoc
+ } deriving (Show, Eq, TH.Lift)
+
+instance EquatableGType EnumTyInfo where
+ type EqProps EnumTyInfo = (G.NamedType, Map.HashMap G.EnumValue EnumValInfo)
+ getEqProps ety = (,) (_etiName ety) (normalizeEnumValues $ _etiValues ety)
+
+fromEnumTyDef :: G.EnumTypeDefinition -> TypeLoc -> EnumTyInfo
+fromEnumTyDef (G.EnumTypeDefinition descM n _ valDefs) loc =
+ EnumTyInfo descM (G.NamedType n) (EnumValuesSynthetic enumVals) loc
+ where
+ enumVals = Map.fromList
+ [(G._evdName valDef, fromEnumValDef valDef) | valDef <- valDefs]
+
+mkHsraEnumTyInfo
+ :: Maybe G.Description
+ -> G.NamedType
+ -> EnumValuesInfo
+ -> EnumTyInfo
+mkHsraEnumTyInfo descM ty enumVals =
+ EnumTyInfo descM ty enumVals TLHasuraType
+
+fromInpValDef :: G.InputValueDefinition -> InpValInfo
+fromInpValDef (G.InputValueDefinition descM n ty defM) =
+ InpValInfo descM n defM ty
+
+type ParamMap = Map.HashMap G.Name InpValInfo
+
+-- | location of the type: a hasura type or a remote type
+data TypeLoc
+ = TLHasuraType
+ | TLRemoteType !RemoteSchemaName !RemoteSchemaInfo
+ | TLCustom
+ deriving (Show, Eq, TH.Lift, Generic)
+
+$(J.deriveJSON
+ J.defaultOptions { J.constructorTagModifier = J.snakeCase . drop 2
+ , J.sumEncoding = J.TaggedObject "type" "detail"
+ }
+ ''TypeLoc)
+
+instance Hashable TypeLoc
+
+data ObjFldInfo
+ = ObjFldInfo
+ { _fiDesc :: !(Maybe G.Description)
+ , _fiName :: !G.Name
+ , _fiParams :: !ParamMap
+ , _fiTy :: !G.GType
+ , _fiLoc :: !TypeLoc
+ } deriving (Show, Eq, TH.Lift)
+
+instance EquatableGType ObjFldInfo where
+ type EqProps ObjFldInfo = (G.Name, G.GType, ParamMap)
+ getEqProps o = (,,) (_fiName o) (_fiTy o) (_fiParams o)
+
+fromFldDef :: G.FieldDefinition -> TypeLoc -> ObjFldInfo
+fromFldDef (G.FieldDefinition descM n args ty _) loc =
+ ObjFldInfo descM n params ty loc
+ where
+ params = Map.fromList [(G._ivdName arg, fromInpValDef arg) | arg <- args]
+
+mkHsraObjFldInfo
+ :: Maybe G.Description
+ -> G.Name
+ -> ParamMap
+ -> G.GType
+ -> ObjFldInfo
+mkHsraObjFldInfo descM name params ty =
+ ObjFldInfo descM name params ty TLHasuraType
+
+type ObjFieldMap = Map.HashMap G.Name ObjFldInfo
+
+type IFacesSet = Set.HashSet G.NamedType
+
+data ObjTyInfo
+ = ObjTyInfo
+ { _otiDesc :: !(Maybe G.Description)
+ , _otiName :: !G.NamedType
+ , _otiImplIFaces :: !IFacesSet
+ , _otiFields :: !ObjFieldMap
+ } deriving (Show, Eq, TH.Lift)
+
+instance EquatableGType ObjTyInfo where
+ type EqProps ObjTyInfo =
+ (G.NamedType, Set.HashSet G.NamedType, Map.HashMap G.Name (G.Name, G.GType, ParamMap))
+ getEqProps a = (,,) (_otiName a) (_otiImplIFaces a) (Map.map getEqProps (_otiFields a))
+
+instance Monoid ObjTyInfo where
+ mempty = ObjTyInfo Nothing (G.NamedType "") Set.empty Map.empty
+
+instance Semigroup ObjTyInfo where
+ objA <> objB =
+ objA { _otiFields = Map.union (_otiFields objA) (_otiFields objB)
+ , _otiImplIFaces = _otiImplIFaces objA `Set.union` _otiImplIFaces objB
+ }
+
+mkObjTyInfo
+ :: Maybe G.Description -> G.NamedType
+ -> IFacesSet -> ObjFieldMap -> TypeLoc -> ObjTyInfo
+mkObjTyInfo descM ty iFaces flds _ =
+ ObjTyInfo descM ty iFaces $ Map.insert (_fiName newFld) newFld flds
+ where newFld = typenameFld
+
+mkHsraObjTyInfo
+ :: Maybe G.Description
+ -> G.NamedType
+ -> IFacesSet
+ -> ObjFieldMap
+ -> ObjTyInfo
+mkHsraObjTyInfo descM ty implIFaces flds =
+ mkObjTyInfo descM ty implIFaces flds TLHasuraType
+
+mkIFaceTyInfo
+ :: Maybe G.Description -> G.NamedType
+ -> Map.HashMap G.Name ObjFldInfo -> MemberTypes -> IFaceTyInfo
+mkIFaceTyInfo descM ty flds =
+ IFaceTyInfo descM ty $ Map.insert (_fiName newFld) newFld flds
+ where
+ newFld = typenameFld
+
+typenameFld :: ObjFldInfo
+typenameFld =
+ ObjFldInfo (Just desc) "__typename" Map.empty
+ (G.toGT $ G.toNT $ G.NamedType "String") TLHasuraType
+ where
+ desc = "The name of the current Object type at runtime"
+
+fromObjTyDef :: G.ObjectTypeDefinition -> TypeLoc -> ObjTyInfo
+fromObjTyDef (G.ObjectTypeDefinition descM n ifaces _ flds) loc =
+ mkObjTyInfo descM (G.NamedType n) (Set.fromList ifaces) fldMap loc
+ where
+ fldMap = Map.fromList [(G._fldName fld, fromFldDef fld loc) | fld <- flds]
+
+data IFaceTyInfo
+ = IFaceTyInfo
+ { _ifDesc :: !(Maybe G.Description)
+ , _ifName :: !G.NamedType
+ , _ifFields :: !ObjFieldMap
+ , _ifMemberTypes :: !MemberTypes
+ } deriving (Show, Eq, TH.Lift)
+
+instance EquatableGType IFaceTyInfo where
+ type EqProps IFaceTyInfo =
+ (G.NamedType, Map.HashMap G.Name (G.Name, G.GType, ParamMap))
+ getEqProps a = (,) (_ifName a) (Map.map getEqProps (_ifFields a))
+
+instance Semigroup IFaceTyInfo where
+ objA <> objB =
+ objA { _ifFields = Map.union (_ifFields objA) (_ifFields objB)
+ }
+
+fromIFaceDef
+ :: InterfaceImplementations -> G.InterfaceTypeDefinition -> TypeLoc -> IFaceTyInfo
+fromIFaceDef interfaceImplementations (G.InterfaceTypeDefinition descM n _ flds) loc =
+ mkIFaceTyInfo descM (G.NamedType n) fldMap implementations
+ where
+ fldMap = Map.fromList [(G._fldName fld, fromFldDef fld loc) | fld <- flds]
+ implementations = fromMaybe mempty $ Map.lookup (G.NamedType n) interfaceImplementations
+
+type MemberTypes = Set.HashSet G.NamedType
+
+data UnionTyInfo
+ = UnionTyInfo
+ { _utiDesc :: !(Maybe G.Description)
+ , _utiName :: !G.NamedType
+ , _utiMemberTypes :: !MemberTypes
+ } deriving (Show, Eq, TH.Lift)
+
+instance EquatableGType UnionTyInfo where
+ type EqProps UnionTyInfo =
+ (G.NamedType, Set.HashSet G.NamedType)
+ getEqProps a = (,) (_utiName a) (_utiMemberTypes a)
+
+instance Monoid UnionTyInfo where
+ mempty = UnionTyInfo Nothing (G.NamedType "") Set.empty
+
+instance Semigroup UnionTyInfo where
+ objA <> objB =
+ objA { _utiMemberTypes = Set.union (_utiMemberTypes objA) (_utiMemberTypes objB)
+ }
+
+fromUnionTyDef :: G.UnionTypeDefinition -> UnionTyInfo
+fromUnionTyDef (G.UnionTypeDefinition descM n _ mt) = UnionTyInfo descM (G.NamedType n) $ Set.fromList mt
+
+type InpObjFldMap = Map.HashMap G.Name InpValInfo
+
+data InpObjTyInfo
+ = InpObjTyInfo
+ { _iotiDesc :: !(Maybe G.Description)
+ , _iotiName :: !G.NamedType
+ , _iotiFields :: !InpObjFldMap
+ , _iotiLoc :: !TypeLoc
+ } deriving (Show, Eq, TH.Lift)
+
+instance EquatableGType InpObjTyInfo where
+ type EqProps InpObjTyInfo = (G.NamedType, Map.HashMap G.Name (G.Name, G.GType))
+ getEqProps a = (,) (_iotiName a) (Map.map getEqProps $ _iotiFields a)
+
+fromInpObjTyDef :: G.InputObjectTypeDefinition -> TypeLoc -> InpObjTyInfo
+fromInpObjTyDef (G.InputObjectTypeDefinition descM n _ inpFlds) loc =
+ InpObjTyInfo descM (G.NamedType n) fldMap loc
+ where
+ fldMap = Map.fromList
+ [(G._ivdName inpFld, fromInpValDef inpFld) | inpFld <- inpFlds]
+
+mkHsraInpTyInfo
+ :: Maybe G.Description
+ -> G.NamedType
+ -> InpObjFldMap
+ -> InpObjTyInfo
+mkHsraInpTyInfo descM ty flds =
+ InpObjTyInfo descM ty flds TLHasuraType
+
+data ScalarTyInfo
+ = ScalarTyInfo
+ { _stiDesc :: !(Maybe G.Description)
+ , _stiName :: !G.Name
+ , _stiType :: !PGScalarType
+ , _stiLoc :: !TypeLoc
+ } deriving (Show, Eq, TH.Lift)
+
+mkHsraScalarTyInfo :: PGScalarType -> ScalarTyInfo
+mkHsraScalarTyInfo ty =
+ ScalarTyInfo Nothing (G.Name $ pgColTyToScalar ty) ty TLHasuraType
+
+instance EquatableGType ScalarTyInfo where
+ type EqProps ScalarTyInfo = PGScalarType
+ getEqProps = _stiType
+
+fromScalarTyDef
+ :: G.ScalarTypeDefinition
+ -> TypeLoc
+ -> ScalarTyInfo
+fromScalarTyDef (G.ScalarTypeDefinition descM n _) =
+ ScalarTyInfo descM n ty
+ where
+ ty = case n of
+ "Int" -> PGInteger
+ "Float" -> PGFloat
+ "String" -> PGText
+ "Boolean" -> PGBoolean
+ "ID" -> PGText
+ _ -> textToPGScalarType $ G.unName n
+
+data TypeInfo
+ = TIScalar !ScalarTyInfo
+ | TIObj !ObjTyInfo
+ | TIEnum !EnumTyInfo
+ | TIInpObj !InpObjTyInfo
+ | TIIFace !IFaceTyInfo
+ | TIUnion !UnionTyInfo
+ deriving (Show, Eq, TH.Lift)
+
+instance J.ToJSON TypeInfo where
+ toJSON _ = J.String "toJSON not implemented for TypeInfo"
+
+data AsObjType
+ = AOTIFace IFaceTyInfo
+ | AOTUnion UnionTyInfo
+
+getPossibleObjTypes :: TypeMap -> AsObjType -> Map.HashMap G.NamedType ObjTyInfo
+getPossibleObjTypes tyMap = \case
+ (AOTIFace i) ->
+ toObjMap $ mapMaybe (extrObjTyInfoM tyMap) $ Set.toList $ _ifMemberTypes i
+ (AOTUnion u) ->
+ toObjMap $ mapMaybe (extrObjTyInfoM tyMap) $ Set.toList $ _utiMemberTypes u
+ -- toObjMap $ mapMaybe previewImplTypeM $ Map.elems tyMap
+ -- where
+ -- previewImplTypeM = \case
+ -- TIObj objTyInfo -> bool Nothing (Just objTyInfo) $
+ -- _ifName i `elem` _otiImplIFaces objTyInfo
+ -- _ -> Nothing
+
+
+toObjMap :: [ObjTyInfo] -> Map.HashMap G.NamedType ObjTyInfo
+toObjMap = foldr (\o -> Map.insert (_otiName o) o) Map.empty
+
+
+isObjTy :: TypeInfo -> Bool
+isObjTy = \case
+ (TIObj _) -> True
+ _ -> False
+
+getObjTyM :: TypeInfo -> Maybe ObjTyInfo
+getObjTyM = \case
+ (TIObj t) -> return t
+ _ -> Nothing
+
+getUnionTyM :: TypeInfo -> Maybe UnionTyInfo
+getUnionTyM = \case
+ (TIUnion u) -> return u
+ _ -> Nothing
+
+isIFaceTy :: TypeInfo -> Bool
+isIFaceTy = \case
+ (TIIFace _) -> True
+ _ -> False
+
+data SchemaPath
+ = SchemaPath
+ { _spTypeName :: !(Maybe G.NamedType)
+ , _spFldName :: !(Maybe G.Name)
+ , _spArgName :: !(Maybe G.Name)
+ , _spType :: !(Maybe T.Text)
+ }
+
+setFldNameSP :: SchemaPath -> G.Name -> SchemaPath
+setFldNameSP sp fn = sp { _spFldName = Just fn}
+
+setArgNameSP :: SchemaPath -> G.Name -> SchemaPath
+setArgNameSP sp an = sp { _spArgName = Just an}
+
+showSP :: SchemaPath -> Text
+showSP (SchemaPath t f a _) = maybe "" (\x -> showNamedTy x <> fN) t
+ where
+ fN = maybe "" (\x -> "." <> showName x <> aN) f
+ aN = maybe "" showArg a
+ showArg x = "(" <> showName x <> ":)"
+
+showSPTxt' :: SchemaPath -> Text
+showSPTxt' (SchemaPath _ f a t) = maybe "" (<> " "<> fld) t
+ where
+ fld = maybe "" (const $ "field " <> arg) f
+ arg = maybe "" (const "argument ") a
+
+showSPTxt :: SchemaPath -> Text
+showSPTxt p = showSPTxt' p <> showSP p
+
+validateIFace :: MonadError Text f => IFaceTyInfo -> f ()
+validateIFace (IFaceTyInfo _ n flds _) =
+ when (isFldListEmpty flds) $ throwError $ "List of fields cannot be empty for interface " <> showNamedTy n
+
+validateObj :: TypeMap -> ObjTyInfo -> Either Text ()
+validateObj tyMap objTyInfo@(ObjTyInfo _ n _ flds) = do
+ when (isFldListEmpty flds) $ throwError $ "List of fields cannot be empty for " <> objTxt
+ mapM_ (extrIFaceTyInfo' >=> validateIFaceImpl objTyInfo) $ _otiImplIFaces objTyInfo
+ where
+ extrIFaceTyInfo' t = withObjTxt $ extrIFaceTyInfo tyMap t
+ withObjTxt x = x `catchError` \e -> throwError $ e <> " implemented by " <> objTxt
+ objTxt = "Object type " <> showNamedTy n
+ validateIFaceImpl = implmntsIFace tyMap
+
+isFldListEmpty :: ObjFieldMap -> Bool
+isFldListEmpty = Map.null . Map.delete "__typename"
+
+validateUnion :: MonadError Text m => TypeMap -> UnionTyInfo -> m ()
+validateUnion tyMap (UnionTyInfo _ un mt) = do
+ when (Set.null mt) $ throwError $ "List of member types cannot be empty for union type " <> showNamedTy un
+ mapM_ valIsObjTy $ Set.toList mt
+ where
+ valIsObjTy mn = case Map.lookup mn tyMap of
+ Just (TIObj t) -> return t
+ Nothing -> throwError $ "Could not find type " <> showNamedTy mn <> ", which is defined as a member type of Union " <> showNamedTy un
+ _ -> throwError $ "Union type " <> showNamedTy un <> " can only include object types. It cannot include " <> showNamedTy mn
+
+implmntsIFace :: TypeMap -> ObjTyInfo -> IFaceTyInfo -> Either Text ()
+implmntsIFace tyMap objTyInfo iFaceTyInfo = do
+ let path =
+ ( SchemaPath (Just $ _otiName objTyInfo) Nothing Nothing (Just "Object")
+ , SchemaPath (Just $ _ifName iFaceTyInfo) Nothing Nothing (Just "Interface")
+ )
+ mapM_ (includesIFaceFld path) $ _ifFields iFaceTyInfo
+ where
+ includesIFaceFld (spO,spIF) ifFld = do
+ let pathA@(spOA, spIFA) = (spO, setFldNameSP spIF $ _fiName ifFld)
+ objFld <- sameNameFld pathA ifFld
+ let pathB = (setFldNameSP spOA $ _fiName objFld, spIFA)
+ validateIsSubType' pathB (_fiTy objFld) (_fiTy ifFld)
+ hasAllArgs pathB objFld ifFld
+ isExtraArgsNullable pathB objFld ifFld
+
+ validateIsSubType' (spO,spIF) oFld iFld = validateIsSubType tyMap oFld iFld `catchError` \_ ->
+ throwError $ "The type of " <> showSPTxt spO <> " (" <> G.showGT oFld <>
+ ") is not the same type/sub type of " <> showSPTxt spIF <> " (" <> G.showGT iFld <> ")"
+
+ sameNameFld (spO, spIF) ifFld = do
+ let spIFN = setFldNameSP spIF $ _fiName ifFld
+ onNothing (Map.lookup (_fiName ifFld) objFlds)
+ $ throwError $ showSPTxt spIFN <> " expected, but " <> showSP spO <> " does not provide it"
+
+ hasAllArgs (spO, spIF) objFld ifFld = forM_ (_fiParams ifFld) $ \ifArg -> do
+ objArg <- sameNameArg ifArg
+ let (spON, spIFN) = (setArgNameSP spO $ _iviName objArg, setArgNameSP spIF $ _iviName ifArg)
+ unless (_iviType objArg == _iviType ifArg) $ throwError $
+ showSPTxt spIFN <> " expects type " <> G.showGT (_iviType ifArg) <> ", but " <>
+ showSP spON <> " has type " <> G.showGT (_iviType objArg)
+ where
+ sameNameArg ivi = do
+ let spIFN = setArgNameSP spIF $ _iviName ivi
+ onNothing (Map.lookup (_iviName ivi) objArgs) $ throwError $ showSPTxt spIFN <> " required, but " <>
+ showSPTxt spO <> " does not provide it"
+ objArgs = _fiParams objFld
+
+ isExtraArgsNullable (spO, spIF) objFld ifFld = forM_ extraArgs isInpValNullable
+ where
+ extraArgs = Map.difference (_fiParams objFld) (_fiParams ifFld)
+ isInpValNullable ivi = unless (G.isNullable $ _iviType ivi) $ throwError $
+ showSPTxt (setArgNameSP spO $ _iviName ivi) <> " is of required type "
+ <> G.showGT (_iviType ivi) <> ", but is not provided by " <> showSPTxt spIF
+
+ objFlds = _otiFields objTyInfo
+
+extrTyInfo :: TypeMap -> G.NamedType -> Either Text TypeInfo
+extrTyInfo tyMap tn = maybe
+ (throwError $ "Could not find type with name " <> showNamedTy tn)
+ return
+ $ Map.lookup tn tyMap
+
+extrIFaceTyInfo :: MonadError Text m => Map.HashMap G.NamedType TypeInfo -> G.NamedType -> m IFaceTyInfo
+extrIFaceTyInfo tyMap tn = case Map.lookup tn tyMap of
+ Just (TIIFace i) -> return i
+ _ -> throwError $ "Could not find interface " <> showNamedTy tn
+
+extrObjTyInfoM :: TypeMap -> G.NamedType -> Maybe ObjTyInfo
+extrObjTyInfoM tyMap tn = case Map.lookup tn tyMap of
+ Just (TIObj o) -> return o
+ _ -> Nothing
+
+validateIsSubType :: Map.HashMap G.NamedType TypeInfo -> G.GType -> G.GType -> Either Text ()
+validateIsSubType tyMap subFldTy supFldTy = do
+ checkNullMismatch subFldTy supFldTy
+ case (subFldTy,supFldTy) of
+ (G.TypeNamed _ subTy, G.TypeNamed _ supTy) -> do
+ subTyInfo <- extrTyInfo tyMap subTy
+ supTyInfo <- extrTyInfo tyMap supTy
+ isSubTypeBase subTyInfo supTyInfo
+ (G.TypeList _ (G.ListType sub), G.TypeList _ (G.ListType sup) ) ->
+ validateIsSubType tyMap sub sup
+ _ -> throwError $ showIsListTy subFldTy <> " Type " <> G.showGT subFldTy <>
+ " cannot be a sub-type of " <> showIsListTy supFldTy <> " Type " <> G.showGT supFldTy
+ where
+ checkNullMismatch subTy supTy = when (G.isNotNull supTy && G.isNullable subTy ) $
+ throwError $ "Nullable Type " <> G.showGT subFldTy <> " cannot be a sub-type of Non-Null Type " <> G.showGT supFldTy
+ showIsListTy = \case
+ G.TypeList {} -> "List"
+ G.TypeNamed {} -> "Named"
+
+-- TODO Should we check the schema location as well?
+isSubTypeBase :: (MonadError Text m) => TypeInfo -> TypeInfo -> m ()
+isSubTypeBase subTyInfo supTyInfo = case (subTyInfo,supTyInfo) of
+ (TIObj obj, TIIFace iFace) -> unless (_ifName iFace `elem` _otiImplIFaces obj) notSubTyErr
+ _ -> unless (subTyInfo == supTyInfo) notSubTyErr
+ where
+ showTy = showNamedTy . getNamedTy
+ notSubTyErr = throwError $ "Type " <> showTy subTyInfo <> " is not a sub type of " <> showTy supTyInfo
+
+-- map postgres types to builtin scalars
+pgColTyToScalar :: PGScalarType -> Text
+pgColTyToScalar = \case
+ PGInteger -> "Int"
+ PGBoolean -> "Boolean"
+ PGFloat -> "Float"
+ PGText -> "String"
+ PGVarchar -> "String"
+ t -> toSQLTxt t
+
+mkScalarTy :: PGScalarType -> G.NamedType
+mkScalarTy =
+ G.NamedType . G.Name . pgColTyToScalar
+
+getNamedTy :: TypeInfo -> G.NamedType
+getNamedTy = \case
+ TIScalar t -> G.NamedType $ _stiName t
+ TIObj t -> _otiName t
+ TIIFace i -> _ifName i
+ TIEnum t -> _etiName t
+ TIInpObj t -> _iotiName t
+ TIUnion u -> _utiName u
+
+mkTyInfoMap :: [TypeInfo] -> TypeMap
+mkTyInfoMap tyInfos =
+ Map.fromList [(getNamedTy tyInfo, tyInfo) | tyInfo <- tyInfos]
+
+fromTyDef :: InterfaceImplementations -> TypeLoc -> G.TypeDefinition -> TypeInfo
+fromTyDef interfaceImplementations loc tyDef = case tyDef of
+ G.TypeDefinitionScalar t -> TIScalar $ fromScalarTyDef t loc
+ G.TypeDefinitionObject t -> TIObj $ fromObjTyDef t loc
+ G.TypeDefinitionInterface t -> TIIFace $ fromIFaceDef interfaceImplementations t loc
+ G.TypeDefinitionUnion t -> TIUnion $ fromUnionTyDef t
+ G.TypeDefinitionEnum t -> TIEnum $ fromEnumTyDef t loc
+ G.TypeDefinitionInputObject t -> TIInpObj $ fromInpObjTyDef t loc
+
+type InterfaceImplementations = Map.HashMap G.NamedType MemberTypes
+
+fromSchemaDoc :: G.SchemaDocument -> TypeLoc -> Either Text TypeMap
+fromSchemaDoc (G.SchemaDocument tyDefs) loc = do
+ let tyMap = mkTyInfoMap $ map (fromTyDef interfaceImplementations loc) tyDefs
+ validateTypeMap tyMap
+ return tyMap
+ where
+ interfaceImplementations :: InterfaceImplementations
+ interfaceImplementations =
+ foldr (Map.unionWith (<>)) mempty $ flip mapMaybe tyDefs $ \case
+ G.TypeDefinitionObject objectDefinition ->
+ Just $ Map.fromList $ zip
+ (G._otdImplementsInterfaces objectDefinition)
+ (repeat $ Set.singleton $ G.NamedType $ G._otdName objectDefinition)
+ _ -> Nothing
+
+validateTypeMap :: TypeMap -> Either Text ()
+validateTypeMap tyMap = mapM_ validateTy $ Map.elems tyMap
+ where
+ validateTy (TIObj o) = validateObj tyMap o
+ validateTy (TIUnion u) = validateUnion tyMap u
+ validateTy (TIIFace i) = validateIFace i
+ validateTy _ = return ()
+
+fromSchemaDocQ :: G.SchemaDocument -> TypeLoc -> TH.Q TH.Exp
+fromSchemaDocQ sd loc = case fromSchemaDoc sd loc of
+ Left e -> fail $ T.unpack e
+ Right tyMap -> TH.ListE <$> mapM TH.lift (Map.elems tyMap)
+
+defaultSchema :: G.SchemaDocument
+defaultSchema = $(G.parseSchemaDocQ "src-rsr/schema.graphql")
+
+-- fromBaseSchemaFileQ :: FilePath -> TH.Q TH.Exp
+-- fromBaseSchemaFileQ fp =
+-- fromSchemaDocQ $(G.parseSchemaDocQ fp)
+
+type TypeMap = Map.HashMap G.NamedType TypeInfo
+
+data DirectiveInfo
+ = DirectiveInfo
+ { _diDescription :: !(Maybe G.Description)
+ , _diName :: !G.Name
+ , _diParams :: !ParamMap
+ , _diLocations :: ![G.DirectiveLocation]
+ } deriving (Show, Eq)
+
+-- TODO: generate this from template haskell once we have a parser for directive defs
+-- directive @skip(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT
+defaultDirectives :: [DirectiveInfo]
+defaultDirectives =
+ [mkDirective "skip", mkDirective "include"]
+ where
+ mkDirective n = DirectiveInfo Nothing n args dirLocs
+ args = Map.singleton "if" $ InpValInfo Nothing "if" Nothing $
+ G.TypeNamed (G.Nullability False) $ mkScalarTy PGBoolean
+ dirLocs = map G.DLExecutable
+ [G.EDLFIELD, G.EDLFRAGMENT_SPREAD, G.EDLINLINE_FRAGMENT]
+
+defDirectivesMap :: Map.HashMap G.Name DirectiveInfo
+defDirectivesMap = mapFromL _diName defaultDirectives
+
+data FragDef
+ = FragDef
+ { _fdName :: !G.Name
+ , _fdTyInfo :: !FragmentTypeInfo
+ , _fdSelSet :: !G.SelectionSet
+ } deriving (Show, Eq)
+
+data FragmentTypeInfo
+ = FragmentTyObject !ObjTyInfo
+ | FragmentTyInterface !IFaceTyInfo
+ | FragmentTyUnion !UnionTyInfo
+ deriving (Show, Eq)
+
+type FragDefMap = Map.HashMap G.Name FragDef
+
+type AnnVarVals =
+ Map.HashMap G.Variable AnnInpVal
+
+stripTypenames :: [G.ExecutableDefinition] -> [G.ExecutableDefinition]
+stripTypenames = map filterExecDef
+ where
+ filterExecDef = \case
+ G.ExecutableDefinitionOperation opDef ->
+ G.ExecutableDefinitionOperation $ filterOpDef opDef
+ G.ExecutableDefinitionFragment fragDef ->
+ let newSelset = filterSelSet $ G._fdSelectionSet fragDef
+ in G.ExecutableDefinitionFragment fragDef{G._fdSelectionSet = newSelset}
+
+ filterOpDef = \case
+ G.OperationDefinitionTyped typeOpDef ->
+ let newSelset = filterSelSet $ G._todSelectionSet typeOpDef
+ in G.OperationDefinitionTyped typeOpDef{G._todSelectionSet = newSelset}
+ G.OperationDefinitionUnTyped selset ->
+ G.OperationDefinitionUnTyped $ filterSelSet selset
+
+ filterSelSet = mapMaybe filterSel
+ filterSel s = case s of
+ G.SelectionField f ->
+ if G._fName f == "__typename"
+ then Nothing
+ else
+ let newSelset = filterSelSet $ G._fSelectionSet f
+ in Just $ G.SelectionField f{G._fSelectionSet = newSelset}
+ _ -> Just s
+
+-- | Used by 'Hasura.GraphQL.Validate.validateVariablesForReuse' to parse new sets of variables for
+-- reusable query plans; see also 'QueryReusability'.
+newtype ReusableVariableTypes
+ = ReusableVariableTypes { unReusableVarTypes :: Map.HashMap G.Variable RQL.PGColumnType }
+ deriving (Show, Eq, Semigroup, Monoid, J.ToJSON)
+type ReusableVariableValues = Map.HashMap G.Variable (WithScalarType PGScalarValue)
+
+-- | Tracks whether or not a query is /reusable/. Reusable queries are nice, since we can cache
+-- their resolved ASTs and avoid re-resolving them if we receive an identical query. However, we
+-- can’t always safely reuse queries if they have variables, since some variable values can affect
+-- the generated SQL. For example, consider the following query:
+--
+-- > query users_where($condition: users_bool_exp!) {
+-- > users(where: $condition) {
+-- > id
+-- > }
+-- > }
+--
+-- Different values for @$condition@ will produce completely different queries, so we can’t reuse
+-- its plan (unless the variable values were also all identical, of course, but we don’t bother
+-- caching those).
+--
+-- If a query does turn out to be reusable, we build up a 'ReusableVariableTypes' value that maps
+-- variable names to their types so that we can use a fast path for validating new sets of
+-- variables (namely 'Hasura.GraphQL.Validate.validateVariablesForReuse').
+data QueryReusability
+ = Reusable !ReusableVariableTypes
+ | NotReusable
+ deriving (Show, Eq)
+$(makePrisms ''QueryReusability)
+
+instance Semigroup QueryReusability where
+ Reusable a <> Reusable b = Reusable (a <> b)
+ _ <> _ = NotReusable
+instance Monoid QueryReusability where
+ mempty = Reusable mempty
+
+class (Monad m) => MonadReusability m where
+ recordVariableUse :: G.Variable -> RQL.PGColumnType -> m ()
+ markNotReusable :: m ()
+
+instance (MonadReusability m) => MonadReusability (ReaderT r m) where
+ recordVariableUse a b = lift $ recordVariableUse a b
+ markNotReusable = lift markNotReusable
+
+instance (MonadReusability m) => MonadReusability (StateT s m) where
+ recordVariableUse a b = lift $ recordVariableUse a b
+ markNotReusable = lift markNotReusable
+
+newtype ReusabilityT m a = ReusabilityT { unReusabilityT :: StateT QueryReusability m a }
+ deriving (Functor, Applicative, Monad, MonadError e, MonadReader r, MonadIO)
+
+instance (Monad m) => MonadReusability (ReusabilityT m) where
+ recordVariableUse varName varType = ReusabilityT $
+ modify' (<> Reusable (ReusableVariableTypes $ Map.singleton varName varType))
+ markNotReusable = ReusabilityT $ put NotReusable
+
+runReusabilityT :: ReusabilityT m a -> m (a, QueryReusability)
+runReusabilityT = runReusabilityTWith mempty
+
+-- | Like 'runReusabilityT', but starting from an existing 'QueryReusability' state.
+runReusabilityTWith :: QueryReusability -> ReusabilityT m a -> m (a, QueryReusability)
+runReusabilityTWith initialReusability = flip runStateT initialReusability . unReusabilityT
+
+evalReusabilityT :: (Monad m) => ReusabilityT m a -> m a
+evalReusabilityT = flip evalStateT mempty . unReusabilityT
diff --git a/server/src-lib/Hasura/RQL/DDL/RemoteSchema.hs b/server/src-lib/Hasura/RQL/DDL/RemoteSchema.hs
index 2f14666240c..239e8b65e8e 100644
--- a/server/src-lib/Hasura/RQL/DDL/RemoteSchema.hs
+++ b/server/src-lib/Hasura/RQL/DDL/RemoteSchema.hs
@@ -10,6 +10,9 @@ module Hasura.RQL.DDL.RemoteSchema
) where
import Hasura.EncJSON
+-- import Hasura.GraphQL.NormalForm
+import Hasura.GraphQL.RemoteServer
+-- import Hasura.GraphQL.Schema.Merge
import Hasura.Prelude
import qualified Data.Aeson as J
@@ -121,3 +124,34 @@ fetchRemoteSchemas =
where
fromRow (name, Q.AltJ def, comment) =
AddRemoteSchemaQuery name def comment
+
+-- runIntrospectRemoteSchema
+-- :: (CacheRM m, QErrM m) => RemoteSchemaNameQuery -> m EncJSON
+-- runIntrospectRemoteSchema (RemoteSchemaNameQuery rsName) = do
+-- sc <- askSchemaCache
+-- rGCtx <-
+-- case Map.lookup rsName (scRemoteSchemas sc) of
+-- Nothing ->
+-- throw400 NotExists $
+-- "remote schema: " <> remoteSchemaNameToTxt rsName <> " not found"
+-- Just rCtx -> mergeGCtx (rscGCtx rCtx) GC.emptyGCtx
+-- -- merge with emptyGCtx to get default query fields
+-- queryParts <- flip runReaderT rGCtx $ VQ.getQueryParts introspectionQuery
+-- (rootSelSet, _) <- flip runReaderT rGCtx $ VT.runReusabilityT $ VQ.validateGQ queryParts
+-- schemaField <-
+-- case rootSelSet of
+-- VQ.RQuery selSet -> getSchemaField $ toList $ unAliasedFields $
+-- unObjectSelectionSet selSet
+-- _ -> throw500 "expected query for introspection"
+-- (introRes, _) <- flip runReaderT rGCtx $ VT.runReusabilityT $ RI.schemaR schemaField
+-- pure $ wrapInSpecKeys introRes
+-- where
+-- wrapInSpecKeys introObj =
+-- encJFromAssocList
+-- [ ( T.pack "data"
+-- , encJFromAssocList [(T.pack "__schema", encJFromJValue introObj)])
+-- ]
+-- getSchemaField = \case
+-- [] -> throw500 "found empty when looking for __schema field"
+-- [f] -> pure f
+-- _ -> throw500 "expected __schema field, found many fields"
diff --git a/server/src-lib/Hasura/RQL/DDL/Schema/Cache.hs b/server/src-lib/Hasura/RQL/DDL/Schema/Cache.hs
index d2f1d02605f..114bee9f4c2 100644
--- a/server/src-lib/Hasura/RQL/DDL/Schema/Cache.hs
+++ b/server/src-lib/Hasura/RQL/DDL/Schema/Cache.hs
@@ -47,6 +47,7 @@ import Hasura.RQL.DDL.CustomTypes
import Hasura.RQL.DDL.Deps
import Hasura.RQL.DDL.EventTrigger
import Hasura.RQL.DDL.RemoteSchema
+-- import Hasura.RQL.DDL.ScheduledTrigger
import Hasura.RQL.DDL.Schema.Cache.Common
import Hasura.RQL.DDL.Schema.Cache.Dependencies
import Hasura.RQL.DDL.Schema.Cache.Fields
@@ -192,6 +193,9 @@ buildSchemaCacheRule = proc (catalogMetadata, invalidationKeys) -> do
+ -- Step 4: Build the relay GraphQL schema
+ -- relayGQLSchema <- bindA -< Relay.mkRelayGCtxMap (_boTables resolvedOutputs) (_boFunctions resolvedOutputs)
+
returnA -< SchemaCache
{ scTables = _boTables resolvedOutputs
, scActions = _boActions resolvedOutputs
diff --git a/server/src-lib/Hasura/RQL/DML/Mutation.hs b/server/src-lib/Hasura/RQL/DML/Mutation.hs
index 4d2d3d8f968..16f0b43c093 100644
--- a/server/src-lib/Hasura/RQL/DML/Mutation.hs
+++ b/server/src-lib/Hasura/RQL/DML/Mutation.hs
@@ -71,7 +71,7 @@ mutateAndFetchCols qt cols (cte, p) strfyNum =
tabFrom = FromIden aliasIden
tabPerm = TablePerm annBoolExpTrue Nothing
selFlds = flip map cols $
- \ci -> (fromPGCol $ pgiColumn ci, mkAnnColFieldAsText ci)
+ \ci -> (fromPGCol $ pgiColumn ci, mkAnnColumnFieldAsText ci)
sql = toSQL selectWith
selectWith = S.SelectWith [(S.Alias aliasIden, cte)] select
@@ -87,7 +87,7 @@ mutateAndFetchCols qt cols (cte, p) strfyNum =
, S.selFrom = Just $ S.FromExp [S.FIIden aliasIden]
}
colSel = S.SESelect $ mkSQLSelect JASMultipleRows $
- AnnSelG selFlds tabFrom tabPerm noTableArgs strfyNum
+ AnnSelectG selFlds tabFrom tabPerm noSelectArgs strfyNum
-- | Note:- Using sorted columns is necessary to enable casting the rows returned by VALUES expression to table type.
-- For example, let's consider the table, `CREATE TABLE test (id serial primary key, name text not null, age int)`.
diff --git a/server/src-lib/Hasura/RQL/DML/RemoteJoin.hs b/server/src-lib/Hasura/RQL/DML/RemoteJoin.hs
index 29e05f412d4..191ddd01318 100644
--- a/server/src-lib/Hasura/RQL/DML/RemoteJoin.hs
+++ b/server/src-lib/Hasura/RQL/DML/RemoteJoin.hs
@@ -12,23 +12,20 @@ import Hasura.Prelude
import Control.Lens
import Data.List (nub)
-import Data.Validation
import Data.Scientific (toBoundedInteger, toRealFloat)
+import Data.Validation
import Hasura.EncJSON
+import Hasura.GraphQL.Parser
import Hasura.GraphQL.RemoteServer (execRemoteGQ')
import Hasura.GraphQL.Transport.HTTP.Protocol
-import Hasura.GraphQL.Utils
-import Hasura.GraphQL.Parser
-import Hasura.GraphQL.Execute.Resolve
import Hasura.RQL.DML.Internal
-import Hasura.RQL.DML.Returning
import Hasura.RQL.DML.Returning.Types
import Hasura.RQL.DML.Select.Types
import Hasura.RQL.Types
import Hasura.Server.Version (HasVersion)
-import Hasura.SQL.Types ((<<>))
import Hasura.Session
+import Hasura.SQL.Types ((<<>))
import qualified Hasura.SQL.DML as S
@@ -45,8 +42,6 @@ import qualified Language.GraphQL.Draft.Printer.Text as G
import qualified Language.GraphQL.Draft.Syntax as G
import qualified Network.HTTP.Client as HTTP
import qualified Network.HTTP.Types as N
-import qualified Data.ByteString.Lazy as BL
-import qualified Network.Wreq as Wreq
-- | Executes given query and fetch response JSON from Postgres. Substitutes remote relationship fields.
executeQueryWithRemoteJoins
@@ -106,7 +101,7 @@ pathToAlias path counter = do
parseGraphQLName $ T.intercalate "_" (map getFieldNameTxt $ unFieldPath path)
<> "__" <> (T.pack . show . unCounter) counter
--- | A 'RemoteJoin' represents the context of remote relationship to be extracted from 'AnnFldG's.
+-- | A 'RemoteJoin' represents the context of remote relationship to be extracted from 'AnnFieldG's.
data RemoteJoin
= RemoteJoin
{ _rjName :: !FieldName -- ^ The remote join field name.
@@ -139,14 +134,14 @@ transformSelect path sel = do
pure sel{_asnFields = transformedFields}
-- | Traverse through @'AnnAggregateSelect' and collect remote join fields (if any).
-getRemoteJoinsAggregateSelect :: AnnAggSel -> (AnnAggSel, Maybe RemoteJoins)
+getRemoteJoinsAggregateSelect :: AnnAggregateSelect -> (AnnAggregateSelect, Maybe RemoteJoins)
getRemoteJoinsAggregateSelect =
second mapToNonEmpty . flip runState mempty . transformAggregateSelect mempty
transformAggregateSelect
:: FieldPath
- -> AnnAggSel
- -> State RemoteJoinMap AnnAggSel
+ -> AnnAggregateSelect
+ -> State RemoteJoinMap AnnAggregateSelect
transformAggregateSelect path sel = do
let aggFields = _asnFields sel
transformedFields <- forM aggFields $ \(fieldName, aggField) ->
@@ -156,32 +151,32 @@ transformAggregateSelect path sel = do
TAFExp t -> pure $ TAFExp t
pure sel{_asnFields = transformedFields}
--- -- | Traverse through @'ConnectionSelect' and collect remote join fields (if any).
--- getRemoteJoinsConnectionSelect :: ConnectionSelect S.SQLExp -> (ConnectionSelect S.SQLExp, Maybe RemoteJoins)
--- getRemoteJoinsConnectionSelect =
--- second mapToNonEmpty . flip runState mempty . transformConnectionSelect mempty
+-- | Traverse through @'ConnectionSelect' and collect remote join fields (if any).
+getRemoteJoinsConnectionSelect :: ConnectionSelect S.SQLExp -> (ConnectionSelect S.SQLExp, Maybe RemoteJoins)
+getRemoteJoinsConnectionSelect =
+ second mapToNonEmpty . flip runState mempty . transformConnectionSelect mempty
--- transformConnectionSelect
--- :: FieldPath
--- -> ConnectionSelect S.SQLExp
--- -> State RemoteJoinMap (ConnectionSelect S.SQLExp)
--- transformConnectionSelect path ConnectionSelect{..} = do
--- let connectionFields = _asnFields _csSelect
--- transformedFields <- forM connectionFields $ \(fieldName, field) ->
--- (fieldName,) <$> case field of
--- ConnectionTypename t -> pure $ ConnectionTypename t
--- ConnectionPageInfo p -> pure $ ConnectionPageInfo p
--- ConnectionEdges edges -> ConnectionEdges <$> transformEdges (appendPath fieldName path) edges
--- let select = _csSelect{_asnFields = transformedFields}
--- pure $ ConnectionSelect _csPrimaryKeyColumns _csSplit _csSlice select
--- where
--- transformEdges edgePath edgeFields =
--- forM edgeFields $ \(fieldName, edgeField) ->
--- (fieldName,) <$> case edgeField of
--- EdgeTypename t -> pure $ EdgeTypename t
--- EdgeCursor -> pure EdgeCursor
--- EdgeNode annFields ->
--- EdgeNode <$> transformAnnFields (appendPath fieldName edgePath) annFields
+transformConnectionSelect
+ :: FieldPath
+ -> ConnectionSelect S.SQLExp
+ -> State RemoteJoinMap (ConnectionSelect S.SQLExp)
+transformConnectionSelect path ConnectionSelect{..} = do
+ let connectionFields = _asnFields _csSelect
+ transformedFields <- forM connectionFields $ \(fieldName, field) ->
+ (fieldName,) <$> case field of
+ ConnectionTypename t -> pure $ ConnectionTypename t
+ ConnectionPageInfo p -> pure $ ConnectionPageInfo p
+ ConnectionEdges edges -> ConnectionEdges <$> transformEdges (appendPath fieldName path) edges
+ let select = _csSelect{_asnFields = transformedFields}
+ pure $ ConnectionSelect _csPrimaryKeyColumns _csSplit _csSlice select
+ where
+ transformEdges edgePath edgeFields =
+ forM edgeFields $ \(fieldName, edgeField) ->
+ (fieldName,) <$> case edgeField of
+ EdgeTypename t -> pure $ EdgeTypename t
+ EdgeCursor -> pure EdgeCursor
+ EdgeNode annFields ->
+ EdgeNode <$> transformAnnFields (appendPath fieldName edgePath) annFields
-- | Traverse through 'MutationOutput' and collect remote join fields (if any)
getRemoteJoinsMutationOutput :: MutationOutput -> (MutationOutput, Maybe RemoteJoins)
@@ -203,10 +198,10 @@ getRemoteJoinsMutationOutput =
MExp t -> pure $ MExp t
MRet annFields -> MRet <$> transformAnnFields fieldPath annFields
-transformAnnFields :: FieldPath -> AnnFlds -> State RemoteJoinMap AnnFlds
+transformAnnFields :: FieldPath -> AnnFields -> State RemoteJoinMap AnnFields
transformAnnFields path fields = do
- let pgColumnFields = map fst $ getFields _FCol fields
- remoteSelects = getFields _FRemote fields
+ let pgColumnFields = map fst $ getFields _AFColumn fields
+ remoteSelects = getFields _AFRemote fields
remoteJoins = flip map remoteSelects $ \(fieldName, remoteSelect) ->
let RemoteSelect argsMap selSet hasuraColumns remoteFields rsi = remoteSelect
hasuraColumnL = toList hasuraColumns
@@ -217,44 +212,46 @@ transformAnnFields path fields = do
transformedFields <- forM fields $ \(fieldName, field') -> do
let fieldPath = appendPath fieldName path
(fieldName,) <$> case field' of
- FCol c -> pure $ FCol c
- FObj annRel ->
- FObj <$> transformAnnRelation fieldPath annRel
- FArr (ASSimple annRel) ->
- FArr . ASSimple <$> transformAnnRelation fieldPath annRel
- FArr (ASAgg aggRel) ->
- FArr . ASAgg <$> transformAnnAggregateRelation fieldPath aggRel
- -- AFArrayRelation (ASConnection annRel) ->
- -- AFArrayRelation . ASConnection <$> transformArrayConnection fieldPath annRel
- FComputedField computedField ->
- FComputedField <$> case computedField of
+ AFNodeId qt pkeys -> pure $ AFNodeId qt pkeys
+ AFColumn c -> pure $ AFColumn c
+ AFObjectRelation annRel ->
+ AFObjectRelation <$> transformAnnRelation fieldPath annRel
+ AFArrayRelation (ASSimple annRel) ->
+ AFArrayRelation . ASSimple <$> transformAnnRelation fieldPath annRel
+ AFArrayRelation (ASAggregate aggRel) ->
+ AFArrayRelation . ASAggregate <$> transformAnnAggregateRelation fieldPath aggRel
+ AFArrayRelation (ASConnection annRel) ->
+ AFArrayRelation . ASConnection <$> transformArrayConnection fieldPath annRel
+ AFComputedField computedField ->
+ AFComputedField <$> case computedField of
CFSScalar _ -> pure computedField
CFSTable jas annSel -> CFSTable jas <$> transformSelect fieldPath annSel
- FRemote rs -> pure $ FRemote rs
+ AFRemote rs -> pure $ AFRemote rs
+ AFExpression t -> pure $ AFExpression t
case NE.nonEmpty remoteJoins of
Nothing -> pure transformedFields
Just nonEmptyRemoteJoins -> do
- let phantomColumns = map (\ci -> (fromPGCol $ pgiColumn ci, FCol $ AnnColField ci False Nothing)) $
+ let phantomColumns = map (\ci -> (fromPGCol $ pgiColumn ci, AFColumn $ AnnColumnField ci False Nothing)) $
concatMap _rjPhantomFields remoteJoins
modify (Map.insert path nonEmptyRemoteJoins)
pure $ transformedFields <> phantomColumns
where
getFields f = mapMaybe (sequence . second (^? f))
transformAnnRelation fieldPath annRel = do
- let annSel = aarAnnSel annRel
+ let annSel = aarAnnSelect annRel
transformedSel <- transformSelect fieldPath annSel
- pure annRel{aarAnnSel = transformedSel}
+ pure annRel{aarAnnSelect = transformedSel}
transformAnnAggregateRelation fieldPath annRel = do
- let annSel = aarAnnSel annRel
+ let annSel = aarAnnSelect annRel
transformedSel <- transformAggregateSelect fieldPath annSel
- pure annRel{aarAnnSel = transformedSel}
+ pure annRel{aarAnnSelect = transformedSel}
- -- transformArrayConnection fieldPath annRel = do
- -- let connectionSelect = aarAnnSelect annRel
- -- transformedConnectionSelect <- transformConnectionSelect fieldPath connectionSelect
- -- pure annRel{aarAnnSelect = transformedConnectionSelect}
+ transformArrayConnection fieldPath annRel = do
+ let connectionSelect = aarAnnSelect annRel
+ transformedConnectionSelect <- transformConnectionSelect fieldPath connectionSelect
+ pure annRel{aarAnnSelect = transformedConnectionSelect}
type CompositeObject a = OMap.InsOrdHashMap Text (CompositeValue a)
@@ -338,7 +335,7 @@ traverseQueryResponseJSON rjm =
A.Number val ->
case (toBoundedInteger val) of
Just intVal -> pure $ G.VInt intVal
- Nothing -> pure $ G.VFloat $ toRealFloat val
+ Nothing -> pure $ G.VFloat $ toRealFloat val
A.Array vals -> G.VList <$> traverse go (toList vals)
A.Object vals ->
G.VObject . Map.fromList <$> for (Map.toList vals) \(key, val) -> do
diff --git a/server/src-lib/Hasura/RQL/DML/Returning.hs b/server/src-lib/Hasura/RQL/DML/Returning.hs
index 049bb5e983c..ed510443c8a 100644
--- a/server/src-lib/Hasura/RQL/DML/Returning.hs
+++ b/server/src-lib/Hasura/RQL/DML/Returning.hs
@@ -10,7 +10,6 @@ import Hasura.SQL.Types
import qualified Data.Text as T
import qualified Hasura.SQL.DML as S
-
traverseMutFld
:: (Applicative f)
=> (a -> f b)
@@ -19,8 +18,7 @@ traverseMutFld
traverseMutFld f = \case
MCount -> pure MCount
MExp t -> pure $ MExp t
- MRet flds -> MRet <$> traverse (traverse (traverseAnnFld f)) flds
-
+ MRet flds -> MRet <$> traverse (traverse (traverseAnnField f)) flds
traverseMutationOutput
:: (Applicative f)
@@ -30,7 +28,7 @@ traverseMutationOutput f = \case
MOutMultirowFields mutationFields ->
MOutMultirowFields <$> traverse (traverse (traverseMutFld f)) mutationFields
MOutSinglerowObject annFields ->
- MOutSinglerowObject <$> traverseAnnFlds f annFields
+ MOutSinglerowObject <$> traverseAnnFields f annFields
traverseMutFlds
:: (Applicative f)
@@ -43,15 +41,15 @@ traverseMutFlds f =
hasNestedFld :: MutationOutputG a -> Bool
hasNestedFld = \case
MOutMultirowFields flds -> any isNestedMutFld flds
- MOutSinglerowObject annFlds -> any isNestedAnnFld annFlds
+ MOutSinglerowObject annFlds -> any isNestedAnnField annFlds
where
isNestedMutFld (_, mutFld) = case mutFld of
- MRet annFlds -> any isNestedAnnFld annFlds
+ MRet annFlds -> any isNestedAnnField annFlds
_ -> False
- isNestedAnnFld (_, annFld) = case annFld of
- FObj _ -> True
- FArr _ -> True
- _ -> False
+ isNestedAnnField (_, annFld) = case annFld of
+ AFObjectRelation _ -> True
+ AFArrayRelation _ -> True
+ _ -> False
pgColsFromMutFld :: MutFld -> [(PGCol, PGColumnType)]
pgColsFromMutFld = \case
@@ -59,16 +57,16 @@ pgColsFromMutFld = \case
MExp _ -> []
MRet selFlds ->
flip mapMaybe selFlds $ \(_, annFld) -> case annFld of
- FCol (AnnColField (PGColumnInfo col _ _ colTy _ _) _ _) -> Just (col, colTy)
- _ -> Nothing
+ AFColumn (AnnColumnField (PGColumnInfo col _ _ colTy _ _) _ _) -> Just (col, colTy)
+ _ -> Nothing
pgColsFromMutFlds :: MutFlds -> [(PGCol, PGColumnType)]
pgColsFromMutFlds = concatMap (pgColsFromMutFld . snd)
-pgColsToSelFlds :: [PGColumnInfo] -> [(FieldName, AnnFld)]
+pgColsToSelFlds :: [PGColumnInfo] -> [(FieldName, AnnField)]
pgColsToSelFlds cols =
flip map cols $
- \pgColInfo -> (fromPGCol $ pgiColumn pgColInfo, mkAnnColField pgColInfo Nothing)
+ \pgColInfo -> (fromPGCol $ pgiColumn pgColInfo, mkAnnColumnField pgColInfo Nothing)
mkDefaultMutFlds :: Maybe [PGColumnInfo] -> MutationOutput
mkDefaultMutFlds = MOutMultirowFields . \case
@@ -95,7 +93,7 @@ mkMutFldExp qt preCalAffRows strfyNum = \case
let tabFrom = FromIden cteAlias
tabPerm = TablePerm annBoolExpTrue Nothing
in S.SESelect $ mkSQLSelect JASMultipleRows $
- AnnSelG selFlds tabFrom tabPerm noTableArgs strfyNum
+ AnnSelectG selFlds tabFrom tabPerm noSelectArgs strfyNum
where
cteAlias = qualTableToAliasIden qt
@@ -117,7 +115,7 @@ mkMutationOutputExp qt preCalAffRows cte mutOutput strfyNum =
let tabFrom = FromIden cteAlias
tabPerm = TablePerm annBoolExpTrue Nothing
in S.SESelect $ mkSQLSelect JASSingleObject $
- AnnSelG annFlds tabFrom tabPerm noTableArgs strfyNum
+ AnnSelectG annFlds tabFrom tabPerm noSelectArgs strfyNum
checkRetCols
diff --git a/server/src-lib/Hasura/RQL/DML/Returning/Types.hs b/server/src-lib/Hasura/RQL/DML/Returning/Types.hs
index e46d0f18c04..d94f809c4e4 100644
--- a/server/src-lib/Hasura/RQL/DML/Returning/Types.hs
+++ b/server/src-lib/Hasura/RQL/DML/Returning/Types.hs
@@ -12,7 +12,7 @@ import Hasura.RQL.DML.Select.Types
data MutFldG v
= MCount
| MExp !T.Text
- | MRet !(AnnFldsG v)
+ | MRet !(AnnFieldsG v)
deriving (Show, Eq)
type MutFld = MutFldG S.SQLExp
@@ -21,7 +21,7 @@ type MutFldsG v = Fields (MutFldG v)
data MutationOutputG v
= MOutMultirowFields !(MutFldsG v)
- | MOutSinglerowObject !(AnnFldsG v)
+ | MOutSinglerowObject !(AnnFieldsG v)
deriving (Show, Eq)
type MutationOutput = MutationOutputG S.SQLExp
diff --git a/server/src-lib/Hasura/RQL/DML/Select.hs b/server/src-lib/Hasura/RQL/DML/Select.hs
index 810e0e953be..4e7f1faabcc 100644
--- a/server/src-lib/Hasura/RQL/DML/Select.hs
+++ b/server/src-lib/Hasura/RQL/DML/Select.hs
@@ -1,11 +1,11 @@
module Hasura.RQL.DML.Select
( selectP2
- , selectQuerySQL
- , selectAggQuerySQL
, convSelectQuery
, asSingleRowJsonResp
- , module Hasura.RQL.DML.Select.Internal
, runSelect
+ , selectQuerySQL
+ , selectAggregateQuerySQL
+ , module Hasura.RQL.DML.Select.Internal
)
where
@@ -105,7 +105,7 @@ convOrderByElem
=> SessVarBldr m
-> (FieldInfoMap FieldInfo, SelPermInfo)
-> OrderByCol
- -> m AnnObCol
+ -> m (AnnOrderByElement S.SQLExp)
convOrderByElem sessVarBldr (flds, spi) = \case
OCPG fldName -> do
fldInfo <- askFieldInfo flds fldName
@@ -118,7 +118,7 @@ convOrderByElem sessVarBldr (flds, spi) = \case
[ fldName <<> " has type 'geometry'"
, " and cannot be used in order_by"
]
- else return $ AOCPG $ pgiColumn colInfo
+ else return $ AOCColumn colInfo
FIRelationship _ -> throw400 UnexpectedPayload $ mconcat
[ fldName <<> " is a"
, " relationship and should be expanded"
@@ -149,7 +149,7 @@ convOrderByElem sessVarBldr (flds, spi) = \case
]
(relFim, relSpi) <- fetchRelDet (riName relInfo) (riRTable relInfo)
resolvedSelFltr <- convAnnBoolExpPartialSQL sessVarBldr $ spiFilter relSpi
- AOCObj relInfo resolvedSelFltr <$>
+ AOCObjectRelation relInfo resolvedSelFltr <$>
convOrderByElem sessVarBldr (relFim, relSpi) rest
FIRemoteRelationship {} ->
throw400 UnexpectedPayload (mconcat [ fldName <<> " is a remote field" ])
@@ -168,12 +168,12 @@ convSelectQ fieldInfoMap selPermInfo selQ sessVarBldr prepValBldr = do
indexedForM (sqColumns selQ) $ \case
(ECSimple pgCol) -> do
colInfo <- convExtSimple fieldInfoMap selPermInfo pgCol
- return (fromPGCol pgCol, mkAnnColField colInfo Nothing)
+ return (fromPGCol pgCol, mkAnnColumnField colInfo Nothing)
(ECRel relName mAlias relSelQ) -> do
annRel <- convExtRel fieldInfoMap relName mAlias
relSelQ sessVarBldr prepValBldr
return ( fromRel $ fromMaybe relName mAlias
- , either FObj FArr annRel
+ , either AFObjectRelation AFArrayRelation annRel
)
-- let spiT = spiTable selPermInfo
@@ -198,11 +198,11 @@ convSelectQ fieldInfoMap selPermInfo selQ sessVarBldr prepValBldr = do
let tabFrom = FromTable $ spiTable selPermInfo
tabPerm = TablePerm resolvedSelFltr mPermLimit
- tabArgs = TableArgs wClause annOrdByM mQueryLimit
+ tabArgs = SelectArgs wClause annOrdByM mQueryLimit
(S.intToSQLExp <$> mQueryOffset) Nothing
strfyNum <- stringifyNum <$> askSQLGenCtx
- return $ AnnSelG annFlds tabFrom tabPerm tabArgs strfyNum
+ return $ AnnSelectG annFlds tabFrom tabPerm tabArgs strfyNum
where
mQueryOffset = sqOffset selQ
@@ -229,7 +229,7 @@ convExtRel
-> SelectQExt
-> SessVarBldr m
-> (PGColumnType -> Value -> m S.SQLExp)
- -> m (Either ObjSel ArrSel)
+ -> m (Either ObjectRelationSelect ArraySelect)
convExtRel fieldInfoMap relName mAlias selQ sessVarBldr prepValBldr = do
-- Point to the name key
relInfo <- withPathK "name" $
@@ -240,9 +240,9 @@ convExtRel fieldInfoMap relName mAlias selQ sessVarBldr prepValBldr = do
case relTy of
ObjRel -> do
when misused $ throw400 UnexpectedPayload objRelMisuseMsg
- return $ Left $ AnnRelG (fromMaybe relName mAlias) colMapping annSel
+ return $ Left $ AnnRelationSelectG (fromMaybe relName mAlias) colMapping annSel
ArrRel ->
- return $ Right $ ASSimple $ AnnRelG (fromMaybe relName mAlias)
+ return $ Right $ ASSimple $ AnnRelationSelectG (fromMaybe relName mAlias)
colMapping annSel
where
pgWhenRelErr = "only relationships can be expanded"
@@ -270,8 +270,7 @@ convSelectQuery sessVarBldr prepArgBuilder (DMLQuery qt selQ) = do
let fieldInfo = _tciFieldInfoMap $ _tiCoreInfo tabInfo
extSelQ <- resolveStar fieldInfo selPermInfo selQ
validateHeaders $ spiRequiredHeaders selPermInfo
- convSelectQ fieldInfo selPermInfo
- extSelQ sessVarBldr prepArgBuilder
+ convSelectQ fieldInfo selPermInfo extSelQ sessVarBldr prepArgBuilder
selectP2 :: JsonAggSelect -> (AnnSimpleSel, DS.Seq Q.PrepArg) -> Q.TxE QErr EncJSON
selectP2 jsonAggSelect (sel, p) =
@@ -284,9 +283,9 @@ selectQuerySQL :: JsonAggSelect -> AnnSimpleSel -> Q.Query
selectQuerySQL jsonAggSelect sel =
Q.fromBuilder $ toSQL $ mkSQLSelect jsonAggSelect sel
-selectAggQuerySQL :: AnnAggSel -> Q.Query
-selectAggQuerySQL =
- Q.fromBuilder . toSQL . mkAggSelect
+selectAggregateQuerySQL :: AnnAggregateSelect -> Q.Query
+selectAggregateQuerySQL =
+ Q.fromBuilder . toSQL . mkAggregateSelect
asSingleRowJsonResp :: Q.Query -> [Q.PrepArg] -> Q.TxE QErr EncJSON
asSingleRowJsonResp query args =
diff --git a/server/src-lib/Hasura/RQL/DML/Select/Internal.hs b/server/src-lib/Hasura/RQL/DML/Select/Internal.hs
index 3d60a76bb2e..7091f6f7254 100644
--- a/server/src-lib/Hasura/RQL/DML/Select/Internal.hs
+++ b/server/src-lib/Hasura/RQL/DML/Select/Internal.hs
@@ -1,13 +1,13 @@
module Hasura.RQL.DML.Select.Internal
( mkSQLSelect
- , mkAggSelect
+ , mkAggregateSelect
+ , mkConnectionSelect
, module Hasura.RQL.DML.Select.Types
)
where
import Control.Lens hiding (op)
-import qualified Data.List as L
-import Instances.TH.Lift ()
+import Control.Monad.Writer.Strict
import qualified Data.HashMap.Strict as HM
import qualified Data.List.NonEmpty as NE
@@ -18,7 +18,7 @@ import Hasura.RQL.DML.Internal
import Hasura.RQL.DML.Select.Types
import Hasura.RQL.GBoolExp
import Hasura.RQL.Types
-import Hasura.SQL.Rewrite (prefixNumToAliases)
+import Hasura.SQL.Rewrite
import Hasura.SQL.Types
import qualified Hasura.SQL.DML as S
@@ -30,8 +30,8 @@ import qualified Hasura.SQL.DML as S
functionToIden :: QualifiedFunction -> Iden
functionToIden = Iden . qualObjectToText
-selFromToFromItem :: Iden -> SelectFrom -> S.FromItem
-selFromToFromItem pfx = \case
+selectFromToFromItem :: Iden -> SelectFrom -> S.FromItem
+selectFromToFromItem pfx = \case
FromTable tn -> S.FISimple tn Nothing
FromIden i -> S.FIIden i
FromFunction qf args defListM ->
@@ -40,17 +40,17 @@ selFromToFromItem pfx = \case
-- This function shouldn't be present ideally
-- You should be able to retrieve this information
--- from the FromItem generated with selFromToFromItem
+-- from the FromItem generated with selectFromToFromItem
-- however given from S.FromItem is modelled, it is not
-- possible currently
-selFromToQual :: SelectFrom -> S.Qual
-selFromToQual = \case
+selectFromToQual :: SelectFrom -> S.Qual
+selectFromToQual = \case
FromTable tn -> S.QualTable tn
FromIden i -> S.QualIden i Nothing
FromFunction qf _ _ -> S.QualIden (functionToIden qf) Nothing
-aggFldToExp :: AggFlds -> S.SQLExp
-aggFldToExp aggFlds = jsonRow
+aggregateFieldToExp :: AggregateFields -> S.SQLExp
+aggregateFieldToExp aggFlds = jsonRow
where
jsonRow = S.applyJsonBuildObj (concatMap aggToFlds aggFlds)
withAls fldName sqlExp = [S.SELit fldName, sqlExp]
@@ -59,26 +59,16 @@ aggFldToExp aggFlds = jsonRow
AFOp aggOp -> aggOpToObj aggOp
AFExp e -> S.SELit e
- aggOpToObj (AggOp op flds) =
- S.applyJsonBuildObj $ concatMap (colFldsToExtr op) flds
+ aggOpToObj (AggregateOp opText flds) =
+ S.applyJsonBuildObj $ concatMap (colFldsToExtr opText) flds
- colFldsToExtr op (FieldName t, PCFCol col) =
+ colFldsToExtr opText (FieldName t, PCFCol col) =
[ S.SELit t
- , S.SEFnApp op [S.SEIden $ toIden col] Nothing
+ , S.SEFnApp opText [S.SEIden $ toIden col] Nothing
]
colFldsToExtr _ (FieldName t, PCFExp e) =
[ S.SELit t , S.SELit e]
-arrNodeToSelect :: BaseNode -> [S.Extractor] -> S.BoolExp -> S.Select
-arrNodeToSelect bn extrs joinCond =
- S.mkSelect
- { S.selExtr = extrs
- , S.selFrom = Just $ S.FromExp [selFrom]
- }
- where
- selFrom = S.mkSelFromItem (baseNodeToSel joinCond bn) $ S.Alias $
- _bnPrefix bn
-
asSingleRowExtr :: S.Alias -> S.SQLExp
asSingleRowExtr col =
S.SEFnApp "coalesce" [jsonAgg, S.SELit "null"] Nothing
@@ -89,11 +79,12 @@ asSingleRowExtr col =
]
withJsonAggExtr
- :: Bool -> Maybe Int -> Maybe S.OrderByExp -> S.Alias -> S.SQLExp
-withJsonAggExtr subQueryReq permLimitM ordBy alias =
+ :: PermissionLimitSubQuery -> Maybe S.OrderByExp -> S.Alias -> S.SQLExp
+withJsonAggExtr permLimitSubQuery ordBy alias =
-- if select has aggregations then use subquery to apply permission limit
- if subQueryReq then maybe simpleJsonAgg withPermLimit permLimitM
- else simpleJsonAgg
+ case permLimitSubQuery of
+ PLSQRequired permLimit -> withPermLimit permLimit
+ PLSQNotRequired -> simpleJsonAgg
where
simpleJsonAgg = mkSimpleJsonAgg rowIdenExp ordBy
rowIdenExp = S.SEIden $ S.getAlias alias
@@ -107,7 +98,7 @@ withJsonAggExtr subQueryReq permLimitM ordBy alias =
withPermLimit limit =
let subSelect = mkSubSelect limit
rowIden = S.mkQIdenExp subSelAls alias
- extr = S.Extractor (mkSimpleJsonAgg rowIden newOrdBy) Nothing
+ extr = S.Extractor (mkSimpleJsonAgg rowIden newOrderBy) Nothing
fromExp = S.FromExp $ pure $
S.mkSelFromItem subSelect $ S.Alias subSelAls
in S.SESelect $ S.mkSelect { S.selExtr = pure extr
@@ -122,7 +113,7 @@ withJsonAggExtr subQueryReq permLimitM ordBy alias =
in S.mkSelect { S.selExtr = jsonRowExtr : obExtrs
, S.selFrom = Just $ S.FromExp $ pure unnestFromItem
, S.selLimit = Just $ S.LimitExp $ S.intToSQLExp limit
- , S.selOrderBy = newOrdBy
+ , S.selOrderBy = newOrderBy
}
unnestFromItem =
@@ -131,11 +122,11 @@ withJsonAggExtr subQueryReq permLimitM ordBy alias =
in S.FIUnnest arrayAggItems (S.Alias unnestTable) $
rowIdenExp : map S.SEIden newOBAliases
- newOrdBy = bool (Just $ S.OrderByExp newOBItems) Nothing $ null newOBItems
+ newOrderBy = S.OrderByExp <$> NE.nonEmpty newOBItems
- (newOBItems, obCols, newOBAliases) = maybe ([], [], []) transformOrdBy ordBy
- transformOrdBy (S.OrderByExp l) = unzip3 $
- flip map (zip l [1..]) $ \(obItem, i::Int) ->
+ (newOBItems, obCols, newOBAliases) = maybe ([], [], []) transformOrderBy ordBy
+ transformOrderBy (S.OrderByExp l) = unzip3 $
+ flip map (zip (toList l) [1..]) $ \(obItem, i::Int) ->
let iden = Iden $ "ob_col_" <> T.pack (show i)
in ( obItem{S.oColumn = S.SEIden iden}
, S.oColumn obItem
@@ -143,113 +134,82 @@ withJsonAggExtr subQueryReq permLimitM ordBy alias =
)
asJsonAggExtr
- :: JsonAggSelect -> S.Alias -> Bool -> Maybe Int -> Maybe S.OrderByExp -> S.Extractor
-asJsonAggExtr jsonAggSelect als subQueryReq permLimit ordByExpM =
+ :: JsonAggSelect -> S.Alias -> PermissionLimitSubQuery -> Maybe S.OrderByExp -> S.Extractor
+asJsonAggExtr jsonAggSelect als permLimitSubQuery ordByExpM =
flip S.Extractor (Just als) $ case jsonAggSelect of
- JASMultipleRows -> withJsonAggExtr subQueryReq permLimit ordByExpM als
+ JASMultipleRows -> withJsonAggExtr permLimitSubQuery ordByExpM als
JASSingleObject -> asSingleRowExtr als
-- array relationships are not grouped, so have to be prefixed by
-- parent's alias
-mkUniqArrRelAls :: FieldName -> [FieldName] -> Iden
-mkUniqArrRelAls parAls flds =
- Iden $
- getFieldNameTxt parAls <> "."
- <> T.intercalate "." (map getFieldNameTxt flds)
+mkUniqArrayRelationAlias :: FieldName -> [FieldName] -> Iden
+mkUniqArrayRelationAlias parAls flds =
+ let sortedFields = sort flds
+ in Iden $
+ getFieldNameTxt parAls <> "."
+ <> T.intercalate "." (map getFieldNameTxt sortedFields)
-mkArrRelTableAls :: Iden -> FieldName -> [FieldName] -> Iden
-mkArrRelTableAls pfx parAls flds =
+mkArrayRelationTableAlias :: Iden -> FieldName -> [FieldName] -> Iden
+mkArrayRelationTableAlias pfx parAls flds =
pfx <> Iden ".ar." <> uniqArrRelAls
where
- uniqArrRelAls = mkUniqArrRelAls parAls flds
+ uniqArrRelAls = mkUniqArrayRelationAlias parAls flds
-mkObjRelTableAls :: Iden -> RelName -> Iden
-mkObjRelTableAls pfx relName =
+mkObjectRelationTableAlias :: Iden -> RelName -> Iden
+mkObjectRelationTableAlias pfx relName =
pfx <> Iden ".or." <> toIden relName
-mkComputedFieldTableAls :: Iden -> FieldName -> Iden
-mkComputedFieldTableAls pfx fldAls =
+mkComputedFieldTableAlias :: Iden -> FieldName -> Iden
+mkComputedFieldTableAlias pfx fldAls =
pfx <> Iden ".cf." <> toIden fldAls
-mkBaseTableAls :: Iden -> Iden
-mkBaseTableAls pfx =
+mkBaseTableAlias :: Iden -> Iden
+mkBaseTableAlias pfx =
pfx <> Iden ".base"
-mkBaseTableColAls :: Iden -> PGCol -> Iden
-mkBaseTableColAls pfx pgColumn =
+mkBaseTableColumnAlias :: Iden -> PGCol -> Iden
+mkBaseTableColumnAlias pfx pgColumn =
pfx <> Iden ".pg." <> toIden pgColumn
mkOrderByFieldName :: RelName -> FieldName
mkOrderByFieldName relName =
FieldName $ relNameToTxt relName <> "." <> "order_by"
+mkAggregateOrderByAlias :: AnnAggregateOrderBy -> S.Alias
+mkAggregateOrderByAlias = (S.Alias . Iden) . \case
+ AAOCount -> "count"
+ AAOOp opText col -> opText <> "." <> getPGColTxt (pgiColumn col)
+
+mkArrayRelationSourcePrefix
+ :: Iden
+ -> FieldName
+ -> HM.HashMap FieldName [FieldName]
+ -> FieldName
+ -> Iden
+mkArrayRelationSourcePrefix parentSourcePrefix parentFieldName similarFieldsMap fieldName =
+ mkArrayRelationTableAlias parentSourcePrefix parentFieldName $
+ HM.lookupDefault [fieldName] fieldName similarFieldsMap
+
+mkArrayRelationAlias
+ :: FieldName
+ -> HM.HashMap FieldName [FieldName]
+ -> FieldName
+ -> S.Alias
+mkArrayRelationAlias parentFieldName similarFieldsMap fieldName =
+ S.Alias $ mkUniqArrayRelationAlias parentFieldName $
+ HM.lookupDefault [fieldName] fieldName similarFieldsMap
+
fromTableRowArgs
:: Iden -> FunctionArgsExpTableRow S.SQLExp -> S.FunctionArgs
fromTableRowArgs pfx = toFunctionArgs . fmap toSQLExp
where
toFunctionArgs (FunctionArgsExp positional named) =
S.FunctionArgs positional named
- toSQLExp (AETableRow Nothing) = S.SERowIden $ mkBaseTableAls pfx
- toSQLExp (AETableRow (Just acc)) = S.mkQIdenExp (mkBaseTableAls pfx) acc
+ toSQLExp (AETableRow Nothing) = S.SERowIden $ mkBaseTableAlias pfx
+ toSQLExp (AETableRow (Just acc)) = S.mkQIdenExp (mkBaseTableAlias pfx) acc
+ -- toSQLExp (AESession s) = s
toSQLExp (AEInput s) = s
--- posttgres ignores anything beyond 63 chars for an iden
--- in this case, we'll need to use json_build_object function
--- json_build_object is slower than row_to_json hence it is only
--- used when needed
-buildJsonObject
- :: Iden -> FieldName -> ArrRelCtx -> Bool
- -> [(FieldName, AnnFld)] -> (S.Alias, S.SQLExp)
-buildJsonObject pfx parAls arrRelCtx strfyNum flds =
- if any ( (> 63) . T.length . getFieldNameTxt . fst ) flds
- then withJsonBuildObj parAls jsonBuildObjExps
- else withRowToJSON parAls rowToJsonExtrs
- where
- jsonBuildObjExps = concatMap (toSQLFld withAlsExp) flds
- rowToJsonExtrs = map (toSQLFld withAlsExtr) flds
-
- withAlsExp fldName sqlExp =
- [S.SELit $ getFieldNameTxt fldName, sqlExp]
-
- withAlsExtr fldName sqlExp =
- S.Extractor sqlExp $ Just $ S.toAlias fldName
-
- toSQLFld :: (FieldName -> S.SQLExp -> f)
- -> (FieldName, AnnFld) -> f
- toSQLFld f (fldAls, fld) = f fldAls $ case fld of
- FCol c -> toSQLCol c
- FExp e -> S.SELit e
- FObj objSel ->
- let qual = mkObjRelTableAls pfx $ aarName objSel
- in S.mkQIdenExp qual fldAls
- FArr arrSel ->
- let arrPfx = _aniPrefix $ mkArrNodeInfo pfx parAls arrRelCtx $
- ANIField (fldAls, arrSel)
- in S.mkQIdenExp arrPfx fldAls
- FComputedField (CFSScalar computedFieldScalar) ->
- fromScalarComputedField computedFieldScalar
- FComputedField (CFSTable _ _) ->
- let ccPfx = mkComputedFieldTableAls pfx fldAls
- in S.mkQIdenExp ccPfx fldAls
- FRemote _ -> S.SELit "null: remote field selected"
-
- toSQLCol :: AnnColField -> S.SQLExp
- toSQLCol (AnnColField col asText colOpM) =
- toJSONableExp strfyNum (pgiType col) asText $ withColOp colOpM $
- S.mkQIdenExp (mkBaseTableAls pfx) $ pgiColumn col
-
- fromScalarComputedField :: ComputedFieldScalarSel S.SQLExp -> S.SQLExp
- fromScalarComputedField computedFieldScalar =
- toJSONableExp strfyNum (PGColumnScalar ty) False $ withColOp colOpM $
- S.SEFunction $ S.FunctionExp fn (fromTableRowArgs pfx args) Nothing
- where
- ComputedFieldScalarSel fn args ty colOpM = computedFieldScalar
-
- withColOp :: Maybe ColOp -> S.SQLExp -> S.SQLExp
- withColOp colOpM sqlExp = case colOpM of
- Nothing -> sqlExp
- Just (ColOp op cExp) -> S.mkSQLOpExp op sqlExp cExp
-
-- uses row_to_json to build a json object
withRowToJSON
:: FieldName -> [S.Extractor] -> (S.Alias, S.SQLExp)
@@ -266,423 +226,597 @@ withJsonBuildObj parAls exps =
where
jsonRow = S.applyJsonBuildObj exps
-mkAggObFld :: AnnAggOrdBy -> FieldName
-mkAggObFld = \case
- AAOCount -> FieldName "count"
- AAOOp op col -> FieldName $ op <> "." <> getPGColTxt col
+-- | Forces aggregation
+withForceAggregation :: S.TypeAnn -> S.SQLExp -> S.SQLExp
+withForceAggregation tyAnn e =
+ -- bool_or to force aggregation
+ S.SEFnApp "coalesce" [e, S.SETyAnn (S.SEUnsafe "bool_or('true')") tyAnn] Nothing
-mkAggObExtrAndFlds :: AnnAggOrdBy -> (S.Extractor, AggFlds)
-mkAggObExtrAndFlds annAggOb = case annAggOb of
- AAOCount ->
- ( S.Extractor S.countStar als
- , [(FieldName "count", AFCount S.CTStar)]
- )
- AAOOp op pgColumn ->
- ( S.Extractor (S.SEFnApp op [S.SEIden $ toIden pgColumn] Nothing) als
- , [(FieldName op, AFOp $ AggOp op [(fromPGCol pgColumn, PCFCol pgColumn)])]
- )
+mkAggregateOrderByExtractorAndFields
+ :: AnnAggregateOrderBy -> (S.Extractor, AggregateFields)
+mkAggregateOrderByExtractorAndFields annAggOrderBy =
+ case annAggOrderBy of
+ AAOCount ->
+ ( S.Extractor S.countStar alias
+ , [(FieldName "count", AFCount S.CTStar)]
+ )
+ AAOOp opText pgColumnInfo ->
+ let pgColumn = pgiColumn pgColumnInfo
+ in ( S.Extractor (S.SEFnApp opText [S.SEIden $ toIden pgColumn] Nothing) alias
+ , [(FieldName opText, AFOp $ AggregateOp opText [(fromPGCol pgColumn, PCFCol pgColumn)])]
+ )
where
- als = Just $ S.toAlias $ mkAggObFld annAggOb
+ alias = Just $ mkAggregateOrderByAlias annAggOrderBy
-processAnnOrderByItem
- :: Iden
- -> FieldName
- -> ArrRelCtx
- -> Bool
- -> AnnOrderByItem
- -- the extractors which will select the needed columns
- -> ( (S.Alias, S.SQLExp)
- -- the sql order by item that is attached to the final select
- , S.OrderByItem
- -- extra nodes for order by
- , OrderByNode
- )
-processAnnOrderByItem pfx parAls arrRelCtx strfyNum obItemG =
- ( (obColAls, obColExp)
- , sqlOrdByItem
- , relNodeM
- )
- where
- OrderByItemG obTyM annObCol obNullsM = obItemG
- ((obColAls, obColExp), relNodeM) =
- processAnnOrderByCol pfx parAls arrRelCtx strfyNum annObCol
-
- sqlOrdByItem =
- S.OrderByItem (S.SEIden $ toIden obColAls)
- (unOrderType <$> obTyM) (unNullsOrder <$> obNullsM)
-
-processAnnOrderByCol
- :: Iden
- -> FieldName
- -> ArrRelCtx
- -> Bool
- -> AnnObCol
- -- the extractors which will select the needed columns
- -> ( (S.Alias, S.SQLExp)
- -- extra nodes for order by
- , OrderByNode
- )
-processAnnOrderByCol pfx parAls arrRelCtx strfyNum = \case
- AOCPG pgColumn ->
- let
- qualCol = S.mkQIdenExp (mkBaseTableAls pfx) (toIden pgColumn)
- obColAls = mkBaseTableColAls pfx pgColumn
- in ( (S.Alias obColAls, qualCol)
- , OBNNothing
- )
+mkAnnOrderByAlias
+ :: Iden -> FieldName -> SimilarArrayFields -> AnnOrderByElementG v -> S.Alias
+mkAnnOrderByAlias pfx parAls similarFields = \case
+ AOCColumn pgColumnInfo ->
+ let pgColumn = pgiColumn pgColumnInfo
+ obColAls = mkBaseTableColumnAlias pfx pgColumn
+ in S.Alias obColAls
-- "pfx.or.relname"."pfx.ob.or.relname.rest" AS "pfx.ob.or.relname.rest"
- AOCObj (RelInfo rn _ colMapping relTab _ _) relFltr rest ->
- let relPfx = mkObjRelTableAls pfx rn
+ AOCObjectRelation relInfo _ rest ->
+ let rn = riName relInfo
+ relPfx = mkObjectRelationTableAlias pfx rn
ordByFldName = mkOrderByFieldName rn
- ((nesAls, nesCol), ordByNode) =
- processAnnOrderByCol relPfx ordByFldName emptyArrRelCtx strfyNum rest
- (objNodeM, arrNodeM) = case ordByNode of
- OBNNothing -> (Nothing, Nothing)
- OBNObjNode name node -> (Just (name, node), Nothing)
- OBNArrNode als node -> (Nothing, Just (als, node))
- qualCol = S.mkQIdenExp relPfx nesAls
- relBaseNode =
- BaseNode relPfx Nothing (S.FISimple relTab Nothing)
- (toSQLBoolExp (S.QualTable relTab) relFltr)
- Nothing Nothing Nothing
- (HM.singleton nesAls nesCol)
- (maybe HM.empty (uncurry HM.singleton) objNodeM)
- (maybe HM.empty (uncurry HM.singleton) arrNodeM)
- HM.empty
- relNode = ObjNode colMapping relBaseNode
- in ( (nesAls, qualCol)
- , OBNObjNode rn relNode
- )
- AOCAgg (RelInfo rn _ colMapping relTab _ _) relFltr annAggOb ->
- let ArrNodeInfo arrAls arrPfx _ =
- mkArrNodeInfo pfx parAls arrRelCtx $ ANIAggOrdBy rn
- fldName = mkAggObFld annAggOb
- qOrdBy = S.mkQIdenExp arrPfx $ toIden fldName
- tabFrom = FromTable relTab
- tabPerm = TablePerm relFltr Nothing
- (extr, arrFlds) = mkAggObExtrAndFlds annAggOb
- selFld = TAFAgg arrFlds
- bn = mkBaseNode False (Prefixes arrPfx pfx) fldName selFld tabFrom
- tabPerm noTableArgs strfyNum
- aggNode = ArrNode [extr] colMapping $ mergeBaseNodes bn $
- mkEmptyBaseNode arrPfx tabFrom
- obAls = arrPfx <> Iden "." <> toIden fldName
- in ( (S.Alias obAls, qOrdBy)
- , OBNArrNode arrAls aggNode
- )
+ nesAls = mkAnnOrderByAlias relPfx ordByFldName mempty rest
+ in nesAls
+ AOCArrayAggregation relInfo _ aggOrderBy ->
+ let rn = riName relInfo
+ arrPfx = mkArrayRelationSourcePrefix pfx parAls similarFields $
+ mkOrderByFieldName rn
+ obAls = arrPfx <> Iden "." <> toIden (mkAggregateOrderByAlias aggOrderBy)
+ in S.Alias obAls
-processDistinctOnCol
+processDistinctOnColumns
:: Iden
-> NE.NonEmpty PGCol
-> ( S.DistinctExpr
- -- additional column extractors
- , [(S.Alias, S.SQLExp)]
+ , [(S.Alias, S.SQLExp)] -- additional column extractors
)
-processDistinctOnCol pfx neCols = (distOnExp, colExtrs)
+processDistinctOnColumns pfx neCols = (distOnExp, colExtrs)
where
cols = toList neCols
distOnExp = S.DistinctOn $ map (S.SEIden . toIden . mkQColAls) cols
- mkQCol c = S.mkQIdenExp (mkBaseTableAls pfx) $ toIden c
- mkQColAls = S.Alias . mkBaseTableColAls pfx
+ mkQCol c = S.mkQIdenExp (mkBaseTableAlias pfx) $ toIden c
+ mkQColAls = S.Alias . mkBaseTableColumnAlias pfx
colExtrs = flip map cols $ mkQColAls &&& mkQCol
+type SimilarArrayFields = HM.HashMap FieldName [FieldName]
-mkEmptyBaseNode :: Iden -> SelectFrom -> BaseNode
-mkEmptyBaseNode pfx selectFrom =
- BaseNode pfx Nothing fromItem (S.BELit True) Nothing Nothing
- Nothing selOne HM.empty HM.empty HM.empty
+mkSimilarArrayFields
+ :: Eq v
+ => AnnFieldsG v
+ -> Maybe (NE.NonEmpty (AnnOrderByItemG v))
+ -> SimilarArrayFields
+mkSimilarArrayFields annFields maybeOrderBys =
+ HM.fromList $ flip map allTuples $
+ \(relNameAndArgs, fieldName) -> (fieldName, getSimilarFields relNameAndArgs)
where
- selOne = HM.singleton (S.Alias $ pfx <> Iden "__one") (S.SEUnsafe "1")
- fromItem = selFromToFromItem pfx selectFrom
+ getSimilarFields relNameAndArgs = map snd $ filter ((== relNameAndArgs) . fst) allTuples
+ allTuples = arrayRelationTuples <> aggOrderByRelationTuples
+ arrayRelationTuples =
+ let arrayFields = mapMaybe getAnnArr annFields
+ in flip map arrayFields $
+ \(f, relSel) -> (getArrayRelNameAndSelectArgs relSel, f)
-aggSelToArrNode :: Prefixes -> FieldName -> ArrRelAgg -> ArrNode
-aggSelToArrNode pfxs als aggSel =
- ArrNode [extr] colMapping mergedBN
+ aggOrderByRelationTuples =
+ let mkItem (relName, fieldName) = ( (relName, noSelectArgs)
+ , fieldName
+ )
+ in map mkItem $ maybe []
+ (mapMaybe (fetchAggOrderByRels . obiColumn) . toList) maybeOrderBys
+
+ fetchAggOrderByRels (AOCArrayAggregation ri _ _) =
+ Just (riName ri, mkOrderByFieldName $ riName ri)
+ fetchAggOrderByRels _ = Nothing
+
+getArrayRelNameAndSelectArgs :: ArraySelectG v -> (RelName, SelectArgsG v)
+getArrayRelNameAndSelectArgs = \case
+ ASSimple r -> (aarRelationshipName r, _asnArgs $ aarAnnSelect r)
+ ASAggregate r -> (aarRelationshipName r, _asnArgs $ aarAnnSelect r)
+ ASConnection r -> (aarRelationshipName r, _asnArgs $ _csSelect $ aarAnnSelect r)
+
+getAnnArr :: (a, AnnFieldG v) -> Maybe (a, ArraySelectG v)
+getAnnArr (f, annFld) = case annFld of
+ AFArrayRelation (ASConnection _) -> Nothing
+ AFArrayRelation ar -> Just (f, ar)
+ _ -> Nothing
+
+
+withWriteJoinTree
+ :: (MonadWriter JoinTree m)
+ => (JoinTree -> b -> JoinTree)
+ -> m (a, b)
+ -> m a
+withWriteJoinTree joinTreeUpdater action =
+ pass $ do
+ (out, result) <- action
+ let fromJoinTree joinTree =
+ joinTreeUpdater joinTree result
+ pure (out, fromJoinTree)
+
+withWriteObjectRelation
+ :: (MonadWriter JoinTree m)
+ => m ( ObjectRelationSource
+ , HM.HashMap S.Alias S.SQLExp
+ , a
+ )
+ -> m a
+withWriteObjectRelation action =
+ withWriteJoinTree updateJoinTree $ do
+ (source, nodeExtractors, out) <- action
+ pure (out, (source, nodeExtractors))
where
- AnnRelG _ colMapping annSel = aggSel
- AnnSelG aggFlds tabFrm tabPerm tabArgs strfyNum = annSel
- fldAls = S.Alias $ toIden als
+ updateJoinTree joinTree (source, nodeExtractors) =
+ let selectNode = SelectNode nodeExtractors joinTree
+ in mempty{_jtObjectRelations = HM.singleton source selectNode}
- extr = flip S.Extractor (Just fldAls) $ S.applyJsonBuildObj $
- concatMap selFldToExtr aggFlds
-
- permLimit = _tpLimit tabPerm
- ordBy = _bnOrderBy mergedBN
-
- allBNs = map mkAggBaseNode aggFlds
- emptyBN = mkEmptyBaseNode (_pfThis pfxs) tabFrm
- mergedBN = foldr mergeBaseNodes emptyBN allBNs
-
- mkAggBaseNode (fn, selFld) =
- mkBaseNode subQueryReq pfxs fn selFld tabFrm tabPerm tabArgs strfyNum
-
- selFldToExtr (FieldName t, fld) = (:) (S.SELit t) $ pure $ case fld of
- TAFAgg flds -> aggFldToExp flds
- TAFNodes _ ->
- withJsonAggExtr subQueryReq permLimit ordBy $ S.Alias $ Iden t
- TAFExp e ->
- -- bool_or to force aggregation
- S.SEFnApp "coalesce"
- [ S.SELit e , S.SEUnsafe "bool_or('true')::text"] Nothing
-
- subQueryReq = hasAggFld aggFlds
-
-hasAggFld :: Foldable t => t (a, TableAggFldG v) -> Bool
-hasAggFld = any (isTabAggFld . snd)
+withWriteArrayRelation
+ :: (MonadWriter JoinTree m)
+ => m ( ArrayRelationSource
+ , S.Extractor
+ , HM.HashMap S.Alias S.SQLExp
+ , a
+ )
+ -> m a
+withWriteArrayRelation action =
+ withWriteJoinTree updateJoinTree $ do
+ (source, topExtractor, nodeExtractors, out) <- action
+ pure (out, (source, topExtractor, nodeExtractors))
where
- isTabAggFld (TAFAgg _) = True
- isTabAggFld _ = False
+ updateJoinTree joinTree (source, topExtractor, nodeExtractors) =
+ let arraySelectNode = ArraySelectNode [topExtractor] $
+ SelectNode nodeExtractors joinTree
+ in mempty{_jtArrayRelations = HM.singleton source arraySelectNode}
-mkArrNodeInfo
- :: Iden
+withWriteArrayConnection
+ :: (MonadWriter JoinTree m)
+ => m ( ArrayConnectionSource
+ , S.Extractor
+ , HM.HashMap S.Alias S.SQLExp
+ , a
+ )
+ -> m a
+withWriteArrayConnection action =
+ withWriteJoinTree updateJoinTree $ do
+ (source, topExtractor, nodeExtractors, out) <- action
+ pure (out, (source, topExtractor, nodeExtractors))
+ where
+ updateJoinTree joinTree (source, topExtractor, nodeExtractors) =
+ let arraySelectNode = ArraySelectNode [topExtractor] $
+ SelectNode nodeExtractors joinTree
+ in mempty{_jtArrayConnections = HM.singleton source arraySelectNode}
+
+withWriteComputedFieldTableSet
+ :: (MonadWriter JoinTree m)
+ => m ( ComputedFieldTableSetSource
+ , HM.HashMap S.Alias S.SQLExp
+ , a
+ )
+ -> m a
+withWriteComputedFieldTableSet action =
+ withWriteJoinTree updateJoinTree $ do
+ (source, nodeExtractors, out) <- action
+ pure (out, (source, nodeExtractors))
+ where
+ updateJoinTree joinTree (source, nodeExtractors) =
+ let selectNode = SelectNode nodeExtractors joinTree
+ in mempty{_jtComputedFieldTableSets = HM.singleton source selectNode}
+
+
+processAnnSimpleSelect
+ :: forall m. ( MonadReader Bool m
+ , MonadWriter JoinTree m
+ )
+ => SourcePrefixes
-> FieldName
- -> ArrRelCtx
- -> ArrNodeItem
- -> ArrNodeInfo
-mkArrNodeInfo pfx parAls (ArrRelCtx arrFlds obRels) = \case
- ANIField aggFld@(fld, annArrSel) ->
- let (rn, tabArgs) = fetchRNAndTArgs annArrSel
- similarFlds = getSimilarAggFlds rn tabArgs $ L.delete aggFld
- similarFldNames = map fst similarFlds
- similarOrdByFound = rn `elem` obRels && tabArgs == noTableArgs
- ordByFldName = mkOrderByFieldName rn
- extraOrdByFlds = bool [] [ordByFldName] similarOrdByFound
- sortedFlds = L.sort $ fld : (similarFldNames <> extraOrdByFlds)
- alias = S.Alias $ mkUniqArrRelAls parAls sortedFlds
- prefix = mkArrRelTableAls pfx parAls sortedFlds
- in ArrNodeInfo alias prefix $
- subQueryRequired similarFlds similarOrdByFound
- ANIAggOrdBy rn ->
- let similarFlds = map fst $ getSimilarAggFlds rn noTableArgs id
- ordByFldName = mkOrderByFieldName rn
- sortedFlds = L.sort $ ordByFldName:similarFlds
- alias = S.Alias $ mkUniqArrRelAls parAls sortedFlds
- prefix = mkArrRelTableAls pfx parAls sortedFlds
- in ArrNodeInfo alias prefix False
+ -> PermissionLimitSubQuery
+ -> AnnSimpleSel
+ -> m ( SelectSource
+ , HM.HashMap S.Alias S.SQLExp
+ )
+processAnnSimpleSelect sourcePrefixes fieldAlias permLimitSubQuery annSimpleSel = do
+ (selectSource, orderByAndDistinctExtrs, _) <-
+ processSelectParams sourcePrefixes fieldAlias similarArrayFields tableFrom
+ permLimitSubQuery tablePermissions tableArgs
+ annFieldsExtr <- processAnnFields (_pfThis sourcePrefixes) fieldAlias similarArrayFields annSelFields
+ let allExtractors = HM.fromList $ annFieldsExtr : orderByAndDistinctExtrs
+ pure (selectSource, allExtractors)
where
- getSimilarAggFlds rn tabArgs f =
- flip filter (f arrFlds) $ \(_, annArrSel) ->
- let (lrn, lTabArgs) = fetchRNAndTArgs annArrSel
- in (lrn == rn) && (lTabArgs == tabArgs)
+ AnnSelectG annSelFields tableFrom tablePermissions tableArgs _ = annSimpleSel
+ similarArrayFields =
+ mkSimilarArrayFields annSelFields $ _saOrderBy tableArgs
- subQueryRequired similarFlds hasSimOrdBy =
- hasSimOrdBy || any hasAgg similarFlds
+processAnnAggregateSelect
+ :: forall m. ( MonadReader Bool m
+ , MonadWriter JoinTree m
+ )
+ => SourcePrefixes
+ -> FieldName
+ -> AnnAggregateSelect
+ -> m ( SelectSource
+ , HM.HashMap S.Alias S.SQLExp
+ , S.Extractor
+ )
+processAnnAggregateSelect sourcePrefixes fieldAlias annAggSel = do
+ (selectSource, orderByAndDistinctExtrs, _) <-
+ processSelectParams sourcePrefixes fieldAlias similarArrayFields tableFrom
+ permLimitSubQuery tablePermissions tableArgs
+ let thisSourcePrefix = _pfThis sourcePrefixes
+ processedFields <- forM aggSelFields $ \(fieldName, field) ->
+ (fieldName,) <$>
+ case field of
+ TAFAgg aggFields ->
+ pure ( aggregateFieldsToExtractorExps thisSourcePrefix aggFields
+ , aggregateFieldToExp aggFields
+ )
+ TAFNodes annFields -> do
+ annFieldExtr <- processAnnFields thisSourcePrefix fieldName similarArrayFields annFields
+ pure ( [annFieldExtr]
+ , withJsonAggExtr permLimitSubQuery (_ssOrderBy selectSource) $
+ S.Alias $ toIden fieldName
+ )
+ TAFExp e ->
+ pure ( []
+ , withForceAggregation S.textTypeAnn $ S.SELit e
+ )
- hasAgg (_, ASSimple _) = False
- hasAgg (_, ASAgg (AnnRelG _ _ annSel)) = hasAggFld $ _asnFields annSel
+ let topLevelExtractor =
+ flip S.Extractor (Just $ S.Alias $ toIden fieldAlias) $
+ S.applyJsonBuildObj $ flip concatMap (map (second snd) processedFields) $
+ \(FieldName fieldText, fieldExp) -> [S.SELit fieldText, fieldExp]
+ nodeExtractors = HM.fromList $
+ concatMap (fst . snd) processedFields <> orderByAndDistinctExtrs
- fetchRNAndTArgs (ASSimple (AnnRelG rn _ annSel)) =
- (rn, _asnArgs annSel)
- fetchRNAndTArgs (ASAgg (AnnRelG rn _ annSel)) =
- (rn, _asnArgs annSel)
-
-fetchOrdByAggRels
- :: Maybe (NE.NonEmpty AnnOrderByItem)
- -> [RelName]
-fetchOrdByAggRels orderByM = fromMaybe [] relNamesM
+ pure (selectSource, nodeExtractors, topLevelExtractor)
where
- relNamesM =
- mapMaybe (fetchAggOrdByRels . obiColumn) . toList <$> orderByM
+ AnnSelectG aggSelFields tableFrom tablePermissions tableArgs _ = annAggSel
+ permLimit = _tpLimit tablePermissions
+ orderBy = _saOrderBy tableArgs
+ permLimitSubQuery = mkPermissionLimitSubQuery permLimit aggSelFields orderBy
+ similarArrayFields = HM.unions $
+ flip map (map snd aggSelFields) $ \case
+ TAFAgg _ -> mempty
+ TAFNodes annFlds ->
+ mkSimilarArrayFields annFlds orderBy
+ TAFExp _ -> mempty
- fetchAggOrdByRels (AOCAgg ri _ _) = Just $ riName ri
- fetchAggOrdByRels _ = Nothing
-
-mkOrdByItems
- :: Iden -> FieldName
+mkPermissionLimitSubQuery
+ :: Maybe Int
+ -> TableAggregateFields
-> Maybe (NE.NonEmpty AnnOrderByItem)
- -> Bool
- -> ArrRelCtx
- -- extractors
- -> ( [(S.Alias, S.SQLExp)]
- -- object relation nodes
- , HM.HashMap RelName ObjNode
- -- array relation aggregate nodes
- , HM.HashMap S.Alias ArrNode
- -- final order by expression
- , Maybe S.OrderByExp
- )
-mkOrdByItems pfx fldAls orderByM strfyNum arrRelCtx =
- (obExtrs, ordByObjsMap, ordByArrsMap, ordByExpM)
+ -> PermissionLimitSubQuery
+mkPermissionLimitSubQuery permLimit aggFields orderBys =
+ case permLimit of
+ Nothing -> PLSQNotRequired
+ Just limit ->
+ if hasAggregateField || hasAggOrderBy then PLSQRequired limit
+ else PLSQNotRequired
where
- procAnnOrdBy' = processAnnOrderByItem pfx fldAls arrRelCtx strfyNum
- procOrdByM =
- unzip3 . map procAnnOrdBy' . toList <$> orderByM
+ hasAggregateField = flip any (map snd aggFields) $
+ \case
+ TAFAgg _ -> True
+ _ -> False
- obExtrs = maybe [] (^. _1) procOrdByM
- ordByExpM = S.OrderByExp . (^. _2) <$> procOrdByM
+ hasAggOrderBy = case orderBys of
+ Nothing -> False
+ Just l -> flip any (concatMap toList $ toList l) $
+ \case
+ AOCArrayAggregation{} -> True
+ _ -> False
- ordByObjs = mapMaybe getOrdByRelNode $ maybe [] (^. _3) procOrdByM
- ordByObjsMap = HM.fromListWith mergeObjNodes ordByObjs
-
- ordByAggArrs = mapMaybe getOrdByAggNode $ maybe [] (^. _3) procOrdByM
- ordByArrsMap = HM.fromListWith mergeArrNodes ordByAggArrs
-
- getOrdByRelNode (OBNObjNode name node) = Just (name, node)
- getOrdByRelNode _ = Nothing
-
- getOrdByAggNode (OBNArrNode als node) = Just (als, node)
- getOrdByAggNode _ = Nothing
-
-mkBaseNode
- :: Bool
- -> Prefixes
+processArrayRelation
+ :: forall m. ( MonadReader Bool m
+ , MonadWriter JoinTree m
+ )
+ => SourcePrefixes
-> FieldName
- -> TableAggFld
- -> SelectFrom
- -> TablePerm
- -> TableArgs
- -> Bool
- -> BaseNode
-mkBaseNode subQueryReq pfxs fldAls annSelFlds selectFrom
- tablePerm tableArgs strfyNum =
- BaseNode thisPfx distExprM fromItem finalWhere ordByExpM finalLimit offsetM
- allExtrs allObjsWithOb allArrsWithOb computedFields
- where
- Prefixes thisPfx baseTablepfx = pfxs
- TablePerm permFilter permLimit = tablePerm
- TableArgs whereM orderByM inpLimitM offsetM distM = tableArgs
+ -> S.Alias
+ -> ArraySelect
+ -> m ()
+processArrayRelation sourcePrefixes fieldAlias relAlias arrSel =
+ case arrSel of
+ ASSimple annArrRel -> withWriteArrayRelation $ do
+ let AnnRelationSelectG _ colMapping sel = annArrRel
+ permLimitSubQuery =
+ maybe PLSQNotRequired PLSQRequired $ _tpLimit $ _asnPerm sel
+ (source, nodeExtractors) <-
+ processAnnSimpleSelect sourcePrefixes fieldAlias permLimitSubQuery sel
+ let topExtr = asJsonAggExtr JASMultipleRows (S.toAlias fieldAlias)
+ permLimitSubQuery $ _ssOrderBy source
+ pure ( ArrayRelationSource relAlias colMapping source
+ , topExtr
+ , nodeExtractors
+ , ()
+ )
+ ASAggregate aggSel -> withWriteArrayRelation $ do
+ let AnnRelationSelectG _ colMapping sel = aggSel
+ (source, nodeExtractors, topExtr) <-
+ processAnnAggregateSelect sourcePrefixes fieldAlias sel
+ pure ( ArrayRelationSource relAlias colMapping source
+ , topExtr
+ , nodeExtractors
+ , ()
+ )
+ ASConnection connSel -> withWriteArrayConnection $ do
+ let AnnRelationSelectG _ colMapping sel = connSel
+ (source, topExtractor, nodeExtractors) <-
+ processConnectionSelect sourcePrefixes fieldAlias relAlias colMapping sel
+ pure ( source
+ , topExtractor
+ , nodeExtractors
+ , ()
+ )
- -- if sub query is used, then only use input limit
+processSelectParams
+ :: forall m. ( MonadReader Bool m
+ , MonadWriter JoinTree m
+ )
+ => SourcePrefixes
+ -> FieldName
+ -> SimilarArrayFields
+ -> SelectFrom
+ -> PermissionLimitSubQuery
+ -> TablePerm
+ -> SelectArgs
+ -> m ( SelectSource
+ , [(S.Alias, S.SQLExp)]
+ , Maybe S.SQLExp -- Order by cursor
+ )
+processSelectParams sourcePrefixes fieldAlias similarArrFields selectFrom
+ permLimitSubQ tablePermissions tableArgs = do
+ maybeOrderBy <- mapM
+ (processOrderByItems thisSourcePrefix fieldAlias similarArrFields)
+ orderByM
+ let fromItem = selectFromToFromItem (_pfBase sourcePrefixes) selectFrom
+ (maybeDistinct, distinctExtrs) =
+ maybe (Nothing, []) (first Just) $ processDistinctOnColumns thisSourcePrefix <$> distM
+ finalWhere = toSQLBoolExp (selectFromToQual selectFrom) $
+ maybe permFilter (andAnnBoolExps permFilter) whereM
+ selectSource = SelectSource thisSourcePrefix fromItem maybeDistinct finalWhere
+ ((^. _2) <$> maybeOrderBy) finalLimit offsetM
+ orderByExtrs = maybe [] (^. _1) maybeOrderBy
+ pure ( selectSource
+ , orderByExtrs <> distinctExtrs
+ , (^. _3) <$> maybeOrderBy
+ )
+ where
+ thisSourcePrefix = _pfThis sourcePrefixes
+ SelectArgs whereM orderByM inpLimitM offsetM distM = tableArgs
+ TablePerm permFilter permLimit = tablePermissions
+ finalLimit =
+ -- if sub query is required, then only use input limit
-- because permission limit is being applied in subquery
-- else compare input and permission limits
- finalLimit =
- if subQueryReq then inpLimitM
- else withPermLimit
+ case permLimitSubQ of
+ PLSQRequired _ -> inpLimitM
+ PLSQNotRequired -> compareLimits
- withPermLimit =
+ compareLimits =
case (inpLimitM, permLimit) of
(inpLim, Nothing) -> inpLim
(Nothing, permLim) -> permLim
(Just inp, Just perm) -> Just $ if inp < perm then inp else perm
+processOrderByItems
+ :: forall m. ( MonadReader Bool m
+ , MonadWriter JoinTree m
+ )
+ => Iden
+ -> FieldName
+ -> SimilarArrayFields
+ -> NE.NonEmpty AnnOrderByItem
+ -> m ( [(S.Alias, S.SQLExp)] -- Order by Extractors
+ , S.OrderByExp
+ , S.SQLExp -- The cursor expression
+ )
+processOrderByItems sourcePrefix' fieldAlias' similarArrayFields orderByItems = do
+ orderByItemExps <- forM orderByItems processAnnOrderByItem
+ let orderByExp = S.OrderByExp $ toOrderByExp <$> orderByItemExps
+ orderByExtractors = concat $ toList $ map snd . toList <$> orderByItemExps
+ cursor = mkCursorExp $ toList orderByItemExps
+ pure (orderByExtractors, orderByExp, cursor)
+ where
+ processAnnOrderByItem :: AnnOrderByItem -> m OrderByItemExp
+ processAnnOrderByItem orderByItem =
+ forM orderByItem $ \ordByCol -> (ordByCol,) <$>
+ processAnnOrderByElement sourcePrefix' fieldAlias' ordByCol
- aggOrdByRelNames = fetchOrdByAggRels orderByM
+ processAnnOrderByElement
+ :: Iden -> FieldName -> AnnOrderByElement S.SQLExp -> m (S.Alias, S.SQLExp)
+ processAnnOrderByElement sourcePrefix fieldAlias annObCol = do
+ let ordByAlias = mkAnnOrderByAlias sourcePrefix fieldAlias similarArrayFields annObCol
+ (ordByAlias, ) <$> case annObCol of
+ AOCColumn pgColInfo -> pure $
+ S.mkQIdenExp (mkBaseTableAlias sourcePrefix) $ toIden $ pgiColumn pgColInfo
- (allExtrs, allObjsWithOb, allArrsWithOb, computedFields, ordByExpM) =
- case annSelFlds of
- TAFNodes flds ->
- let arrFlds = mapMaybe getAnnArr flds
- arrRelCtx = mkArrRelCtx arrFlds
- selExtr = buildJsonObject thisPfx fldAls arrRelCtx strfyNum flds
- -- all object relationships
- objNodes = HM.fromListWith mergeObjNodes $
- map mkObjItem (mapMaybe getAnnObj flds)
- -- all array items (array relationships + aggregates)
- arrNodes = HM.fromListWith mergeArrNodes $
- map (mkArrItem arrRelCtx) arrFlds
- -- all computed fields with table returns
- computedFieldNodes = HM.fromList $ map mkComputedFieldTable $
- mapMaybe getComputedFieldTable flds
+ AOCObjectRelation relInfo relFilter rest -> withWriteObjectRelation $ do
+ let RelInfo relName _ colMapping relTable _ _ = relInfo
+ relSourcePrefix = mkObjectRelationTableAlias sourcePrefix relName
+ fieldName = mkOrderByFieldName relName
+ (relOrderByAlias, relOrdByExp) <-
+ processAnnOrderByElement relSourcePrefix fieldName rest
+ let selectSource = SelectSource relSourcePrefix
+ (S.FISimple relTable Nothing) Nothing
+ (toSQLBoolExp (S.QualTable relTable) relFilter)
+ Nothing Nothing Nothing
+ relSource = ObjectRelationSource relName colMapping selectSource
+ pure ( relSource
+ , HM.singleton relOrderByAlias relOrdByExp
+ , S.mkQIdenExp relSourcePrefix relOrderByAlias
+ )
- (obExtrs, ordByObjs, ordByArrs, obeM)
- = mkOrdByItems' arrRelCtx
- allObjs = HM.unionWith mergeObjNodes objNodes ordByObjs
- allArrs = HM.unionWith mergeArrNodes arrNodes ordByArrs
+ AOCArrayAggregation relInfo relFilter aggOrderBy -> withWriteArrayRelation $ do
+ let RelInfo relName _ colMapping relTable _ _ = relInfo
+ fieldName = mkOrderByFieldName relName
+ relSourcePrefix = mkArrayRelationSourcePrefix sourcePrefix fieldAlias
+ similarArrayFields fieldName
+ relAlias = mkArrayRelationAlias fieldAlias similarArrayFields fieldName
+ (topExtractor, fields) = mkAggregateOrderByExtractorAndFields aggOrderBy
+ selectSource = SelectSource relSourcePrefix
+ (S.FISimple relTable Nothing) Nothing
+ (toSQLBoolExp (S.QualTable relTable) relFilter)
+ Nothing Nothing Nothing
+ relSource = ArrayRelationSource relAlias colMapping selectSource
+ pure ( relSource
+ , topExtractor
+ , HM.fromList $ aggregateFieldsToExtractorExps relSourcePrefix fields
+ , S.mkQIdenExp relSourcePrefix (mkAggregateOrderByAlias aggOrderBy)
+ )
- in ( HM.fromList $ selExtr:obExtrs <> distExtrs
- , allObjs
- , allArrs
- , computedFieldNodes
- , obeM
- )
- TAFAgg tabAggs ->
- let extrs = concatMap (fetchExtrFromAggFld . snd) tabAggs
- (obExtrs, ordByObjs, ordByArrs, obeM)
- = mkOrdByItems' emptyArrRelCtx
- in ( HM.fromList $ extrs <> obExtrs <> distExtrs
- , ordByObjs
- , ordByArrs
- , HM.empty
- , obeM
- )
- TAFExp _ ->
- let (obExtrs, ordByObjs, ordByArrs, obeM)
- = mkOrdByItems' emptyArrRelCtx
- in (HM.fromList obExtrs, ordByObjs, ordByArrs, HM.empty, obeM)
+ toOrderByExp :: OrderByItemExp -> S.OrderByItem
+ toOrderByExp orderByItemExp =
+ let OrderByItemG obTyM expAlias obNullsM = fst . snd <$> orderByItemExp
+ in S.OrderByItem (S.SEIden $ toIden expAlias)
+ (unOrderType <$> obTyM) (unNullsOrder <$> obNullsM)
- fetchExtrFromAggFld (AFCount cty) = countTyToExps cty
- fetchExtrFromAggFld (AFOp aggOp) = aggOpToExps aggOp
- fetchExtrFromAggFld (AFExp _) = []
-
- countTyToExps S.CTStar = []
- countTyToExps (S.CTSimple cols) = colsToExps cols
- countTyToExps (S.CTDistinct cols) = colsToExps cols
+ mkCursorExp :: [OrderByItemExp] -> S.SQLExp
+ mkCursorExp orderByItemExps =
+ S.applyJsonBuildObj $ flip concatMap orderByItemExps $
+ \orderByItemExp ->
+ let OrderByItemG _ (annObCol, (_, valExp)) _ = orderByItemExp
+ in annObColToJSONField valExp annObCol
+ where
+ annObColToJSONField valExp = \case
+ AOCColumn pgCol -> [S.SELit $ getPGColTxt $ pgiColumn pgCol, valExp]
+ AOCObjectRelation relInfo _ obCol ->
+ [ S.SELit $ relNameToTxt $ riName relInfo
+ , S.applyJsonBuildObj $ annObColToJSONField valExp obCol
+ ]
+ AOCArrayAggregation relInfo _ aggOrderBy ->
+ [ S.SELit $ relNameToTxt (riName relInfo) <> "_aggregate"
+ , S.applyJsonBuildObj $
+ case aggOrderBy of
+ AAOCount -> [S.SELit "count", valExp]
+ AAOOp opText colInfo ->
+ [ S.SELit opText
+ , S.applyJsonBuildObj [S.SELit $ getPGColTxt $ pgiColumn colInfo, valExp]
+ ]
+ ]
+aggregateFieldsToExtractorExps
+ :: Iden -> AggregateFields -> [(S.Alias, S.SQLExp)]
+aggregateFieldsToExtractorExps sourcePrefix aggregateFields =
+ flip concatMap aggregateFields $ \(_, field) ->
+ case field of
+ AFCount cty -> case cty of
+ S.CTStar -> []
+ S.CTSimple cols -> colsToExps cols
+ S.CTDistinct cols -> colsToExps cols
+ AFOp aggOp -> aggOpToExps aggOp
+ AFExp _ -> []
+ where
colsToExps = mapMaybe (mkColExp . PCFCol)
-
- aggOpToExps = mapMaybe (mkColExp . snd) . _aoFlds
+ aggOpToExps = mapMaybe (mkColExp . snd) . _aoFields
mkColExp (PCFCol c) =
- let qualCol = S.mkQIdenExp (mkBaseTableAls thisPfx) (toIden c)
+ let qualCol = S.mkQIdenExp (mkBaseTableAlias sourcePrefix) (toIden c)
colAls = toIden c
in Just (S.Alias colAls, qualCol)
mkColExp _ = Nothing
- finalWhere = toSQLBoolExp tableQual $
- maybe permFilter (andAnnBoolExps permFilter) whereM
- fromItem = selFromToFromItem baseTablepfx selectFrom
- tableQual = selFromToQual selectFrom
+processAnnFields
+ :: forall m. ( MonadReader Bool m
+ , MonadWriter JoinTree m
+ )
+ => Iden
+ -> FieldName
+ -> SimilarArrayFields
+ -> AnnFields
+ -> m (S.Alias, S.SQLExp)
+processAnnFields sourcePrefix fieldAlias similarArrFields annFields = do
+ fieldExps <- forM annFields $ \(fieldName, field) ->
+ (fieldName,) <$>
+ case field of
+ AFExpression t -> pure $ S.SELit t
- mkArrRelCtx arrSels = ArrRelCtx arrSels aggOrdByRelNames
+ AFNodeId tn pKeys -> pure $ mkNodeId tn pKeys
- mkOrdByItems' = mkOrdByItems thisPfx fldAls orderByM strfyNum
+ AFColumn c -> toSQLCol c
- distItemsM = processDistinctOnCol thisPfx <$> distM
- distExprM = fst <$> distItemsM
- distExtrs = maybe [] snd distItemsM
+ AFRemote _ -> pure $ S.SELit "null: remote field selected"
- -- process an object relationship
- mkObjItem (fld, objSel) =
- let relName = aarName objSel
- objNodePfx = mkObjRelTableAls thisPfx $ aarName objSel
- objNode = mkObjNode (Prefixes objNodePfx thisPfx) (fld, objSel)
- in (relName, objNode)
+ AFObjectRelation objSel -> withWriteObjectRelation $ do
+ let AnnRelationSelectG relName relMapping annSel = objSel
+ objRelSourcePrefix = mkObjectRelationTableAlias sourcePrefix relName
+ (selectSource, extractors) <- processAnnSimpleSelect (mkSourcePrefixes objRelSourcePrefix)
+ fieldName PLSQNotRequired annSel
+ let objRelSource = ObjectRelationSource relName relMapping selectSource
+ pure ( objRelSource
+ , extractors
+ , S.mkQIdenExp objRelSourcePrefix fieldName
+ )
- -- process an array/array-aggregate item
- mkArrItem arrRelCtx (fld, arrSel) =
- let ArrNodeInfo arrAls arrPfx subQReq =
- mkArrNodeInfo thisPfx fldAls arrRelCtx $ ANIField (fld, arrSel)
- arrNode = mkArrNode subQReq (Prefixes arrPfx thisPfx) (fld, arrSel)
- in (arrAls, arrNode)
+ AFArrayRelation arrSel -> do
+ let arrRelSourcePrefix = mkArrayRelationSourcePrefix sourcePrefix fieldAlias similarArrFields fieldName
+ arrRelAlias = mkArrayRelationAlias fieldAlias similarArrFields fieldName
+ processArrayRelation (mkSourcePrefixes arrRelSourcePrefix) fieldName arrRelAlias arrSel
+ pure $ S.mkQIdenExp arrRelSourcePrefix fieldName
- -- process a computed field, which returns a table
- mkComputedFieldTable (fld, jsonAggSelect, sel) =
- let prefixes = Prefixes (mkComputedFieldTableAls thisPfx fld) thisPfx
- baseNode = annSelToBaseNode False prefixes fld sel
- in (fld, CFTableNode jsonAggSelect baseNode)
+ AFComputedField (CFSScalar scalar) -> fromScalarComputedField scalar
- getAnnObj (f, annFld) = case annFld of
- FObj ob -> Just (f, ob)
- _ -> Nothing
+ AFComputedField (CFSTable selectTy sel) -> withWriteComputedFieldTableSet $ do
+ let computedFieldSourcePrefix =
+ mkComputedFieldTableAlias sourcePrefix fieldName
+ (selectSource, nodeExtractors) <-
+ processAnnSimpleSelect (mkSourcePrefixes computedFieldSourcePrefix)
+ fieldName PLSQNotRequired sel
+ let computedFieldTableSetSource =
+ ComputedFieldTableSetSource fieldName selectTy selectSource
+ pure ( computedFieldTableSetSource
+ , nodeExtractors
+ , S.mkQIdenExp computedFieldSourcePrefix fieldName
+ )
- getAnnArr (f, annFld) = case annFld of
- FArr ar -> Just (f, ar)
- _ -> Nothing
-
- getComputedFieldTable (f, annFld) = case annFld of
- FComputedField (CFSTable jas sel) -> Just (f, jas, sel)
- _ -> Nothing
-
-annSelToBaseNode :: Bool -> Prefixes -> FieldName -> AnnSimpleSel -> BaseNode
-annSelToBaseNode subQueryReq pfxs fldAls annSel =
- mkBaseNode subQueryReq pfxs fldAls (TAFNodes selFlds) tabFrm tabPerm tabArgs strfyNum
+ pure $
+ -- posttgres ignores anything beyond 63 chars for an iden
+ -- in this case, we'll need to use json_build_object function
+ -- json_build_object is slower than row_to_json hence it is only
+ -- used when needed
+ if any ( (> 63) . T.length . getFieldNameTxt . fst ) fieldExps then
+ withJsonBuildObj fieldAlias $ concatMap toJsonBuildObjectExps fieldExps
+ else withRowToJSON fieldAlias $ map toRowToJsonExtr fieldExps
where
- AnnSelG selFlds tabFrm tabPerm tabArgs strfyNum = annSel
+ mkSourcePrefixes newPrefix = SourcePrefixes newPrefix sourcePrefix
+ toJsonBuildObjectExps (fieldName, fieldExp) =
+ [S.SELit $ getFieldNameTxt fieldName, fieldExp]
-mkObjNode :: Prefixes -> (FieldName, ObjSel) -> ObjNode
-mkObjNode pfxs (fldName, AnnRelG _ rMapn rAnnSel) =
- ObjNode rMapn $ annSelToBaseNode False pfxs fldName rAnnSel
+ toRowToJsonExtr (fieldName, fieldExp) =
+ S.Extractor fieldExp $ Just $ S.toAlias fieldName
-mkArrNode :: Bool -> Prefixes -> (FieldName, ArrSel) -> ArrNode
-mkArrNode subQueryReq pfxs (fldName, annArrSel) = case annArrSel of
- ASSimple annArrRel ->
- let bn = annSelToBaseNode subQueryReq pfxs fldName $ aarAnnSel annArrRel
- permLimit = getPermLimit $ aarAnnSel annArrRel
- extr = asJsonAggExtr JASMultipleRows (S.toAlias fldName) subQueryReq permLimit $
- _bnOrderBy bn
- in ArrNode [extr] (aarMapping annArrRel) bn
+ toSQLCol :: AnnColumnField -> m S.SQLExp
+ toSQLCol (AnnColumnField col asText colOpM) = do
+ strfyNum <- ask
+ pure $ toJSONableExp strfyNum (pgiType col) asText $ withColumnOp colOpM $
+ S.mkQIdenExp (mkBaseTableAlias sourcePrefix) $ pgiColumn col
- ASAgg annAggSel -> aggSelToArrNode pfxs fldName annAggSel
+ fromScalarComputedField :: ComputedFieldScalarSelect S.SQLExp -> m S.SQLExp
+ fromScalarComputedField computedFieldScalar = do
+ strfyNum <- ask
+ pure $ toJSONableExp strfyNum (PGColumnScalar ty) False $ withColumnOp colOpM $
+ S.SEFunction $ S.FunctionExp fn (fromTableRowArgs sourcePrefix args) Nothing
+ where
+ ComputedFieldScalarSelect fn args ty colOpM = computedFieldScalar
+
+ withColumnOp :: Maybe ColumnOp -> S.SQLExp -> S.SQLExp
+ withColumnOp colOpM sqlExp = case colOpM of
+ Nothing -> sqlExp
+ Just (ColumnOp opText cExp) -> S.mkSQLOpExp opText sqlExp cExp
+
+ mkNodeId :: QualifiedTable -> NonEmpty PGColumnInfo -> S.SQLExp
+ mkNodeId (QualifiedObject tableSchema tableName) pkeyColumns =
+ let tableObjectExp = S.applyJsonBuildObj
+ [ S.SELit "schema"
+ , S.SELit (getSchemaTxt tableSchema)
+ , S.SELit "name"
+ , S.SELit (toTxt tableName)
+ ]
+ in encodeBase64 $ flip S.SETyAnn S.textTypeAnn $ S.applyJsonBuildObj
+ [ S.SELit "table", tableObjectExp
+ , S.SELit "columns", mkPrimaryKeyColumnsObjectExp sourcePrefix pkeyColumns
+ ]
injectJoinCond :: S.BoolExp -- ^ Join condition
-> S.BoolExp -- ^ Where condition
@@ -695,28 +829,33 @@ mkJoinCond baseTablepfx colMapn =
foldl' (S.BEBin S.AndOp) (S.BELit True) $ flip map (HM.toList colMapn) $ \(lCol, rCol) ->
S.BECompare S.SEQ (S.mkQIdenExp baseTablepfx lCol) (S.mkSIdenExp rCol)
-baseNodeToSel :: S.BoolExp -> BaseNode -> S.Select
-baseNodeToSel joinCond baseNode =
+generateSQLSelect
+ :: S.BoolExp -- ^ Pre join condition
+ -> SelectSource
+ -> SelectNode
+ -> S.Select
+generateSQLSelect joinCondition selectSource selectNode =
S.mkSelect
- { S.selExtr = [S.Extractor e $ Just a | (a, e) <- HM.toList extrs]
- , S.selFrom = Just $ S.FromExp [joinedFrom]
- , S.selOrderBy = ordByM
- , S.selLimit = S.LimitExp . S.intToSQLExp <$> limitM
- , S.selOffset = S.OffsetExp <$> offsetM
- , S.selDistinct = dExp
+ { S.selExtr = [S.Extractor e $ Just a | (a, e) <- HM.toList extractors]
+ , S.selFrom = Just $ S.FromExp [joinedFrom]
+ , S.selOrderBy = maybeOrderby
+ , S.selLimit = S.LimitExp . S.intToSQLExp <$> maybeLimit
+ , S.selOffset = S.OffsetExp <$> maybeOffset
+ , S.selDistinct = maybeDistinct
}
where
- BaseNode pfx dExp fromItem whr ordByM limitM
- offsetM extrs objRels arrRels computedFields
- = baseNode
- -- this is the table which is aliased as "pfx.base"
- baseSel = S.mkSelect
+ SelectSource sourcePrefix fromItem maybeDistinct whereExp
+ maybeOrderby maybeLimit maybeOffset = selectSource
+ SelectNode extractors joinTree = selectNode
+ JoinTree objectRelations arrayRelations arrayConnections computedFields = joinTree
+ -- this is the table which is aliased as "sourcePrefix.base"
+ baseSelect = S.mkSelect
{ S.selExtr = [S.Extractor (S.SEStar Nothing) Nothing]
, S.selFrom = Just $ S.FromExp [fromItem]
- , S.selWhere = Just $ injectJoinCond joinCond whr
+ , S.selWhere = Just $ injectJoinCond joinCondition whereExp
}
- baseSelAls = S.Alias $ mkBaseTableAls pfx
- baseFromItem = S.FISelect (S.Lateral False) baseSel baseSelAls
+ baseSelectAlias = S.Alias $ mkBaseTableAlias sourcePrefix
+ baseFromItem = S.mkSelFromItem baseSelect baseSelectAlias
-- function to create a joined from item from two from items
leftOuterJoin current new =
@@ -726,54 +865,422 @@ baseNodeToSel joinCond baseNode =
-- this is the from eexp for the final select
joinedFrom :: S.FromItem
joinedFrom = foldl' leftOuterJoin baseFromItem $
- map objNodeToFromItem (HM.elems objRels) <>
- map arrNodeToFromItem (HM.elems arrRels) <>
- map computedFieldNodeToFromItem (HM.toList computedFields)
+ map objectRelationToFromItem (HM.toList objectRelations) <>
+ map arrayRelationToFromItem (HM.toList arrayRelations) <>
+ map arrayConnectionToFromItem (HM.toList arrayConnections) <>
+ map computedFieldToFromItem (HM.toList computedFields)
- objNodeToFromItem :: ObjNode -> S.FromItem
- objNodeToFromItem (ObjNode relMapn relBaseNode) =
- let als = S.Alias $ _bnPrefix relBaseNode
- sel = baseNodeToSel (mkJoinCond baseSelAls relMapn) relBaseNode
- in S.mkLateralFromItem sel als
- arrNodeToFromItem :: ArrNode -> S.FromItem
- arrNodeToFromItem (ArrNode es colMapn bn) =
- let sel = arrNodeToSelect bn es (mkJoinCond baseSelAls colMapn)
- als = S.Alias $ _bnPrefix bn
- in S.mkLateralFromItem sel als
+ objectRelationToFromItem
+ :: (ObjectRelationSource, SelectNode) -> S.FromItem
+ objectRelationToFromItem (objectRelationSource, node) =
+ let ObjectRelationSource _ colMapping source = objectRelationSource
+ alias = S.Alias $ _ssPrefix source
+ select = generateSQLSelect (mkJoinCond baseSelectAlias colMapping) source node
+ in S.mkLateralFromItem select alias
- computedFieldNodeToFromItem :: (FieldName, CFTableNode) -> S.FromItem
- computedFieldNodeToFromItem (fld, CFTableNode jsonAggSelect bn) =
- let internalSel = baseNodeToSel (S.BELit True) bn
- als = S.Alias $ _bnPrefix bn
- extr = asJsonAggExtr jsonAggSelect (S.toAlias fld) False Nothing $
- _bnOrderBy bn
- internalSelFrom = S.mkSelFromItem internalSel als
- sel = S.mkSelect
- { S.selExtr = pure extr
- , S.selFrom = Just $ S.FromExp [internalSelFrom]
+ arrayRelationToFromItem
+ :: (ArrayRelationSource, ArraySelectNode) -> S.FromItem
+ arrayRelationToFromItem (arrayRelationSource, arraySelectNode) =
+ let ArrayRelationSource _ colMapping source = arrayRelationSource
+ alias = S.Alias $ _ssPrefix source
+ select = generateSQLSelectFromArrayNode source arraySelectNode $
+ mkJoinCond baseSelectAlias colMapping
+ in S.mkLateralFromItem select alias
+
+ arrayConnectionToFromItem
+ :: (ArrayConnectionSource, ArraySelectNode) -> S.FromItem
+ arrayConnectionToFromItem (arrayConnectionSource, arraySelectNode) =
+ let selectWith = connectionToSelectWith baseSelectAlias arrayConnectionSource arraySelectNode
+ alias = S.Alias $ _ssPrefix $ _acsSource arrayConnectionSource
+ in S.FISelectWith (S.Lateral True) selectWith alias
+
+ computedFieldToFromItem
+ :: (ComputedFieldTableSetSource, SelectNode) -> S.FromItem
+ computedFieldToFromItem (computedFieldTableSource, node) =
+ let ComputedFieldTableSetSource fieldName selectTy source = computedFieldTableSource
+ internalSelect = generateSQLSelect (S.BELit True) source node
+ extractor = asJsonAggExtr selectTy (S.toAlias fieldName) PLSQNotRequired $
+ _ssOrderBy source
+ alias = S.Alias $ _ssPrefix source
+ select = S.mkSelect
+ { S.selExtr = [extractor]
+ , S.selFrom = Just $ S.FromExp [S.mkSelFromItem internalSelect alias]
}
- in S.mkLateralFromItem sel als
+ in S.mkLateralFromItem select alias
-mkAggSelect :: AnnAggSel -> S.Select
-mkAggSelect annAggSel =
- prefixNumToAliases $ arrNodeToSelect bn extr $ S.BELit True
+generateSQLSelectFromArrayNode
+ :: SelectSource
+ -> ArraySelectNode
+ -> S.BoolExp
+ -> S.Select
+generateSQLSelectFromArrayNode selectSource arraySelectNode joinCondition =
+ S.mkSelect
+ { S.selExtr = topExtractors
+ , S.selFrom = Just $ S.FromExp [selectFrom]
+ }
where
- aggSel = AnnRelG rootRelName HM.empty annAggSel
- rootIden = Iden "root"
- rootPrefix = Prefixes rootIden rootIden
- ArrNode extr _ bn =
- aggSelToArrNode rootPrefix (FieldName "root") aggSel
+ ArraySelectNode topExtractors selectNode = arraySelectNode
+ selectFrom = S.mkSelFromItem
+ (generateSQLSelect joinCondition selectSource selectNode) $
+ S.Alias $ _ssPrefix selectSource
+
+mkAggregateSelect :: AnnAggregateSelect -> S.Select
+mkAggregateSelect annAggSel =
+ let ((selectSource, nodeExtractors, topExtractor), joinTree) =
+ runWriter $ flip runReaderT strfyNum $
+ processAnnAggregateSelect sourcePrefixes rootFieldName annAggSel
+ selectNode = SelectNode nodeExtractors joinTree
+ arrayNode = ArraySelectNode [topExtractor] selectNode
+ in prefixNumToAliases $
+ generateSQLSelectFromArrayNode selectSource arrayNode $ S.BELit True
+ where
+ strfyNum = _asnStrfyNum annAggSel
+ rootFieldName = FieldName "root"
+ rootIden = toIden rootFieldName
+ sourcePrefixes = SourcePrefixes rootIden rootIden
mkSQLSelect :: JsonAggSelect -> AnnSimpleSel -> S.Select
mkSQLSelect jsonAggSelect annSel =
- prefixNumToAliases $ arrNodeToSelect baseNode extrs $ S.BELit True
+ let permLimitSubQuery = PLSQNotRequired
+ ((selectSource, nodeExtractors), joinTree) =
+ runWriter $ flip runReaderT strfyNum $
+ processAnnSimpleSelect sourcePrefixes rootFldName permLimitSubQuery annSel
+ selectNode = SelectNode nodeExtractors joinTree
+ topExtractor = asJsonAggExtr jsonAggSelect rootFldAls permLimitSubQuery
+ $ _ssOrderBy selectSource
+ arrayNode = ArraySelectNode [topExtractor] selectNode
+ in prefixNumToAliases $
+ generateSQLSelectFromArrayNode selectSource arrayNode $ S.BELit True
where
- permLimit = getPermLimit annSel
- extrs = pure $ asJsonAggExtr jsonAggSelect rootFldAls False permLimit
- $ _bnOrderBy baseNode
+ strfyNum = _asnStrfyNum annSel
rootFldIden = toIden rootFldName
- rootPrefix = Prefixes rootFldIden rootFldIden
- baseNode = annSelToBaseNode False rootPrefix rootFldName annSel
+ sourcePrefixes = SourcePrefixes rootFldIden rootFldIden
rootFldName = FieldName "root"
rootFldAls = S.Alias $ toIden rootFldName
+
+mkConnectionSelect :: ConnectionSelect S.SQLExp -> S.SelectWithG S.Select
+mkConnectionSelect connectionSelect =
+ let ((connectionSource, topExtractor, nodeExtractors), joinTree) =
+ runWriter $ flip runReaderT strfyNum $
+ processConnectionSelect sourcePrefixes rootFieldName
+ (S.Alias rootIden) mempty connectionSelect
+ selectNode = ArraySelectNode [topExtractor] $
+ SelectNode nodeExtractors joinTree
+ in prefixNumToAliasesSelectWith $
+ connectionToSelectWith (S.Alias rootIden) connectionSource selectNode
+ where
+ strfyNum = _asnStrfyNum $ _csSelect connectionSelect
+ rootFieldName = FieldName "root"
+ rootIden = toIden rootFieldName
+ sourcePrefixes = SourcePrefixes rootIden rootIden
+
+-- | First element extractor expression from given record set
+-- For example:- To get first "id" column from given row set,
+-- the function generates the SQL expression AS `(array_agg("id"))[1]`
+mkFirstElementExp :: S.SQLExp -> S.SQLExp
+mkFirstElementExp expIden =
+ -- For Example
+ S.SEArrayIndex (S.SEFnApp "array_agg" [expIden] Nothing) (S.intToSQLExp 1)
+
+-- | Last element extractor expression from given record set.
+-- For example:- To get first "id" column from given row set,
+-- the function generates the SQL expression AS `(array_agg("id"))[array_length(array_agg("id"), 1)]`
+mkLastElementExp :: S.SQLExp -> S.SQLExp
+mkLastElementExp expIden =
+ let arrayExp = S.SEFnApp "array_agg" [expIden] Nothing
+ in S.SEArrayIndex arrayExp $
+ S.SEFnApp "array_length" [arrayExp, S.intToSQLExp 1] Nothing
+
+cursorIden :: Iden
+cursorIden = Iden "__cursor"
+
+startCursorIden :: Iden
+startCursorIden = Iden "__start_cursor"
+
+endCursorIden :: Iden
+endCursorIden = Iden "__end_cursor"
+
+hasPreviousPageIden :: Iden
+hasPreviousPageIden = Iden "__has_previous_page"
+
+hasNextPageIden :: Iden
+hasNextPageIden = Iden "__has_next_page"
+
+pageInfoSelectAliasIden :: Iden
+pageInfoSelectAliasIden = Iden "__page_info"
+
+cursorsSelectAliasIden :: Iden
+cursorsSelectAliasIden = Iden "__cursors_select"
+
+mkPrimaryKeyColumnsObjectExp :: Iden -> NonEmpty PGColumnInfo -> S.SQLExp
+mkPrimaryKeyColumnsObjectExp sourcePrefix primaryKeyColumns =
+ S.applyJsonBuildObj $ flip concatMap (toList primaryKeyColumns) $
+ \pgColumnInfo ->
+ [ S.SELit $ getPGColTxt $ pgiColumn pgColumnInfo
+ , toJSONableExp False (pgiType pgColumnInfo) False $
+ S.mkQIdenExp (mkBaseTableAlias sourcePrefix) $ pgiColumn pgColumnInfo
+ ]
+
+encodeBase64 :: S.SQLExp -> S.SQLExp
+encodeBase64 =
+ removeNewline . bytesToBase64Text . convertToBytes
+ where
+ convertToBytes e =
+ S.SEFnApp "convert_to" [e, S.SELit "UTF8"] Nothing
+ bytesToBase64Text e =
+ S.SEFnApp "encode" [e, S.SELit "base64"] Nothing
+ removeNewline e =
+ S.SEFnApp "regexp_replace" [e, S.SELit "\\n", S.SELit "", S.SELit "g"] Nothing
+
+
+processConnectionSelect
+ :: ( MonadReader Bool m
+ , MonadWriter JoinTree m
+ )
+ => SourcePrefixes
+ -> FieldName
+ -> S.Alias
+ -> HM.HashMap PGCol PGCol
+ -> ConnectionSelect S.SQLExp
+ -> m ( ArrayConnectionSource
+ , S.Extractor
+ , HM.HashMap S.Alias S.SQLExp
+ )
+processConnectionSelect sourcePrefixes fieldAlias relAlias colMapping connectionSelect = do
+ (selectSource, orderByAndDistinctExtrs, maybeOrderByCursor) <-
+ processSelectParams sourcePrefixes fieldAlias similarArrayFields selectFrom
+ permLimitSubQuery tablePermissions tableArgs
+
+ let mkCursorExtractor = (S.Alias cursorIden,) . (`S.SETyAnn` S.textTypeAnn)
+ cursorExtractors = case maybeOrderByCursor of
+ Just orderByCursor -> [mkCursorExtractor orderByCursor]
+ Nothing ->
+ -- Extract primary key columns from base select along with cursor expression.
+ -- Those columns are required to perform connection split via a WHERE clause.
+ mkCursorExtractor (mkPrimaryKeyColumnsObjectExp thisPrefix primaryKeyColumns) : primaryKeyColumnExtractors
+ orderByExp = _ssOrderBy selectSource
+ (topExtractorExp, exps) <- flip runStateT [] $ processFields orderByExp
+ let topExtractor = S.Extractor topExtractorExp $ Just $ S.Alias fieldIden
+ allExtractors = HM.fromList $ cursorExtractors <> exps <> orderByAndDistinctExtrs
+ arrayConnectionSource = ArrayConnectionSource relAlias colMapping
+ (mkSplitBoolExp <$> maybeSplit) maybeSlice selectSource
+ pure ( arrayConnectionSource
+ , topExtractor
+ , allExtractors
+ )
+ where
+ ConnectionSelect primaryKeyColumns maybeSplit maybeSlice select = connectionSelect
+ AnnSelectG fields selectFrom tablePermissions tableArgs _ = select
+ fieldIden = toIden fieldAlias
+ thisPrefix = _pfThis sourcePrefixes
+ permLimitSubQuery = PLSQNotRequired
+
+ primaryKeyColumnExtractors =
+ flip map (toList primaryKeyColumns) $
+ \pgColumnInfo ->
+ let pgColumn = pgiColumn pgColumnInfo
+ in ( S.Alias $ mkBaseTableColumnAlias thisPrefix pgColumn
+ , S.mkQIdenExp (mkBaseTableAlias thisPrefix) pgColumn
+ )
+
+ mkSplitBoolExp (firstSplit NE.:| rest) =
+ S.BEBin S.OrOp (mkSplitCompareExp firstSplit) $ mkBoolExpFromRest firstSplit rest
+ where
+ mkBoolExpFromRest previousSplit =
+ S.BEBin S.AndOp (mkEqualityCompareExp previousSplit) . \case
+ [] -> S.BELit False
+ (thisSplit:remainingSplit) -> mkSplitBoolExp (thisSplit NE.:| remainingSplit)
+
+ mkSplitCompareExp (ConnectionSplit kind v (OrderByItemG obTyM obCol _)) =
+ let obAlias = mkAnnOrderByAlias thisPrefix fieldAlias similarArrayFields obCol
+ obTy = maybe S.OTAsc unOrderType obTyM
+ compareOp = case (kind, obTy) of
+ (CSKAfter, S.OTAsc) -> S.SGT
+ (CSKAfter, S.OTDesc) -> S.SLT
+ (CSKBefore, S.OTAsc) -> S.SLT
+ (CSKBefore, S.OTDesc) -> S.SGT
+ in S.BECompare compareOp (S.SEIden $ toIden obAlias) v
+
+ mkEqualityCompareExp (ConnectionSplit _ v orderByItem) =
+ let obAlias = mkAnnOrderByAlias thisPrefix fieldAlias similarArrayFields $
+ obiColumn orderByItem
+ in S.BECompare S.SEQ (S.SEIden $ toIden obAlias) v
+
+ similarArrayFields = HM.unions $
+ flip map (map snd fields) $ \case
+ ConnectionTypename{} -> mempty
+ ConnectionPageInfo{} -> mempty
+ ConnectionEdges edges -> HM.unions $
+ flip map (map snd edges) $ \case
+ EdgeTypename{} -> mempty
+ EdgeCursor{} -> mempty
+ EdgeNode annFields ->
+ mkSimilarArrayFields annFields $ _saOrderBy tableArgs
+
+ mkSimpleJsonAgg rowExp ob =
+ let jsonAggExp = S.SEFnApp "json_agg" [rowExp] ob
+ in S.SEFnApp "coalesce" [jsonAggExp, S.SELit "[]"] Nothing
+
+ processFields
+ :: ( MonadReader Bool m
+ , MonadWriter JoinTree m
+ , MonadState [(S.Alias, S.SQLExp)] m
+ )
+ => Maybe S.OrderByExp -> m S.SQLExp
+ processFields orderByExp =
+ fmap (S.applyJsonBuildObj . concat) $
+ forM fields $
+ \(FieldName fieldText, field) -> (S.SELit fieldText:) . pure <$>
+ case field of
+ ConnectionTypename t -> pure $ withForceAggregation S.textTypeAnn $ S.SELit t
+ ConnectionPageInfo pageInfoFields -> pure $ processPageInfoFields pageInfoFields
+ ConnectionEdges edges ->
+ fmap (flip mkSimpleJsonAgg orderByExp . S.applyJsonBuildObj . concat) $ forM edges $
+ \(FieldName edgeText, edge) -> (S.SELit edgeText:) . pure <$>
+ case edge of
+ EdgeTypename t -> pure $ S.SELit t
+ EdgeCursor -> pure $ encodeBase64 $ S.SEIden (toIden cursorIden)
+ EdgeNode annFields -> do
+ let edgeFieldName = FieldName $
+ getFieldNameTxt fieldAlias <> "." <> fieldText <> "." <> edgeText
+ edgeFieldIden = toIden edgeFieldName
+ annFieldsExtrExp <- processAnnFields thisPrefix edgeFieldName similarArrayFields annFields
+ modify' (<> [annFieldsExtrExp])
+ pure $ S.SEIden edgeFieldIden
+
+ processPageInfoFields infoFields =
+ S.applyJsonBuildObj $ flip concatMap infoFields $
+ \(FieldName fieldText, field) -> (:) (S.SELit fieldText) $ pure case field of
+ PageInfoTypename t -> withForceAggregation S.textTypeAnn $ S.SELit t
+ PageInfoHasNextPage -> withForceAggregation S.boolTypeAnn $
+ mkSingleFieldSelect (S.SEIden hasNextPageIden) pageInfoSelectAliasIden
+ PageInfoHasPreviousPage -> withForceAggregation S.boolTypeAnn $
+ mkSingleFieldSelect (S.SEIden hasPreviousPageIden) pageInfoSelectAliasIden
+ PageInfoStartCursor -> withForceAggregation S.textTypeAnn $
+ encodeBase64 $ mkSingleFieldSelect (S.SEIden startCursorIden) cursorsSelectAliasIden
+ PageInfoEndCursor -> withForceAggregation S.textTypeAnn $
+ encodeBase64 $ mkSingleFieldSelect (S.SEIden endCursorIden) cursorsSelectAliasIden
+ where
+ mkSingleFieldSelect field fromIden = S.SESelect
+ S.mkSelect { S.selExtr = [S.Extractor field Nothing]
+ , S.selFrom = Just $ S.FromExp [S.FIIden fromIden]
+ }
+
+connectionToSelectWith
+ :: S.Alias
+ -> ArrayConnectionSource
+ -> ArraySelectNode
+ -> S.SelectWithG S.Select
+connectionToSelectWith baseSelectAlias arrayConnectionSource arraySelectNode =
+ let extractionSelect = S.mkSelect
+ { S.selExtr = topExtractors
+ , S.selFrom = Just $ S.FromExp [S.FIIden finalSelectIden]
+ }
+ in S.SelectWith fromBaseSelections extractionSelect
+ where
+ ArrayConnectionSource _ columnMapping maybeSplit maybeSlice selectSource =
+ arrayConnectionSource
+ ArraySelectNode topExtractors selectNode = arraySelectNode
+ baseSelectIden = Iden "__base_select"
+ splitSelectIden = Iden "__split_select"
+ sliceSelectIden = Iden "__slice_select"
+ finalSelectIden = Iden "__final_select"
+
+ rowNumberIden = Iden "__row_number"
+ rowNumberExp = S.SEUnsafe "(row_number() over (partition by 1))"
+ startRowNumberIden = Iden "__start_row_number"
+ endRowNumberIden = Iden "__end_row_number"
+
+ startCursorExp = mkFirstElementExp $ S.SEIden cursorIden
+ endCursorExp = mkLastElementExp $ S.SEIden cursorIden
+
+ startRowNumberExp = mkFirstElementExp $ S.SEIden rowNumberIden
+ endRowNumberExp = mkLastElementExp $ S.SEIden rowNumberIden
+
+ fromBaseSelections =
+ let joinCond = mkJoinCond baseSelectAlias columnMapping
+ baseSelectFrom = S.mkSelFromItem
+ (generateSQLSelect joinCond selectSource selectNode)
+ $ S.Alias $ _ssPrefix selectSource
+ select =
+ S.mkSelect { S.selExtr = [ S.selectStar
+ , S.Extractor rowNumberExp $ Just $ S.Alias rowNumberIden
+ ]
+ , S.selFrom = Just $ S.FromExp [baseSelectFrom]
+ }
+ in (S.Alias baseSelectIden, select):fromSplitSelection
+
+ mkStarSelect fromIden =
+ S.mkSelect { S.selExtr = [S.selectStar]
+ , S.selFrom = Just $ S.FromExp [S.FIIden fromIden]
+ }
+
+ fromSplitSelection = case maybeSplit of
+ Nothing -> fromSliceSelection baseSelectIden
+ Just splitBool ->
+ let select =
+ (mkStarSelect baseSelectIden){S.selWhere = Just $ S.WhereFrag splitBool}
+ in (S.Alias splitSelectIden, select):fromSliceSelection splitSelectIden
+
+ fromSliceSelection prevSelect = case maybeSlice of
+ Nothing -> fromFinalSelect prevSelect
+ Just slice ->
+ let select = case slice of
+ SliceFirst limit ->
+ (mkStarSelect prevSelect)
+ {S.selLimit = (Just . S.LimitExp . S.intToSQLExp) limit}
+ SliceLast limit ->
+ let mkRowNumberOrderBy obType =
+ let orderByItem =
+ S.OrderByItem (S.SEIden rowNumberIden) (Just obType) Nothing
+ in S.OrderByExp $ orderByItem NE.:| []
+
+ sliceLastSelect = (mkStarSelect prevSelect)
+ { S.selLimit = (Just . S.LimitExp . S.intToSQLExp) limit
+ , S.selOrderBy = Just $ mkRowNumberOrderBy S.OTDesc
+ }
+ sliceLastSelectFrom =
+ S.mkSelFromItem sliceLastSelect $ S.Alias sliceSelectIden
+ in S.mkSelect { S.selExtr = [S.selectStar]
+ , S.selFrom = Just $ S.FromExp [sliceLastSelectFrom]
+ , S.selOrderBy = Just $ mkRowNumberOrderBy S.OTAsc
+ }
+ in (S.Alias sliceSelectIden, select):fromFinalSelect sliceSelectIden
+
+ fromFinalSelect prevSelect =
+ let select = mkStarSelect prevSelect
+ in (S.Alias finalSelectIden, select):fromCursorSelection
+
+ fromCursorSelection =
+ let extrs = [ S.Extractor startCursorExp $ Just $ S.Alias startCursorIden
+ , S.Extractor endCursorExp $ Just $ S.Alias endCursorIden
+ , S.Extractor startRowNumberExp $ Just $ S.Alias startRowNumberIden
+ , S.Extractor endRowNumberExp $ Just $ S.Alias endRowNumberIden
+ ]
+ select =
+ S.mkSelect { S.selExtr = extrs
+ , S.selFrom = Just $ S.FromExp [S.FIIden finalSelectIden]
+ }
+ in (S.Alias cursorsSelectAliasIden, select):fromPageInfoSelection
+
+ fromPageInfoSelection =
+ let hasPrevPage = S.SEBool $
+ S.mkExists (S.FIIden baseSelectIden) $
+ S.BECompare S.SLT (S.SEIden rowNumberIden) $
+ S.SESelect $ S.mkSelect { S.selFrom = Just $ S.FromExp [S.FIIden cursorsSelectAliasIden]
+ , S.selExtr = [S.Extractor (S.SEIden startRowNumberIden) Nothing]
+ }
+ hasNextPage = S.SEBool $
+ S.mkExists (S.FIIden baseSelectIden) $
+ S.BECompare S.SGT (S.SEIden rowNumberIden) $
+ S.SESelect $ S.mkSelect { S.selFrom = Just $ S.FromExp [S.FIIden cursorsSelectAliasIden]
+ , S.selExtr = [S.Extractor (S.SEIden endRowNumberIden) Nothing]
+ }
+
+ select =
+ S.mkSelect { S.selExtr = [ S.Extractor hasPrevPage $ Just $ S.Alias hasPreviousPageIden
+ , S.Extractor hasNextPage $ Just $ S.Alias hasNextPageIden
+ ]
+ }
+ in pure (S.Alias pageInfoSelectAliasIden, select)
diff --git a/server/src-lib/Hasura/RQL/DML/Select/Types.hs b/server/src-lib/Hasura/RQL/DML/Select/Types.hs
index eb794dfaf8c..15cd08fbc9e 100644
--- a/server/src-lib/Hasura/RQL/DML/Select/Types.hs
+++ b/server/src-lib/Hasura/RQL/DML/Select/Types.hs
@@ -3,19 +3,19 @@
module Hasura.RQL.DML.Select.Types where
+import Control.Lens.TH (makeLenses, makePrisms)
import Data.Aeson.Types
-import Language.Haskell.TH.Syntax (Lift)
-import Control.Lens.TH (makePrisms)
+import Language.Haskell.TH.Syntax (Lift)
-import qualified Data.HashMap.Strict as HM
-import qualified Data.List.NonEmpty as NE
-import qualified Data.Sequence as Seq
-import qualified Data.Text as T
-import qualified Data.Aeson as J
-import qualified Language.GraphQL.Draft.Syntax as G
+import qualified Data.Aeson as J
+import qualified Data.HashMap.Strict as HM
+import qualified Data.List.NonEmpty as NE
+import qualified Data.Sequence as Seq
+import qualified Data.Text as T
+import qualified Language.GraphQL.Draft.Syntax as G
-import Hasura.Prelude
import Hasura.GraphQL.Parser.Schema
+import Hasura.Prelude
import Hasura.RQL.Types.BoolExp
import Hasura.RQL.Types.Column
import Hasura.RQL.Types.Common
@@ -23,7 +23,7 @@ import Hasura.RQL.Types.DML
import Hasura.RQL.Types.Function
import Hasura.RQL.Types.RemoteRelationship
import Hasura.RQL.Types.RemoteSchema
-import qualified Hasura.SQL.DML as S
+import qualified Hasura.SQL.DML as S
import Hasura.SQL.Types
type SelectQExt = SelectG ExtCol BoolExp Int
@@ -31,7 +31,8 @@ type SelectQExt = SelectG ExtCol BoolExp Int
data JsonAggSelect
= JASMultipleRows
| JASSingleObject
- deriving (Show, Eq)
+ deriving (Show, Eq, Generic)
+instance Hashable JsonAggSelect
-- Columns in RQL
data ExtCol
@@ -60,118 +61,129 @@ instance FromJSON ExtCol where
, "object (relationship)"
]
-data AnnAggOrdBy
+data AnnAggregateOrderBy
= AAOCount
- | AAOOp !T.Text !PGCol
- deriving (Show, Eq)
+ | AAOOp !T.Text !PGColumnInfo
+ deriving (Show, Eq, Generic)
+instance Hashable AnnAggregateOrderBy
-data AnnObColG v
- = AOCPG !PGCol
- | AOCObj !RelInfo !(AnnBoolExp v) !(AnnObColG v)
- | AOCAgg !RelInfo !(AnnBoolExp v) !AnnAggOrdBy
- deriving (Show, Eq)
+data AnnOrderByElementG v
+ = AOCColumn !PGColumnInfo
+ | AOCObjectRelation !RelInfo !v !(AnnOrderByElementG v)
+ | AOCArrayAggregation !RelInfo !v !AnnAggregateOrderBy
+ deriving (Show, Eq, Generic, Functor)
+instance (Hashable v) => Hashable (AnnOrderByElementG v)
-traverseAnnObCol
+type AnnOrderByElement v = AnnOrderByElementG (AnnBoolExp v)
+
+traverseAnnOrderByElement
:: (Applicative f)
- => (a -> f b) -> AnnObColG a -> f (AnnObColG b)
-traverseAnnObCol f = \case
- AOCPG pgColInfo -> pure $ AOCPG pgColInfo
- AOCObj relInfo annBoolExp annObCol ->
- AOCObj relInfo
+ => (a -> f b) -> AnnOrderByElement a -> f (AnnOrderByElement b)
+traverseAnnOrderByElement f = \case
+ AOCColumn pgColInfo -> pure $ AOCColumn pgColInfo
+ AOCObjectRelation relInfo annBoolExp annObCol ->
+ AOCObjectRelation relInfo
<$> traverseAnnBoolExp f annBoolExp
- <*> traverseAnnObCol f annObCol
- AOCAgg relInfo annBoolExp annAggOb ->
- AOCAgg relInfo
+ <*> traverseAnnOrderByElement f annObCol
+ AOCArrayAggregation relInfo annBoolExp annAggOb ->
+ AOCArrayAggregation relInfo
<$> traverseAnnBoolExp f annBoolExp
<*> pure annAggOb
-type AnnObCol = AnnObColG S.SQLExp
-
-type AnnOrderByItemG v = OrderByItemG (AnnObColG v)
+type AnnOrderByItemG v = OrderByItemG (AnnOrderByElement v)
traverseAnnOrderByItem
:: (Applicative f)
=> (a -> f b) -> AnnOrderByItemG a -> f (AnnOrderByItemG b)
traverseAnnOrderByItem f =
- traverse (traverseAnnObCol f)
+ traverse (traverseAnnOrderByElement f)
type AnnOrderByItem = AnnOrderByItemG S.SQLExp
-data AnnRelG a
- = AnnRelG
- { aarName :: !RelName -- Relationship name
- , aarMapping :: !(HashMap PGCol PGCol) -- Column of left table to join with
- , aarAnnSel :: !a -- Current table. Almost ~ to SQL Select
+type OrderByItemExp =
+ OrderByItemG (AnnOrderByElement S.SQLExp, (S.Alias, S.SQLExp))
+
+data AnnRelationSelectG a
+ = AnnRelationSelectG
+ { aarRelationshipName :: !RelName -- Relationship name
+ , aarColumnMapping :: !(HashMap PGCol PGCol) -- Column of left table to join with
+ , aarAnnSelect :: !a -- Current table. Almost ~ to SQL Select
} deriving (Show, Eq, Functor, Foldable, Traversable)
-type ObjSelG v = AnnRelG (AnnSimpleSelG v)
-type ObjSel = ObjSelG S.SQLExp
+type ObjectRelationSelectG v = AnnRelationSelectG (AnnSimpleSelG v)
+type ObjectRelationSelect = ObjectRelationSelectG S.SQLExp
-type ArrRelG v = AnnRelG (AnnSimpleSelG v)
-type ArrRelAggG v = AnnRelG (AnnAggSelG v)
-type ArrRelAgg = ArrRelAggG S.SQLExp
+type ArrayRelationSelectG v = AnnRelationSelectG (AnnSimpleSelG v)
+type ArrayAggregateSelectG v = AnnRelationSelectG (AnnAggregateSelectG v)
+type ArrayConnectionSelect v = AnnRelationSelectG (ConnectionSelect v)
+type ArrayAggregateSelect = ArrayAggregateSelectG S.SQLExp
-data ComputedFieldScalarSel v
- = ComputedFieldScalarSel
+data ComputedFieldScalarSelect v
+ = ComputedFieldScalarSelect
{ _cfssFunction :: !QualifiedFunction
, _cfssArguments :: !(FunctionArgsExpTableRow v)
, _cfssType :: !PGScalarType
- , _cfssColumnOp :: !(Maybe ColOp)
+ , _cfssColumnOp :: !(Maybe ColumnOp)
} deriving (Show, Eq, Functor, Foldable, Traversable)
-data ComputedFieldSel v
- = CFSScalar !(ComputedFieldScalarSel v)
+data ComputedFieldSelect v
+ = CFSScalar !(ComputedFieldScalarSelect v)
| CFSTable !JsonAggSelect !(AnnSimpleSelG v)
deriving (Show, Eq)
-traverseComputedFieldSel
+traverseComputedFieldSelect
:: (Applicative f)
=> (v -> f w)
- -> ComputedFieldSel v -> f (ComputedFieldSel w)
-traverseComputedFieldSel fv = \case
+ -> ComputedFieldSelect v -> f (ComputedFieldSelect w)
+traverseComputedFieldSelect fv = \case
CFSScalar scalarSel -> CFSScalar <$> traverse fv scalarSel
- CFSTable b tableSel -> CFSTable b <$> traverseAnnSimpleSel fv tableSel
+ CFSTable b tableSel -> CFSTable b <$> traverseAnnSimpleSelect fv tableSel
type Fields a = [(FieldName, a)]
-data ArrSelG v
- = ASSimple !(ArrRelG v)
- | ASAgg !(ArrRelAggG v)
+data ArraySelectG v
+ = ASSimple !(ArrayRelationSelectG v)
+ | ASAggregate !(ArrayAggregateSelectG v)
+ | ASConnection !(ArrayConnectionSelect v)
deriving (Show, Eq)
-traverseArrSel
+traverseArraySelect
:: (Applicative f)
=> (a -> f b)
- -> ArrSelG a
- -> f (ArrSelG b)
-traverseArrSel f = \case
- ASSimple arrRel -> ASSimple <$> traverse (traverseAnnSimpleSel f) arrRel
- ASAgg arrRelAgg -> ASAgg <$> traverse (traverseAnnAggSel f) arrRelAgg
+ -> ArraySelectG a
+ -> f (ArraySelectG b)
+traverseArraySelect f = \case
+ ASSimple arrRel ->
+ ASSimple <$> traverse (traverseAnnSimpleSelect f) arrRel
+ ASAggregate arrRelAgg ->
+ ASAggregate <$> traverse (traverseAnnAggregateSelect f) arrRelAgg
+ ASConnection relConnection ->
+ ASConnection <$> traverse (traverseConnectionSelect f) relConnection
-type ArrSel = ArrSelG S.SQLExp
+type ArraySelect = ArraySelectG S.SQLExp
-type ArrSelFldsG v = Fields (ArrSelG v)
+type ArraySelectFieldsG v = Fields (ArraySelectG v)
-data ColOp
- = ColOp
+data ColumnOp
+ = ColumnOp
{ _colOp :: S.SQLOp
, _colExp :: S.SQLExp
} deriving (Show, Eq)
-data AnnColField
- = AnnColField
+data AnnColumnField
+ = AnnColumnField
{ _acfInfo :: !PGColumnInfo
, _acfAsText :: !Bool
-- ^ If this field is 'True', columns are explicitly casted to @text@ when fetched, which avoids
-- an issue that occurs because we don’t currently have proper support for array types. See
-- https://github.com/hasura/graphql-engine/pull/3198 for more details.
- , _acfOp :: !(Maybe ColOp)
+ , _acfOp :: !(Maybe ColumnOp)
} deriving (Show, Eq)
data RemoteFieldArgument
= RemoteFieldArgument
- { _rfaName :: !G.Name
- , _rfaValue :: !(G.Value Variable)
+ { _rfaName :: !G.Name
+ , _rfaValue :: !(G.Value Variable)
, _rfaVariable :: !(Maybe [(G.VariableDefinition,J.Value)])
} deriving (Eq,Show)
@@ -184,50 +196,53 @@ data RemoteSelect
, _rselRemoteSchema :: !RemoteSchemaInfo
} deriving (Show,Eq)
-data AnnFldG v
- = FCol !AnnColField
- | FObj !(ObjSelG v)
- | FArr !(ArrSelG v)
- | FComputedField !(ComputedFieldSel v)
- | FExp !T.Text
- | FRemote !RemoteSelect
- deriving (Show,Eq)
+data AnnFieldG v
+ = AFColumn !AnnColumnField
+ | AFObjectRelation !(ObjectRelationSelectG v)
+ | AFArrayRelation !(ArraySelectG v)
+ | AFComputedField !(ComputedFieldSelect v)
+ | AFRemote !RemoteSelect
+ | AFNodeId !QualifiedTable !(NonEmpty PGColumnInfo)
+ | AFExpression !T.Text
+ deriving (Show, Eq)
-mkAnnColField :: PGColumnInfo -> Maybe ColOp -> AnnFldG v
-mkAnnColField ci colOpM =
- FCol $ AnnColField ci False colOpM
+mkAnnColumnField :: PGColumnInfo -> Maybe ColumnOp -> AnnFieldG v
+mkAnnColumnField ci colOpM =
+ AFColumn $ AnnColumnField ci False colOpM
-mkAnnColFieldAsText :: PGColumnInfo -> AnnFldG v
-mkAnnColFieldAsText ci =
- FCol $ AnnColField ci True Nothing
+mkAnnColumnFieldAsText :: PGColumnInfo -> AnnFieldG v
+mkAnnColumnFieldAsText ci =
+ AFColumn $ AnnColumnField ci True Nothing
-traverseAnnFld
+traverseAnnField
:: (Applicative f)
- => (a -> f b) -> AnnFldG a -> f (AnnFldG b)
-traverseAnnFld f = \case
- FCol colFld -> pure $ FCol colFld
- FObj sel -> FObj <$> traverse (traverseAnnSimpleSel f) sel
- FArr sel -> FArr <$> traverseArrSel f sel
- FComputedField sel -> FComputedField <$> traverseComputedFieldSel f sel
- FExp t -> FExp <$> pure t
- FRemote s -> pure $ FRemote s
+ => (a -> f b) -> AnnFieldG a -> f (AnnFieldG b)
+traverseAnnField f = \case
+ AFColumn colFld -> pure $ AFColumn colFld
+ AFObjectRelation sel -> AFObjectRelation <$> traverse (traverseAnnSimpleSelect f) sel
+ AFArrayRelation sel -> AFArrayRelation <$> traverseArraySelect f sel
+ AFComputedField sel -> AFComputedField <$> traverseComputedFieldSelect f sel
+ AFRemote s -> pure $ AFRemote s
+ AFNodeId qt pKeys -> pure $ AFNodeId qt pKeys
+ AFExpression t -> AFExpression <$> pure t
-type AnnFld = AnnFldG S.SQLExp
+type AnnField = AnnFieldG S.SQLExp
-data TableArgsG v
- = TableArgs
- { _taWhere :: !(Maybe (AnnBoolExp v))
- , _taOrderBy :: !(Maybe (NE.NonEmpty (AnnOrderByItemG v)))
- , _taLimit :: !(Maybe Int)
- , _taOffset :: !(Maybe S.SQLExp)
- , _taDistCols :: !(Maybe (NE.NonEmpty PGCol))
- } deriving (Show, Eq)
+data SelectArgsG v
+ = SelectArgs
+ { _saWhere :: !(Maybe (AnnBoolExp v))
+ , _saOrderBy :: !(Maybe (NE.NonEmpty (AnnOrderByItemG v)))
+ , _saLimit :: !(Maybe Int)
+ , _saOffset :: !(Maybe S.SQLExp)
+ , _saDistinct :: !(Maybe (NE.NonEmpty PGCol))
+ } deriving (Show, Eq, Generic)
+instance (Hashable v) => Hashable (SelectArgsG v)
-traverseTableArgs
+traverseSelectArgs
:: (Applicative f)
- => (a -> f b) -> TableArgsG a -> f (TableArgsG b)
-traverseTableArgs f (TableArgs wh ordBy lmt ofst distCols) =
- TableArgs
+ => (a -> f b) -> SelectArgsG a -> f (SelectArgsG b)
+traverseSelectArgs f (SelectArgs wh ordBy lmt ofst distCols) =
+ SelectArgs
<$> traverse (traverseAnnBoolExp f) wh
-- traversing through maybe -> nonempty -> annorderbyitem
<*> traverse (traverse (traverseAnnOrderByItem f)) ordBy
@@ -235,62 +250,103 @@ traverseTableArgs f (TableArgs wh ordBy lmt ofst distCols) =
<*> pure ofst
<*> pure distCols
-type TableArgs = TableArgsG S.SQLExp
+type SelectArgs = SelectArgsG S.SQLExp
-noTableArgs :: TableArgsG v
-noTableArgs = TableArgs Nothing Nothing Nothing Nothing Nothing
+noSelectArgs :: SelectArgsG v
+noSelectArgs = SelectArgs Nothing Nothing Nothing Nothing Nothing
data PGColFld
= PCFCol !PGCol
| PCFExp !T.Text
deriving (Show, Eq)
-type ColFlds = Fields PGColFld
+type ColumnFields = Fields PGColFld
-data AggOp
- = AggOp
- { _aoOp :: !T.Text
- , _aoFlds :: !ColFlds
+data AggregateOp
+ = AggregateOp
+ { _aoOp :: !T.Text
+ , _aoFields :: !ColumnFields
} deriving (Show, Eq)
-data AggFld
+data AggregateField
= AFCount !S.CountType
- | AFOp !AggOp
+ | AFOp !AggregateOp
| AFExp !T.Text
deriving (Show, Eq)
-type AggFlds = Fields AggFld
-type AnnFldsG v = Fields (AnnFldG v)
+type AggregateFields = Fields AggregateField
+type AnnFieldsG v = Fields (AnnFieldG v)
-traverseAnnFlds
+traverseAnnFields
:: (Applicative f)
- => (a -> f b) -> AnnFldsG a -> f (AnnFldsG b)
-traverseAnnFlds f = traverse (traverse (traverseAnnFld f))
+ => (a -> f b) -> AnnFieldsG a -> f (AnnFieldsG b)
+traverseAnnFields f = traverse (traverse (traverseAnnField f))
-type AnnFlds = AnnFldsG S.SQLExp
+type AnnFields = AnnFieldsG S.SQLExp
-data TableAggFldG v
- = TAFAgg !AggFlds
- | TAFNodes !(AnnFldsG v)
+data TableAggregateFieldG v
+ = TAFAgg !AggregateFields
+ | TAFNodes !(AnnFieldsG v)
| TAFExp !T.Text
deriving (Show, Eq)
-traverseTableAggFld
+data PageInfoField
+ = PageInfoTypename !Text
+ | PageInfoHasNextPage
+ | PageInfoHasPreviousPage
+ | PageInfoStartCursor
+ | PageInfoEndCursor
+ deriving (Show, Eq)
+type PageInfoFields = Fields PageInfoField
+
+data EdgeField v
+ = EdgeTypename !Text
+ | EdgeCursor
+ | EdgeNode !(AnnFieldsG v)
+ deriving (Show, Eq)
+type EdgeFields v = Fields (EdgeField v)
+
+traverseEdgeField
:: (Applicative f)
- => (a -> f b) -> TableAggFldG a -> f (TableAggFldG b)
-traverseTableAggFld f = \case
+ => (a -> f b) -> EdgeField a -> f (EdgeField b)
+traverseEdgeField f = \case
+ EdgeTypename t -> pure $ EdgeTypename t
+ EdgeCursor -> pure EdgeCursor
+ EdgeNode fields -> EdgeNode <$> traverseAnnFields f fields
+
+data ConnectionField v
+ = ConnectionTypename !Text
+ | ConnectionPageInfo !PageInfoFields
+ | ConnectionEdges !(EdgeFields v)
+ deriving (Show, Eq)
+type ConnectionFields v = Fields (ConnectionField v)
+
+traverseConnectionField
+ :: (Applicative f)
+ => (a -> f b) -> ConnectionField a -> f (ConnectionField b)
+traverseConnectionField f = \case
+ ConnectionTypename t -> pure $ ConnectionTypename t
+ ConnectionPageInfo fields -> pure $ ConnectionPageInfo fields
+ ConnectionEdges fields ->
+ ConnectionEdges <$> traverse (traverse (traverseEdgeField f)) fields
+
+traverseTableAggregateField
+ :: (Applicative f)
+ => (a -> f b) -> TableAggregateFieldG a -> f (TableAggregateFieldG b)
+traverseTableAggregateField f = \case
TAFAgg aggFlds -> pure $ TAFAgg aggFlds
- TAFNodes annFlds -> TAFNodes <$> traverseAnnFlds f annFlds
+ TAFNodes annFlds -> TAFNodes <$> traverseAnnFields f annFlds
TAFExp t -> pure $ TAFExp t
-type TableAggFld = TableAggFldG S.SQLExp
-type TableAggFldsG v = Fields (TableAggFldG v)
-type TableAggFlds = TableAggFldsG S.SQLExp
+type TableAggregateField = TableAggregateFieldG S.SQLExp
+type TableAggregateFieldsG v = Fields (TableAggregateFieldG v)
+type TableAggregateFields = TableAggregateFieldsG S.SQLExp
data ArgumentExp a
= AETableRow !(Maybe Iden) -- ^ table row accessor
| AEInput !a
- deriving (Show, Eq, Functor, Foldable, Traversable)
+ deriving (Show, Eq, Functor, Foldable, Traversable, Generic)
+instance (Hashable v) => Hashable (ArgumentExp v)
type FunctionArgsExpTableRow v = FunctionArgsExpG (ArgumentExp v)
@@ -299,8 +355,10 @@ data SelectFromG v
| FromIden !Iden
| FromFunction !QualifiedFunction
!(FunctionArgsExpTableRow v)
+ -- a definition list
!(Maybe [(PGCol, PGScalarType)])
- deriving (Show, Eq, Functor, Foldable, Traversable)
+ deriving (Show, Eq, Functor, Foldable, Traversable, Generic)
+instance (Hashable v) => Hashable (SelectFromG v)
type SelectFrom = SelectFromG S.SQLExp
@@ -308,7 +366,8 @@ data TablePermG v
= TablePerm
{ _tpFilter :: !(AnnBoolExp v)
, _tpLimit :: !(Maybe Int)
- } deriving (Eq, Show)
+ } deriving (Eq, Show, Generic)
+instance (Hashable v) => Hashable (TablePermG v)
traverseTablePerm
:: (Applicative f)
@@ -326,62 +385,105 @@ noTablePermissions =
type TablePerm = TablePermG S.SQLExp
-data AnnSelG a v
- = AnnSelG
+data AnnSelectG a v
+ = AnnSelectG
{ _asnFields :: !a
, _asnFrom :: !(SelectFromG v)
, _asnPerm :: !(TablePermG v)
- , _asnArgs :: !(TableArgsG v)
+ , _asnArgs :: !(SelectArgsG v)
, _asnStrfyNum :: !Bool
} deriving (Show, Eq)
-getPermLimit :: AnnSelG a v -> Maybe Int
-getPermLimit = _tpLimit . _asnPerm
-
-traverseAnnSimpleSel
+traverseAnnSimpleSelect
:: (Applicative f)
=> (a -> f b)
-> AnnSimpleSelG a -> f (AnnSimpleSelG b)
-traverseAnnSimpleSel f = traverseAnnSel (traverseAnnFlds f) f
+traverseAnnSimpleSelect f = traverseAnnSelect (traverseAnnFields f) f
-traverseAnnAggSel
+traverseAnnAggregateSelect
:: (Applicative f)
=> (a -> f b)
- -> AnnAggSelG a -> f (AnnAggSelG b)
-traverseAnnAggSel f =
- traverseAnnSel (traverse (traverse (traverseTableAggFld f))) f
+ -> AnnAggregateSelectG a -> f (AnnAggregateSelectG b)
+traverseAnnAggregateSelect f =
+ traverseAnnSelect (traverse (traverse (traverseTableAggregateField f))) f
-traverseAnnSel
+traverseAnnSelect
:: (Applicative f)
=> (a -> f b) -> (v -> f w)
- -> AnnSelG a v -> f (AnnSelG b w)
-traverseAnnSel f1 f2 (AnnSelG flds tabFrom perm args strfyNum) =
- AnnSelG
+ -> AnnSelectG a v -> f (AnnSelectG b w)
+traverseAnnSelect f1 f2 (AnnSelectG flds tabFrom perm args strfyNum) =
+ AnnSelectG
<$> f1 flds
<*> traverse f2 tabFrom
<*> traverseTablePerm f2 perm
- <*> traverseTableArgs f2 args
+ <*> traverseSelectArgs f2 args
<*> pure strfyNum
-type AnnSimpleSelG v = AnnSelG (AnnFldsG v) v
+type AnnSimpleSelG v = AnnSelectG (AnnFieldsG v) v
type AnnSimpleSel = AnnSimpleSelG S.SQLExp
-type AnnAggSelG v = AnnSelG (TableAggFldsG v) v
-type AnnAggSel = AnnAggSelG S.SQLExp
+type AnnAggregateSelectG v = AnnSelectG (TableAggregateFieldsG v) v
+type AnnAggregateSelect = AnnAggregateSelectG S.SQLExp
+
+data ConnectionSlice
+ = SliceFirst !Int
+ | SliceLast !Int
+ deriving (Show, Eq, Generic)
+instance Hashable ConnectionSlice
+
+data ConnectionSplitKind
+ = CSKBefore
+ | CSKAfter
+ deriving (Show, Eq, Generic)
+instance Hashable ConnectionSplitKind
+
+data ConnectionSplit v
+ = ConnectionSplit
+ { _csKind :: !ConnectionSplitKind
+ , _csValue :: !v
+ , _csOrderBy :: !(OrderByItemG (AnnOrderByElementG ()))
+ } deriving (Show, Eq, Functor, Generic, Foldable, Traversable)
+instance (Hashable v) => Hashable (ConnectionSplit v)
+
+traverseConnectionSplit
+ :: (Applicative f)
+ => (a -> f b) -> ConnectionSplit a -> f (ConnectionSplit b)
+traverseConnectionSplit f (ConnectionSplit k v ob) =
+ ConnectionSplit k <$> f v <*> pure ob
+
+data ConnectionSelect v
+ = ConnectionSelect
+ { _csPrimaryKeyColumns :: !(NE.NonEmpty PGColumnInfo)
+ , _csSplit :: !(Maybe (NE.NonEmpty (ConnectionSplit v)))
+ , _csSlice :: !(Maybe ConnectionSlice)
+ , _csSelect :: !(AnnSelectG (ConnectionFields v) v)
+ } deriving (Show, Eq)
+
+traverseConnectionSelect
+ :: (Applicative f)
+ => (a -> f b)
+ -> ConnectionSelect a -> f (ConnectionSelect b)
+traverseConnectionSelect f (ConnectionSelect pkCols cSplit cSlice sel) =
+ ConnectionSelect pkCols
+ <$> traverse (traverse (traverseConnectionSplit f)) cSplit
+ <*> pure cSlice
+ <*> traverseAnnSelect (traverse (traverse (traverseConnectionField f))) f sel
data FunctionArgsExpG a
= FunctionArgsExp
{ _faePositional :: ![a]
, _faeNamed :: !(HM.HashMap Text a)
- } deriving (Show, Eq, Functor, Foldable, Traversable)
+ } deriving (Show, Eq, Functor, Foldable, Traversable, Generic)
+instance (Hashable a) => Hashable (FunctionArgsExpG a)
emptyFunctionArgsExp :: FunctionArgsExpG a
emptyFunctionArgsExp = FunctionArgsExp [] HM.empty
type FunctionArgExp = FunctionArgsExpG S.SQLExp
--- | If argument positional index is less than or equal to length of 'positional' arguments then
--- insert the value in 'positional' arguments else insert the value with argument name in 'named' arguments
+-- | If argument positional index is less than or equal to length of
+-- 'positional' arguments then insert the value in 'positional' arguments else
+-- insert the value with argument name in 'named' arguments
insertFunctionArg
:: FunctionArgName
-> Int
@@ -396,113 +498,106 @@ insertFunctionArg argName index value (FunctionArgsExp positional named) =
where
insertAt i a = toList . Seq.insertAt i a . Seq.fromList
-data BaseNode
- = BaseNode
- { _bnPrefix :: !Iden
- , _bnDistinct :: !(Maybe S.DistinctExpr)
- , _bnFrom :: !S.FromItem
- , _bnWhere :: !S.BoolExp
- , _bnOrderBy :: !(Maybe S.OrderByExp)
- , _bnLimit :: !(Maybe Int)
- , _bnOffset :: !(Maybe S.SQLExp)
+data SourcePrefixes
+ = SourcePrefixes
+ { _pfThis :: !Iden -- ^ Current source prefix
+ , _pfBase :: !Iden
+ -- ^ Base table source row identifier to generate
+ -- the table's column identifiers for computed field
+ -- function input parameters
+ } deriving (Show, Eq, Generic)
+instance Hashable SourcePrefixes
- , _bnExtrs :: !(HM.HashMap S.Alias S.SQLExp)
- , _bnObjs :: !(HM.HashMap RelName ObjNode)
- , _bnArrs :: !(HM.HashMap S.Alias ArrNode)
- , _bnComputedFieldTables :: !(HM.HashMap FieldName CFTableNode)
+data SelectSource
+ = SelectSource
+ { _ssPrefix :: !Iden
+ , _ssFrom :: !S.FromItem
+ , _ssDistinct :: !(Maybe S.DistinctExpr)
+ , _ssWhere :: !S.BoolExp
+ , _ssOrderBy :: !(Maybe S.OrderByExp)
+ , _ssLimit :: !(Maybe Int)
+ , _ssOffset :: !(Maybe S.SQLExp)
+ } deriving (Show, Eq, Generic)
+instance Hashable SelectSource
+
+data SelectNode
+ = SelectNode
+ { _snExtractors :: !(HM.HashMap S.Alias S.SQLExp)
+ , _snJoinTree :: !JoinTree
} deriving (Show, Eq)
-mergeBaseNodes :: BaseNode -> BaseNode -> BaseNode
-mergeBaseNodes lNodeDet rNodeDet =
- BaseNode pfx dExp f whr ordBy limit offset
- (HM.union lExtrs rExtrs)
- (HM.unionWith mergeObjNodes lObjs rObjs)
- (HM.unionWith mergeArrNodes lArrs rArrs)
- (HM.unionWith mergeCFTableNodes lCFTables rCFTables)
- where
- BaseNode pfx dExp f whr ordBy limit offset lExtrs lObjs lArrs lCFTables
- = lNodeDet
- BaseNode _ _ _ _ _ _ _ rExtrs rObjs rArrs rCFTables
- = rNodeDet
+instance Semigroup SelectNode where
+ SelectNode lExtrs lJoinTree <> SelectNode rExtrs rJoinTree =
+ SelectNode (lExtrs <> rExtrs) (lJoinTree <> rJoinTree)
-data OrderByNode
- = OBNNothing
- | OBNObjNode !RelName !ObjNode
- | OBNArrNode !S.Alias !ArrNode
+data ObjectRelationSource
+ = ObjectRelationSource
+ { _orsRelationshipName :: !RelName
+ , _orsRelationMapping :: !(HM.HashMap PGCol PGCol)
+ , _orsSelectSource :: !SelectSource
+ } deriving (Show, Eq, Generic)
+instance Hashable ObjectRelationSource
+
+data ArrayRelationSource
+ = ArrayRelationSource
+ { _arsAlias :: !S.Alias
+ , _arsRelationMapping :: !(HM.HashMap PGCol PGCol)
+ , _arsSelectSource :: !SelectSource
+ } deriving (Show, Eq, Generic)
+instance Hashable ArrayRelationSource
+
+data ArraySelectNode
+ = ArraySelectNode
+ { _asnTopExtractors :: ![S.Extractor]
+ , _asnSelectNode :: !SelectNode
+ } deriving (Show, Eq)
+
+instance Semigroup ArraySelectNode where
+ ArraySelectNode lTopExtrs lSelNode <> ArraySelectNode rTopExtrs rSelNode =
+ ArraySelectNode (lTopExtrs <> rTopExtrs) (lSelNode <> rSelNode)
+
+data ComputedFieldTableSetSource
+ = ComputedFieldTableSetSource
+ { _cftssFieldName :: !FieldName
+ , _cftssSelectType :: !JsonAggSelect
+ , _cftssSelectSource :: !SelectSource
+ } deriving (Show, Eq, Generic)
+instance Hashable ComputedFieldTableSetSource
+
+data ArrayConnectionSource
+ = ArrayConnectionSource
+ { _acsAlias :: !S.Alias
+ , _acsRelationMapping :: !(HM.HashMap PGCol PGCol)
+ , _acsSplitFilter :: !(Maybe S.BoolExp)
+ , _acsSlice :: !(Maybe ConnectionSlice)
+ , _acsSource :: !SelectSource
+ } deriving (Show, Eq, Generic)
+
+instance Hashable ArrayConnectionSource
+
+data JoinTree
+ = JoinTree
+ { _jtObjectRelations :: !(HM.HashMap ObjectRelationSource SelectNode)
+ , _jtArrayRelations :: !(HM.HashMap ArrayRelationSource ArraySelectNode)
+ , _jtArrayConnections :: !(HM.HashMap ArrayConnectionSource ArraySelectNode)
+ , _jtComputedFieldTableSets :: !(HM.HashMap ComputedFieldTableSetSource SelectNode)
+ } deriving (Show, Eq)
+
+instance Semigroup JoinTree where
+ JoinTree lObjs lArrs lArrConns lCfts <> JoinTree rObjs rArrs rArrConns rCfts =
+ JoinTree (HM.unionWith (<>) lObjs rObjs)
+ (HM.unionWith (<>) lArrs rArrs)
+ (HM.unionWith (<>) lArrConns rArrConns)
+ (HM.unionWith (<>) lCfts rCfts)
+
+instance Monoid JoinTree where
+ mempty = JoinTree mempty mempty mempty mempty
+
+data PermissionLimitSubQuery
+ = PLSQRequired !Int -- ^ Permission limit
+ | PLSQNotRequired
deriving (Show, Eq)
-data ArrRelCtxG v
- = ArrRelCtx
- { aacFields :: !(ArrSelFldsG v)
- , aacAggOrdBys :: ![RelName]
- } deriving (Show, Eq)
-
-type ArrRelCtx = ArrRelCtxG S.SQLExp
-
-emptyArrRelCtx :: ArrRelCtxG a
-emptyArrRelCtx = ArrRelCtx [] []
-
-data ArrNodeItemG v
- = ANIField !(FieldName, ArrSelG v)
- | ANIAggOrdBy !RelName
- deriving (Show, Eq)
-
-type ArrNodeItem = ArrNodeItemG S.SQLExp
-
-data ObjNode
- = ObjNode
- { _rnRelMapping :: !(HashMap PGCol PGCol)
- , _rnNodeDet :: !BaseNode
- } deriving (Show, Eq)
-
-mergeObjNodes :: ObjNode -> ObjNode -> ObjNode
-mergeObjNodes lNode rNode =
- ObjNode colMapping $ mergeBaseNodes lBN rBN
- where
- ObjNode colMapping lBN = lNode
- ObjNode _ rBN = rNode
-
--- simple array select, aggregate select and order by
--- nodes differ in extractors
-data ArrNode
- = ArrNode
- { _anExtr :: ![S.Extractor]
- , _anRelMapping :: !(HashMap PGCol PGCol)
- , _anNodeDet :: !BaseNode
- } deriving (Show, Eq)
-
-mergeArrNodes :: ArrNode -> ArrNode -> ArrNode
-mergeArrNodes lNode rNode =
- ArrNode (lExtrs `union` rExtrs) colMapping $
- mergeBaseNodes lBN rBN
- where
- ArrNode lExtrs colMapping lBN = lNode
- ArrNode rExtrs _ rBN = rNode
-
-data ArrNodeInfo
- = ArrNodeInfo
- { _aniAlias :: !S.Alias
- , _aniPrefix :: !Iden
- , _aniSubQueryRequired :: !Bool
- } deriving (Show, Eq)
-
--- | Node for computed field returning setof
-data CFTableNode
- = CFTableNode
- { _ctnSelectType :: !JsonAggSelect
- , _ctnNode :: !BaseNode
- } deriving (Show, Eq)
-
-mergeCFTableNodes :: CFTableNode -> CFTableNode -> CFTableNode
-mergeCFTableNodes lNode rNode =
- CFTableNode
- (_ctnSelectType rNode)
- (mergeBaseNodes (_ctnNode lNode) (_ctnNode rNode))
-
-data Prefixes
- = Prefixes
- { _pfThis :: !Iden -- Current node prefix
- , _pfBase :: !Iden -- Base table row identifier for computed field function
- } deriving (Show, Eq)
-
-$(makePrisms ''AnnFldG)
+$(makeLenses ''AnnSelectG)
+$(makePrisms ''AnnFieldG)
+$(makePrisms ''AnnOrderByElementG)
diff --git a/server/src-lib/Hasura/RQL/Types/Action.hs b/server/src-lib/Hasura/RQL/Types/Action.hs
index 17eee857ebd..796f8e1d821 100644
--- a/server/src-lib/Hasura/RQL/Types/Action.hs
+++ b/server/src-lib/Hasura/RQL/Types/Action.hs
@@ -55,9 +55,9 @@ import Hasura.RQL.DDL.Headers
import Hasura.RQL.DML.Select.Types
import Hasura.RQL.Types.CustomTypes
import Hasura.RQL.Types.Permission
+import Hasura.Session
import Hasura.SQL.Types
import Language.Haskell.TH.Syntax (Lift)
-import Hasura.Session
import qualified Data.Aeson as J
import qualified Data.Aeson.Casing as J
@@ -273,7 +273,7 @@ data AnnActionExecution v
= AnnActionExecution
{ _aaeName :: !ActionName
, _aaeOutputType :: !GraphQLType -- ^ output type
- , _aaeFields :: !(AnnFldsG v) -- ^ output selection
+ , _aaeFields :: !(AnnFieldsG v) -- ^ output selection
, _aaePayload :: !J.Value -- ^ jsonified input arguments
, _aaeOutputFields :: !ActionOutputFields
-- ^ to validate the response fields from webhook
@@ -292,7 +292,7 @@ data AnnActionMutationAsync
data AsyncActionQueryFieldG v
= AsyncTypename !Text
- | AsyncOutput !(AnnFldsG v)
+ | AsyncOutput !(AnnFieldsG v)
| AsyncId
| AsyncCreatedAt
| AsyncErrors
diff --git a/server/src-lib/Hasura/RQL/Types/BoolExp.hs b/server/src-lib/Hasura/RQL/Types/BoolExp.hs
index b3cd0480f18..f51968a18cb 100644
--- a/server/src-lib/Hasura/RQL/Types/BoolExp.hs
+++ b/server/src-lib/Hasura/RQL/Types/BoolExp.hs
@@ -66,6 +66,7 @@ data GExists a
instance (NFData a) => NFData (GExists a)
instance (Data a) => Plated (GExists a)
instance (Cacheable a) => Cacheable (GExists a)
+instance (Hashable a) => Hashable (GExists a)
gExistsToJSON :: (a -> (Text, Value)) -> GExists a -> Value
gExistsToJSON f (GExists qt wh) =
@@ -92,6 +93,7 @@ data GBoolExp a
instance (NFData a) => NFData (GBoolExp a)
instance (Data a) => Plated (GBoolExp a)
instance (Cacheable a) => Cacheable (GBoolExp a)
+instance (Hashable a) => Hashable (GBoolExp a)
gBoolExpTrue :: GBoolExp a
gBoolExpTrue = BoolAnd []
@@ -143,6 +145,7 @@ data DWithinGeomOp a =
} deriving (Show, Eq, Functor, Foldable, Traversable, Generic, Data)
instance (NFData a) => NFData (DWithinGeomOp a)
instance (Cacheable a) => Cacheable (DWithinGeomOp a)
+instance (Hashable a) => Hashable (DWithinGeomOp a)
$(deriveJSON (aesonDrop 6 snakeCase) ''DWithinGeomOp)
data DWithinGeogOp a =
@@ -153,6 +156,7 @@ data DWithinGeogOp a =
} deriving (Show, Eq, Functor, Foldable, Traversable, Generic, Data)
instance (NFData a) => NFData (DWithinGeogOp a)
instance (Cacheable a) => Cacheable (DWithinGeogOp a)
+instance (Hashable a) => Hashable (DWithinGeogOp a)
$(deriveJSON (aesonDrop 6 snakeCase) ''DWithinGeogOp)
data STIntersectsNbandGeommin a =
@@ -162,6 +166,7 @@ data STIntersectsNbandGeommin a =
} deriving (Show, Eq, Functor, Foldable, Traversable, Generic, Data)
instance (NFData a) => NFData (STIntersectsNbandGeommin a)
instance (Cacheable a) => Cacheable (STIntersectsNbandGeommin a)
+instance (Hashable a) => Hashable (STIntersectsNbandGeommin a)
$(deriveJSON (aesonDrop 4 snakeCase) ''STIntersectsNbandGeommin)
data STIntersectsGeomminNband a =
@@ -171,6 +176,7 @@ data STIntersectsGeomminNband a =
} deriving (Show, Eq, Functor, Foldable, Traversable, Generic, Data)
instance (NFData a) => NFData (STIntersectsGeomminNband a)
instance (Cacheable a) => Cacheable (STIntersectsGeomminNband a)
+instance (Hashable a) => Hashable (STIntersectsGeomminNband a)
$(deriveJSON (aesonDrop 4 snakeCase) ''STIntersectsGeomminNband)
type CastExp a = M.HashMap PGScalarType [OpExpG a]
@@ -230,6 +236,7 @@ data OpExpG a
deriving (Eq, Show, Functor, Foldable, Traversable, Generic, Data)
instance (NFData a) => NFData (OpExpG a)
instance (Cacheable a) => Cacheable (OpExpG a)
+instance (Hashable a) => Hashable (OpExpG a)
opExpDepCol :: OpExpG a -> Maybe PGCol
opExpDepCol = \case
@@ -303,6 +310,7 @@ data AnnBoolExpFld a
deriving (Show, Eq, Functor, Foldable, Traversable, Generic)
instance (NFData a) => NFData (AnnBoolExpFld a)
instance (Cacheable a) => Cacheable (AnnBoolExpFld a)
+instance (Hashable a) => Hashable (AnnBoolExpFld a)
type AnnBoolExp a
= GBoolExp (AnnBoolExpFld a)
diff --git a/server/src-lib/Hasura/RQL/Types/Common.hs b/server/src-lib/Hasura/RQL/Types/Common.hs
index 0316d2062f7..1a3d8a8e161 100644
--- a/server/src-lib/Hasura/RQL/Types/Common.hs
+++ b/server/src-lib/Hasura/RQL/Types/Common.hs
@@ -143,6 +143,7 @@ data RelInfo
} deriving (Show, Eq, Generic)
instance NFData RelInfo
instance Cacheable RelInfo
+instance Hashable RelInfo
$(deriveToJSON (aesonDrop 2 snakeCase) ''RelInfo)
newtype FieldName
@@ -150,6 +151,7 @@ newtype FieldName
deriving ( Show, Eq, Ord, Hashable, FromJSON, ToJSON
, FromJSONKey, ToJSONKey, Lift, Data, Generic
, IsString, Arbitrary, NFData, Cacheable
+ , Semigroup
)
instance IsIden FieldName where
@@ -212,7 +214,7 @@ data PrimaryKey a
= PrimaryKey
{ _pkConstraint :: !Constraint
, _pkColumns :: !(NESeq a)
- } deriving (Show, Eq, Generic)
+ } deriving (Show, Eq, Generic, Foldable)
instance (NFData a) => NFData (PrimaryKey a)
instance (Cacheable a) => Cacheable (PrimaryKey a)
$(makeLenses ''PrimaryKey)
diff --git a/server/src-lib/Hasura/RQL/Types/DML.hs b/server/src-lib/Hasura/RQL/Types/DML.hs
index 7851770f2fb..e19e5436988 100644
--- a/server/src-lib/Hasura/RQL/Types/DML.hs
+++ b/server/src-lib/Hasura/RQL/Types/DML.hs
@@ -101,6 +101,7 @@ instance (FromJSON a) => FromJSON (DMLQuery a) where
newtype OrderType
= OrderType { unOrderType :: S.OrderType }
deriving (Show, Eq, Lift, Generic)
+instance Hashable OrderType
instance FromJSON OrderType where
parseJSON =
@@ -112,6 +113,7 @@ instance FromJSON OrderType where
newtype NullsOrder
= NullsOrder { unNullsOrder :: S.NullsOrder }
deriving (Show, Eq, Lift, Generic)
+instance Hashable NullsOrder
instance FromJSON NullsOrder where
parseJSON =
@@ -176,7 +178,8 @@ data OrderByItemG a
{ obiType :: !(Maybe OrderType)
, obiColumn :: !a
, obiNulls :: !(Maybe NullsOrder)
- } deriving (Show, Eq, Lift, Functor, Foldable, Traversable)
+ } deriving (Show, Eq, Lift, Functor, Foldable, Traversable, Generic)
+instance (Hashable a) => Hashable (OrderByItemG a)
type OrderByItem = OrderByItemG OrderByCol
diff --git a/server/src-lib/Hasura/RQL/Types/SchemaCache.hs b/server/src-lib/Hasura/RQL/Types/SchemaCache.hs
index f91937603b0..c244a02420c 100644
--- a/server/src-lib/Hasura/RQL/Types/SchemaCache.hs
+++ b/server/src-lib/Hasura/RQL/Types/SchemaCache.hs
@@ -133,8 +133,8 @@ import Hasura.RQL.Types.QueryCollection
import Hasura.RQL.Types.RemoteSchema
import Hasura.RQL.Types.SchemaCacheTypes
import Hasura.RQL.Types.Table
-import Hasura.SQL.Types
import Hasura.Session
+import Hasura.SQL.Types
import Data.Aeson
import Data.Aeson.Casing
@@ -198,18 +198,18 @@ type ActionCache = M.HashMap ActionName ActionInfo -- info of all actions
data SchemaCache
= SchemaCache
- { scTables :: !TableCache
- , scActions :: !ActionCache
- , scFunctions :: !FunctionCache
- , scRemoteSchemas :: !RemoteSchemaMap
- , scAllowlist :: !(HS.HashSet GQLQuery)
- , scGQLContext :: !(HashMap RoleName GQLContext)
- , scUnauthenticatedGQLContext :: !GQLContext
- , scRelayContext :: !(HashMap RoleName GQLContext)
+ { scTables :: !TableCache
+ , scActions :: !ActionCache
+ , scFunctions :: !FunctionCache
+ , scRemoteSchemas :: !RemoteSchemaMap
+ , scAllowlist :: !(HS.HashSet GQLQuery)
+ , scGQLContext :: !(HashMap RoleName GQLContext)
+ , scUnauthenticatedGQLContext :: !GQLContext
+ , scRelayContext :: !(HashMap RoleName GQLContext)
, scUnauthenticatedRelayContext :: !GQLContext
-- , scCustomTypes :: !(NonObjectTypeMap, AnnotatedObjects)
- , scDepMap :: !DepMap
- , scInconsistentObjs :: ![InconsistentMetadata]
+ , scDepMap :: !DepMap
+ , scInconsistentObjs :: ![InconsistentMetadata]
}
$(deriveToJSON (aesonDrop 2 snakeCase) ''SchemaCache)
diff --git a/server/src-lib/Hasura/SQL/DML.hs b/server/src-lib/Hasura/SQL/DML.hs
index a0ab184bcb3..d49fc310b9a 100644
--- a/server/src-lib/Hasura/SQL/DML.hs
+++ b/server/src-lib/Hasura/SQL/DML.hs
@@ -35,6 +35,7 @@ data Select
} deriving (Show, Eq, Generic, Data)
instance NFData Select
instance Cacheable Select
+instance Hashable Select
mkSelect :: Select
mkSelect = Select Nothing [] Nothing
@@ -43,7 +44,7 @@ mkSelect = Select Nothing [] Nothing
newtype LimitExp
= LimitExp SQLExp
- deriving (Show, Eq, NFData, Data, Cacheable)
+ deriving (Show, Eq, NFData, Data, Cacheable, Hashable)
instance ToSQL LimitExp where
toSQL (LimitExp se) =
@@ -51,15 +52,15 @@ instance ToSQL LimitExp where
newtype OffsetExp
= OffsetExp SQLExp
- deriving (Show, Eq, NFData, Data, Cacheable)
+ deriving (Show, Eq, NFData, Data, Cacheable, Hashable)
instance ToSQL OffsetExp where
toSQL (OffsetExp se) =
"OFFSET" <-> toSQL se
newtype OrderByExp
- = OrderByExp [OrderByItem]
- deriving (Show, Eq, NFData, Data, Cacheable)
+ = OrderByExp (NonEmpty OrderByItem)
+ deriving (Show, Eq, NFData, Data, Cacheable, Hashable)
data OrderByItem
= OrderByItem
@@ -69,6 +70,7 @@ data OrderByItem
} deriving (Show, Eq, Generic, Data)
instance NFData OrderByItem
instance Cacheable OrderByItem
+instance Hashable OrderByItem
instance ToSQL OrderByItem where
toSQL (OrderByItem e ot no) =
@@ -78,6 +80,7 @@ data OrderType = OTAsc | OTDesc
deriving (Show, Eq, Lift, Generic, Data)
instance NFData OrderType
instance Cacheable OrderType
+instance Hashable OrderType
instance ToSQL OrderType where
toSQL OTAsc = "ASC"
@@ -89,6 +92,7 @@ data NullsOrder
deriving (Show, Eq, Lift, Generic, Data)
instance NFData NullsOrder
instance Cacheable NullsOrder
+instance Hashable NullsOrder
instance ToSQL NullsOrder where
toSQL NFirst = "NULLS FIRST"
@@ -96,11 +100,11 @@ instance ToSQL NullsOrder where
instance ToSQL OrderByExp where
toSQL (OrderByExp l) =
- "ORDER BY" <-> (", " <+> l)
+ "ORDER BY" <-> (", " <+> toList l)
newtype GroupByExp
= GroupByExp [SQLExp]
- deriving (Show, Eq, NFData, Data, Cacheable)
+ deriving (Show, Eq, NFData, Data, Cacheable, Hashable)
instance ToSQL GroupByExp where
toSQL (GroupByExp idens) =
@@ -108,7 +112,7 @@ instance ToSQL GroupByExp where
newtype FromExp
= FromExp [FromItem]
- deriving (Show, Eq, NFData, Data, Cacheable)
+ deriving (Show, Eq, NFData, Data, Cacheable, Hashable)
instance ToSQL FromExp where
toSQL (FromExp items) =
@@ -148,7 +152,7 @@ mkRowExp extrs = let
newtype HavingExp
= HavingExp BoolExp
- deriving (Show, Eq, NFData, Data, Cacheable)
+ deriving (Show, Eq, NFData, Data, Cacheable, Hashable)
instance ToSQL HavingExp where
toSQL (HavingExp be) =
@@ -156,7 +160,7 @@ instance ToSQL HavingExp where
newtype WhereFrag
= WhereFrag { getWFBoolExp :: BoolExp }
- deriving (Show, Eq, NFData, Data, Cacheable)
+ deriving (Show, Eq, NFData, Data, Cacheable, Hashable)
instance ToSQL WhereFrag where
toSQL (WhereFrag be) =
@@ -188,6 +192,7 @@ data Qual
deriving (Show, Eq, Generic, Data)
instance NFData Qual
instance Cacheable Qual
+instance Hashable Qual
mkQual :: QualifiedTable -> Qual
mkQual = QualTable
@@ -205,6 +210,7 @@ data QIden
deriving (Show, Eq, Generic, Data)
instance NFData QIden
instance Cacheable QIden
+instance Hashable QIden
instance ToSQL QIden where
toSQL (QIden qual iden) =
@@ -212,7 +218,7 @@ instance ToSQL QIden where
newtype SQLOp
= SQLOp {sqlOpTxt :: T.Text}
- deriving (Show, Eq, NFData, Data, Cacheable)
+ deriving (Show, Eq, NFData, Data, Cacheable, Hashable)
incOp :: SQLOp
incOp = SQLOp "+"
@@ -234,7 +240,7 @@ jsonbDeleteAtPathOp = SQLOp "#-"
newtype TypeAnn
= TypeAnn { unTypeAnn :: T.Text }
- deriving (Show, Eq, NFData, Data, Cacheable)
+ deriving (Show, Eq, NFData, Data, Cacheable, Hashable)
instance ToSQL TypeAnn where
toSQL (TypeAnn ty) = "::" <> TB.text ty
@@ -260,6 +266,9 @@ jsonTypeAnn = mkTypeAnn $ PGTypeScalar PGJSON
jsonbTypeAnn :: TypeAnn
jsonbTypeAnn = mkTypeAnn $ PGTypeScalar PGJSONB
+boolTypeAnn :: TypeAnn
+boolTypeAnn = mkTypeAnn $ PGTypeScalar PGBoolean
+
data CountType
= CTStar
| CTSimple ![PGCol]
@@ -267,6 +276,7 @@ data CountType
deriving (Show, Eq, Generic, Data)
instance NFData CountType
instance Cacheable CountType
+instance Hashable CountType
instance ToSQL CountType where
toSQL CTStar = "*"
@@ -277,7 +287,7 @@ instance ToSQL CountType where
newtype TupleExp
= TupleExp [SQLExp]
- deriving (Show, Eq, NFData, Data, Cacheable)
+ deriving (Show, Eq, NFData, Data, Cacheable, Hashable)
instance ToSQL TupleExp where
toSQL (TupleExp exps) =
@@ -302,6 +312,7 @@ data SQLExp
| SEBool !BoolExp
| SEExcluded !Iden
| SEArray ![SQLExp]
+ | SEArrayIndex !SQLExp !SQLExp
| SETuple !TupleExp
| SECount !CountType
| SENamedArg !Iden !SQLExp
@@ -309,6 +320,7 @@ data SQLExp
deriving (Show, Eq, Generic, Data)
instance NFData SQLExp
instance Cacheable SQLExp
+instance Hashable SQLExp
withTyAnn :: PGScalarType -> SQLExp -> SQLExp
withTyAnn colTy v = SETyAnn v . mkTypeAnn $ PGTypeScalar colTy
@@ -318,7 +330,7 @@ instance J.ToJSON SQLExp where
newtype Alias
= Alias { getAlias :: Iden }
- deriving (Show, Eq, NFData, Hashable, Data, Cacheable)
+ deriving (Show, Eq, NFData, Data, Cacheable, Hashable)
instance IsIden Alias where
toIden (Alias iden) = iden
@@ -370,6 +382,9 @@ instance ToSQL SQLExp where
<> toSQL i
toSQL (SEArray exps) = "ARRAY" <> TB.char '['
<> (", " <+> exps) <> TB.char ']'
+ toSQL (SEArrayIndex arrayExp indexExp) =
+ paren (toSQL arrayExp)
+ <> TB.char '[' <> toSQL indexExp <> TB.char ']'
toSQL (SETuple tup) = toSQL tup
toSQL (SECount ty) = "COUNT" <> paren (toSQL ty)
-- https://www.postgresql.org/docs/current/sql-syntax-calling-funcs.html
@@ -384,6 +399,7 @@ data Extractor = Extractor !SQLExp !(Maybe Alias)
deriving (Show, Eq, Generic, Data)
instance NFData Extractor
instance Cacheable Extractor
+instance Hashable Extractor
mkSQLOpExp
:: SQLOp
@@ -431,6 +447,7 @@ data DistinctExpr
deriving (Show, Eq, Generic, Data)
instance NFData DistinctExpr
instance Cacheable DistinctExpr
+instance Hashable DistinctExpr
instance ToSQL DistinctExpr where
toSQL DistinctSimple = "DISTINCT"
@@ -444,6 +461,7 @@ data FunctionArgs
} deriving (Show, Eq, Generic, Data)
instance NFData FunctionArgs
instance Cacheable FunctionArgs
+instance Hashable FunctionArgs
instance ToSQL FunctionArgs where
toSQL (FunctionArgs positionalArgs namedArgsMap) =
@@ -458,6 +476,7 @@ data DefinitionListItem
} deriving (Show, Eq, Data, Generic)
instance NFData DefinitionListItem
instance Cacheable DefinitionListItem
+instance Hashable DefinitionListItem
instance ToSQL DefinitionListItem where
toSQL (DefinitionListItem column columnType) =
@@ -470,6 +489,7 @@ data FunctionAlias
} deriving (Show, Eq, Data, Generic)
instance NFData FunctionAlias
instance Cacheable FunctionAlias
+instance Hashable FunctionAlias
mkSimpleFunctionAlias :: Iden -> FunctionAlias
mkSimpleFunctionAlias identifier =
@@ -494,6 +514,7 @@ data FunctionExp
} deriving (Show, Eq, Generic, Data)
instance NFData FunctionExp
instance Cacheable FunctionExp
+instance Hashable FunctionExp
instance ToSQL FunctionExp where
toSQL (FunctionExp qf args alsM) =
@@ -505,11 +526,13 @@ data FromItem
| FIFunc !FunctionExp
| FIUnnest ![SQLExp] !Alias ![SQLExp]
| FISelect !Lateral !Select !Alias
+ | FISelectWith !Lateral !(SelectWithG Select) !Alias
| FIValues !ValuesExp !Alias !(Maybe [PGCol])
| FIJoin !JoinExpr
deriving (Show, Eq, Generic, Data)
instance NFData FromItem
instance Cacheable FromItem
+instance Hashable FromItem
mkSelFromItem :: Select -> Alias -> FromItem
mkSelFromItem = FISelect (Lateral False)
@@ -532,6 +555,8 @@ instance ToSQL FromItem where
"UNNEST" <> paren (", " <+> args) <-> toSQL als <> paren (", " <+> cols)
toSQL (FISelect mla sel al) =
toSQL mla <-> paren (toSQL sel) <-> toSQL al
+ toSQL (FISelectWith mla selWith al) =
+ toSQL mla <-> paren (toSQL selWith) <-> toSQL al
toSQL (FIValues valsExp al mCols) =
paren (toSQL valsExp) <-> toSQL al
<-> toSQL (toColTupExp <$> mCols)
@@ -539,7 +564,7 @@ instance ToSQL FromItem where
toSQL je
newtype Lateral = Lateral Bool
- deriving (Show, Eq, Data, NFData, Cacheable)
+ deriving (Show, Eq, Data, NFData, Cacheable, Hashable)
instance ToSQL Lateral where
toSQL (Lateral True) = "LATERAL"
@@ -554,6 +579,7 @@ data JoinExpr
} deriving (Show, Eq, Generic, Data)
instance NFData JoinExpr
instance Cacheable JoinExpr
+instance Hashable JoinExpr
instance ToSQL JoinExpr where
toSQL je =
@@ -570,6 +596,7 @@ data JoinType
deriving (Eq, Show, Generic, Data)
instance NFData JoinType
instance Cacheable JoinType
+instance Hashable JoinType
instance ToSQL JoinType where
toSQL Inner = "INNER JOIN"
@@ -583,6 +610,7 @@ data JoinCond
deriving (Show, Eq, Generic, Data)
instance NFData JoinCond
instance Cacheable JoinCond
+instance Hashable JoinCond
instance ToSQL JoinCond where
toSQL (JoinOn be) =
@@ -606,6 +634,7 @@ data BoolExp
deriving (Show, Eq, Generic, Data)
instance NFData BoolExp
instance Cacheable BoolExp
+instance Hashable BoolExp
-- removes extraneous 'AND true's
simplifyBoolExp :: BoolExp -> BoolExp
@@ -661,6 +690,7 @@ data BinOp = AndOp | OrOp
deriving (Show, Eq, Generic, Data)
instance NFData BinOp
instance Cacheable BinOp
+instance Hashable BinOp
instance ToSQL BinOp where
toSQL AndOp = "AND"
@@ -689,6 +719,7 @@ data CompareOp
deriving (Eq, Generic, Data)
instance NFData CompareOp
instance Cacheable CompareOp
+instance Hashable CompareOp
instance Show CompareOp where
show = \case
@@ -835,7 +866,7 @@ instance ToSQL SQLConflict where
newtype ValuesExp
= ValuesExp [TupleExp]
- deriving (Show, Eq, Data, NFData, Cacheable)
+ deriving (Show, Eq, Data, NFData, Cacheable, Hashable)
instance ToSQL ValuesExp where
toSQL (ValuesExp tuples) =
@@ -874,14 +905,20 @@ instance ToSQL CTE where
CTEUpdate q -> toSQL q
CTEDelete q -> toSQL q
-data SelectWith
+data SelectWithG v
= SelectWith
- { swCTEs :: [(Alias, CTE)]
+ { swCTEs :: ![(Alias, v)]
, swSelect :: !Select
- } deriving (Show, Eq)
+ } deriving (Show, Eq, Generic, Data)
-instance ToSQL SelectWith where
+instance (NFData v) => NFData (SelectWithG v)
+instance (Cacheable v) => Cacheable (SelectWithG v)
+instance (Hashable v) => Hashable (SelectWithG v)
+
+instance (ToSQL v) => ToSQL (SelectWithG v) where
toSQL (SelectWith ctes sel) =
"WITH " <> (", " <+> map f ctes) <-> toSQL sel
where
f (Alias al, q) = toSQL al <-> "AS" <-> paren (toSQL q)
+
+type SelectWith = SelectWithG CTE
diff --git a/server/src-lib/Hasura/SQL/Rewrite.hs b/server/src-lib/Hasura/SQL/Rewrite.hs
index 6235709b1ea..d7ced56ecc5 100644
--- a/server/src-lib/Hasura/SQL/Rewrite.hs
+++ b/server/src-lib/Hasura/SQL/Rewrite.hs
@@ -1,5 +1,6 @@
module Hasura.SQL.Rewrite
( prefixNumToAliases
+ , prefixNumToAliasesSelectWith
) where
import qualified Data.HashMap.Strict as Map
@@ -20,6 +21,11 @@ prefixNumToAliases :: S.Select -> S.Select
prefixNumToAliases s =
uSelect s `evalState` UniqSt 0 Map.empty
+prefixNumToAliasesSelectWith
+ :: S.SelectWithG S.Select -> S.SelectWithG S.Select
+prefixNumToAliasesSelectWith s =
+ uSelectWith s `evalState` UniqSt 0 Map.empty
+
type Rewrite a = State a
data UniqSt
@@ -56,6 +62,12 @@ restoringIdens action = do
modify' $ \s -> s { _uqIdens = idens }
return res
+uSelectWith :: S.SelectWithG S.Select -> Uniq (S.SelectWithG S.Select)
+uSelectWith (S.SelectWith ctes baseSelect) =
+ S.SelectWith
+ <$> forM ctes (\(als, sel) -> (als,) <$> restoringIdens (uSelect sel))
+ <*> uSelect baseSelect
+
uSelect :: S.Select -> Uniq S.Select
uSelect sel = do
-- this has to be the first thing to process
@@ -113,6 +125,10 @@ uFromItem fromItem = case fromItem of
newSel <- restoringIdens $ uSelect sel
newAls <- addAlias al
return $ S.FISelect isLateral newSel newAls
+ S.FISelectWith isLateral selectWith al -> do
+ newSelectWith <- uSelectWith selectWith
+ newAls <- addAlias al
+ return $ S.FISelectWith isLateral newSelectWith newAls
S.FIValues (S.ValuesExp tups) als mCols -> do
newValExp <- fmap S.ValuesExp $
forM tups $ \(S.TupleExp ts) ->
@@ -196,8 +212,10 @@ uSqlExp = restoringIdens . \case
S.SEExcluded <$> return t
S.SEArray l ->
S.SEArray <$> mapM uSqlExp l
+ S.SEArrayIndex arrayExp indexExp ->
+ S.SEArrayIndex <$> uSqlExp arrayExp <*> uSqlExp indexExp
S.SETuple (S.TupleExp l) ->
- S.SEArray <$> mapM uSqlExp l
+ S.SETuple . S.TupleExp <$> mapM uSqlExp l
S.SECount cty -> return $ S.SECount cty
S.SENamedArg arg val -> S.SENamedArg arg <$> uSqlExp val
S.SEFunction funcExp -> S.SEFunction <$> uFunctionExp funcExp
diff --git a/server/src-lib/Hasura/Server/App.hs b/server/src-lib/Hasura/Server/App.hs
index a72b1829e75..149875d28f6 100644
--- a/server/src-lib/Hasura/Server/App.hs
+++ b/server/src-lib/Hasura/Server/App.hs
@@ -3,68 +3,68 @@
module Hasura.Server.App where
import Control.Concurrent.MVar.Lifted
-import Control.Exception (IOException, try)
-import Control.Lens (view, _2)
+import Control.Exception (IOException, try)
+import Control.Lens (view, _2)
import Control.Monad.Stateless
-import Control.Monad.Trans.Control (MonadBaseControl)
-import Data.Aeson hiding (json)
-import Data.Either (isRight)
-import Data.Int (Int64)
+import Control.Monad.Trans.Control (MonadBaseControl)
+import Data.Aeson hiding (json)
+import Data.Either (isRight)
+import Data.Int (Int64)
import Data.IORef
-import Data.Time.Clock (UTCTime)
-import Data.Time.Clock.POSIX (getPOSIXTime)
-import Network.Mime (defaultMimeLookup)
-import System.Exit (exitFailure)
-import System.FilePath (joinPath, takeFileName)
-import Web.Spock.Core ((/>))
+import Data.Time.Clock (UTCTime)
+import Data.Time.Clock.POSIX (getPOSIXTime)
+import Network.Mime (defaultMimeLookup)
+import System.Exit (exitFailure)
+import System.FilePath (joinPath, takeFileName)
+import Web.Spock.Core ((/>))
-import qualified Control.Concurrent.Async.Lifted.Safe as LA
-import qualified Data.ByteString.Lazy as BL
-import qualified Data.CaseInsensitive as CI
-import qualified Data.HashMap.Strict as M
-import qualified Data.HashSet as S
-import qualified Data.Text as T
-import qualified Database.PG.Query as Q
-import qualified Network.HTTP.Client as HTTP
-import qualified Network.HTTP.Types as HTTP
-import qualified Network.Wai as Wai
-import qualified Network.Wai.Handler.WebSockets as WS
-import qualified Network.WebSockets as WS
-import qualified System.Metrics as EKG
-import qualified System.Metrics.Json as EKG
-import qualified Text.Mustache as M
-import qualified Web.Spock.Core as Spock
+import qualified Control.Concurrent.Async.Lifted.Safe as LA
+import qualified Data.ByteString.Lazy as BL
+import qualified Data.CaseInsensitive as CI
+import qualified Data.HashMap.Strict as M
+import qualified Data.HashSet as S
+import qualified Data.Text as T
+import qualified Database.PG.Query as Q
+import qualified Network.HTTP.Client as HTTP
+import qualified Network.HTTP.Types as HTTP
+import qualified Network.Wai as Wai
+import qualified Network.Wai.Handler.WebSockets as WS
+import qualified Network.WebSockets as WS
+import qualified System.Metrics as EKG
+import qualified System.Metrics.Json as EKG
+import qualified Text.Mustache as M
+import qualified Web.Spock.Core as Spock
import Hasura.EncJSON
-- import Hasura.GraphQL.Resolve.Action
import Hasura.GraphQL.Logging (MonadQueryLog (..))
import Hasura.HTTP
-import Hasura.Prelude hiding (get, put)
+import Hasura.Prelude hiding (get, put)
import Hasura.RQL.DDL.Schema
import Hasura.RQL.Types
import Hasura.RQL.Types.Run
-import Hasura.Server.API.Config (runGetConfig)
+import Hasura.Server.API.Config (runGetConfig)
import Hasura.Server.API.Query
-import Hasura.Server.Auth (AuthMode (..), UserAuthentication (..))
+import Hasura.Server.Auth (AuthMode (..), UserAuthentication (..))
import Hasura.Server.Compression
import Hasura.Server.Cors
import Hasura.Server.Init
import Hasura.Server.Logging
-import Hasura.Server.Middleware (corsMiddleware)
+import Hasura.Server.Middleware (corsMiddleware)
import Hasura.Server.Utils
import Hasura.Server.Version
-import Hasura.SQL.Types
import Hasura.Session
+import Hasura.SQL.Types
-import qualified Hasura.GraphQL.Execute as E
-import qualified Hasura.GraphQL.Execute.LiveQuery as EL
+import qualified Hasura.GraphQL.Execute as E
+import qualified Hasura.GraphQL.Execute.LiveQuery as EL
-- import qualified Hasura.GraphQL.Explain as GE
-import qualified Hasura.GraphQL.Transport.HTTP as GH
-import qualified Hasura.GraphQL.Transport.HTTP.Protocol as GH
-import qualified Hasura.GraphQL.Transport.WebSocket as WS
+import qualified Hasura.GraphQL.Transport.HTTP as GH
+import qualified Hasura.GraphQL.Transport.HTTP.Protocol as GH
+import qualified Hasura.GraphQL.Transport.WebSocket as WS
import qualified Hasura.GraphQL.Transport.WebSocket.Server as WS
-import qualified Hasura.Logging as L
-import qualified Hasura.Server.API.PGDump as PGD
+import qualified Hasura.Logging as L
+import qualified Hasura.Server.API.PGDump as PGD
import qualified Network.Wai.Handler.WebSockets.Custom as WSC
@@ -350,7 +350,8 @@ v1Alpha1GQHandler queryType query = do
v1GQHandler
:: (HasVersion, MonadIO m)
- => GH.GQLBatchedReqs GH.GQLQueryText -> Handler m (HttpResponse EncJSON)
+ => GH.GQLBatchedReqs GH.GQLQueryText
+ -> Handler m (HttpResponse EncJSON)
v1GQHandler = v1Alpha1GQHandler E.QueryHasura
v1GQRelayHandler
diff --git a/server/tests-py/queries/graphql_query/relay/article_pagination_no_orderby/backward/page_1.yaml b/server/tests-py/queries/graphql_query/relay/article_pagination_no_orderby/backward/page_1.yaml
new file mode 100644
index 00000000000..b64f95d0ac6
--- /dev/null
+++ b/server/tests-py/queries/graphql_query/relay/article_pagination_no_orderby/backward/page_1.yaml
@@ -0,0 +1,49 @@
+description: Get last page of articles with 3 items
+url: /v1/relay
+status: 200
+query:
+ query: |
+ query {
+ article_connection(
+ last: 3
+ ){
+ pageInfo{
+ startCursor
+ endCursor
+ hasPreviousPage
+ hasNextPage
+ }
+ edges{
+ cursor
+ node{
+ title
+ content
+ author_id
+ }
+ }
+ }
+ }
+response:
+ data:
+ article_connection:
+ pageInfo:
+ startCursor: eyJpZCIgOiA0fQ==
+ endCursor: eyJpZCIgOiA2fQ==
+ hasPreviousPage: true
+ hasNextPage: false
+ edges:
+ - cursor: eyJpZCIgOiA0fQ==
+ node:
+ title: Article 4
+ content: Sample article content 4
+ author_id: 2
+ - cursor: eyJpZCIgOiA1fQ==
+ node:
+ title: Article 5
+ content: Sample article content 5
+ author_id: 2
+ - cursor: eyJpZCIgOiA2fQ==
+ node:
+ title: Article 6
+ content: Sample article content 6
+ author_id: 3
diff --git a/server/tests-py/queries/graphql_query/relay/article_pagination_no_orderby/backward/page_2.yaml b/server/tests-py/queries/graphql_query/relay/article_pagination_no_orderby/backward/page_2.yaml
new file mode 100644
index 00000000000..29b9c89a62b
--- /dev/null
+++ b/server/tests-py/queries/graphql_query/relay/article_pagination_no_orderby/backward/page_2.yaml
@@ -0,0 +1,45 @@
+description: Get last page of articles with 2 items before 'Article 4'
+url: /v1/relay
+status: 200
+query:
+ query: |
+ query {
+ article_connection(
+ last: 2
+ before: "eyJpZCIgOiA0fQ=="
+ ){
+ pageInfo{
+ startCursor
+ endCursor
+ hasPreviousPage
+ hasNextPage
+ }
+ edges{
+ cursor
+ node{
+ title
+ content
+ author_id
+ }
+ }
+ }
+ }
+response:
+ data:
+ article_connection:
+ pageInfo:
+ startCursor: eyJpZCIgOiAyfQ==
+ endCursor: eyJpZCIgOiAzfQ==
+ hasPreviousPage: true
+ hasNextPage: true
+ edges:
+ - cursor: eyJpZCIgOiAyfQ==
+ node:
+ title: Article 2
+ content: Sample article content 2
+ author_id: 1
+ - cursor: eyJpZCIgOiAzfQ==
+ node:
+ title: Article 3
+ content: Sample article content 3
+ author_id: 1
diff --git a/server/tests-py/queries/graphql_query/relay/article_pagination_no_orderby/backward/page_3.yaml b/server/tests-py/queries/graphql_query/relay/article_pagination_no_orderby/backward/page_3.yaml
new file mode 100644
index 00000000000..22a75403f71
--- /dev/null
+++ b/server/tests-py/queries/graphql_query/relay/article_pagination_no_orderby/backward/page_3.yaml
@@ -0,0 +1,40 @@
+description: Get last page of articles before 'Article 2'
+url: /v1/relay
+status: 200
+query:
+ query: |
+ query {
+ article_connection(
+ last: 2
+ before: "eyJpZCIgOiAyfQ=="
+ ){
+ pageInfo{
+ startCursor
+ endCursor
+ hasPreviousPage
+ hasNextPage
+ }
+ edges{
+ cursor
+ node{
+ title
+ content
+ author_id
+ }
+ }
+ }
+ }
+response:
+ data:
+ article_connection:
+ pageInfo:
+ startCursor: eyJpZCIgOiAxfQ==
+ endCursor: eyJpZCIgOiAxfQ==
+ hasPreviousPage: false
+ hasNextPage: true
+ edges:
+ - cursor: eyJpZCIgOiAxfQ==
+ node:
+ title: Article 1
+ content: Sample article content 1
+ author_id: 1
diff --git a/server/tests-py/queries/graphql_query/relay/article_pagination_no_orderby/forward/page_1.yaml b/server/tests-py/queries/graphql_query/relay/article_pagination_no_orderby/forward/page_1.yaml
new file mode 100644
index 00000000000..91d3c5637e2
--- /dev/null
+++ b/server/tests-py/queries/graphql_query/relay/article_pagination_no_orderby/forward/page_1.yaml
@@ -0,0 +1,49 @@
+description: Get 1st page of articles with 3 items
+url: /v1/relay
+status: 200
+query:
+ query: |
+ query {
+ article_connection(
+ first: 3
+ ){
+ pageInfo{
+ startCursor
+ endCursor
+ hasPreviousPage
+ hasNextPage
+ }
+ edges{
+ cursor
+ node{
+ title
+ content
+ author_id
+ }
+ }
+ }
+ }
+response:
+ data:
+ article_connection:
+ pageInfo:
+ startCursor: eyJpZCIgOiAxfQ==
+ endCursor: eyJpZCIgOiAzfQ==
+ hasPreviousPage: false
+ hasNextPage: true
+ edges:
+ - cursor: eyJpZCIgOiAxfQ==
+ node:
+ title: Article 1
+ content: Sample article content 1
+ author_id: 1
+ - cursor: eyJpZCIgOiAyfQ==
+ node:
+ title: Article 2
+ content: Sample article content 2
+ author_id: 1
+ - cursor: eyJpZCIgOiAzfQ==
+ node:
+ title: Article 3
+ content: Sample article content 3
+ author_id: 1
diff --git a/server/tests-py/queries/graphql_query/relay/article_pagination_no_orderby/forward/page_2.yaml b/server/tests-py/queries/graphql_query/relay/article_pagination_no_orderby/forward/page_2.yaml
new file mode 100644
index 00000000000..7814e65e4f3
--- /dev/null
+++ b/server/tests-py/queries/graphql_query/relay/article_pagination_no_orderby/forward/page_2.yaml
@@ -0,0 +1,45 @@
+description: Get 2nd page of articles with 2 items
+url: /v1/relay
+status: 200
+query:
+ query: |
+ query {
+ article_connection(
+ first: 2
+ after: "eyJpZCIgOiAzfQ=="
+ ){
+ pageInfo{
+ startCursor
+ endCursor
+ hasPreviousPage
+ hasNextPage
+ }
+ edges{
+ cursor
+ node{
+ title
+ content
+ author_id
+ }
+ }
+ }
+ }
+response:
+ data:
+ article_connection:
+ pageInfo:
+ startCursor: eyJpZCIgOiA0fQ==
+ endCursor: eyJpZCIgOiA1fQ==
+ hasPreviousPage: true
+ hasNextPage: true
+ edges:
+ - cursor: eyJpZCIgOiA0fQ==
+ node:
+ title: Article 4
+ content: Sample article content 4
+ author_id: 2
+ - cursor: eyJpZCIgOiA1fQ==
+ node:
+ title: Article 5
+ content: Sample article content 5
+ author_id: 2
diff --git a/server/tests-py/queries/graphql_query/relay/article_pagination_no_orderby/forward/page_3.yaml b/server/tests-py/queries/graphql_query/relay/article_pagination_no_orderby/forward/page_3.yaml
new file mode 100644
index 00000000000..4b91442a002
--- /dev/null
+++ b/server/tests-py/queries/graphql_query/relay/article_pagination_no_orderby/forward/page_3.yaml
@@ -0,0 +1,40 @@
+description: Get 3rd page of articles
+url: /v1/relay
+status: 200
+query:
+ query: |
+ query {
+ article_connection(
+ first: 3
+ after: "eyJpZCIgOiA1fQ=="
+ ){
+ pageInfo{
+ startCursor
+ endCursor
+ hasPreviousPage
+ hasNextPage
+ }
+ edges{
+ cursor
+ node{
+ title
+ content
+ author_id
+ }
+ }
+ }
+ }
+response:
+ data:
+ article_connection:
+ pageInfo:
+ startCursor: eyJpZCIgOiA2fQ==
+ endCursor: eyJpZCIgOiA2fQ==
+ hasPreviousPage: true
+ hasNextPage: false
+ edges:
+ - cursor: eyJpZCIgOiA2fQ==
+ node:
+ title: Article 6
+ content: Sample article content 6
+ author_id: 3
diff --git a/server/tests-py/queries/graphql_query/relay/author_pagination_articles_aggregate_orderby/backward/page_1.yaml b/server/tests-py/queries/graphql_query/relay/author_pagination_articles_aggregate_orderby/backward/page_1.yaml
new file mode 100644
index 00000000000..ee0d0b66c2b
--- /dev/null
+++ b/server/tests-py/queries/graphql_query/relay/author_pagination_articles_aggregate_orderby/backward/page_1.yaml
@@ -0,0 +1,44 @@
+description: Fetch 1st page from last of articles ordered by their article count
+url: /v1/relay
+status: 200
+query:
+ query: |
+ query {
+ author_connection(
+ last: 1
+ order_by: {articles_aggregate: {count: asc}}
+ ){
+ pageInfo{
+ startCursor
+ endCursor
+ hasPreviousPage
+ hasNextPage
+ }
+ edges{
+ cursor
+ node{
+ name
+ articles_aggregate{
+ aggregate{
+ count
+ }
+ }
+ }
+ }
+ }
+ }
+response:
+ data:
+ author_connection:
+ pageInfo:
+ startCursor: eyJhcnRpY2xlc19hZ2dyZWdhdGUiIDogeyJjb3VudCIgOiAzfSwgImlkIiA6IDF9
+ endCursor: eyJhcnRpY2xlc19hZ2dyZWdhdGUiIDogeyJjb3VudCIgOiAzfSwgImlkIiA6IDF9
+ hasPreviousPage: true
+ hasNextPage: false
+ edges:
+ - cursor: eyJhcnRpY2xlc19hZ2dyZWdhdGUiIDogeyJjb3VudCIgOiAzfSwgImlkIiA6IDF9
+ node:
+ name: Author 1
+ articles_aggregate:
+ aggregate:
+ count: 3
diff --git a/server/tests-py/queries/graphql_query/relay/author_pagination_articles_aggregate_orderby/backward/page_2.yaml b/server/tests-py/queries/graphql_query/relay/author_pagination_articles_aggregate_orderby/backward/page_2.yaml
new file mode 100644
index 00000000000..6e72d8c29ac
--- /dev/null
+++ b/server/tests-py/queries/graphql_query/relay/author_pagination_articles_aggregate_orderby/backward/page_2.yaml
@@ -0,0 +1,51 @@
+description: Fetch 2nd page from last of articles ordered by their article count
+url: /v1/relay
+status: 200
+query:
+ query: |
+ query {
+ author_connection(
+ last: 2
+ order_by: {articles_aggregate: {count: asc}}
+ before: "eyJhcnRpY2xlc19hZ2dyZWdhdGUiIDogeyJjb3VudCIgOiAzfSwgImlkIiA6IDF9"
+ ){
+ pageInfo{
+ startCursor
+ endCursor
+ hasPreviousPage
+ hasNextPage
+ }
+ edges{
+ cursor
+ node{
+ name
+ articles_aggregate{
+ aggregate{
+ count
+ }
+ }
+ }
+ }
+ }
+ }
+response:
+ data:
+ author_connection:
+ pageInfo:
+ startCursor: eyJhcnRpY2xlc19hZ2dyZWdhdGUiIDogeyJjb3VudCIgOiAxfSwgImlkIiA6IDN9
+ endCursor: eyJhcnRpY2xlc19hZ2dyZWdhdGUiIDogeyJjb3VudCIgOiAyfSwgImlkIiA6IDJ9
+ hasPreviousPage: true
+ hasNextPage: true
+ edges:
+ - cursor: eyJhcnRpY2xlc19hZ2dyZWdhdGUiIDogeyJjb3VudCIgOiAxfSwgImlkIiA6IDN9
+ node:
+ name: Author 3
+ articles_aggregate:
+ aggregate:
+ count: 1
+ - cursor: eyJhcnRpY2xlc19hZ2dyZWdhdGUiIDogeyJjb3VudCIgOiAyfSwgImlkIiA6IDJ9
+ node:
+ name: Author 2
+ articles_aggregate:
+ aggregate:
+ count: 2
diff --git a/server/tests-py/queries/graphql_query/relay/author_pagination_articles_aggregate_orderby/backward/page_3.yaml b/server/tests-py/queries/graphql_query/relay/author_pagination_articles_aggregate_orderby/backward/page_3.yaml
new file mode 100644
index 00000000000..d71acbe1d0a
--- /dev/null
+++ b/server/tests-py/queries/graphql_query/relay/author_pagination_articles_aggregate_orderby/backward/page_3.yaml
@@ -0,0 +1,45 @@
+description: Fetch 3rd page from last of articles ordered by their article count
+url: /v1/relay
+status: 200
+query:
+ query: |
+ query {
+ author_connection(
+ last: 1
+ order_by: {articles_aggregate: {count: asc}}
+ before: "eyJhcnRpY2xlc19hZ2dyZWdhdGUiIDogeyJjb3VudCIgOiAxfSwgImlkIiA6IDN9"
+ ){
+ pageInfo{
+ startCursor
+ endCursor
+ hasPreviousPage
+ hasNextPage
+ }
+ edges{
+ cursor
+ node{
+ name
+ articles_aggregate{
+ aggregate{
+ count
+ }
+ }
+ }
+ }
+ }
+ }
+response:
+ data:
+ author_connection:
+ pageInfo:
+ startCursor: eyJhcnRpY2xlc19hZ2dyZWdhdGUiIDogeyJjb3VudCIgOiAwfSwgImlkIiA6IDR9
+ endCursor: eyJhcnRpY2xlc19hZ2dyZWdhdGUiIDogeyJjb3VudCIgOiAwfSwgImlkIiA6IDR9
+ hasPreviousPage: false
+ hasNextPage: true
+ edges:
+ - cursor: eyJhcnRpY2xlc19hZ2dyZWdhdGUiIDogeyJjb3VudCIgOiAwfSwgImlkIiA6IDR9
+ node:
+ name: Author 4
+ articles_aggregate:
+ aggregate:
+ count: 0
diff --git a/server/tests-py/queries/graphql_query/relay/author_pagination_articles_aggregate_orderby/forward/page_1.yaml b/server/tests-py/queries/graphql_query/relay/author_pagination_articles_aggregate_orderby/forward/page_1.yaml
new file mode 100644
index 00000000000..a47b32a898c
--- /dev/null
+++ b/server/tests-py/queries/graphql_query/relay/author_pagination_articles_aggregate_orderby/forward/page_1.yaml
@@ -0,0 +1,50 @@
+description: Fetch 1st page of articles ordered by their article count
+url: /v1/relay
+status: 200
+query:
+ query: |
+ query {
+ author_connection(
+ first: 2
+ order_by: {articles_aggregate: {count: asc}}
+ ){
+ pageInfo{
+ startCursor
+ endCursor
+ hasPreviousPage
+ hasNextPage
+ }
+ edges{
+ cursor
+ node{
+ name
+ articles_aggregate{
+ aggregate{
+ count
+ }
+ }
+ }
+ }
+ }
+ }
+response:
+ data:
+ author_connection:
+ pageInfo:
+ startCursor: eyJhcnRpY2xlc19hZ2dyZWdhdGUiIDogeyJjb3VudCIgOiAwfSwgImlkIiA6IDR9
+ endCursor: eyJhcnRpY2xlc19hZ2dyZWdhdGUiIDogeyJjb3VudCIgOiAxfSwgImlkIiA6IDN9
+ hasPreviousPage: false
+ hasNextPage: true
+ edges:
+ - cursor: eyJhcnRpY2xlc19hZ2dyZWdhdGUiIDogeyJjb3VudCIgOiAwfSwgImlkIiA6IDR9
+ node:
+ name: Author 4
+ articles_aggregate:
+ aggregate:
+ count: 0
+ - cursor: eyJhcnRpY2xlc19hZ2dyZWdhdGUiIDogeyJjb3VudCIgOiAxfSwgImlkIiA6IDN9
+ node:
+ name: Author 3
+ articles_aggregate:
+ aggregate:
+ count: 1
diff --git a/server/tests-py/queries/graphql_query/relay/author_pagination_articles_aggregate_orderby/forward/page_2.yaml b/server/tests-py/queries/graphql_query/relay/author_pagination_articles_aggregate_orderby/forward/page_2.yaml
new file mode 100644
index 00000000000..196a12cd7a3
--- /dev/null
+++ b/server/tests-py/queries/graphql_query/relay/author_pagination_articles_aggregate_orderby/forward/page_2.yaml
@@ -0,0 +1,51 @@
+description: Fetch 2nd page of articles ordered by their article count
+url: /v1/relay
+status: 200
+query:
+ query: |
+ query {
+ author_connection(
+ first: 2
+ order_by: {articles_aggregate: {count: asc}}
+ after: "eyJhcnRpY2xlc19hZ2dyZWdhdGUiIDogeyJjb3VudCIgOiAxfSwgImlkIiA6IDN9"
+ ){
+ pageInfo{
+ startCursor
+ endCursor
+ hasPreviousPage
+ hasNextPage
+ }
+ edges{
+ cursor
+ node{
+ name
+ articles_aggregate{
+ aggregate{
+ count
+ }
+ }
+ }
+ }
+ }
+ }
+response:
+ data:
+ author_connection:
+ pageInfo:
+ startCursor: eyJhcnRpY2xlc19hZ2dyZWdhdGUiIDogeyJjb3VudCIgOiAyfSwgImlkIiA6IDJ9
+ endCursor: eyJhcnRpY2xlc19hZ2dyZWdhdGUiIDogeyJjb3VudCIgOiAzfSwgImlkIiA6IDF9
+ hasPreviousPage: true
+ hasNextPage: false
+ edges:
+ - cursor: eyJhcnRpY2xlc19hZ2dyZWdhdGUiIDogeyJjb3VudCIgOiAyfSwgImlkIiA6IDJ9
+ node:
+ name: Author 2
+ articles_aggregate:
+ aggregate:
+ count: 2
+ - cursor: eyJhcnRpY2xlc19hZ2dyZWdhdGUiIDogeyJjb3VudCIgOiAzfSwgImlkIiA6IDF9
+ node:
+ name: Author 1
+ articles_aggregate:
+ aggregate:
+ count: 3
diff --git a/server/tests-py/queries/graphql_query/relay/basic/invalid_node_id.yaml b/server/tests-py/queries/graphql_query/relay/basic/invalid_node_id.yaml
new file mode 100644
index 00000000000..6cd36891ce1
--- /dev/null
+++ b/server/tests-py/queries/graphql_query/relay/basic/invalid_node_id.yaml
@@ -0,0 +1,19 @@
+description: Query node interface with invalid node id
+url: /v1/relay
+status: 200
+query:
+ query: |
+ query {
+ node(id: "eyJpZCIgOiA0fQ=="){
+ __typename
+ ... on author{
+ name
+ }
+ }
+ }
+response:
+ errors:
+ - extensions:
+ path: "$.selectionSet.node"
+ code: validation-failed
+ message: the node id is invalid
diff --git a/server/tests-py/queries/graphql_query/relay/pagination_errors/after_and_before.yaml b/server/tests-py/queries/graphql_query/relay/pagination_errors/after_and_before.yaml
new file mode 100644
index 00000000000..7559bda0a8b
--- /dev/null
+++ b/server/tests-py/queries/graphql_query/relay/pagination_errors/after_and_before.yaml
@@ -0,0 +1,21 @@
+description: Use after and before arguments in the same query
+url: /v1/relay
+status: 200
+query:
+ query: |
+ query {
+ author_connection(
+ after: "eyJpZCIgOiAyfQ=="
+ before: "eyJpZCIgOiA0fQ=="
+ ){
+ edges{
+ cursor
+ }
+ }
+ }
+response:
+ errors:
+ - extensions:
+ path: "$.selectionSet.author_connection"
+ code: validation-failed
+ message: '"after" and "before" are not allowed at once'
diff --git a/server/tests-py/queries/graphql_query/relay/pagination_errors/first_and_last.yaml b/server/tests-py/queries/graphql_query/relay/pagination_errors/first_and_last.yaml
new file mode 100644
index 00000000000..9aaa7299816
--- /dev/null
+++ b/server/tests-py/queries/graphql_query/relay/pagination_errors/first_and_last.yaml
@@ -0,0 +1,21 @@
+description: Use first and last arguments in the same query
+url: /v1/relay
+status: 200
+query:
+ query: |
+ query {
+ author_connection(
+ first: 1
+ last: 2
+ ){
+ edges{
+ cursor
+ }
+ }
+ }
+response:
+ errors:
+ - extensions:
+ path: "$.selectionSet.author_connection"
+ code: validation-failed
+ message: '"first" and "last" are not allowed at once'
diff --git a/server/tests-py/queries/graphql_query/relay/setup.yaml b/server/tests-py/queries/graphql_query/relay/setup.yaml
new file mode 100644
index 00000000000..44f330d7c43
--- /dev/null
+++ b/server/tests-py/queries/graphql_query/relay/setup.yaml
@@ -0,0 +1,79 @@
+type: bulk
+args:
+- type: run_sql
+ args:
+ sql: |
+ CREATE TABLE author(
+ id SERIAL PRIMARY KEY,
+ name TEXT UNIQUE NOT NULL
+ );
+
+ INSERT INTO author (name)
+ VALUES ('Author 1'), ('Author 2'), ('Author 3'), ('Author 4');
+
+ CREATE TABLE article (
+ id SERIAL PRIMARY KEY,
+ title TEXT,
+ content TEXT,
+ author_id INTEGER REFERENCES author(id)
+ );
+
+ INSERT INTO article (title, content, author_id)
+ VALUES
+ (
+ 'Article 1',
+ 'Sample article content 1',
+ 1
+ ),
+ (
+ 'Article 2',
+ 'Sample article content 2',
+ 1
+ ),
+ (
+ 'Article 3',
+ 'Sample article content 3',
+ 1
+ ),
+ (
+ 'Article 4',
+ 'Sample article content 4',
+ 2
+ ),
+ (
+ 'Article 5',
+ 'Sample article content 5',
+ 2
+ ),
+ (
+ 'Article 6',
+ 'Sample article content 6',
+ 3
+ );
+
+# Track tables and define relationships
+- type: track_table
+ args:
+ name: author
+ schema: public
+
+- type: track_table
+ args:
+ name: article
+ schema: public
+
+- type: create_object_relationship
+ args:
+ table: article
+ name: author
+ using:
+ foreign_key_constraint_on: author_id
+
+- type: create_array_relationship
+ args:
+ table: author
+ name: articles
+ using:
+ foreign_key_constraint_on:
+ table: article
+ column: author_id
diff --git a/server/tests-py/queries/graphql_query/relay/teardown.yaml b/server/tests-py/queries/graphql_query/relay/teardown.yaml
new file mode 100644
index 00000000000..65471ac1d13
--- /dev/null
+++ b/server/tests-py/queries/graphql_query/relay/teardown.yaml
@@ -0,0 +1,8 @@
+type: bulk
+args:
+- type: run_sql
+ args:
+ cascade: true
+ sql: |
+ DROP TABLE article;
+ DROP TABLE author;