relay fixes (#5013)

* fix relay introspection failing if any views exist, fix #5020

* reduce base64 encoded node id length, close #5037

* make node field type non-nullable in an edge

* more relay tests with permissions & complete restructure of test yaml files

Co-authored-by: Aravind <aravindkp@outlook.in>
Co-authored-by: Vamshi Surabhi <0x777@users.noreply.github.com>
This commit is contained in:
Rakesh Emmadi 2020-06-16 19:55:49 +05:30 committed by GitHub
parent f03b5545c7
commit 4e229dc568
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
56 changed files with 1102 additions and 193 deletions

View File

@ -1,24 +1,32 @@
module Data.Sequence.NonEmpty
( NESeq
( NESeq(..)
, (<|)
, (|>)
, init
, head
, tail
, toSeq
, fromSeq
, toNonEmpty
) where
import qualified Data.Foldable as Foldable
import qualified Data.Sequence as Seq
import qualified Data.Functor as Functor
import Prelude (Eq, Show, fst, snd, (.), Semigroup(..))
import qualified Data.Foldable as Foldable
import qualified Data.Functor as Functor
import qualified Data.List.NonEmpty as NE
import qualified Data.Sequence as Seq
import Data.Aeson
import Hasura.Incremental (Cacheable)
import Hasura.Prelude hiding (head, tail)
infixr 5 <|
infixl 5 |>
newtype NESeq a
= NESeq { unNESeq :: (a, Seq.Seq a)}
deriving (Show, Eq)
deriving (Show, Eq, Generic, Traversable)
instance (NFData a) => NFData (NESeq a)
instance (Cacheable a) => Cacheable (NESeq a)
instance Functor.Functor NESeq where
fmap f (NESeq (a, rest))
@ -27,6 +35,14 @@ instance Functor.Functor NESeq where
instance Foldable.Foldable NESeq where
foldr f v = Foldable.foldr f v . toSeq
instance FromJSON a => FromJSON (NESeq a) where
parseJSON v = do
seqList <- parseJSON v
maybe (fail "expected non empty list") pure $ fromSeq seqList
instance ToJSON a => ToJSON (NESeq a) where
toJSON = toJSON . toSeq
init :: a -> NESeq a
init a = NESeq (a, Seq.empty)
@ -45,6 +61,14 @@ v <| (NESeq (h, l)) = NESeq (v, h Seq.<| l)
toSeq :: NESeq a -> Seq.Seq a
toSeq (NESeq (v, l)) = v Seq.<| l
fromSeq :: Seq.Seq a -> Maybe (NESeq a)
fromSeq = \case
Seq.Empty -> Nothing
h Seq.:<| l -> Just $ NESeq (h, l)
toNonEmpty :: NESeq a -> NonEmpty a
toNonEmpty (NESeq (v, l)) = v NE.:| toList l
instance Semigroup (NESeq a) where
(NESeq (h, l)) <> r =
NESeq (h, l <> toSeq r)

View File

@ -58,6 +58,7 @@ data RoleContext a
$(deriveToJSON (aesonDrop 5 snakeCase) ''RoleContext)
type GCtxMap = Map.HashMap RoleName (RoleContext GCtx)
type RelayGCtxMap = Map.HashMap RoleName GCtx
queryRootNamedType :: G.NamedType
queryRootNamedType = G.NamedType "query_root"

View File

@ -131,8 +131,7 @@ getExecPlanPartial userInfo sc queryType enableAL req = do
let gCtx = case queryType of
EQ.QueryHasura -> getGCtx (_uiBackendOnlyFieldAccess userInfo) sc roleName
EQ.QueryRelay -> maybe GC.emptyGCtx _rctxDefault $
Map.lookup roleName (scRelayGCtxMap sc)
EQ.QueryRelay -> fromMaybe GC.emptyGCtx $ Map.lookup roleName $ scRelayGCtxMap sc
queryParts <- flip runReaderT gCtx $ VQ.getQueryParts req

View File

@ -34,23 +34,40 @@ mkNodeInterface relayTableNames =
let description = G.Description "A globally unique identifier"
in mkHsraObjFldInfo (Just description) "id" mempty nodeIdType
-- | Relay schema should contain tables and relationships (whose remote tables)
-- with a mandatory primary key
tablesWithOnlyPrimaryKey :: TableCache -> TableCache
tablesWithOnlyPrimaryKey tableCache =
flip Map.mapMaybe tableCache $ \tableInfo ->
tableInfo ^. tiCoreInfo.tciPrimaryKey *>
Just (infoWithPrimaryKeyRelations tableInfo)
where
infoWithPrimaryKeyRelations =
tiCoreInfo.tciFieldInfoMap %~ Map.mapMaybe (_FIRelationship %%~ withPrimaryKey)
withPrimaryKey relInfo =
let remoteTable = riRTable relInfo
maybePrimaryKey =
(tableCache ^. at remoteTable) >>= (^. tiCoreInfo.tciPrimaryKey)
in maybePrimaryKey *> Just relInfo
mkRelayGCtxMap
:: forall m. (MonadError QErr m)
=> TableCache -> FunctionCache -> m GCtxMap
=> TableCache -> FunctionCache -> m RelayGCtxMap
mkRelayGCtxMap tableCache functionCache = do
typesMapL <- mapM (mkRelayGCtxMapTable tableCache functionCache) relayTables
typesMapL <- mapM (mkRelayGCtxMapTable relayTableCache functionCache) relayTables
typesMap <- combineTypes typesMapL
let gCtxMap = flip Map.map typesMap $
\(ty, flds, insCtx) -> mkGCtx ty flds insCtx
pure $ Map.map (flip RoleContext Nothing) gCtxMap
pure gCtxMap
where
relayTableCache = tablesWithOnlyPrimaryKey tableCache
relayTables =
filter (tableFltr . _tiCoreInfo) $ Map.elems tableCache
filter (tableFltr . _tiCoreInfo) $ Map.elems relayTableCache
tableFltr ti =
not (isSystemDefined $ _tciSystemDefined ti)
&& isValidObjectName (_tciName ti)
&& isJust (_tciPrimaryKey ti)
combineTypes
:: [Map.HashMap RoleName (TyAgg, RootFields, InsCtxMap)]
@ -59,9 +76,10 @@ mkRelayGCtxMap tableCache functionCache = do
let listMap = foldr (Map.unionWith (++) . Map.map pure) mempty maps
flip Map.traverseWithKey listMap $ \roleName typeList -> do
let relayTableNames = map (_tciName . _tiCoreInfo) relayTables
tyAgg = addTypeInfoToTyAgg
(TIIFace $ mkNodeInterface relayTableNames) $
mconcat $ map (^. _1) typeList
tyAgg = foldr addTypeInfoToTyAgg (mconcat $ map (^. _1) typeList)
[ TIIFace $ mkNodeInterface relayTableNames
, TIObj pageInfoObj
]
insCtx = mconcat $ map (^. _3) typeList
rootFields <- combineRootFields roleName $ map (^. _2) typeList
pure (tyAgg, rootFields, insCtx)
@ -251,7 +269,7 @@ mkRelayTyAggRole tn descM insPermM selPermM updColsM delPermM pkeyCols constrain
)
in case riType relInfo of
ObjRel -> [relFld]
ArrRel -> bool [relFld] ([relFld, aggRelFld] <> maybe [] pure maybeConnFld) allowAgg
ArrRel -> bool [relFld] ([relFld, aggRelFld] <> catMaybes [maybeConnFld]) allowAgg
SFComputedField cf -> pure
( (ty, mkComputedFieldName $ _cfName cf)
, RFComputedField cf
@ -403,5 +421,9 @@ mkNodeQueryRootFields roleName relayTables =
, spiRequiredHeaders perm
)
adminPermDetails = (noFilter, Nothing, [])
in (mkTableTy tableName,) . mkSelectOpCtx tableName allColumns
<$> bool permDetailsM (Just adminPermDetails) (isAdmin roleName)
in (mkTableTy tableName,) <$>
((,) <$>
(mkSelectOpCtx tableName allColumns <$>
bool permDetailsM (Just adminPermDetails) (isAdmin roleName)
) <*> (table ^? tiCoreInfo.tciPrimaryKey._Just.pkColumns)
)

View File

@ -71,7 +71,7 @@ traverseQueryRootFldAST f = \case
QRFActionSelect s -> QRFActionSelect <$> DS.traverseAnnSimpleSelect f s
QRFActionExecuteObject s -> QRFActionExecuteObject <$> DS.traverseAnnSimpleSelect f s
QRFActionExecuteList s -> QRFActionExecuteList <$> DS.traverseAnnSimpleSelect f s
QRFConnection s -> QRFConnection <$> DS.traverseConnectionSelect f s
QRFConnection s -> QRFConnection <$> DS.traverseConnectionSelect f s
toPGQuery :: QueryRootFldResolved -> (Q.Query, Maybe RR.RemoteJoins)
toPGQuery = \case
@ -115,12 +115,12 @@ queryFldToPGAST fld actionExecuter = do
userInfo <- asks getter
case opCtx of
QCNodeSelect nodeSelectMap -> do
NodeIdData table pkeyColumnValues <- RS.resolveNodeId fld
NodeIdV1 (V1NodeId table columnValues) <- RS.resolveNodeId fld
case Map.lookup (GS.mkTableTy table) nodeSelectMap of
Nothing -> throwVE $ "table " <> table <<> " not found"
Just selOpCtx -> do
Just (selOpCtx, pkeyColumns) -> do
validateHdrs userInfo (_socHeaders selOpCtx)
QRFNode <$> RS.convertNodeSelect selOpCtx pkeyColumnValues fld
QRFNode <$> RS.convertNodeSelect selOpCtx pkeyColumns columnValues fld
QCSelect ctx -> do
validateHdrs userInfo (_socHeaders ctx)
QRFSimple <$> RS.convertSelect ctx fld
@ -150,9 +150,11 @@ queryFldToPGAST fld actionExecuter = do
DS.JASMultipleRows -> QRFActionExecuteList
DS.JASSingleObject -> QRFActionExecuteObject
f <$> actionExecuter (RA.resolveActionQuery fld ctx (_uiSession userInfo))
QCSelectConnection pk ctx ->
QCSelectConnection pk ctx -> do
validateHdrs userInfo (_socHeaders ctx)
QRFConnection <$> RS.convertConnectionSelect pk ctx fld
QCFuncConnection pk ctx ->
QCFuncConnection pk ctx -> do
validateHdrs userInfo (_fqocHeaders ctx)
QRFConnection <$> RS.convertConnectionFuncQuery pk ctx fld
mutFldToTx

View File

@ -19,12 +19,14 @@ import Data.Parser.JSONPath
import Hasura.Prelude
import qualified Data.Aeson as J
import qualified Data.Aeson.Extended as J
import qualified Data.Aeson.Internal as J
import qualified Data.ByteString.Lazy as BL
import qualified Data.HashMap.Strict as Map
import qualified Data.HashMap.Strict.InsOrd as OMap
import qualified Data.List.NonEmpty as NE
import qualified Data.Sequence as Seq
import qualified Data.Sequence.NonEmpty as NESeq
import qualified Data.Text as T
import qualified Language.GraphQL.Draft.Syntax as G
@ -517,7 +519,7 @@ fromConnectionField
, Has OrdByCtx r, Has SQLGenCtx r
)
=> RS.SelectFromG UnresolvedVal
-> NonEmpty PGColumnInfo
-> PrimaryKeyColumns
-> AnnBoolExpPartialSQL
-> Maybe Int
-> Field -> m (RS.ConnectionSelect UnresolvedVal)
@ -539,7 +541,7 @@ parseConnectionArgs
( MonadReusability m, MonadError QErr m, MonadReader r m
, Has FieldMap r, Has OrdByCtx r
)
=> NonEmpty PGColumnInfo
=> PrimaryKeyColumns
-> ArgsMap
-> m ( SelectArgs
, Maybe RS.ConnectionSlice
@ -587,7 +589,7 @@ parseConnectionArgs pKeyColumns args = do
cursorValue <- either (const throwInvalidCursor) pure $
J.eitherDecode cursorSplit
case maybeOrderBys of
Nothing -> forM pKeyColumns $
Nothing -> forM (NESeq.toNonEmpty pKeyColumns) $
\pgColumnInfo -> do
let columnJsonPath = [J.Key $ getPGColTxt $ pgiColumn pgColumnInfo]
pgColumnValue <- maybe throwInvalidCursor pure $ iResultToMaybe $
@ -648,7 +650,7 @@ convertConnectionSelect
:: ( MonadReusability m, MonadError QErr m, MonadReader r m, Has FieldMap r
, Has OrdByCtx r, Has SQLGenCtx r
)
=> NonEmpty PGColumnInfo -> SelOpCtx -> Field -> m (RS.ConnectionSelect UnresolvedVal)
=> PrimaryKeyColumns -> SelOpCtx -> Field -> m (RS.ConnectionSelect UnresolvedVal)
convertConnectionSelect pkCols opCtx fld =
withPathK "selectionSet" $
fromConnectionField (RS.FromTable qt) pkCols permFilter permLimit fld
@ -746,7 +748,7 @@ convertConnectionFuncQuery
, Has OrdByCtx r
, Has SQLGenCtx r
)
=> NonEmpty PGColumnInfo -> FuncQOpCtx -> Field -> m (RS.ConnectionSelect UnresolvedVal)
=> PrimaryKeyColumns -> FuncQOpCtx -> Field -> m (RS.ConnectionSelect UnresolvedVal)
convertConnectionFuncQuery pkCols funcOpCtx fld =
withPathK "selectionSet" $ fieldAsPath fld $ do
selectFrom <- makeFunctionSelectFrom qf argSeq fld
@ -754,32 +756,33 @@ convertConnectionFuncQuery pkCols funcOpCtx fld =
where
FuncQOpCtx qf argSeq _ _ permFilter permLimit = funcOpCtx
throwInvalidNodeId :: MonadError QErr m => Text -> m a
throwInvalidNodeId t = throwVE $ "the node id is invalid: " <> t
resolveNodeId
:: forall m. ( MonadError QErr m
, MonadReusability m
)
=> Field -> m NodeIdData
:: ( MonadError QErr m
, MonadReusability m
)
=> Field -> m NodeId
resolveNodeId field =
withPathK "selectionSet" $ fieldAsPath field $ do
nodeIdText <- asPGColText =<< getArg (_fArguments field) "id"
either (const throwInvalidNodeId) pure $
J.eitherDecode $ base64Decode nodeIdText
where
throwInvalidNodeId = throwVE "the node id is invalid"
withPathK "selectionSet" $ fieldAsPath field $
withArg (_fArguments field) "id" $ asPGColText >=>
either (throwInvalidNodeId . T.pack) pure . J.eitherDecode . base64Decode
convertNodeSelect
:: ( MonadReusability m
, MonadError QErr m
, MonadReader r m
, Has FieldMap r
, Has OrdByCtx r
, Has SQLGenCtx r
)
:: forall m r. ( MonadReusability m
, MonadError QErr m
, MonadReader r m
, Has FieldMap r
, Has OrdByCtx r
, Has SQLGenCtx r
)
=> SelOpCtx
-> Map.HashMap PGCol J.Value
-> PrimaryKeyColumns
-> NESeq.NESeq J.Value
-> Field
-> m (RS.AnnSimpleSelG UnresolvedVal)
convertNodeSelect selOpCtx pkeyColumnValues field =
convertNodeSelect selOpCtx pkeyColumns columnValues field =
withPathK "selectionSet" $ fieldAsPath field $ do
-- Parse selection set as interface
ifaceSelectionSet <- asInterfaceSelectionSet $ _fSelSet field
@ -790,11 +793,15 @@ convertNodeSelect selOpCtx pkeyColumnValues field =
-- Resolve the table selection set
annFields <- processTableSelectionSet tableObjectType selSet
-- Resolve the Node id primary key column values
pkeyColumnValues <- alignPkeyColumnValues
unresolvedPkeyValues <- flip Map.traverseWithKey pkeyColumnValues $
\pgColumn jsonValue -> case Map.lookup pgColumn pgColumnMap of
Nothing -> throwVE $ "column " <> pgColumn <<> " not found"
Just columnInfo -> (,columnInfo) . UVPG . AnnPGVal Nothing False <$>
parsePGScalarValue (pgiType columnInfo) jsonValue
\columnInfo jsonValue ->
let modifyErrFn t = "value of column " <> pgiColumn columnInfo
<<> " in node id: " <> t
in modifyErr modifyErrFn $
(,columnInfo) . UVPG . AnnPGVal Nothing False <$>
parsePGScalarValue (pgiType columnInfo) jsonValue
-- Generate the bool expression from the primary key column values
let pkeyBoolExp = BoolAnd $ flip map (Map.elems unresolvedPkeyValues) $
\(unresolvedValue, columnInfo) -> (BoolFld . AVCol columnInfo) [AEQ True unresolvedValue]
@ -802,5 +809,19 @@ convertNodeSelect selOpCtx pkeyColumnValues field =
strfyNum <- stringifyNum <$> asks getter
pure $ RS.AnnSelectG annFields (RS.FromTable table) tablePerm selectArgs strfyNum
where
SelOpCtx table _ allColumns permFilter permLimit = selOpCtx
pgColumnMap = mapFromL pgiColumn $ Map.elems allColumns
SelOpCtx table _ _ permFilter permLimit = selOpCtx
alignPkeyColumnValues :: m (Map.HashMap PGColumnInfo J.Value)
alignPkeyColumnValues = do
let NESeq.NESeq (firstPkColumn, remainingPkColumns) = pkeyColumns
NESeq.NESeq (firstColumnValue, remainingColumns) = columnValues
(nonAlignedPkColumns, nonAlignedColumnValues, alignedTuples) =
partitionThese $ toList $ align remainingPkColumns remainingColumns
when (not $ null nonAlignedPkColumns) $ throwInvalidNodeId $
"primary key columns " <> dquoteList (map pgiColumn nonAlignedPkColumns) <> " are missing"
when (not $ null nonAlignedColumnValues) $ throwInvalidNodeId $
"unexpected column values " <> J.encodeToStrictText nonAlignedColumnValues
pure $ Map.fromList $ (firstPkColumn, firstColumnValue):alignedTuples

View File

@ -8,10 +8,9 @@ import Control.Lens.TH
import Hasura.Prelude
import qualified Data.Aeson as J
import qualified Data.Aeson.Casing as J
import qualified Data.Aeson.TH as J
import qualified Data.HashMap.Strict as Map
import qualified Data.Sequence as Seq
import qualified Data.Sequence.NonEmpty as NESeq
import qualified Data.Text as T
import qualified Language.GraphQL.Draft.Syntax as G
@ -31,17 +30,17 @@ import Hasura.SQL.Value
import qualified Hasura.SQL.DML as S
type NodeSelectMap = Map.HashMap G.NamedType SelOpCtx
type NodeSelectMap = Map.HashMap G.NamedType (SelOpCtx, PrimaryKeyColumns)
data QueryCtx
= QCNodeSelect !NodeSelectMap
| QCSelect !SelOpCtx
| QCSelectConnection !(NonEmpty PGColumnInfo) !SelOpCtx
| QCSelectConnection !PrimaryKeyColumns !SelOpCtx
| QCSelectPkey !SelPkOpCtx
| QCSelectAgg !SelOpCtx
| QCFuncQuery !FuncQOpCtx
| QCFuncAggQuery !FuncQOpCtx
| QCFuncConnection !(NonEmpty PGColumnInfo) !FuncQOpCtx
| QCFuncConnection !PrimaryKeyColumns !FuncQOpCtx
| QCAsyncActionFetch !ActionSelectOpContext
| QCAction !ActionExecutionContext
deriving (Show, Eq)
@ -144,13 +143,13 @@ type PGColGNameMap = Map.HashMap G.Name PGColumnInfo
data RelationshipFieldKind
= RFKAggregate
| RFKSimple
| RFKConnection !(NonEmpty PGColumnInfo)
| RFKConnection !PrimaryKeyColumns
deriving (Show, Eq)
data RelationshipField
= RelationshipField
{ _rfInfo :: !RelInfo
, _rfIsAgg :: !RelationshipFieldKind
, _rfKind :: !RelationshipFieldKind
, _rfCols :: !PGColGNameMap
, _rfPermFilter :: !AnnBoolExpPartialSQL
, _rfPermLimit :: !(Maybe Int)
@ -184,7 +183,7 @@ data ResolveField
| RFRelationship !RelationshipField
| RFComputedField !ComputedField
| RFRemoteRelationship !RemoteFieldInfo
| RFNodeId !QualifiedTable !(NonEmpty PGColumnInfo)
| RFNodeId !QualifiedTable !PrimaryKeyColumns
deriving (Show, Eq)
type FieldMap = Map.HashMap (G.NamedType, G.Name) ResolveField
@ -262,12 +261,74 @@ data InputFunctionArgument
| IFAUnknown !FunctionArgItem -- ^ Unknown value, need to be parsed
deriving (Show, Eq)
data NodeIdData
= NodeIdData
{- Note [Relay Node Id]
~~~~~~~~~~~~~~~~~~~~~~~
The 'Node' interface in Relay schema has exactly one field which returns
a non-null 'ID' value. Each table object type in Relay schema should implement
'Node' interface to provide global object identification.
See https://relay.dev/graphql/objectidentification.htm for more details.
To identify each row in a table, we need to encode the table information
(schema and name) and primary key column values in the 'Node' id.
Node id data:
-------------
We are using JSON format for encoding and decoding the node id. The JSON
schema looks like following
'[<version-integer>, "<table-schema>", "<table-name>", "column-1", "column-2", ... "column-n"]'
It is represented in the type @'NodeId'. The 'version-integer' represents the JSON
schema version to enable any backward compatibility if it is broken in upcoming versions.
The stringified JSON is Base64 encoded and sent to client. Also the same
base64 encoded JSON string is accepted for 'node' field resolver's 'id' input.
-}
data NodeIdVersion
= NIVersion1
deriving (Show, Eq)
nodeIdVersionInt :: NodeIdVersion -> Int
nodeIdVersionInt NIVersion1 = 1
currentNodeIdVersion :: NodeIdVersion
currentNodeIdVersion = NIVersion1
instance J.FromJSON NodeIdVersion where
parseJSON v = do
versionInt :: Int <- J.parseJSON v
case versionInt of
1 -> pure NIVersion1
_ -> fail $ "expecting version 1 for node id, but got " <> show versionInt
data V1NodeId
= V1NodeId
{ _nidTable :: !QualifiedTable
, _nidColumns :: !(Map.HashMap PGCol J.Value)
, _nidColumns :: !(NESeq.NESeq J.Value)
} deriving (Show, Eq)
$(J.deriveFromJSON (J.aesonDrop 4 J.snakeCase) ''NodeIdData)
-- | The Relay 'Node' inteface's 'id' field value.
-- See Note [Relay Node id].
data NodeId
= NodeIdV1 !V1NodeId
deriving (Show, Eq)
instance J.FromJSON NodeId where
parseJSON v = do
valueList <- J.parseJSON v
case valueList of
[] -> fail "unexpected GUID format, found empty list"
J.Number 1:rest -> NodeIdV1 <$> parseNodeIdV1 rest
J.Number n:_ -> fail $ "unsupported GUID version: " <> show n
_ -> fail "unexpected GUID format, needs to start with a version number"
where
parseNodeIdV1 (schemaValue:(nameValue:(firstColumn:remainingColumns))) =
V1NodeId
<$> (QualifiedObject <$> J.parseJSON schemaValue <*> J.parseJSON nameValue)
<*> pure (NESeq.NESeq (firstColumn, Seq.fromList remainingColumns))
parseNodeIdV1 _ = fail "GUID version 1: expecting schema name, table name and at least one column value"
-- template haskell related
$(makePrisms ''ResolveField)

View File

@ -526,7 +526,8 @@ getSelPerm tableCache fields roleName selPermInfo = do
FIRelationship relInfo -> do
remTableInfo <- getTabInfo tableCache $ riRTable relInfo
let remTableSelPermM = getSelPermission remTableInfo roleName
remTableFlds = _tciFieldInfoMap $ _tiCoreInfo remTableInfo
remTableCoreInfo = _tiCoreInfo remTableInfo
remTableFlds = _tciFieldInfoMap remTableCoreInfo
remTableColGNameMap =
mkPGColGNameMap $ getValidCols remTableFlds
return $ flip fmap remTableSelPermM $
@ -536,8 +537,7 @@ getSelPerm tableCache fields roleName selPermInfo = do
, _rfiColumns = remTableColGNameMap
, _rfiPermFilter = spiFilter rmSelPermM
, _rfiPermLimit = spiLimit rmSelPermM
, _rfiPrimaryKeyColumns = _pkColumns <$>
_tciPrimaryKey (_tiCoreInfo remTableInfo)
, _rfiPrimaryKeyColumns = _pkColumns <$> _tciPrimaryKey remTableCoreInfo
, _rfiIsNullable = isRelNullable fields relInfo
}
FIComputedField info -> do
@ -913,7 +913,6 @@ mkGCtx tyAgg (RootFields queryFields mutationFields) insCtxMap =
, TIObj <$> mutRootM
, TIObj <$> subRootM
, TIEnum <$> ordByEnumTyM
, Just $ TIObj mkPageInfoObj
] <>
scalarTys <> compTys <> defaultTypes <> wiredInGeoInputTypes
<> wiredInRastInputTypes

View File

@ -57,7 +57,7 @@ data RelationshipFieldInfo
, _rfiColumns :: !PGColGNameMap
, _rfiPermFilter :: !AnnBoolExpPartialSQL
, _rfiPermLimit :: !(Maybe Int)
, _rfiPrimaryKeyColumns :: !(Maybe (NonEmpty PGColumnInfo))
, _rfiPrimaryKeyColumns :: !(Maybe PrimaryKeyColumns)
, _rfiIsNullable :: !Bool
} deriving (Show, Eq)

View File

@ -6,7 +6,7 @@ module Hasura.GraphQL.Schema.Select
, mkTableAggregateFieldsObj
, mkTableColAggregateFieldsObj
, mkTableEdgeObj
, mkPageInfoObj
, pageInfoObj
, mkTableConnectionObj
, mkTableConnectionTy
@ -166,36 +166,44 @@ object_relationship: remote_table
-}
mkRelationshipField
:: Bool
-> RelInfo
-> Bool
-> Maybe (NonEmpty PGColumnInfo)
-> Bool
-> [ObjFldInfo]
mkRelationshipField allowAgg (RelInfo rn rTy _ remTab isManual) isRelay maybePkCols isNullable =
case rTy of
ArrRel -> bool [arrRelFld] ([arrRelFld, aggArrRelFld] <> connFields) allowAgg
ObjRel -> [objRelFld]
:: Bool -> RelationshipFieldInfo -> [ObjFldInfo]
mkRelationshipField isRelay fieldInfo =
if | not isRelay -> mkFields False
| isRelay && isJust maybePkey -> mkFields True
| otherwise -> []
where
objRelFld = mkHsraObjFldInfo (Just "An object relationship")
(mkRelName rn) Map.empty objRelTy
objRelTy = bool (G.toGT $ G.toNT relTabTy) (G.toGT relTabTy) isObjRelNullable
isObjRelNullable = isManual || isNullable
relTabTy = mkTableTy remTab
mkFields includeConnField =
let boolGuard a = bool Nothing (Just a)
in case relType of
ArrRel -> arrRelFld : catMaybes
[ boolGuard aggArrRelFld allowAgg
, boolGuard arrConnFld includeConnField
]
ObjRel -> [objRelFld]
RelationshipFieldInfo relInfo allowAgg _ _ _ maybePkey isNullable = fieldInfo
RelInfo relName relType _ remoteTable isManual = relInfo
remTabTy = mkTableTy remoteTable
objRelFld =
mkHsraObjFldInfo (Just "An object relationship")
(mkRelName relName) Map.empty $
bool (G.toGT . G.toNT) G.toGT (isManual || isNullable) remTabTy
arrRelFld =
mkHsraObjFldInfo (Just "An array relationship") (mkRelName rn)
(fromInpValL $ mkSelArgs remTab) $
G.toGT $ G.toNT $ G.toLT $ G.toNT $ mkTableTy remTab
mkHsraObjFldInfo (Just "An array relationship") (mkRelName relName)
(fromInpValL $ mkSelArgs remoteTable) $
G.toGT $ G.toNT $ G.toLT $ G.toNT remTabTy
connFields = if isNothing maybePkCols || not isRelay then [] else pure $
mkHsraObjFldInfo Nothing (mkConnectionRelName rn)
(fromInpValL $ mkConnectionArgs remTab) $
G.toGT $ G.toNT $ mkTableConnectionTy remTab
arrConnFld =
mkHsraObjFldInfo (Just "An array relationship connection") (mkConnectionRelName relName)
(fromInpValL $ mkConnectionArgs remoteTable) $
G.toGT $ G.toNT $ mkTableConnectionTy remoteTable
aggArrRelFld = mkHsraObjFldInfo (Just "An aggregated array relationship")
(mkAggRelName rn) (fromInpValL $ mkSelArgs remTab) $
G.toGT $ G.toNT $ mkTableAggTy remTab
(mkAggRelName relName) (fromInpValL $ mkSelArgs remoteTable) $
G.toGT $ G.toNT $ mkTableAggTy remoteTable
mkTableObjectDescription :: QualifiedTable -> Maybe PGDescription -> G.Description
mkTableObjectDescription tn pgDescription =
@ -205,12 +213,9 @@ mkTableObjectFields :: Bool -> [SelField] -> [ObjFldInfo]
mkTableObjectFields isRelay =
concatMap \case
SFPGColumn info -> pure $ mkPGColFld info
SFRelationship info -> mkRelationshipField' info
SFRelationship info -> mkRelationshipField isRelay info
SFComputedField info -> pure $ mkComputedFieldFld info
SFRemoteRelationship info -> pure $ mkRemoteRelationshipFld info
where
mkRelationshipField' (RelationshipFieldInfo relInfo allowAgg _ _ _ maybePkCols isNullable) =
mkRelationshipField allowAgg relInfo isRelay maybePkCols isNullable
{-
type table {
@ -420,8 +425,8 @@ type PageInfo {
endCursor: String!
}
-}
mkPageInfoObj :: ObjTyInfo
mkPageInfoObj =
pageInfoObj :: ObjTyInfo
pageInfoObj =
mkHsraObjTyInfo Nothing pageInfoTy Set.empty $ mapFromL _fiName
[hasNextPage, hasPreviousPage, startCursor, endCursor]
where
@ -437,7 +442,7 @@ mkPageInfoObj =
{-
type tableConnection {
cursor: String!
node: table
node: table!
}
-}
mkTableEdgeObj
@ -448,8 +453,8 @@ mkTableEdgeObj tn =
where
cursor = mkHsraObjFldInfo Nothing "cursor" Map.empty $
G.toGT $ G.toNT stringScalar
node = mkHsraObjFldInfo Nothing "node" Map.empty $ G.toGT $
mkTableTy tn
node = mkHsraObjFldInfo Nothing "node" Map.empty $
G.toGT $ G.toNT $ mkTableTy tn
{-
table_by_pk(

View File

@ -57,7 +57,7 @@ import Data.Sequence as M (Seq)
import Data.String as M (IsString)
import Data.Text as M (Text)
import Data.These as M (These (..), fromThese, mergeThese,
mergeTheseWith, these)
mergeTheseWith, partitionThese, these)
import Data.Time.Clock.Units
import Data.Traversable as M (for)
import Data.Word as M (Word64)

View File

@ -21,6 +21,8 @@ import Data.List (delete)
import qualified Data.HashMap.Strict as M
import qualified Data.List.NonEmpty as NE
import qualified Data.Sequence as Seq
import qualified Data.Sequence.NonEmpty as NESeq
import qualified Data.Text as T
import qualified Database.PG.Query as Q
import qualified Language.GraphQL.Draft.Syntax as G
@ -51,7 +53,7 @@ resolveEnumReferences enumTables =
resolveEnumReference foreignKey = do
[(localColumn, foreignColumn)] <- pure $ M.toList (_fkColumnMapping foreignKey)
(primaryKey, enumValues) <- M.lookup (_fkForeignTable foreignKey) enumTables
guard (_pkColumns primaryKey == foreignColumn:|[])
guard (_pkColumns primaryKey == NESeq.NESeq (foreignColumn, Seq.Empty))
pure (localColumn, EnumReference (_fkForeignTable foreignKey) enumValues)
data EnumTableIntegrityError
@ -84,7 +86,7 @@ fetchAndValidateEnumValues tableName maybePrimaryKey columnInfos =
validatePrimaryKey = case maybePrimaryKey of
Nothing -> refute [EnumTableMissingPrimaryKey]
Just primaryKey -> case _pkColumns primaryKey of
column :| [] -> case prciType column of
NESeq.NESeq (column, Seq.Empty) -> case prciType column of
PGText -> pure column
_ -> refute [EnumTableNonTextualPrimaryKey column]
columns -> refute [EnumTableMultiColumnPrimaryKey $ map prciName (toList columns)]

View File

@ -6,13 +6,14 @@ module Hasura.RQL.DML.Select.Internal
)
where
import Control.Lens hiding (op)
import Control.Lens hiding (op)
import Control.Monad.Writer.Strict
import qualified Data.HashMap.Strict as HM
import qualified Data.List.NonEmpty as NE
import qualified Data.Text as T
import qualified Data.HashMap.Strict as HM
import qualified Data.List.NonEmpty as NE
import qualified Data.Text as T
import Hasura.GraphQL.Resolve.Types
import Hasura.Prelude
import Hasura.RQL.DML.Internal
import Hasura.RQL.DML.Select.Types
@ -21,7 +22,7 @@ import Hasura.RQL.Types
import Hasura.SQL.Rewrite
import Hasura.SQL.Types
import qualified Hasura.SQL.DML as S
import qualified Hasura.SQL.DML as S
-- Conversion of SelectQ happens in 2 Stages.
-- Stage 1 : Convert input query into an annotated AST
@ -805,18 +806,18 @@ processAnnFields sourcePrefix fieldAlias similarArrFields annFields = do
Nothing -> sqlExp
Just (ColumnOp opText cExp) -> S.mkSQLOpExp opText sqlExp cExp
mkNodeId :: QualifiedTable -> NonEmpty PGColumnInfo -> S.SQLExp
mkNodeId :: QualifiedTable -> PrimaryKeyColumns -> S.SQLExp
mkNodeId (QualifiedObject tableSchema tableName) pkeyColumns =
let tableObjectExp = S.applyJsonBuildObj
[ S.SELit "schema"
, S.SELit (getSchemaTxt tableSchema)
, S.SELit "name"
, S.SELit (toTxt tableName)
]
in encodeBase64 $ flip S.SETyAnn S.textTypeAnn $ S.applyJsonBuildObj
[ S.SELit "table", tableObjectExp
, S.SELit "columns", mkPrimaryKeyColumnsObjectExp sourcePrefix pkeyColumns
]
let columnInfoToSQLExp pgColumnInfo =
toJSONableExp False (pgiType pgColumnInfo) False $
S.mkQIdenExp (mkBaseTableAlias sourcePrefix) $ pgiColumn pgColumnInfo
-- See Note [Relay Node id].
in encodeBase64 $ flip S.SETyAnn S.textTypeAnn $ S.applyJsonBuildArray $
[ S.intToSQLExp $ nodeIdVersionInt currentNodeIdVersion
, S.SELit (getSchemaTxt tableSchema)
, S.SELit (toTxt tableName)
] <> map columnInfoToSQLExp (toList pkeyColumns)
injectJoinCond :: S.BoolExp -- ^ Join condition
-> S.BoolExp -- ^ Where condition
@ -1013,15 +1014,6 @@ pageInfoSelectAliasIden = Iden "__page_info"
cursorsSelectAliasIden :: Iden
cursorsSelectAliasIden = Iden "__cursors_select"
mkPrimaryKeyColumnsObjectExp :: Iden -> NonEmpty PGColumnInfo -> S.SQLExp
mkPrimaryKeyColumnsObjectExp sourcePrefix primaryKeyColumns =
S.applyJsonBuildObj $ flip concatMap (toList primaryKeyColumns) $
\pgColumnInfo ->
[ S.SELit $ getPGColTxt $ pgiColumn pgColumnInfo
, toJSONableExp False (pgiType pgColumnInfo) False $
S.mkQIdenExp (mkBaseTableAlias sourcePrefix) $ pgiColumn pgColumnInfo
]
encodeBase64 :: S.SQLExp -> S.SQLExp
encodeBase64 =
removeNewline . bytesToBase64Text . convertToBytes
@ -1058,7 +1050,7 @@ processConnectionSelect sourcePrefixes fieldAlias relAlias colMapping connection
Nothing ->
-- Extract primary key columns from base select along with cursor expression.
-- Those columns are required to perform connection split via a WHERE clause.
mkCursorExtractor (mkPrimaryKeyColumnsObjectExp thisPrefix primaryKeyColumns) : primaryKeyColumnExtractors
mkCursorExtractor primaryKeyColumnsObjectExp : primaryKeyColumnExtractors
orderByExp = _ssOrderBy selectSource
(topExtractorExp, exps) <- flip runStateT [] $ processFields orderByExp
let topExtractor = S.Extractor topExtractorExp $ Just $ S.Alias fieldIden
@ -1076,6 +1068,14 @@ processConnectionSelect sourcePrefixes fieldAlias relAlias colMapping connection
thisPrefix = _pfThis sourcePrefixes
permLimitSubQuery = PLSQNotRequired
primaryKeyColumnsObjectExp =
S.applyJsonBuildObj $ flip concatMap (toList primaryKeyColumns) $
\pgColumnInfo ->
[ S.SELit $ getPGColTxt $ pgiColumn pgColumnInfo
, toJSONableExp False (pgiType pgColumnInfo) False $
S.mkQIdenExp (mkBaseTableAlias thisPrefix) $ pgiColumn pgColumnInfo
]
primaryKeyColumnExtractors =
flip map (toList primaryKeyColumns) $
\pgColumnInfo ->

View File

@ -194,7 +194,7 @@ data AnnFieldG v
| AFArrayRelation !(ArraySelectG v)
| AFComputedField !(ComputedFieldSelect v)
| AFRemote !RemoteSelect
| AFNodeId !QualifiedTable !(NonEmpty PGColumnInfo)
| AFNodeId !QualifiedTable !(PrimaryKeyColumns)
| AFExpression !T.Text
deriving (Show, Eq)
@ -446,7 +446,7 @@ traverseConnectionSplit f (ConnectionSplit k v ob) =
data ConnectionSelect v
= ConnectionSelect
{ _csPrimaryKeyColumns :: !(NE.NonEmpty PGColumnInfo)
{ _csPrimaryKeyColumns :: !PrimaryKeyColumns
, _csSplit :: !(Maybe (NE.NonEmpty (ConnectionSplit v)))
, _csSlice :: !(Maybe ConnectionSlice)
, _csSelect :: !(AnnSelectG (ConnectionFields v) v)

View File

@ -16,6 +16,7 @@ module Hasura.RQL.Types.Column
, PGColumnInfo(..)
, PGRawColumnInfo(..)
, PrimaryKeyColumns
, getColInfos
, EnumReference(..)
@ -34,6 +35,7 @@ import Control.Lens.TH
import Data.Aeson
import Data.Aeson.Casing
import Data.Aeson.TH
import Data.Sequence.NonEmpty
import Language.Haskell.TH.Syntax (Lift)
import Hasura.Incremental (Cacheable)
@ -170,6 +172,8 @@ instance Cacheable PGColumnInfo
instance Hashable PGColumnInfo
$(deriveToJSON (aesonDrop 3 snakeCase) ''PGColumnInfo)
type PrimaryKeyColumns = NESeq PGColumnInfo
onlyIntCols :: [PGColumnInfo] -> [PGColumnInfo]
onlyIntCols = filter (isScalarColumnWhere isIntegerType . pgiType)

View File

@ -46,15 +46,16 @@ module Hasura.RQL.Types.Common
import Hasura.EncJSON
import Hasura.Incremental (Cacheable)
import Hasura.Prelude
import Hasura.SQL.Types
import Hasura.RQL.Types.Error
import Hasura.RQL.DDL.Headers ()
import Hasura.RQL.Types.Error
import Hasura.SQL.Types
import Control.Lens (makeLenses)
import Data.Aeson
import Data.Aeson.Casing
import Data.Aeson.TH
import Data.Sequence.NonEmpty
import Data.URL.Template
import Instances.TH.Lift ()
import Language.Haskell.TH.Syntax (Lift, Q, TExp)
@ -226,7 +227,7 @@ $(deriveJSON (aesonDrop 2 snakeCase) ''Constraint)
data PrimaryKey a
= PrimaryKey
{ _pkConstraint :: !Constraint
, _pkColumns :: !(NonEmpty a)
, _pkColumns :: !(NESeq a)
} deriving (Show, Eq, Generic, Foldable)
instance (NFData a) => NFData (PrimaryKey a)
instance (Cacheable a) => Cacheable (PrimaryKey a)
@ -279,7 +280,7 @@ newtype NonNegativeDiffTime = NonNegativeDiffTime { unNonNegativeDiffTime :: Dif
instance FromJSON NonNegativeDiffTime where
parseJSON = withScientific "NonNegativeDiffTime" $ \t -> do
case (t > 0) of
True -> return $ NonNegativeDiffTime . realToFrac $ t
True -> return $ NonNegativeDiffTime . realToFrac $ t
False -> fail "negative value not allowed"
newtype ResolvedWebhook

View File

@ -127,11 +127,11 @@ import Hasura.RQL.Types.Common
import Hasura.RQL.Types.ComputedField
import Hasura.RQL.Types.CustomTypes
import Hasura.RQL.Types.Error
import Hasura.RQL.Types.EventTrigger
import Hasura.RQL.Types.Function
import Hasura.RQL.Types.Metadata
import Hasura.RQL.Types.QueryCollection
import Hasura.RQL.Types.RemoteSchema
import Hasura.RQL.Types.EventTrigger
import Hasura.RQL.Types.ScheduledTrigger
import Hasura.RQL.Types.SchemaCacheTypes
import Hasura.RQL.Types.Table
@ -143,11 +143,11 @@ import Data.Aeson.Casing
import Data.Aeson.TH
import System.Cron.Types
import qualified Hasura.GraphQL.Validate.Types as VT
import qualified Hasura.GraphQL.Context as GC
import qualified Data.HashMap.Strict as M
import qualified Data.HashSet as HS
import qualified Data.Text as T
import qualified Hasura.GraphQL.Context as GC
import qualified Hasura.GraphQL.Validate.Types as VT
reportSchemaObjs :: [SchemaObjId] -> T.Text
reportSchemaObjs = T.intercalate ", " . sort . map reportSchemaObj
@ -217,7 +217,7 @@ data SchemaCache
, scCustomTypes :: !(NonObjectTypeMap, AnnotatedObjects)
, scGCtxMap :: !GC.GCtxMap
, scDefaultRemoteGCtx :: !GC.GCtx
, scRelayGCtxMap :: !GC.GCtxMap
, scRelayGCtxMap :: !GC.RelayGCtxMap
, scDepMap :: !DepMap
, scInconsistentObjs :: ![InconsistentMetadata]
, scCronTriggers :: !(M.HashMap TriggerName CronTriggerInfo)

View File

@ -425,6 +425,10 @@ applyJsonBuildObj :: [SQLExp] -> SQLExp
applyJsonBuildObj args =
SEFnApp "json_build_object" args Nothing
applyJsonBuildArray :: [SQLExp] -> SQLExp
applyJsonBuildArray args =
SEFnApp "json_build_array" args Nothing
applyRowToJson :: [Extractor] -> SQLExp
applyRowToJson extrs =
SEFnApp "row_to_json" [mkRowExp extrs] Nothing

View File

@ -38,7 +38,7 @@ response:
edges:
- cursor: eyJpZCIgOiAxfQ==
node_id:
id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImFydGljbGUifSwgImNvbHVtbnMiIDogeyJpZCIgOiAxfX0=
id: WzEsICJwdWJsaWMiLCAiYXJ0aWNsZSIsIDFd
node:
title: Article 1
content: Sample article content 1
@ -46,7 +46,7 @@ response:
name: Author 1
- cursor: eyJpZCIgOiAyfQ==
node_id:
id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImFydGljbGUifSwgImNvbHVtbnMiIDogeyJpZCIgOiAyfX0=
id: WzEsICJwdWJsaWMiLCAiYXJ0aWNsZSIsIDJd
node:
title: Article 2
content: Sample article content 2
@ -54,7 +54,7 @@ response:
name: Author 1
- cursor: eyJpZCIgOiAzfQ==
node_id:
id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImFydGljbGUifSwgImNvbHVtbnMiIDogeyJpZCIgOiAzfX0=
id: WzEsICJwdWJsaWMiLCAiYXJ0aWNsZSIsIDNd
node:
title: Article 3
content: Sample article content 3
@ -62,7 +62,7 @@ response:
name: Author 1
- cursor: eyJpZCIgOiA0fQ==
node_id:
id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImFydGljbGUifSwgImNvbHVtbnMiIDogeyJpZCIgOiA0fX0=
id: WzEsICJwdWJsaWMiLCAiYXJ0aWNsZSIsIDRd
node:
title: Article 4
content: Sample article content 4
@ -70,7 +70,7 @@ response:
name: Author 2
- cursor: eyJpZCIgOiA1fQ==
node_id:
id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImFydGljbGUifSwgImNvbHVtbnMiIDogeyJpZCIgOiA1fX0=
id: WzEsICJwdWJsaWMiLCAiYXJ0aWNsZSIsIDVd
node:
title: Article 5
content: Sample article content 5
@ -78,7 +78,7 @@ response:
name: Author 2
- cursor: eyJpZCIgOiA2fQ==
node_id:
id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImFydGljbGUifSwgImNvbHVtbnMiIDogeyJpZCIgOiA2fX0=
id: WzEsICJwdWJsaWMiLCAiYXJ0aWNsZSIsIDZd
node:
title: Article 6
content: Sample article content 6

View File

@ -40,16 +40,16 @@ response:
edges:
- cursor: eyJpZCIgOiAxfQ==
node:
id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImF1dGhvciJ9LCAiY29sdW1ucyIgOiB7ImlkIiA6IDF9fQ==
id: WzEsICJwdWJsaWMiLCAiYXV0aG9yIiwgMV0=
name: Author 1
articles:
- id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImFydGljbGUifSwgImNvbHVtbnMiIDogeyJpZCIgOiAxfX0=
- id: WzEsICJwdWJsaWMiLCAiYXJ0aWNsZSIsIDFd
title: Article 1
content: Sample article content 1
- id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImFydGljbGUifSwgImNvbHVtbnMiIDogeyJpZCIgOiAyfX0=
- id: WzEsICJwdWJsaWMiLCAiYXJ0aWNsZSIsIDJd
title: Article 2
content: Sample article content 2
- id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImFydGljbGUifSwgImNvbHVtbnMiIDogeyJpZCIgOiAzfX0=
- id: WzEsICJwdWJsaWMiLCAiYXJ0aWNsZSIsIDNd
title: Article 3
content: Sample article content 3
articles_aggregate:
@ -59,28 +59,28 @@ response:
edges:
- cursor: eyJpZCIgOiAxfQ==
node:
id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImFydGljbGUifSwgImNvbHVtbnMiIDogeyJpZCIgOiAxfX0=
id: WzEsICJwdWJsaWMiLCAiYXJ0aWNsZSIsIDFd
title: Article 1
content: Sample article content 1
- cursor: eyJpZCIgOiAyfQ==
node:
id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImFydGljbGUifSwgImNvbHVtbnMiIDogeyJpZCIgOiAyfX0=
id: WzEsICJwdWJsaWMiLCAiYXJ0aWNsZSIsIDJd
title: Article 2
content: Sample article content 2
- cursor: eyJpZCIgOiAzfQ==
node:
id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImFydGljbGUifSwgImNvbHVtbnMiIDogeyJpZCIgOiAzfX0=
id: WzEsICJwdWJsaWMiLCAiYXJ0aWNsZSIsIDNd
title: Article 3
content: Sample article content 3
- cursor: eyJpZCIgOiAyfQ==
node:
id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImF1dGhvciJ9LCAiY29sdW1ucyIgOiB7ImlkIiA6IDJ9fQ==
id: WzEsICJwdWJsaWMiLCAiYXV0aG9yIiwgMl0=
name: Author 2
articles:
- id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImFydGljbGUifSwgImNvbHVtbnMiIDogeyJpZCIgOiA0fX0=
- id: WzEsICJwdWJsaWMiLCAiYXJ0aWNsZSIsIDRd
title: Article 4
content: Sample article content 4
- id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImFydGljbGUifSwgImNvbHVtbnMiIDogeyJpZCIgOiA1fX0=
- id: WzEsICJwdWJsaWMiLCAiYXJ0aWNsZSIsIDVd
title: Article 5
content: Sample article content 5
articles_aggregate:
@ -90,20 +90,20 @@ response:
edges:
- cursor: eyJpZCIgOiA0fQ==
node:
id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImFydGljbGUifSwgImNvbHVtbnMiIDogeyJpZCIgOiA0fX0=
id: WzEsICJwdWJsaWMiLCAiYXJ0aWNsZSIsIDRd
title: Article 4
content: Sample article content 4
- cursor: eyJpZCIgOiA1fQ==
node:
id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImFydGljbGUifSwgImNvbHVtbnMiIDogeyJpZCIgOiA1fX0=
id: WzEsICJwdWJsaWMiLCAiYXJ0aWNsZSIsIDVd
title: Article 5
content: Sample article content 5
- cursor: eyJpZCIgOiAzfQ==
node:
id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImF1dGhvciJ9LCAiY29sdW1ucyIgOiB7ImlkIiA6IDN9fQ==
id: WzEsICJwdWJsaWMiLCAiYXV0aG9yIiwgM10=
name: Author 3
articles:
- id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImFydGljbGUifSwgImNvbHVtbnMiIDogeyJpZCIgOiA2fX0=
- id: WzEsICJwdWJsaWMiLCAiYXJ0aWNsZSIsIDZd
title: Article 6
content: Sample article content 6
articles_aggregate:
@ -113,12 +113,12 @@ response:
edges:
- cursor: eyJpZCIgOiA2fQ==
node:
id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImFydGljbGUifSwgImNvbHVtbnMiIDogeyJpZCIgOiA2fX0=
id: WzEsICJwdWJsaWMiLCAiYXJ0aWNsZSIsIDZd
title: Article 6
content: Sample article content 6
- cursor: eyJpZCIgOiA0fQ==
node:
id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImF1dGhvciJ9LCAiY29sdW1ucyIgOiB7ImlkIiA6IDR9fQ==
id: WzEsICJwdWJsaWMiLCAiYXV0aG9yIiwgNF0=
name: Author 4
articles: []
articles_aggregate:

View File

@ -0,0 +1,43 @@
# https://github.com/hasura/graphql-engine/issues/5020
description: Query author connection with edges
url: /v1/relay
status: 200
query:
query: |
query {
author_connection{
edges{
cursor
node{
id
name
articles{
id
title
content
}
articles_aggregate{
aggregate{
count
}
}
articles_view_connection{
edges{
cursor
node{
id
title
content
}
}
}
}
}
}
}
response:
errors:
- extensions:
path: $.selectionSet.author_connection.selectionSet.edges.selectionSet.node.selectionSet.articles_view_connection
code: validation-failed
message: "field \"articles_view_connection\" not found in type: 'author'"

View File

@ -3,10 +3,11 @@ url: /v1/relay
status: 200
query:
variables:
author_id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImF1dGhvciJ9LCAiY29sdW1ucyIgOiB7ImlkIiA6IDJ9fQ==
article_id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImFydGljbGUifSwgImNvbHVtbnMiIDogeyJpZCIgOiAzfX0=
author_id: WzEsICJwdWJsaWMiLCJhdXRob3IiLDJdCg==
article_id: WzEsInB1YmxpYyIsImFydGljbGUiLDNdCg==
non_exist_article_id: WzEsInB1YmxpYyIsImFydGljbGUiLDEwMF0K
query: |
query nodeQuery($author_id: ID!, $article_id: ID!){
query nodeQuery($author_id: ID!, $article_id: ID!, $non_exist_article_id: ID!){
author_node: node(id: $author_id){
__typename
... on author{
@ -41,6 +42,12 @@ query:
}
}
}
non_exist_article: node(id: $non_exist_article_id){
... on article{
title
content
}
}
}
response:
data:
@ -58,10 +65,11 @@ response:
title: Article 5
content: Sample article content 5
article_node_with_author_id:
id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImF1dGhvciJ9LCAiY29sdW1ucyIgOiB7ImlkIiA6IDJ9fQ==
id: WzEsICJwdWJsaWMiLCAiYXV0aG9yIiwgMl0=
article_node:
__typename: article
title: Article 3
content: Sample article content 3
author:
name: Author 1
non_exist_article: null

View File

@ -0,0 +1,22 @@
description: Query node interface with insufficient items in node id json array
url: /v1/relay
status: 200
query:
query: |
query {
node(id: "WzEsICJwdWJsaWMiLCAidXNlciJdCg=="){
... on user{
first_name
last_name
age
address
}
}
}
response:
errors:
- extensions:
path: $.selectionSet.node.args.id
code: validation-failed
message: 'the node id is invalid: Error in $: GUID version 1: expecting schema
name, table name and at least one column value'

View File

@ -0,0 +1,22 @@
description: Query node interface with an invalid column value in node id json array
url: /v1/relay
status: 200
query:
query: |
query {
node(id: "WzEsICJwdWJsaWMiLCAidXNlciIsIDEsICJsYXN0XzIiXQo="){
... on user{
first_name
last_name
age
address
}
}
}
response:
errors:
- extensions:
path: "$.selectionSet.node"
code: parse-failed
message: 'value of column "first_name" in node id: parsing Text failed, expected
String, but encountered Number'

View File

@ -0,0 +1,22 @@
description: Query node interface with an invalid node id
url: /v1/relay
status: 200
query:
query: |
query {
node(id: "random"){
... on user{
first_name
last_name
age
address
}
}
}
response:
errors:
- extensions:
path: "$.selectionSet.node.args.id"
code: validation-failed
message: 'the node id is invalid: Error in $: Failed reading: not a valid json value
at ''­©Ý¢'''

View File

@ -0,0 +1,19 @@
description: Query node interface with invalid node id version
url: /v1/relay
status: 200
query:
query: |
query {
node(id: "WzIsICJwdWJsaWMiLCAiYXV0aG9yIiwgMV0K"){
__typename
... on author{
name
}
}
}
response:
errors:
- extensions:
path: $.selectionSet.node.args.id
code: validation-failed
message: 'the node id is invalid: Error in $: unsupported GUID version: 2.0'

View File

@ -0,0 +1,21 @@
description: Query node interface with a missing column value in node id json array
url: /v1/relay
status: 200
query:
query: |
query {
node(id: "WzEsICJwdWJsaWMiLCAidXNlciIsICJmaXJzdF8yIl0K"){
... on user{
first_name
last_name
age
address
}
}
}
response:
errors:
- extensions:
path: "$.selectionSet.node"
code: validation-failed
message: 'the node id is invalid: primary key columns "last_name" are missing'

View File

@ -1,4 +1,4 @@
description: Query node interface with invalid node id
description: Query node interface with id as non array JSON string
url: /v1/relay
status: 200
query:
@ -14,6 +14,7 @@ query:
response:
errors:
- extensions:
path: "$.selectionSet.node"
path: $.selectionSet.node.args.id
code: validation-failed
message: the node id is invalid
message: 'the node id is invalid: Error in $: parsing [] failed, expected Array,
but encountered Object'

View File

@ -0,0 +1,22 @@
description: Query node interface with a non-integer GUID version
url: /v1/relay
status: 200
query:
query: |
query {
node(id: "WyIxIiwgInB1YmxpYyIsICJ1c2VyIiwgImZpcnN0XzIiLCAibGFzdF8yIiwgMl0K"){
... on user{
first_name
last_name
age
address
}
}
}
response:
errors:
- extensions:
path: $.selectionSet.node.args.id
code: validation-failed
message: 'the node id is invalid: Error in $: unexpected GUID format, needs to
start with a version number'

View File

@ -0,0 +1,21 @@
description: Query node interface with an unexpected column value in node id json array
url: /v1/relay
status: 200
query:
query: |
query {
node(id: "WzEsICJwdWJsaWMiLCAidXNlciIsICJmaXJzdF8yIiwgImxhc3RfMiIsIDJdCg=="){
... on user{
first_name
last_name
age
address
}
}
}
response:
errors:
- extensions:
path: "$.selectionSet.node"
code: validation-failed
message: 'the node id is invalid: unexpected column values [2]'

View File

@ -0,0 +1,33 @@
description: Query search_articles function with edges
url: /v1/relay
status: 200
query:
query: |
query {
search_articles_connection(args: {search: "6"}){
edges{
cursor
node{
id
title
content
author{
id
name
}
}
}
}
}
response:
data:
search_articles_connection:
edges:
- cursor: eyJpZCIgOiA2fQ==
node:
id: WzEsICJwdWJsaWMiLCAiYXJ0aWNsZSIsIDZd
title: Article 6
content: Sample article content 6
author:
id: WzEsICJwdWJsaWMiLCAiYXV0aG9yIiwgM10=
name: Author 3

View File

@ -0,0 +1,133 @@
type: bulk
args:
- type: run_sql
args:
sql: |
CREATE TABLE author(
id SERIAL PRIMARY KEY,
name TEXT UNIQUE NOT NULL
);
INSERT INTO author (name)
VALUES ('Author 1'), ('Author 2'), ('Author 3'), ('Author 4');
CREATE TABLE article (
id SERIAL PRIMARY KEY,
title TEXT,
content TEXT,
author_id INTEGER REFERENCES author(id)
);
INSERT INTO article (title, content, author_id)
VALUES
(
'Article 1',
'Sample article content 1',
1
),
(
'Article 2',
'Sample article content 2',
1
),
(
'Article 3',
'Sample article content 3',
1
),
(
'Article 4',
'Sample article content 4',
2
),
(
'Article 5',
'Sample article content 5',
2
),
(
'Article 6',
'Sample article content 6',
3
);
-- Create article view for testing https://github.com/hasura/graphql-engine/issues/5020
CREATE VIEW article_view AS (
SELECT * FROM article
);
CREATE FUNCTION search_articles(search text)
RETURNS SETOF article AS $$
SELECT *
FROM article
WHERE
title ilike ('%' || search || '%') OR
content ilike ('%' || search || '%')
$$ LANGUAGE SQL STABLE;
-- Table with two primary key columns
CREATE TABLE "user"(
first_name TEXT not null,
last_name TEXT not null,
age INTEGER,
address TEXT,
PRIMARY KEY (first_name, last_name)
);
INSERT INTO "user"(first_name, last_name, age, address)
VALUES
('first_1', 'last_1', 24, null),
('first_2', 'last_2', null, 'Bangalore');
# Track tables and define relationships
- type: track_table
args:
name: author
schema: public
- type: track_table
args:
name: article
schema: public
- type: track_table
args:
name: article_view
schema: public
- type: create_object_relationship
args:
table: article
name: author
using:
foreign_key_constraint_on: author_id
- type: create_array_relationship
args:
table: author
name: articles
using:
foreign_key_constraint_on:
table: article
column: author_id
- type: create_array_relationship
args:
table: author
name: articles_view
using:
manual_configuration:
remote_table: article_view
column_mapping:
id: author_id
- type: track_function
version: 2
args:
function: search_articles
- type: track_table
args:
name: user
schema: public

View File

@ -0,0 +1,10 @@
type: bulk
args:
- type: run_sql
args:
cascade: true
sql: |
DROP VIEW article_view;
DROP TABLE article CASCADE;
DROP TABLE author;
DROP TABLE "user";

View File

@ -0,0 +1,60 @@
description: Get last page of articles with 3 items
url: /v1/relay
status: 200
headers:
X-Hasura-Role: user
X-Hasura-User-Id: '2'
query:
query: |
query {
article_connection(
last: 3
){
pageInfo{
startCursor
endCursor
hasPreviousPage
hasNextPage
}
edges{
cursor
node{
id
title
content
is_published
author_id
}
}
}
}
response:
data:
article_connection:
pageInfo:
startCursor: eyJpZCIgOiA0fQ==
endCursor: eyJpZCIgOiA2fQ==
hasPreviousPage: true
hasNextPage: false
edges:
- cursor: eyJpZCIgOiA0fQ==
node:
id: WzEsICJwdWJsaWMiLCAiYXJ0aWNsZSIsIDRd
title: Article 4
content: Sample article content 4
is_published: true
author_id: 2
- cursor: eyJpZCIgOiA1fQ==
node:
id: WzEsICJwdWJsaWMiLCAiYXJ0aWNsZSIsIDVd
title: Article 5
content: Sample article content 5
is_published: false
author_id: 2
- cursor: eyJpZCIgOiA2fQ==
node:
id: WzEsICJwdWJsaWMiLCAiYXJ0aWNsZSIsIDZd
title: Article 6
content: Sample article content 6
is_published: true
author_id: 3

View File

@ -0,0 +1,54 @@
description: Get last page of articles with 2 items before 'Article 4'
url: /v1/relay
status: 200
headers:
X-Hasura-Role: user
X-Hasura-User-Id: '2'
query:
query: |
query {
article_connection(
last: 3
before: "eyJpZCIgOiA0fQ=="
){
pageInfo{
startCursor
endCursor
hasPreviousPage
hasNextPage
}
edges{
cursor
node{
id
title
content
is_published
author_id
}
}
}
}
response:
data:
article_connection:
pageInfo:
startCursor: eyJpZCIgOiAxfQ==
endCursor: eyJpZCIgOiAyfQ==
hasPreviousPage: false
hasNextPage: true
edges:
- cursor: eyJpZCIgOiAxfQ==
node:
id: WzEsICJwdWJsaWMiLCAiYXJ0aWNsZSIsIDFd
title: Article 1
content: Sample article content 1
is_published: true
author_id: 1
- cursor: eyJpZCIgOiAyfQ==
node:
id: WzEsICJwdWJsaWMiLCAiYXJ0aWNsZSIsIDJd
title: Article 2
content: Sample article content 2
is_published: true
author_id: 1

View File

@ -0,0 +1,60 @@
description: Get 1st page of articles with 3 items as user role
url: /v1/relay
status: 200
headers:
X-Hasura-Role: user
X-Hasura-User-Id: '2'
query:
query: |
query {
article_connection(
first: 3
){
pageInfo{
startCursor
endCursor
hasPreviousPage
hasNextPage
}
edges{
cursor
node{
id
title
content
is_published
author_id
}
}
}
}
response:
data:
article_connection:
pageInfo:
startCursor: eyJpZCIgOiAxfQ==
endCursor: eyJpZCIgOiA0fQ==
hasPreviousPage: false
hasNextPage: true
edges:
- cursor: eyJpZCIgOiAxfQ==
node:
id: WzEsICJwdWJsaWMiLCAiYXJ0aWNsZSIsIDFd
title: Article 1
content: Sample article content 1
is_published: true
author_id: 1
- cursor: eyJpZCIgOiAyfQ==
node:
id: WzEsICJwdWJsaWMiLCAiYXJ0aWNsZSIsIDJd
title: Article 2
content: Sample article content 2
is_published: true
author_id: 1
- cursor: eyJpZCIgOiA0fQ==
node:
id: WzEsICJwdWJsaWMiLCAiYXJ0aWNsZSIsIDRd
title: Article 4
content: Sample article content 4
is_published: true
author_id: 2

View File

@ -0,0 +1,54 @@
description: Get 2nd page of articles with 3 items as user role
url: /v1/relay
status: 200
headers:
X-Hasura-Role: user
X-Hasura-User-Id: '2'
query:
query: |
query {
article_connection(
first: 3
after: "eyJpZCIgOiA0fQ=="
){
pageInfo{
startCursor
endCursor
hasPreviousPage
hasNextPage
}
edges{
cursor
node{
id
title
content
is_published
author_id
}
}
}
}
response:
data:
article_connection:
pageInfo:
startCursor: eyJpZCIgOiA1fQ==
endCursor: eyJpZCIgOiA2fQ==
hasPreviousPage: true
hasNextPage: false
edges:
- cursor: eyJpZCIgOiA1fQ==
node:
id: WzEsICJwdWJsaWMiLCAiYXJ0aWNsZSIsIDVd
title: Article 5
content: Sample article content 5
is_published: false
author_id: 2
- cursor: eyJpZCIgOiA2fQ==
node:
id: WzEsICJwdWJsaWMiLCAiYXJ0aWNsZSIsIDZd
title: Article 6
content: Sample article content 6
is_published: true
author_id: 3

View File

@ -0,0 +1,38 @@
description: Query author connection with edges and pageinfo as user role
url: /v1/relay
status: 200
headers:
X-Hasura-Role: user
X-Hasura-User-Id: '2'
query:
query: |
query {
author_connection{
pageInfo{
startCursor
endCursor
hasPreviousPage
hasNextPage
}
edges{
cursor
node{
id
name
}
}
}
}
response:
data:
author_connection:
pageInfo:
startCursor: eyJpZCIgOiAyfQ==
endCursor: eyJpZCIgOiAyfQ==
hasPreviousPage: false
hasNextPage: false
edges:
- cursor: eyJpZCIgOiAyfQ==
node:
id: WzEsICJwdWJsaWMiLCAiYXV0aG9yIiwgMl0=
name: Author 2

View File

@ -0,0 +1,19 @@
description: Query author node with id belongs to same user id
url: /v1/relay
status: 200
headers:
X-Hasura-Role: user
X-Hasura-User-Id: '2'
query:
query: |
query {
node(id: "WzEsICJwdWJsaWMiLCAiYXV0aG9yIiwgMl0K"){
... on author{
name
}
}
}
response:
data:
node:
name: Author 2

View File

@ -0,0 +1,18 @@
description: Query author node with id belongs to another user id
url: /v1/relay
status: 200
headers:
X-Hasura-Role: user
X-Hasura-User-Id: '1'
query:
query: |
query {
node(id: "WzEsICJwdWJsaWMiLCAiYXV0aG9yIiwgMl0K"){
... on author{
name
}
}
}
response:
data:
node: null

View File

@ -15,40 +15,47 @@ args:
id SERIAL PRIMARY KEY,
title TEXT,
content TEXT,
author_id INTEGER REFERENCES author(id)
author_id INTEGER REFERENCES author(id),
is_published BOOLEAN NOT NULL DEFAULT false
);
INSERT INTO article (title, content, author_id)
INSERT INTO article (title, content, author_id, is_published)
VALUES
(
'Article 1',
'Sample article content 1',
1
1,
true
),
(
'Article 2',
'Sample article content 2',
1
1,
true
),
(
'Article 3',
'Sample article content 3',
1
1,
false
),
(
'Article 4',
'Sample article content 4',
2
2,
true
),
(
'Article 5',
'Sample article content 5',
2
2,
false
),
(
'Article 6',
'Sample article content 6',
3
3,
true
);
# Track tables and define relationships
@ -77,3 +84,28 @@ args:
foreign_key_constraint_on:
table: article
column: author_id
# Define permissions
- type: create_select_permission
args:
table: author
role: user
permission:
columns: '*'
filter:
id: x-hasura-user-id
- type: create_select_permission
args:
table: article
role: user
permission:
columns:
- title
- content
- is_published
- author_id
filter:
_or:
- author_id: x-hasura-user-id
- is_published: true

View File

@ -4,5 +4,5 @@ args:
args:
cascade: true
sql: |
DROP TABLE article;
DROP TABLE article CASCADE;
DROP TABLE author;

View File

@ -653,27 +653,29 @@ class TestGraphQLExplain:
@pytest.mark.parametrize('transport', ['http', 'websocket'])
@usefixtures('per_class_tests_db_state')
class TestRelayQueries:
class TestRelayQueriesBasic:
@classmethod
def dir(cls):
return 'queries/graphql_query/relay'
return 'queries/graphql_query/relay/basic'
# Basic queries
def test_article_connection(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + '/basic/article_connection.yaml', transport)
check_query_f(hge_ctx, self.dir() + '/article_connection.yaml', transport)
def test_author_connection(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + '/basic/author_connection.yaml', transport)
check_query_f(hge_ctx, self.dir() + '/author_connection.yaml', transport)
def test_author_with_articles_view_connection(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + '/author_with_articles_view_connection.yaml', transport)
def test_search_articles_connection(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + '/search_articles_connection.yaml', transport)
def test_node(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + '/basic/node.yaml', transport)
def test_invalid_node(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + '/basic/invalid_node_id.yaml', transport)
check_query_f(hge_ctx, self.dir() + '/node.yaml', transport)
def test_only_pageinfo(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + '/basic/only_pageinfo.yaml', transport)
check_query_f(hge_ctx, self.dir() + '/only_pageinfo.yaml', transport)
# Articles forward pagination
def test_article_no_orderby_forward_pagination(self, hge_ctx, transport):
@ -698,6 +700,56 @@ class TestRelayQueries:
def test_after_and_before_fail(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/pagination_errors/after_and_before.yaml", transport)
# Node id errors
def test_insufficient_data(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + '/node_id_errors/insufficient_data.yaml', transport)
def test_invalid_column_value(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + '/node_id_errors/invalid_column_value.yaml', transport)
def test_invalid_id(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + '/node_id_errors/invalid_id.yaml', transport)
def test_missing_columns(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + '/node_id_errors/missing_columns.yaml', transport)
def test_non_array_id(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + '/node_id_errors/non_array_id.yaml', transport)
def test_unexpected_columns(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + '/node_id_errors/unexpected_columns.yaml', transport)
def test_invalid_node_id_version(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + '/node_id_errors/invalid_node_id_version.yaml', transport)
def test_non_integer_version(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + '/node_id_errors/non_integer_version.yaml', transport)
@pytest.mark.parametrize('transport', ['http', 'websocket'])
@usefixtures('per_class_tests_db_state')
class TestRelayQueriesPermissions:
@classmethod
def dir(cls):
return 'queries/graphql_query/relay/permissions'
def test_author_connection(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + '/author_connection.yaml', transport)
def test_author_node(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + '/author_node.yaml', transport)
def test_author_node_null(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + '/author_node_null.yaml', transport)
# Article forward pagination
def test_article_pagination_forward(self, hge_ctx, transport):
_test_relay_pagination(hge_ctx, transport, self.dir() + '/article_pagination/forward', 2)
def test_article_pagination_backward(self, hge_ctx, transport):
_test_relay_pagination(hge_ctx, transport, self.dir() + '/article_pagination/backward', 2)
def _test_relay_pagination(hge_ctx, transport, test_file_prefix, no_of_pages):
for i in range(no_of_pages):
page_no = i + 1