use only required session variables in multiplexed queries for subscriptions (#4329)

* use only required session variables in multiplexed queries for subscriptions

This will reduce the load on Postgres when the result of a subscription
is not dependent on the session variables of the request

* add DerivingVia to the project wide extension list

* expose a more specific function to filter session variables

* improve documentation of session variables of a cohort

Co-Authored-By: Alexis King <lexi.lambda@gmail.com>

* fix bad rebase

* add test for checking only required session variables are used to make query

Co-authored-by: Alexis King <lexi.lambda@gmail.com>
Co-authored-by: Karthikeyan Chinnakonda <karthikeyan@hasura.io>
This commit is contained in:
Vamshi Surabhi 2020-11-03 13:45:22 +05:30 committed by GitHub
parent 25892ac70c
commit 98c853f4f7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 262 additions and 133 deletions

View File

@ -261,6 +261,8 @@ library
-- scheduled triggers
, cron >= 0.6.2
-- needed for deriving via
, semigroups >= 0.19
, random
, mmorph

View File

@ -1,7 +1,7 @@
{-# LANGUAGE UndecidableInstances #-}
-- | Construction of multiplexed live query plans; see "Hasura.GraphQL.Execute.LiveQuery" for
-- details.
-- | Construction of multiplexed live query plans; see
-- "Hasura.GraphQL.Execute.LiveQuery" for details.
module Hasura.GraphQL.Execute.LiveQuery.Plan
( MultiplexedQuery
, mkMultiplexedQuery
@ -28,20 +28,21 @@ import Hasura.Prelude
import qualified Data.Aeson.Casing as J
import qualified Data.Aeson.Extended as J
import qualified Data.Aeson.TH as J
import qualified Data.ByteString as B
import qualified Data.HashSet as Set
import qualified Data.Aeson.TH as J
import qualified Data.HashMap.Strict as Map
import qualified Data.HashMap.Strict.InsOrd as OMap
import qualified Data.Sequence as Seq
import qualified Data.Text as T
import qualified Data.UUID.V4 as UUID
import qualified Database.PG.Query as Q
import qualified Database.PG.Query.PTI as PTI
import qualified Language.GraphQL.Draft.Syntax as G
import qualified Database.PG.Query.PTI as PTI
import qualified PostgreSQL.Binary.Encoding as PE
import Control.Lens
import Data.UUID (UUID)
import Data.Semigroup.Generic
import qualified Hasura.Backends.Postgres.Execute.RemoteJoin as RR
import qualified Hasura.Backends.Postgres.SQL.DML as S
@ -112,35 +113,6 @@ mkMultiplexedQuery rootFields = MultiplexedQuery . Q.fromBuilder . toSQL $ S.mkS
mkQualifiedIdentifier prefix = S.SEQIdentifier . S.QIdentifier (S.QualifiedIdentifier prefix Nothing) -- TODO fix this Nothing of course
aliasToIdentifier = Identifier . G.unName
-- TODO fix this comment
-- | Resolves an 'GR.UnresolvedVal' by converting 'GR.UVPG' values to SQL expressions that refer to
-- the @result_vars@ input object, collecting variable values along the way.
resolveMultiplexedValue
:: (MonadState (HashMap G.Name PGColumnValue, Seq PGColumnValue) m)
=> UnpreparedValue -> m S.SQLExp
resolveMultiplexedValue = \case
UVParameter colVal varM -> do
varJsonPath <- case fmap PS.getName varM of
Just varName -> do
modifying _1 $ Map.insert varName colVal
pure ["query", G.unName varName]
Nothing -> do
syntheticVarIndex <- gets (length . snd)
modifying _2 (|> colVal)
pure ["synthetic", T.pack $ show syntheticVarIndex]
pure $ fromResVars (PGTypeScalar $ pstType $ pcvValue colVal) varJsonPath
UVSessionVar ty sessVar -> pure $ fromResVars ty ["session", sessionVariableToText sessVar]
UVLiteral sqlExp -> pure sqlExp
UVSession -> pure $ fromResVars (PGTypeScalar PGJSON) ["session"]
where
fromResVars pgType jPath = addTypeAnnotation pgType $ S.SEOpApp (S.SQLOp "#>>")
[ S.SEQIdentifier $ S.QIdentifier (S.QualifiedIdentifier (Identifier "_subs") Nothing) (Identifier "result_vars")
, S.SEArray $ map S.SELit jPath
]
addTypeAnnotation pgType = flip S.SETyAnn (S.mkTypeAnn pgType) . case pgType of
PGTypeScalar scalarType -> withConstructorFn scalarType
PGTypeArray _ -> id
newtype CohortId = CohortId { unCohortId :: UUID }
deriving (Show, Eq, Hashable, J.ToJSON, Q.FromCol)
@ -150,11 +122,35 @@ newCohortId = CohortId <$> liftIO UUID.nextRandom
data CohortVariables
= CohortVariables
{ _cvSessionVariables :: !SessionVariables
-- ^ A set of session variables, pruned to the minimal set actually used by
-- this query. To illustrate the need for this pruning, suppose we have the
-- following query:
--
-- > query {
-- > articles {
-- > id
-- > title
-- > }
-- > }
--
-- If the select permission on @articles@ is just @{"is_public": true}@, we
-- just generate the SQL query
--
-- > SELECT id, title FROM articles WHERE is_public = true
--
-- which doesnt use any session variables at all. Therefore, we ought to be
-- able to multiplex all queries of this shape into a single cohort, for quite
-- good performance! But if we dont prune the session variables, well
-- needlessly split subscribers into several cohorts simply because they have
-- different values for, say, @X-Hasura-User-Id@.
--
-- The 'mkCohortVariables' smart constructor handles pruning the session
-- variables to a minimal set, avoiding this pessimization.
, _cvQueryVariables :: !ValidatedQueryVariables
, _cvSyntheticVariables :: !ValidatedSyntheticVariables
-- ^ To allow more queries to be multiplexed together, we introduce “synthetic” variables for
-- /all/ SQL literals in a query, even if they dont correspond to any GraphQL variable. For
-- example, the query
-- ^ To allow more queries to be multiplexed together, we introduce “synthetic”
-- variables for /all/ SQL literals in a query, even if they dont correspond to
-- any GraphQL variable. For example, the query
--
-- > subscription latest_tracks($condition: tracks_bool_exp!) {
-- > tracks(where: $tracks_bool_exp) {
@ -163,30 +159,50 @@ data CohortVariables
-- > }
-- > }
--
-- might be executed with similar values for @$condition@, such as @{"album_id": {"_eq": "1"}}@
-- and @{"album_id": {"_eq": "2"}}@.
-- might be executed with similar values for @$condition@, such as @{"album_id":
-- {"_eq": "1"}}@ and @{"album_id": {"_eq": "2"}}@.
--
-- Normally, we wouldnt bother parameterizing over the @1@ and @2@ literals in the resulting
-- query because we cant cache that query plan (since different @$condition@ values could lead to
-- different SQL). However, for live queries, we can still take advantage of the similarity
-- between the two queries by multiplexing them together, so we replace them with references to
-- synthetic variables.
-- Normally, we wouldnt bother parameterizing over the @1@ and @2@ literals in the
-- resulting query because we cant cache that query plan (since different
-- @$condition@ values could lead to different SQL). However, for live queries, we
-- can still take advantage of the similarity between the two queries by
-- multiplexing them together, so we replace them with references to synthetic
-- variables.
} deriving (Show, Eq, Generic)
instance Hashable CohortVariables
-- | Builds a cohort's variables by only using the session variables that
-- are required for the subscription
mkCohortVariables
:: Set.HashSet SessionVariable
-> SessionVariables
-> ValidatedQueryVariables
-> ValidatedSyntheticVariables
-> CohortVariables
mkCohortVariables requiredSessionVariables sessionVariableValues =
CohortVariables $ filterSessionVariables (\k _ -> Set.member k requiredSessionVariables)
sessionVariableValues
instance J.ToJSON CohortVariables where
toJSON (CohortVariables sessionVars queryVars syntheticVars) =
J.object ["session" J..= sessionVars, "query" J..= queryVars, "synthetic" J..= syntheticVars]
J.object [ "session" J..= sessionVars
, "query" J..= queryVars
, "synthetic" J..= syntheticVars
]
-- These types exist only to use the Postgres array encoding.
newtype CohortIdArray = CohortIdArray { unCohortIdArray :: [CohortId] }
deriving (Show, Eq)
instance Q.ToPrepArg CohortIdArray where
toPrepVal (CohortIdArray l) = Q.toPrepValHelper PTI.unknown encoder $ map unCohortId l
where
encoder = PE.array 2950 . PE.dimensionArray foldl' (PE.encodingArray . PE.uuid)
newtype CohortVariablesArray = CohortVariablesArray { unCohortVariablesArray :: [CohortVariables] }
newtype CohortVariablesArray
= CohortVariablesArray { unCohortVariablesArray :: [CohortVariables] }
deriving (Show, Eq)
instance Q.ToPrepArg CohortVariablesArray where
toPrepVal (CohortVariablesArray l) =
Q.toPrepValHelper PTI.unknown encoder (map J.toJSON l)
@ -194,23 +210,26 @@ instance Q.ToPrepArg CohortVariablesArray where
encoder = PE.array 114 . PE.dimensionArray foldl' (PE.encodingArray . PE.json_ast)
executeMultiplexedQuery
:: (MonadTx m) => MultiplexedQuery -> [(CohortId, CohortVariables)] -> m [(CohortId, B.ByteString)]
:: (MonadTx m)
=> MultiplexedQuery -> [(CohortId, CohortVariables)] -> m [(CohortId, B.ByteString)]
executeMultiplexedQuery (MultiplexedQuery query) = executeQuery query
-- | Internal; used by both 'executeMultiplexedQuery' and 'explainLiveQueryPlan'.
executeQuery :: (MonadTx m, Q.FromRow a) => Q.Query -> [(CohortId, CohortVariables)] -> m [a]
executeQuery
:: (MonadTx m, Q.FromRow a)
=> Q.Query -> [(CohortId, CohortVariables)] -> m [a]
executeQuery query cohorts =
let (cohortIds, cohortVars) = unzip cohorts
preparedArgs = (CohortIdArray cohortIds, CohortVariablesArray cohortVars)
in liftTx $ Q.listQE defaultTxErrorHandler query preparedArgs True
-- -------------------------------------------------------------------------------------------------
-- ---------------------------------------------------------------------------
-- Variable validation
-- | When running multiplexed queries, we have to be especially careful about user input, since
-- invalid values will cause the query to fail, causing collateral damage for anyone else
-- multiplexed into the same query. Therefore, we pre-validate variables against Postgres by
-- executing a no-op query of the shape
-- | When running multiplexed queries, we have to be especially careful about user
-- input, since invalid values will cause the query to fail, causing collateral
-- damage for anyone else multiplexed into the same query. Therefore, we
-- pre-validate variables against Postgres by executing a no-op query of the shape
--
-- > SELECT 'v1'::t1, 'v2'::t2, ..., 'vn'::tn
--
@ -225,7 +244,7 @@ type ValidatedQueryVariables = ValidatedVariables (Map.HashMap G.Name)
type ValidatedSyntheticVariables = ValidatedVariables []
-- | Checks if the provided arguments are valid values for their corresponding types.
-- Generates SQL of the format "select 'v1'::t1, 'v2'::t2 ..."
-- | Generates SQL of the format "select 'v1'::t1, 'v2'::t2 ..."
validateVariables
:: (Traversable f, MonadError QErr m, MonadIO m)
=> PGExecCtx
@ -244,15 +263,62 @@ validateVariables pgExecCtx variableValues = do
res <- liftIO $ runExceptT (runQueryTx pgExecCtx tx)
liftEither res
-- Explicitly look for the class of errors raised when the format of a value provided
-- for a type is incorrect.
-- Explicitly look for the class of errors raised when the format of a value
-- provided for a type is incorrect.
dataExnErrHandler = mkTxErrorHandler (has _PGDataException)
-- -------------------------------------------------------------------------------------------------
-- | Internal: Used to collect information about various parameters
-- of a subscription field's AST as we resolve them to SQL expressions.
data QueryParametersInfo
= QueryParametersInfo
{ _qpiReusableVariableValues :: !(HashMap G.Name PGColumnValue)
, _qpiSyntheticVariableValues :: !(Seq PGColumnValue)
, _qpiReferencedSessionVariables :: !(Set.HashSet SessionVariable)
-- ^ The session variables that are referenced in the query root fld's AST.
-- This information is used to determine a cohort's required session
-- variables
} deriving (Generic)
deriving (Semigroup, Monoid) via (GenericSemigroupMonoid QueryParametersInfo)
makeLenses ''QueryParametersInfo
-- | Resolves an 'GR.UnresolvedVal' by converting 'GR.UVPG' values to SQL
-- expressions that refer to the @result_vars@ input object, collecting information
-- about various parameters of the query along the way.
resolveMultiplexedValue
:: (MonadState QueryParametersInfo m)
=> UnpreparedValue -> m S.SQLExp
resolveMultiplexedValue = \case
UVParameter colVal varM -> do
varJsonPath <- case fmap PS.getName varM of
Just varName -> do
modifying qpiReusableVariableValues $ Map.insert varName colVal
pure ["query", G.unName varName]
Nothing -> do
syntheticVarIndex <- use (qpiSyntheticVariableValues . to length)
modifying qpiSyntheticVariableValues (|> colVal)
pure ["synthetic", T.pack $ show syntheticVarIndex]
pure $ fromResVars (PGTypeScalar $ pstType $ pcvValue colVal) varJsonPath
UVSessionVar ty sessVar -> do
modifying qpiReferencedSessionVariables (Set.insert sessVar)
pure $ fromResVars ty ["session", sessionVariableToText sessVar]
UVLiteral sqlExp -> pure sqlExp
UVSession -> pure $ fromResVars (PGTypeScalar PGJSON) ["session"]
where
fromResVars pgType jPath = addTypeAnnotation pgType $ S.SEOpApp (S.SQLOp "#>>")
[ S.SEQIdentifier $ S.QIdentifier (S.QualifiedIdentifier (Identifier "_subs") Nothing) (Identifier "result_vars")
, S.SEArray $ map S.SELit jPath
]
addTypeAnnotation pgType = flip S.SETyAnn (S.mkTypeAnn pgType) . case pgType of
PGTypeScalar scalarType -> withConstructorFn scalarType
PGTypeArray _ -> id
-- -----------------------------------------------------------------------------------
-- Live query plans
-- | A self-contained, ready-to-execute live query plan. Contains enough information to find an
-- existing poller that this can be added to /or/ to create a new poller if necessary.
-- | A self-contained, ready-to-execute live query plan. Contains enough information
-- to find an existing poller that this can be added to /or/ to create a new poller
-- if necessary.
data LiveQueryPlan
= LiveQueryPlan
{ _lqpParameterizedPlan :: !ParameterizedLiveQueryPlan
@ -268,17 +334,15 @@ $(J.deriveToJSON (J.aesonDrop 4 J.snakeCase) ''ParameterizedLiveQueryPlan)
data ReusableLiveQueryPlan
= ReusableLiveQueryPlan
{ _rlqpParameterizedPlan :: !ParameterizedLiveQueryPlan
, _rlqpSyntheticVariableValues :: !ValidatedSyntheticVariables
, _rlqpQueryVariableTypes :: HashMap G.Name PGColumnType
{ _rlqpParameterizedPlan :: !ParameterizedLiveQueryPlan
, _rlqpRequiredSessionVariables :: !(Set.HashSet SessionVariable)
, _rlqpSyntheticVariableValues :: !ValidatedSyntheticVariables
, _rlqpQueryVariableTypes :: HashMap G.Name PGColumnType
} deriving (Show)
$(J.deriveToJSON (J.aesonDrop 4 J.snakeCase) ''ReusableLiveQueryPlan)
-- | Constructs a new execution plan for a live query and returns a reusable version of the plan if
-- possible.
-- NOTE: This function has a 'MonadTrace' constraint in master, but we don't need it
-- here. We should evaluate if we need it here.
-- | Constructs a new execution plan for a live query and returns a reusable version
-- of the plan if possible.
buildLiveQueryPlan
:: ( MonadError QErr m
, MonadIO m
@ -288,29 +352,7 @@ buildLiveQueryPlan
-> InsOrdHashMap G.Name (SubscriptionRootField UnpreparedValue)
-> m (LiveQueryPlan, Maybe ReusableLiveQueryPlan)
buildLiveQueryPlan pgExecCtx userInfo unpreparedAST = do
-- ((resolvedASTs, (queryVariableValues, syntheticVariableValues)), finalReusability) <-
-- GV.runReusabilityTWith initialReusability . flip runStateT mempty $
-- fmap Map.fromList . for (toList fields) $ \field -> case GV._fName field of
-- "__typename" -> throwVE "you cannot create a subscription on '__typename' field"
-- _ -> do
-- unresolvedAST <- GR.queryFldToPGAST field actionExecutioner
-- resolvedAST <- GR.traverseQueryRootFldAST resolveMultiplexedValue unresolvedAST
-- let (_, remoteJoins) = GR.toPGQuery resolvedAST
-- -- Reject remote relationships in subscription live query
-- when (remoteJoins /= mempty) $
-- throw400 NotSupported
-- "Remote relationships are not allowed in subscriptions"
-- pure (GV._fAlias field, resolvedAST)
-- Transform the RQL AST into a prepared SQL query
{- preparedAST <- for unpreparedAST \unpreparedQuery -> do
(preparedQuery, PlanningSt _ planVars planVals)
<- flip runStateT initPlanningSt
$ traverseSubscriptionRootField prepareWithPlan unpreparedQuery
pure $! irToRootFieldPlan planVars planVals preparedQuery
-}
(preparedAST, (queryVariableValues, querySyntheticVariableValues)) <- flip runStateT (mempty, Seq.empty) $
(preparedAST, QueryParametersInfo{..}) <- flip runStateT mempty $
for unpreparedAST \unpreparedQuery -> do
resolvedRootField <- traverseQueryRootField resolveMultiplexedValue unpreparedQuery
case resolvedRootField of
@ -332,56 +374,34 @@ buildLiveQueryPlan pgExecCtx userInfo unpreparedAST = do
-- We need to ensure that the values provided for variables are correct according to Postgres.
-- Without this check an invalid value for a variable for one instance of the subscription will
-- take down the entire multiplexed query.
validatedQueryVars <- validateVariables pgExecCtx $ fmap pcvValue queryVariableValues
validatedSyntheticVars <- validateVariables pgExecCtx $ map pcvValue $ toList querySyntheticVariableValues
validatedQueryVars <- validateVariables pgExecCtx $ fmap pcvValue _qpiReusableVariableValues
validatedSyntheticVars <- validateVariables pgExecCtx $ map pcvValue $ toList _qpiSyntheticVariableValues
let -- TODO validatedQueryVars validatedSyntheticVars
cohortVariables = CohortVariables (_uiSession userInfo) validatedQueryVars validatedSyntheticVars
cohortVariables = mkCohortVariables _qpiReferencedSessionVariables
(_uiSession userInfo) validatedQueryVars validatedSyntheticVars
plan = LiveQueryPlan parameterizedPlan cohortVariables
-- See Note [Temporarily disabling query plan caching]
-- varTypes = finalReusability ^? GV._Reusable
reusablePlan = ReusableLiveQueryPlan parameterizedPlan validatedSyntheticVars mempty {- <$> _varTypes -}
reusablePlan =
ReusableLiveQueryPlan parameterizedPlan
_qpiReferencedSessionVariables
validatedSyntheticVars
mempty {- <$> _varTypes -}
pure (plan, Just reusablePlan)
-- (astResolved, (queryVariableValues, syntheticVariableValues)) <- flip runStateT mempty $
-- GEQ.traverseSubscriptionRootField resolveMultiplexedValue _astUnresolved
-- let pgQuery = mkMultiplexedQuery $ _toPGQuery astResolved
-- parameterizedPlan = ParameterizedLiveQueryPlan (userRole userInfo) fieldAlias pgQuery
-- -- We need to ensure that the values provided for variables
-- -- are correct according to Postgres. Without this check
-- -- an invalid value for a variable for one instance of the
-- -- subscription will take down the entire multiplexed query
-- validatedQueryVars <- validateVariables pgExecCtx queryVariableValues
-- validatedSyntheticVars <- validateVariables pgExecCtx (toList syntheticVariableValues)
-- let cohortVariables = CohortVariables (userVars userInfo) validatedQueryVars validatedSyntheticVars
-- plan = LiveQueryPlan parameterizedPlan cohortVariables
-- reusablePlan = ReusableLiveQueryPlan parameterizedPlan validatedSyntheticVars <$> _varTypes
-- pure (plan, reusablePlan)
-- See Note [Temporarily disabling query plan caching]
-- reuseLiveQueryPlan
-- :: (MonadError QErr m, MonadIO m)
-- => PGExecCtx
-- -> SessionVariables
-- -> Maybe GH.VariableValues
-- -> ReusableLiveQueryPlan
-- -> m LiveQueryPlan
-- reuseLiveQueryPlan pgExecCtx sessionVars queryVars reusablePlan = do
-- let ReusableLiveQueryPlan parameterizedPlan syntheticVars queryVarTypes = reusablePlan
-- annVarVals <- _validateVariablesForReuse queryVarTypes queryVars
-- validatedVars <- validateVariables pgExecCtx annVarVals
-- pure $ LiveQueryPlan parameterizedPlan (CohortVariables sessionVars validatedVars syntheticVars)
data LiveQueryPlanExplanation
= LiveQueryPlanExplanation
{ _lqpeSql :: !Text
, _lqpePlan :: ![Text]
{ _lqpeSql :: !Text
, _lqpePlan :: ![Text]
, _lqpeVariables :: !CohortVariables
} deriving (Show)
$(J.deriveToJSON (J.aesonDrop 5 J.snakeCase) ''LiveQueryPlanExplanation)
explainLiveQueryPlan :: (MonadTx m, MonadIO m) => LiveQueryPlan -> m LiveQueryPlanExplanation
explainLiveQueryPlan
:: (MonadTx m, MonadIO m)
=> LiveQueryPlan -> m LiveQueryPlanExplanation
explainLiveQueryPlan plan = do
let parameterizedPlan = _lqpParameterizedPlan plan
queryText = Q.getQueryText . unMultiplexedQuery $ _plqpQuery parameterizedPlan
@ -389,5 +409,6 @@ explainLiveQueryPlan plan = do
-- query, maybe resulting in privilege escalation:
explainQuery = Q.fromText $ "EXPLAIN (FORMAT TEXT) " <> queryText
cohortId <- newCohortId
explanationLines <- map runIdentity <$> executeQuery explainQuery [(cohortId, _lqpVariables plan)]
pure $ LiveQueryPlanExplanation queryText explanationLines
explanationLines <- map runIdentity <$> executeQuery explainQuery
[(cohortId, _lqpVariables plan)]
pure $ LiveQueryPlanExplanation queryText explanationLines $ _lqpVariables plan

View File

@ -7,6 +7,7 @@ module Hasura.Session
, SessionVariable
, mkSessionVariable
, SessionVariables
, filterSessionVariables
, SessionVariableValue
, sessionVariableToText
, mkSessionVariablesText
@ -100,6 +101,11 @@ newtype SessionVariables =
SessionVariables { unSessionVariables :: Map.HashMap SessionVariable SessionVariableValue}
deriving (Show, Eq, Hashable, Semigroup, Monoid)
filterSessionVariables
:: (SessionVariable -> SessionVariableValue -> Bool)
-> SessionVariables -> SessionVariables
filterSessionVariables f = SessionVariables . Map.filterWithKey f . unSessionVariables
instance ToJSON SessionVariables where
toJSON (SessionVariables varMap) =
toJSON $ Map.fromList $ map (first sessionVariableToText) $ Map.toList varMap

View File

@ -11,3 +11,28 @@ args:
args:
schema: hge_tests
name: test_t2
- type: run_sql
args:
sql: |
create table hge_tests.articles(
id serial primary key,
content text,
title text,
is_public bool default false
);
- type: track_table
args:
schema: hge_tests
name: articles
- type: create_select_permission
args:
table:
schema: hge_tests
name: articles
role: public
permission:
columns:
- title
- content
filter:
is_public: true

View File

@ -3,4 +3,5 @@ args:
- type: run_sql
args:
sql: |
drop table hge_tests.test_t2
drop table hge_tests.test_t2;
drop table hge_tests.articles;

View File

@ -0,0 +1,7 @@
query: |
subscription {
articles {
title
content
}
}

View File

@ -6,3 +6,41 @@ args:
- type: track_table
args:
name: test
- type: run_sql
args:
sql: |
create table articles(
id serial primary key,
user_id int,
content text,
title text,
is_public bool default false
);
- type: track_table
args:
name: articles
- type: create_select_permission
args:
table:
name: articles
role: public
permission:
columns:
- title
- content
filter:
is_public: true
- type: create_select_permission
args:
table:
name: articles
role: user
permission:
columns:
- user_id
- title
- content
- is_public
filter:
id:
_eq: X-Hasura-User-Id

View File

@ -2,4 +2,6 @@ type: bulk
args:
- type: run_sql
args:
sql: DROP TABLE test;
sql: |
DROP TABLE test;
DROP TABLE articles;

View File

@ -272,16 +272,43 @@ class TestSubscriptionMultiplexing:
different_sql = self.get_parameterized_sql(hge_ctx, query, vars)
assert different_sql != representative_sql, (representative_sql, different_sql)
def get_parameterized_sql(self, hge_ctx, query, variables):
def test_extraneous_session_variables_are_discarded_from_query(self, hge_ctx):
with open(self.dir() + '/articles_query.yaml') as c:
config = yaml.safe_load(c)
query = config['query']
session_variables = {
"X-Hasura-Role":"public",
"X-Hasura-User-Id":"1" # extraneous session variable
}
response = self.get_explain_graphql_query_response(hge_ctx, query, {}, session_variables)
# The input session variables should be ignored because the only check for the role is
# if `is_public` is `true`
assert response["variables"]["session"] == {}, response["variables"]
session_variables = {
"X-Hasura-Role":"user",
"X-Hasura-User-Id":"1",
"X-Hasura-Allowed-Ids":"{1,3,4}" # extraneous session variable
}
response = self.get_explain_graphql_query_response(hge_ctx, query, {}, session_variables)
# The input session variable should not be ignored because the `user` role can only
# select those roles where `user_id = X-Hasura-User-Id`
assert response["variables"]["session"] == {'x-hasura-user-id':"1"}, response["variables"]
def get_explain_graphql_query_response(self, hge_ctx, query, variables, user_headers = {}):
admin_secret = hge_ctx.hge_key
headers = {}
if admin_secret is not None:
headers['X-Hasura-Admin-Secret'] = admin_secret
request = { 'query': { 'query': query, 'variables': variables }, 'user': {} }
request = { 'query': { 'query': query, 'variables': variables }, 'user': user_headers }
status_code, response, _ = hge_ctx.anyq('/v1/graphql/explain', request, headers)
assert status_code == 200, (request, status_code, response)
return response
def get_parameterized_sql(self, hge_ctx, query, variables):
response = self.get_explain_graphql_query_response(hge_ctx, query, variables)
sql = response['sql']
assert isinstance(sql, str), response
return sql