mirror of
https://github.com/hasura/graphql-engine.git
synced 2024-12-16 09:51:59 +03:00
Merge branch 'master' into issue-3969
This commit is contained in:
commit
b2c4fa2918
@ -25,7 +25,7 @@
|
||||
|
||||
### Other changes
|
||||
|
||||
- console: disable editing action relationships
|
||||
- console and cli-ext: fix parsing of wrapped types in SDL
|
||||
- cli: fix typo in cli example for squash (fix #4047) (#4049)
|
||||
- console: fix run_sql migration modal messaging (close #4020) (#4060)
|
||||
- docs: add note on pg versions for actions (#4034)
|
||||
@ -88,4 +88,8 @@
|
||||
- auto-include `__typename` field in custom types' objects (fix #4063)
|
||||
- squash some potential space leaks (#3937)
|
||||
- docs: bump MarupSafe version (#4102)
|
||||
- console: add design system base components (#3866)
|
||||
- server: validate action webhook response to conform to action output type (fix #3977)
|
||||
- server: preserve cookie headers from sync action webhook (close #4021)
|
||||
- server: add 'ID' to default scalars in custom types (fix #4061)
|
||||
- console: add design system base components (#3866)
|
||||
- docs: add docs for redeliver_event API
|
||||
|
@ -17,9 +17,6 @@ const Editor = ({ mode, ...props }) => {
|
||||
tabSize={2}
|
||||
setOptions={{
|
||||
showLineNumbers: true,
|
||||
enableBasicAutocompletion: true,
|
||||
enableSnippets: true,
|
||||
behavioursEnabled: true,
|
||||
}}
|
||||
{...props}
|
||||
/>
|
||||
|
@ -76,7 +76,10 @@ const Headers = ({ headers, setHeaders }) => {
|
||||
};
|
||||
|
||||
return (
|
||||
<div className={`${styles.display_flex} ${styles.add_mar_bottom_mid}`}>
|
||||
<div
|
||||
className={`${styles.display_flex} ${styles.add_mar_bottom_mid}`}
|
||||
key={i}
|
||||
>
|
||||
{getHeaderNameInput()}
|
||||
{getHeaderValueInput()}
|
||||
{getRemoveButton()}
|
||||
|
@ -28,11 +28,7 @@
|
||||
}
|
||||
|
||||
.ReactTable .rt-thead .rt-resizable-header-content {
|
||||
/* font-weight: bold; */
|
||||
/* Newly added */
|
||||
font-size: 16px;
|
||||
font-weight: 600;
|
||||
color: #454236;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.ReactTable .rt-thead .rt-resizable-header .rt-resizer {
|
||||
@ -69,9 +65,6 @@
|
||||
.ReactTable .rt-table .rt-thead .rt-td {
|
||||
padding-left: 20px !important;
|
||||
padding-right: 20px !important;
|
||||
/* Newly added */
|
||||
padding-top: 14px !important;
|
||||
padding-bottom: 14px !important;
|
||||
border-right: 1px solid rgba(0, 0, 0, 0.1);
|
||||
}
|
||||
|
||||
@ -86,15 +79,6 @@
|
||||
padding: 4px 8px;
|
||||
border-right: 1px solid rgba(0, 0, 0, 0.1);
|
||||
word-wrap: break-word;
|
||||
/* padding: 8px; */
|
||||
/* Newly added */
|
||||
padding-left: 20px !important;
|
||||
padding-right: 20px !important;
|
||||
padding-top: 14px !important;
|
||||
padding-bottom: 14px !important;
|
||||
font-size: 16px;
|
||||
font-weight: 400;
|
||||
color: #303030;
|
||||
}
|
||||
.ReactTable .rt-th,
|
||||
.ReactTable .rt-td {
|
||||
@ -120,12 +104,10 @@
|
||||
}
|
||||
|
||||
.ReactTable .rt-table .rt-thead .rt-tr .rt-th {
|
||||
/* background-color: #f2f2f2 !important; */
|
||||
/* Newly added */
|
||||
background-color: #ededed !important;
|
||||
background-color: #f2f2f2 !important;
|
||||
color: #4d4d4d;
|
||||
font-weight: 600 !important;
|
||||
border-bottom: 0px solid #ddd;
|
||||
border-bottom: 2px solid #ddd;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
@ -159,31 +141,6 @@
|
||||
border-bottom: solid 1px rgba(0, 0, 0, 0.05);
|
||||
}
|
||||
|
||||
/* Newly added */
|
||||
.ReactTable .rt-thead .rt-tr {
|
||||
border-left: 4px solid #ededed;
|
||||
box-shadow: 0 3px 6px 0 rgba(0, 0, 0, 0.16);
|
||||
z-index: 100;
|
||||
}
|
||||
/* Newly added */
|
||||
.ReactTable .rt-table .rt-tbody .rt-tr-group .rt-tr.-even,
|
||||
.ReactTable .rt-table .rt-tbody .rt-tr-group .rt-tr.-odd {
|
||||
background-color: #fff;
|
||||
border-left: 4px solid transparent;
|
||||
}
|
||||
.ReactTable .rt-table .rt-tbody .rt-tr-group .rt-tr.-even:hover {
|
||||
/* background-color: #ebf7de; */
|
||||
/* Newly added */
|
||||
box-shadow: 0 1px 2px 0 rgba(0, 0, 0, 0.16);
|
||||
border-left: 4px solid #f8d721;
|
||||
}
|
||||
.ReactTable .rt-table .rt-tbody .rt-tr-group .rt-tr.-odd:hover {
|
||||
/* background-color: #ebf7de; */
|
||||
/* Newly added */
|
||||
box-shadow: 0 1px 2px 0 rgba(0, 0, 0, 0.16);
|
||||
border-left: 4px solid #f8d721;
|
||||
}
|
||||
|
||||
.ReactTable .rt-table .rt-tbody {
|
||||
overflow-x: unset !important;
|
||||
min-width: fit-content !important;
|
||||
|
@ -50,10 +50,10 @@ const CodeTabs = ({
|
||||
);
|
||||
}
|
||||
|
||||
const files = codegenFiles.map(({ name, content }) => {
|
||||
const files = codegenFiles.map(({ name, content }, i) => {
|
||||
const getFileTab = (component, filename) => {
|
||||
return (
|
||||
<Tab eventKey={filename} title={filename}>
|
||||
<Tab eventKey={filename} title={filename} key={i}>
|
||||
{component}
|
||||
</Tab>
|
||||
);
|
||||
@ -73,7 +73,7 @@ const CodeTabs = ({
|
||||
}
|
||||
});
|
||||
|
||||
return <Tabs id="uncontrolled-tab-example">{files} </Tabs>;
|
||||
return <Tabs id="codegen-files-tabs">{files} </Tabs>;
|
||||
};
|
||||
|
||||
export default CodeTabs;
|
||||
|
@ -40,7 +40,6 @@ export const getAllCodegenFrameworks = () => {
|
||||
};
|
||||
|
||||
export const getCodegenFunc = framework => {
|
||||
process.hrtime = () => null;
|
||||
return fetch(getCodegenFilePath(framework))
|
||||
.then(r => r.text())
|
||||
.then(rawJsString => {
|
||||
|
@ -39,6 +39,7 @@ const HandlerEditor = ({
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={forwardClientHeaders}
|
||||
readOnly
|
||||
className={`${styles.add_mar_right_small}`}
|
||||
/>
|
||||
Forward client headers to webhook
|
||||
|
@ -35,6 +35,7 @@ const HandlerEditor = ({ value, onChange, className }) => {
|
||||
<input
|
||||
type="radio"
|
||||
checked={value === 'synchronous'}
|
||||
readOnly
|
||||
className={styles.add_mar_right_small}
|
||||
/>
|
||||
Synchronous
|
||||
@ -45,6 +46,7 @@ const HandlerEditor = ({ value, onChange, className }) => {
|
||||
>
|
||||
<input
|
||||
type="radio"
|
||||
readOnly
|
||||
checked={value === 'asynchronous'}
|
||||
className={styles.add_mar_right_small}
|
||||
/>
|
||||
|
@ -138,7 +138,6 @@ const RelationshipEditor = ({
|
||||
className={`${styles.select} form-control ${styles.add_pad_left}`}
|
||||
placeholder="Enter relationship name"
|
||||
data-test="rel-name"
|
||||
disabled={isDisabled}
|
||||
title={relNameInputTitle}
|
||||
value={name}
|
||||
/>
|
||||
@ -371,7 +370,7 @@ const RelationshipEditor = ({
|
||||
};
|
||||
|
||||
const RelEditor = props => {
|
||||
const { dispatch, relConfig, objectType } = props;
|
||||
const { dispatch, relConfig, objectType, isNew } = props;
|
||||
|
||||
const [relConfigState, setRelConfigState] = React.useState(null);
|
||||
|
||||
@ -382,7 +381,7 @@ const RelEditor = props => {
|
||||
<div>
|
||||
<b>{relConfig.name}</b>
|
||||
<div className={tableStyles.relationshipTopPadding}>
|
||||
{getRelDef(relConfig)}
|
||||
{getRelDef({ ...relConfig, typename: objectType.name })}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
@ -418,20 +417,24 @@ const RelEditor = props => {
|
||||
);
|
||||
}
|
||||
dispatch(
|
||||
addActionRel({ ...relConfigState, typename: objectType.name }, toggle)
|
||||
addActionRel(
|
||||
{ ...relConfigState, typename: objectType.name },
|
||||
toggle,
|
||||
isNew ? null : relConfig
|
||||
)
|
||||
);
|
||||
};
|
||||
|
||||
// function to remove the relationship
|
||||
let removeFunc;
|
||||
if (relConfig) {
|
||||
if (!isNew) {
|
||||
removeFunc = toggle => {
|
||||
dispatch(removeActionRel(relConfig.name, objectType.name, toggle));
|
||||
};
|
||||
}
|
||||
|
||||
const expandButtonText = relConfig ? 'Edit' : 'Add a relationship';
|
||||
const collapseButtonText = relConfig ? 'Close' : 'Cancel';
|
||||
const expandButtonText = isNew ? 'Add a relationship' : 'Edit';
|
||||
const collapseButtonText = isNew ? 'Cancel' : 'Close';
|
||||
|
||||
return (
|
||||
<ExpandableEditor
|
||||
|
@ -34,6 +34,7 @@ const Relationships = ({
|
||||
typename={objectType.name}
|
||||
allTables={allTables}
|
||||
schemaList={schemaList}
|
||||
isNew
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
|
@ -76,7 +76,7 @@ export const getRelDef = relMeta => {
|
||||
? `${relMeta.remote_table.schema}.${relMeta.remote_table.name}`
|
||||
: relMeta.remote_table;
|
||||
|
||||
return `${lcol} → ${tableLabel} . ${rcol}`;
|
||||
return `${relMeta.typename} . ${lcol} → ${tableLabel} . ${rcol}`;
|
||||
};
|
||||
|
||||
export const removeTypeRelationship = (types, typename, relName) => {
|
||||
@ -90,3 +90,15 @@ export const removeTypeRelationship = (types, typename, relName) => {
|
||||
return t;
|
||||
});
|
||||
};
|
||||
|
||||
export const validateRelTypename = (types, typename, relname) => {
|
||||
for (let i = types.length - 1; i >= 0; i--) {
|
||||
const type = types[i];
|
||||
if (type.kind === 'object' && type.name === typename) {
|
||||
if ((type.relationships || []).some(r => r.name === relname)) {
|
||||
return `Relationship with name "${relname}" already exists.`;
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
@ -22,6 +22,7 @@ import {
|
||||
import {
|
||||
injectTypeRelationship,
|
||||
removeTypeRelationship,
|
||||
validateRelTypename,
|
||||
} from './Relationships/utils';
|
||||
import { getConfirmation } from '../../Common/utils/jsUtils';
|
||||
import {
|
||||
@ -406,11 +407,52 @@ export const deleteAction = currentAction => (dispatch, getState) => {
|
||||
);
|
||||
};
|
||||
|
||||
export const addActionRel = (relConfig, successCb) => (dispatch, getState) => {
|
||||
export const addActionRel = (relConfig, successCb, existingRelConfig) => (
|
||||
dispatch,
|
||||
getState
|
||||
) => {
|
||||
const { types: existingTypes } = getState().types;
|
||||
|
||||
const typesWithRels = injectTypeRelationship(
|
||||
existingTypes,
|
||||
let typesWithRels = [...existingTypes];
|
||||
|
||||
let validationError;
|
||||
|
||||
if (existingRelConfig) {
|
||||
// modifying existing relationship
|
||||
// if the relationship is being renamed
|
||||
if (existingRelConfig.name !== relConfig.name) {
|
||||
// validate the new name
|
||||
validationError = validateRelTypename(
|
||||
existingTypes,
|
||||
relConfig.typename,
|
||||
relConfig.name
|
||||
);
|
||||
// remove old relationship from types
|
||||
typesWithRels = removeTypeRelationship(
|
||||
existingTypes,
|
||||
relConfig.typename,
|
||||
existingRelConfig.name
|
||||
);
|
||||
}
|
||||
} else {
|
||||
// creating a new relationship
|
||||
|
||||
// validate the relationship name
|
||||
validationError = validateRelTypename(
|
||||
existingTypes,
|
||||
relConfig.typename,
|
||||
relConfig.name
|
||||
);
|
||||
}
|
||||
|
||||
const errorMsg = 'Saving relationship failed';
|
||||
if (validationError) {
|
||||
return dispatch(showErrorNotification(errorMsg, validationError));
|
||||
}
|
||||
|
||||
// add modified relationship to types
|
||||
typesWithRels = injectTypeRelationship(
|
||||
typesWithRels,
|
||||
relConfig.typename,
|
||||
relConfig
|
||||
);
|
||||
@ -426,10 +468,9 @@ export const addActionRel = (relConfig, successCb) => (dispatch, getState) => {
|
||||
const upQueries = [customTypesQueryUp];
|
||||
const downQueries = [customTypesQueryDown];
|
||||
|
||||
const migrationName = 'add_action_rel'; // TODO: better migration name
|
||||
const requestMsg = 'Adding relationship...';
|
||||
const successMsg = 'Relationship added successfully';
|
||||
const errorMsg = 'Adding relationship failed';
|
||||
const migrationName = `save_rel_${relConfig.name}_on_${relConfig.typename}`;
|
||||
const requestMsg = 'Saving relationship...';
|
||||
const successMsg = 'Relationship saved successfully';
|
||||
const customOnSuccess = () => {
|
||||
// dispatch(createActionRequestComplete());
|
||||
dispatch(fetchCustomTypes());
|
||||
|
@ -35,7 +35,7 @@ export const unwrapType = wrappedTypename => {
|
||||
};
|
||||
|
||||
export const getAstTypeMetadata = type => {
|
||||
let _t = { type };
|
||||
let _t = { ...type };
|
||||
const typewraps = [];
|
||||
while (_t.kind !== 'NamedType') {
|
||||
if (_t.kind === 'ListType') {
|
||||
@ -74,7 +74,7 @@ export const getSchemaTypeMetadata = type => {
|
||||
|
||||
export const wrapTypename = (name, wrapperStack) => {
|
||||
let wrappedTypename = name;
|
||||
wrapperStack.forEach(w => {
|
||||
wrapperStack.reverse().forEach(w => {
|
||||
if (w === 'l') {
|
||||
wrappedTypename = `[${wrappedTypename}]`;
|
||||
}
|
||||
|
@ -157,6 +157,7 @@ module.exports = {
|
||||
// set global consts
|
||||
new webpack.DefinePlugin({
|
||||
CONSOLE_ASSET_VERSION: Date.now().toString(),
|
||||
'process.hrtime': () => null,
|
||||
}),
|
||||
webpackIsomorphicToolsPlugin.development(),
|
||||
new ForkTsCheckerWebpackPlugin({
|
||||
|
@ -196,6 +196,7 @@ module.exports = {
|
||||
NODE_ENV: JSON.stringify('production'),
|
||||
},
|
||||
CONSOLE_ASSET_VERSION: Date.now().toString(),
|
||||
'process.hrtime': () => null,
|
||||
}),
|
||||
new ForkTsCheckerWebpackPlugin({
|
||||
compilerOptions: {
|
||||
|
@ -143,12 +143,53 @@ Args syntax
|
||||
- TriggerName_
|
||||
- Name of the event trigger
|
||||
|
||||
|
||||
.. _redeliver_event:
|
||||
|
||||
redeliver_event
|
||||
---------------
|
||||
|
||||
``redeliver_event`` is used to redeliver an existing event. For example, if an event is marked as error (
|
||||
say it did not succeed after retries), you can redeliver it using this API. Note that this will reset the count of retries so far.
|
||||
If the event fails to deliver, it will be retried automatically according to its ``retry_conf``.
|
||||
|
||||
.. code-block:: http
|
||||
|
||||
POST /v1/query HTTP/1.1
|
||||
Content-Type: application/json
|
||||
X-Hasura-Role: admin
|
||||
|
||||
{
|
||||
"type" : "redeliver_event",
|
||||
"args" : {
|
||||
"event_id": "ad4f698f-a14e-4a6d-a01b-38cd252dd8bf"
|
||||
}
|
||||
}
|
||||
|
||||
.. _redeliver_event_syntax:
|
||||
|
||||
Args syntax
|
||||
^^^^^^^^^^^
|
||||
|
||||
.. list-table::
|
||||
:header-rows: 1
|
||||
|
||||
* - Key
|
||||
- Required
|
||||
- Schema
|
||||
- Description
|
||||
* - event_id
|
||||
- true
|
||||
- String
|
||||
- UUID of the event
|
||||
|
||||
|
||||
.. _invoke_event_trigger:
|
||||
|
||||
invoke_event_trigger
|
||||
--------------------
|
||||
|
||||
``invoke_event_trigger`` is used to invoke an event trigger manually.
|
||||
``invoke_event_trigger`` is used to invoke an event trigger with custom payload.
|
||||
|
||||
.. code-block:: http
|
||||
|
||||
|
@ -211,16 +211,21 @@ The various types of queries are listed in the following table:
|
||||
- 1
|
||||
- Create or replace an event trigger
|
||||
|
||||
* - :ref:`invoke_event_trigger`
|
||||
- :ref:`invoke_event_trigger_args <invoke_event_trigger_syntax>`
|
||||
- 1
|
||||
- Invoke a trigger manually
|
||||
|
||||
* - :ref:`delete_event_trigger`
|
||||
- :ref:`delete_event_trigger_args <delete_event_trigger_syntax>`
|
||||
- 1
|
||||
- Delete an existing event trigger
|
||||
|
||||
* - :ref:`redeliver_event`
|
||||
- :ref:`redeliver_event_args <redeliver_event_syntax>`
|
||||
- 1
|
||||
- Redeliver an existing event
|
||||
|
||||
* - :ref:`invoke_event_trigger`
|
||||
- :ref:`invoke_event_trigger_args <invoke_event_trigger_syntax>`
|
||||
- 1
|
||||
- Invoke a trigger with custom payload
|
||||
|
||||
* - :ref:`add_remote_schema`
|
||||
- :ref:`add_remote_schema_args <add_remote_schema_syntax>`
|
||||
- 1
|
||||
|
@ -17,6 +17,11 @@ The last two prerequisites can be installed on Debian with:
|
||||
|
||||
$ sudo apt install libpq-dev python3 python3-pip python3-venv
|
||||
|
||||
Additionally, you will need a way to run a Postgres database server. The `dev.sh` script (described below) can set up a Postgres instance for you via [Docker](https://www.docker.com), but if you want to run it yourself, you’ll need:
|
||||
|
||||
- [PostgreSQL](https://www.postgresql.org) >= 9.5
|
||||
- [postgis](https://postgis.net)
|
||||
|
||||
### Upgrading npm
|
||||
|
||||
If your npm is too old (>= 5.7 required):
|
||||
|
@ -47,7 +47,8 @@ import Hasura.Prelude
|
||||
import Hasura.RQL.DDL.Headers
|
||||
import Hasura.RQL.Types
|
||||
import Hasura.Server.Context
|
||||
import Hasura.Server.Utils (RequestId, mkClientHeadersForward)
|
||||
import Hasura.Server.Utils (RequestId, mkClientHeadersForward,
|
||||
mkSetCookieHeaders)
|
||||
import Hasura.Server.Version (HasVersion)
|
||||
|
||||
import qualified Hasura.GraphQL.Execute.LiveQuery as EL
|
||||
@ -170,12 +171,11 @@ getExecPlanPartial userInfo sc enableAL req = do
|
||||
-- to be executed
|
||||
data ExecOp
|
||||
= ExOpQuery !LazyRespTx !(Maybe EQ.GeneratedSqlMap)
|
||||
| ExOpMutation !LazyRespTx
|
||||
| ExOpMutation !N.ResponseHeaders !LazyRespTx
|
||||
| ExOpSubs !EL.LiveQueryPlan
|
||||
|
||||
-- The graphql query is resolved into an execution operation
|
||||
type ExecPlanResolved
|
||||
= GQExecPlan ExecOp
|
||||
type ExecPlanResolved = GQExecPlan ExecOp
|
||||
|
||||
getResolvedExecPlan
|
||||
:: (HasVersion, MonadError QErr m, MonadIO m)
|
||||
@ -215,8 +215,9 @@ getResolvedExecPlan pgExecCtx planCache userInfo sqlGenCtx
|
||||
getExecPlanPartial userInfo sc enableAL req
|
||||
forM partialExecPlan $ \(gCtx, rootSelSet) ->
|
||||
case rootSelSet of
|
||||
VQ.RMutation selSet ->
|
||||
ExOpMutation <$> getMutOp gCtx sqlGenCtx userInfo httpManager reqHeaders selSet
|
||||
VQ.RMutation selSet -> do
|
||||
(tx, respHeaders) <- getMutOp gCtx sqlGenCtx userInfo httpManager reqHeaders selSet
|
||||
pure $ ExOpMutation respHeaders tx
|
||||
VQ.RQuery selSet -> do
|
||||
(queryTx, plan, genSql) <- getQueryOp gCtx sqlGenCtx userInfo queryReusability selSet
|
||||
traverse_ (addPlanToCache . EP.RPQuery) plan
|
||||
@ -286,16 +287,16 @@ resolveMutSelSet
|
||||
, MonadIO m
|
||||
)
|
||||
=> VQ.SelSet
|
||||
-> m LazyRespTx
|
||||
-> m (LazyRespTx, N.ResponseHeaders)
|
||||
resolveMutSelSet fields = do
|
||||
aliasedTxs <- forM (toList fields) $ \fld -> do
|
||||
fldRespTx <- case VQ._fName fld of
|
||||
"__typename" -> return $ return $ encJFromJValue mutationRootName
|
||||
_ -> fmap liftTx . evalReusabilityT $ GR.mutFldToTx fld
|
||||
"__typename" -> return (return $ encJFromJValue mutationRootName, [])
|
||||
_ -> evalReusabilityT $ GR.mutFldToTx fld
|
||||
return (G.unName $ G.unAlias $ VQ._fAlias fld, fldRespTx)
|
||||
|
||||
-- combines all transactions into a single transaction
|
||||
return $ liftTx $ toSingleTx aliasedTxs
|
||||
return (liftTx $ toSingleTx aliasedTxs, concatMap (snd . snd) aliasedTxs)
|
||||
where
|
||||
-- A list of aliased transactions for eg
|
||||
-- [("f1", Tx r1), ("f2", Tx r2)]
|
||||
@ -304,7 +305,7 @@ resolveMutSelSet fields = do
|
||||
-- toSingleTx :: [(Text, LazyRespTx)] -> LazyRespTx
|
||||
toSingleTx aliasedTxs =
|
||||
fmap encJFromAssocList $
|
||||
forM aliasedTxs $ \(al, tx) -> (,) al <$> tx
|
||||
forM aliasedTxs $ \(al, (tx, _)) -> (,) al <$> tx
|
||||
|
||||
getMutOp
|
||||
:: (HasVersion, MonadError QErr m, MonadIO m)
|
||||
@ -314,17 +315,16 @@ getMutOp
|
||||
-> HTTP.Manager
|
||||
-> [N.Header]
|
||||
-> VQ.SelSet
|
||||
-> m LazyRespTx
|
||||
-> m (LazyRespTx, N.ResponseHeaders)
|
||||
getMutOp ctx sqlGenCtx userInfo manager reqHeaders selSet =
|
||||
runE_ $ resolveMutSelSet selSet
|
||||
peelReaderT $ resolveMutSelSet selSet
|
||||
where
|
||||
runE_ action = do
|
||||
res <- runExceptT $ runReaderT action
|
||||
peelReaderT action =
|
||||
runReaderT action
|
||||
( userInfo, queryCtxMap, mutationCtxMap
|
||||
, typeMap, fldMap, ordByCtx, insCtxMap, sqlGenCtx
|
||||
, manager, reqHeaders
|
||||
)
|
||||
either throwError return res
|
||||
where
|
||||
queryCtxMap = _gQueryCtxMap ctx
|
||||
mutationCtxMap = _gMutationCtxMap ctx
|
||||
@ -414,9 +414,7 @@ execRemoteGQ reqId userInfo reqHdrs q rsi opDef = do
|
||||
L.unLogger logger $ QueryLog q Nothing reqId
|
||||
(time, res) <- withElapsedTime $ liftIO $ try $ HTTP.httpLbs req manager
|
||||
resp <- either httpThrow return res
|
||||
let cookieHdrs = getCookieHdr (resp ^.. Wreq.responseHeader "Set-Cookie")
|
||||
respHdrs = Just $ mkRespHeaders cookieHdrs
|
||||
!httpResp = HttpResponse (encJFromLBS $ resp ^. Wreq.responseBody) respHdrs
|
||||
let !httpResp = HttpResponse (encJFromLBS $ resp ^. Wreq.responseBody) $ mkSetCookieHeaders resp
|
||||
return (time, httpResp)
|
||||
|
||||
where
|
||||
@ -428,7 +426,3 @@ execRemoteGQ reqId userInfo reqHdrs q rsi opDef = do
|
||||
|
||||
userInfoToHdrs = map (\(k, v) -> (CI.mk $ CS.cs k, CS.cs v)) $
|
||||
userInfoToList userInfo
|
||||
|
||||
getCookieHdr = fmap (\h -> ("Set-Cookie", h))
|
||||
|
||||
mkRespHeaders = map (\(k, v) -> Header (bsToTxt $ CI.original k, bsToTxt v))
|
||||
|
@ -1,5 +1,6 @@
|
||||
module Hasura.GraphQL.Resolve
|
||||
( mutFldToTx
|
||||
|
||||
, queryFldToPGAST
|
||||
, traverseQueryRootFldAST
|
||||
, UnresolvedVal(..)
|
||||
@ -120,29 +121,30 @@ mutFldToTx
|
||||
, MonadIO m
|
||||
)
|
||||
=> V.Field
|
||||
-> m RespTx
|
||||
-> m (RespTx, HTTP.ResponseHeaders)
|
||||
mutFldToTx fld = do
|
||||
userInfo <- asks getter
|
||||
opCtx <- getOpCtx $ V._fName fld
|
||||
let noRespHeaders = fmap (,[])
|
||||
case opCtx of
|
||||
MCInsert ctx -> do
|
||||
validateHdrs userInfo (_iocHeaders ctx)
|
||||
RI.convertInsert (userRole userInfo) (_iocTable ctx) fld
|
||||
noRespHeaders $ RI.convertInsert (userRole userInfo) (_iocTable ctx) fld
|
||||
MCInsertOne ctx -> do
|
||||
validateHdrs userInfo (_iocHeaders ctx)
|
||||
RI.convertInsertOne (userRole userInfo) (_iocTable ctx) fld
|
||||
noRespHeaders $ RI.convertInsertOne (userRole userInfo) (_iocTable ctx) fld
|
||||
MCUpdate ctx -> do
|
||||
validateHdrs userInfo (_uocHeaders ctx)
|
||||
RM.convertUpdate ctx fld
|
||||
noRespHeaders $ RM.convertUpdate ctx fld
|
||||
MCUpdateByPk ctx -> do
|
||||
validateHdrs userInfo (_uocHeaders ctx)
|
||||
RM.convertUpdateByPk ctx fld
|
||||
noRespHeaders $ RM.convertUpdateByPk ctx fld
|
||||
MCDelete ctx -> do
|
||||
validateHdrs userInfo (_docHeaders ctx)
|
||||
RM.convertDelete ctx fld
|
||||
noRespHeaders $ RM.convertDelete ctx fld
|
||||
MCDeleteByPk ctx -> do
|
||||
validateHdrs userInfo (_docHeaders ctx)
|
||||
RM.convertDeleteByPk ctx fld
|
||||
noRespHeaders $ RM.convertDeleteByPk ctx fld
|
||||
MCAction ctx ->
|
||||
RA.resolveActionMutation fld ctx (userVars userInfo)
|
||||
|
||||
|
@ -41,7 +41,7 @@ import Hasura.RQL.DDL.Schema.Cache
|
||||
import Hasura.RQL.DML.Select (asSingleRowJsonResp)
|
||||
import Hasura.RQL.Types
|
||||
import Hasura.RQL.Types.Run
|
||||
import Hasura.Server.Utils (mkClientHeadersForward)
|
||||
import Hasura.Server.Utils (mkClientHeadersForward, mkSetCookieHeaders)
|
||||
import Hasura.Server.Version (HasVersion)
|
||||
import Hasura.SQL.Types
|
||||
import Hasura.SQL.Value (PGScalarValue (..), pgScalarValueToJson,
|
||||
@ -97,13 +97,13 @@ resolveActionMutation
|
||||
=> Field
|
||||
-> ActionExecutionContext
|
||||
-> UserVars
|
||||
-> m RespTx
|
||||
-> m (RespTx, HTTP.ResponseHeaders)
|
||||
resolveActionMutation field executionContext sessionVariables =
|
||||
case executionContext of
|
||||
ActionExecutionSyncWebhook executionContextSync ->
|
||||
resolveActionMutationSync field executionContextSync sessionVariables
|
||||
ActionExecutionAsync ->
|
||||
resolveActionMutationAsync field sessionVariables
|
||||
(,[]) <$> resolveActionMutationAsync field sessionVariables
|
||||
|
||||
-- | Synchronously execute webhook handler and resolve response to action "output"
|
||||
resolveActionMutationSync
|
||||
@ -121,14 +121,15 @@ resolveActionMutationSync
|
||||
=> Field
|
||||
-> SyncActionExecutionContext
|
||||
-> UserVars
|
||||
-> m RespTx
|
||||
-> m (RespTx, HTTP.ResponseHeaders)
|
||||
resolveActionMutationSync field executionContext sessionVariables = do
|
||||
let inputArgs = J.toJSON $ fmap annInpValueToJson $ _fArguments field
|
||||
actionContext = ActionContext actionName
|
||||
handlerPayload = ActionWebhookPayload actionContext sessionVariables inputArgs
|
||||
manager <- asks getter
|
||||
reqHeaders <- asks getter
|
||||
webhookRes <- callWebhook manager outputType reqHeaders confHeaders forwardClientHeaders resolvedWebhook handlerPayload
|
||||
(webhookRes, respHeaders) <- callWebhook manager outputType outputFields reqHeaders confHeaders
|
||||
forwardClientHeaders resolvedWebhook handlerPayload
|
||||
let webhookResponseExpression = RS.AEInput $ UVSQL $
|
||||
toTxtValue $ WithScalarType PGJSONB $ PGValJSONB $ Q.JSONB $ J.toJSON webhookRes
|
||||
selectAstUnresolved <-
|
||||
@ -136,9 +137,9 @@ resolveActionMutationSync field executionContext sessionVariables = do
|
||||
(_fType field) $ _fSelSet field
|
||||
astResolved <- RS.traverseAnnSimpleSel resolveValTxt selectAstUnresolved
|
||||
let jsonAggType = mkJsonAggSelect outputType
|
||||
return $ asSingleRowJsonResp (RS.selectQuerySQL jsonAggType astResolved) []
|
||||
return $ (,respHeaders) $ asSingleRowJsonResp (RS.selectQuerySQL jsonAggType astResolved) []
|
||||
where
|
||||
SyncActionExecutionContext actionName outputType definitionList resolvedWebhook confHeaders
|
||||
SyncActionExecutionContext actionName outputType outputFields definitionList resolvedWebhook confHeaders
|
||||
forwardClientHeaders = executionContext
|
||||
|
||||
{- Note: [Async action architecture]
|
||||
@ -281,9 +282,6 @@ asyncActionsProcessor cacheRef pgPool httpManager = forever $ do
|
||||
A.mapConcurrently_ (callHandler actionCache) asyncInvocations
|
||||
threadDelay (1 * 1000 * 1000)
|
||||
where
|
||||
getActionDefinition actionCache actionName =
|
||||
_aiDefinition <$> Map.lookup actionName actionCache
|
||||
|
||||
runTx :: (Monoid a) => Q.TxE QErr a -> IO a
|
||||
runTx q = do
|
||||
res <- runExceptT $ Q.runTx' pgPool q
|
||||
@ -293,20 +291,23 @@ asyncActionsProcessor cacheRef pgPool httpManager = forever $ do
|
||||
callHandler actionCache actionLogItem = do
|
||||
let ActionLogItem actionId actionName reqHeaders
|
||||
sessionVariables inputPayload = actionLogItem
|
||||
case getActionDefinition actionCache actionName of
|
||||
case Map.lookup actionName actionCache of
|
||||
Nothing -> return ()
|
||||
Just definition -> do
|
||||
let webhookUrl = _adHandler definition
|
||||
Just actionInfo -> do
|
||||
let definition = _aiDefinition actionInfo
|
||||
outputFields = _aiOutputFields actionInfo
|
||||
webhookUrl = _adHandler definition
|
||||
forwardClientHeaders = _adForwardClientHeaders definition
|
||||
confHeaders = _adHeaders definition
|
||||
outputType = _adOutputType definition
|
||||
actionContext = ActionContext actionName
|
||||
eitherRes <- runExceptT $ callWebhook httpManager outputType reqHeaders confHeaders
|
||||
forwardClientHeaders webhookUrl $
|
||||
ActionWebhookPayload actionContext sessionVariables inputPayload
|
||||
eitherRes <- runExceptT $
|
||||
callWebhook httpManager outputType outputFields reqHeaders confHeaders
|
||||
forwardClientHeaders webhookUrl $
|
||||
ActionWebhookPayload actionContext sessionVariables inputPayload
|
||||
case eitherRes of
|
||||
Left e -> setError actionId e
|
||||
Right responsePayload -> setCompleted actionId $ J.toJSON responsePayload
|
||||
Left e -> setError actionId e
|
||||
Right (responsePayload, _) -> setCompleted actionId $ J.toJSON responsePayload
|
||||
|
||||
setError :: UUID.UUID -> QErr -> IO ()
|
||||
setError actionId e =
|
||||
@ -361,13 +362,15 @@ callWebhook
|
||||
:: (HasVersion, MonadIO m, MonadError QErr m)
|
||||
=> HTTP.Manager
|
||||
-> GraphQLType
|
||||
-> ActionOutputFields
|
||||
-> [HTTP.Header]
|
||||
-> [HeaderConf]
|
||||
-> Bool
|
||||
-> ResolvedWebhook
|
||||
-> ActionWebhookPayload
|
||||
-> m ActionWebhookResponse
|
||||
callWebhook manager outputType reqHeaders confHeaders forwardClientHeaders resolvedWebhook actionWebhookPayload = do
|
||||
-> m (ActionWebhookResponse, HTTP.ResponseHeaders)
|
||||
callWebhook manager outputType outputFields reqHeaders confHeaders
|
||||
forwardClientHeaders resolvedWebhook actionWebhookPayload = do
|
||||
resolvedConfHeaders <- makeHeadersFromConf confHeaders
|
||||
let clientHeaders = if forwardClientHeaders then mkClientHeadersForward reqHeaders else []
|
||||
contentType = ("Content-Type", "application/json")
|
||||
@ -396,14 +399,19 @@ callWebhook manager outputType reqHeaders confHeaders forwardClientHeaders resol
|
||||
if | HTTP.statusIsSuccessful responseStatus -> do
|
||||
let expectingArray = isListType outputType
|
||||
addInternalToErr e = e{qeInternal = Just webhookResponseObject}
|
||||
throw400Detail t = throwError $ addInternalToErr $ err400 Unexpected t
|
||||
webhookResponse <- modifyQErr addInternalToErr $ decodeValue responseValue
|
||||
case webhookResponse of
|
||||
AWRArray{} -> when (not expectingArray) $
|
||||
throw400Detail "expecting object for action webhook response but got array"
|
||||
AWRObject{} -> when expectingArray $
|
||||
throw400Detail "expecting array for action webhook response but got object"
|
||||
pure webhookResponse
|
||||
-- Incase any error, add webhook response in internal
|
||||
modifyQErr addInternalToErr $ do
|
||||
webhookResponse <- decodeValue responseValue
|
||||
case webhookResponse of
|
||||
AWRArray objs -> do
|
||||
when (not expectingArray) $
|
||||
throwUnexpected "expecting object for action webhook response but got array"
|
||||
mapM_ validateResponseObject objs
|
||||
AWRObject obj -> do
|
||||
when expectingArray $
|
||||
throwUnexpected "expecting array for action webhook response but got object"
|
||||
validateResponseObject obj
|
||||
pure (webhookResponse, mkSetCookieHeaders responseWreq)
|
||||
|
||||
| HTTP.statusIsClientError responseStatus -> do
|
||||
ActionWebhookErrorResponse message maybeCode <-
|
||||
@ -414,6 +422,23 @@ callWebhook manager outputType reqHeaders confHeaders forwardClientHeaders resol
|
||||
|
||||
| otherwise ->
|
||||
throw500WithDetail "internal error" webhookResponseObject
|
||||
where
|
||||
throwUnexpected = throw400 Unexpected
|
||||
|
||||
-- Webhook response object should conform to action output fields
|
||||
validateResponseObject obj = do
|
||||
-- Fields not specified in the output type shouldn't be present in the response
|
||||
let extraFields = filter (not . flip Map.member outputFields) $ map G.Name $ Map.keys obj
|
||||
when (not $ null extraFields) $ throwUnexpected $
|
||||
"unexpected fields in webhook response: " <> showNames extraFields
|
||||
|
||||
void $ flip Map.traverseWithKey outputFields $ \fieldName fieldTy ->
|
||||
-- When field is non-nullable, it has to present in the response with no null value
|
||||
when (not $ G.isNullable fieldTy) $ case Map.lookup (G.unName fieldName) obj of
|
||||
Nothing -> throwUnexpected $
|
||||
"field " <> fieldName <<> " expected in webhook response, but not found"
|
||||
Just v -> when (v == J.Null) $ throwUnexpected $
|
||||
"expecting not null value for field " <>> fieldName
|
||||
|
||||
annInpValueToJson :: AnnInpVal -> J.Value
|
||||
annInpValueToJson annInpValue =
|
||||
|
@ -107,6 +107,7 @@ data SyncActionExecutionContext
|
||||
= SyncActionExecutionContext
|
||||
{ _saecName :: !ActionName
|
||||
, _saecOutputType :: !GraphQLType
|
||||
, _saecOutputFields :: !ActionOutputFields
|
||||
, _saecDefinitionList :: ![(PGCol, PGScalarType)]
|
||||
, _saecWebhook :: !ResolvedWebhook
|
||||
, _saecHeaders :: ![HeaderConf]
|
||||
|
@ -68,6 +68,7 @@ mkMutationField actionName actionInfo definitionList =
|
||||
ActionSynchronous ->
|
||||
ActionExecutionSyncWebhook $ SyncActionExecutionContext actionName
|
||||
(_adOutputType definition)
|
||||
(_aiOutputFields actionInfo)
|
||||
definitionList
|
||||
(_adHandler definition)
|
||||
(_adHeaders definition)
|
||||
|
@ -20,6 +20,7 @@ import qualified Hasura.GraphQL.Execute as E
|
||||
import qualified Hasura.Logging as L
|
||||
import qualified Hasura.Server.Telemetry.Counters as Telem
|
||||
import qualified Language.GraphQL.Draft.Syntax as G
|
||||
import qualified Network.HTTP.Types as HTTP
|
||||
|
||||
runGQ
|
||||
:: ( HasVersion
|
||||
@ -41,8 +42,8 @@ runGQ reqId userInfo reqHdrs req = do
|
||||
userInfo sqlGenCtx enableAL sc scVer httpManager reqHdrs req
|
||||
case execPlan of
|
||||
E.GExPHasura resolvedOp -> do
|
||||
(telemTimeIO, telemQueryType, resp) <- runHasuraGQ reqId req userInfo resolvedOp
|
||||
return (telemCacheHit, Telem.Local, (telemTimeIO, telemQueryType, HttpResponse resp Nothing))
|
||||
(telemTimeIO, telemQueryType, respHdrs, resp) <- runHasuraGQ reqId req userInfo resolvedOp
|
||||
return (telemCacheHit, Telem.Local, (telemTimeIO, telemQueryType, HttpResponse resp respHdrs))
|
||||
E.GExPRemote rsi opDef -> do
|
||||
let telemQueryType | G._todType opDef == G.OperationTypeMutation = Telem.Mutation
|
||||
| otherwise = Telem.Query
|
||||
@ -73,7 +74,7 @@ runGQBatched reqId userInfo reqHdrs reqs =
|
||||
-- responses with distinct headers, so just do the simplest thing
|
||||
-- in this case, and don't forward any.
|
||||
let removeHeaders =
|
||||
flip HttpResponse Nothing
|
||||
flip HttpResponse []
|
||||
. encJFromList
|
||||
. map (either (encJFromJValue . encodeGQErr False) _hrBody)
|
||||
try = flip catchError (pure . Left) . fmap Right
|
||||
@ -89,7 +90,7 @@ runHasuraGQ
|
||||
-> GQLReqUnparsed
|
||||
-> UserInfo
|
||||
-> E.ExecOp
|
||||
-> m (DiffTime, Telem.QueryType, EncJSON)
|
||||
-> m (DiffTime, Telem.QueryType, HTTP.ResponseHeaders, EncJSON)
|
||||
-- ^ Also return 'Mutation' when the operation was a mutation, and the time
|
||||
-- spent in the PG query; for telemetry.
|
||||
runHasuraGQ reqId query userInfo resolvedOp = do
|
||||
@ -98,15 +99,15 @@ runHasuraGQ reqId query userInfo resolvedOp = do
|
||||
E.ExOpQuery tx genSql -> do
|
||||
-- log the generated SQL and the graphql query
|
||||
L.unLogger logger $ QueryLog query genSql reqId
|
||||
runLazyTx' pgExecCtx tx
|
||||
E.ExOpMutation tx -> do
|
||||
([],) <$> runLazyTx' pgExecCtx tx
|
||||
E.ExOpMutation respHeaders tx -> do
|
||||
-- log the graphql query
|
||||
L.unLogger logger $ QueryLog query Nothing reqId
|
||||
runLazyTx pgExecCtx Q.ReadWrite $ withUserInfo userInfo tx
|
||||
(respHeaders,) <$> runLazyTx pgExecCtx Q.ReadWrite (withUserInfo userInfo tx)
|
||||
E.ExOpSubs _ ->
|
||||
throw400 UnexpectedPayload
|
||||
"subscriptions are not supported over HTTP, use websockets instead"
|
||||
resp <- liftEither respE
|
||||
(respHdrs, resp) <- liftEither respE
|
||||
let !json = encodeGQResp $ GQSuccess $ encJToLBS resp
|
||||
telemQueryType = case resolvedOp of E.ExOpMutation{} -> Telem.Mutation ; _ -> Telem.Query
|
||||
return (telemTimeIO, telemQueryType, json)
|
||||
return (telemTimeIO, telemQueryType, respHdrs, json)
|
||||
|
@ -63,7 +63,7 @@ import qualified Hasura.Server.Telemetry.Counters as Telem
|
||||
-- this to track a connection's operations so we can remove them from 'LiveQueryState', and
|
||||
-- log.
|
||||
--
|
||||
-- NOTE!: This must be kept consistent with the global 'LiveQueryState', in 'onClose'
|
||||
-- NOTE!: This must be kept consistent with the global 'LiveQueryState', in 'onClose'
|
||||
-- and 'onStart'.
|
||||
type OperationMap
|
||||
= STMMap.Map OperationId (LQ.LiveQueryId, Maybe OperationName)
|
||||
@ -325,7 +325,8 @@ onStart serverEnv wsConn (StartMsg opId q) = catchAndIgnore $ do
|
||||
runHasuraGQ timerTot telemCacheHit reqId query userInfo = \case
|
||||
E.ExOpQuery opTx genSql ->
|
||||
execQueryOrMut Telem.Query genSql $ runLazyTx' pgExecCtx opTx
|
||||
E.ExOpMutation opTx ->
|
||||
-- Response headers discarded over websockets
|
||||
E.ExOpMutation _ opTx ->
|
||||
execQueryOrMut Telem.Mutation Nothing $
|
||||
runLazyTx pgExecCtx Q.ReadWrite $ withUserInfo userInfo opTx
|
||||
E.ExOpSubs lqOp -> do
|
||||
|
@ -81,7 +81,7 @@ resolveAction
|
||||
:: (QErrM m, MonadIO m)
|
||||
=> (NonObjectTypeMap, AnnotatedObjects)
|
||||
-> ActionDefinitionInput
|
||||
-> m ResolvedActionDefinition
|
||||
-> m (ResolvedActionDefinition, ActionOutputFields)
|
||||
resolveAction customTypes actionDefinition = do
|
||||
let responseType = unGraphQLType $ _adOutputType actionDefinition
|
||||
responseBaseType = G.getBaseType responseType
|
||||
@ -96,8 +96,10 @@ resolveAction customTypes actionDefinition = do
|
||||
<> showNamedTy argumentBaseType <>
|
||||
" should be a scalar/enum/input_object"
|
||||
-- Check if the response type is an object
|
||||
getObjectTypeInfo responseBaseType
|
||||
traverse resolveWebhook actionDefinition
|
||||
annFields <- _aotAnnotatedFields <$> getObjectTypeInfo responseBaseType
|
||||
let outputFields = Map.fromList $ map (unObjectFieldName *** fst) $ Map.toList annFields
|
||||
resolvedDef <- traverse resolveWebhook actionDefinition
|
||||
pure (resolvedDef, outputFields)
|
||||
where
|
||||
getNonObjectTypeInfo typeName = do
|
||||
let nonObjectTypeMap = unNonObjectTypeMap $ fst $ customTypes
|
||||
|
@ -48,8 +48,7 @@ validateCustomTypeDefinitions tableCache customTypes = do
|
||||
enumTypes =
|
||||
Set.fromList $ map (unEnumTypeName . _etdName) enumDefinitions
|
||||
|
||||
-- TODO, clean it up maybe?
|
||||
defaultScalars = map G.NamedType ["Int", "Float", "String", "Boolean"]
|
||||
defaultScalars = map G.NamedType ["Int", "Float", "String", "Boolean", "ID"]
|
||||
|
||||
validateEnum
|
||||
:: (MonadValidate [CustomTypeValidationError] m)
|
||||
|
@ -256,7 +256,7 @@ class (ToJSON a) => IsPerm a where
|
||||
getPermAcc2
|
||||
:: DropPerm a -> PermAccessor (PermInfo a)
|
||||
getPermAcc2 _ = permAccessor
|
||||
|
||||
|
||||
addPermP2 :: (IsPerm a, MonadTx m, HasSystemDefined m) => QualifiedTable -> PermDef a -> m ()
|
||||
addPermP2 tn pd = do
|
||||
let pt = permAccToType $ getPermAcc1 pd
|
||||
|
@ -266,10 +266,10 @@ buildSchemaCacheRule = proc (catalogMetadata, invalidationKeys) -> do
|
||||
addActionContext e = "in action " <> name <<> "; " <> e
|
||||
(| withRecordInconsistency (
|
||||
(| modifyErrA ( do
|
||||
resolvedDef <- bindErrorA -< resolveAction resolvedCustomTypes def
|
||||
(resolvedDef, outFields) <- bindErrorA -< resolveAction resolvedCustomTypes def
|
||||
let permissionInfos = map (ActionPermissionInfo . _apmRole) actionPermissions
|
||||
permissionMap = mapFromL _apiRole permissionInfos
|
||||
returnA -< ActionInfo name resolvedDef permissionMap comment
|
||||
returnA -< ActionInfo name outFields resolvedDef permissionMap comment
|
||||
)
|
||||
|) addActionContext)
|
||||
|) metadataObj)
|
||||
|
@ -13,8 +13,10 @@ module Hasura.RQL.Types.Action
|
||||
, ResolvedWebhook(..)
|
||||
, ResolvedActionDefinition
|
||||
|
||||
, ActionOutputFields
|
||||
, ActionInfo(..)
|
||||
, aiName
|
||||
, aiOutputFields
|
||||
, aiDefinition
|
||||
, aiPermissions
|
||||
, aiComment
|
||||
@ -117,13 +119,15 @@ data ActionPermissionInfo
|
||||
$(J.deriveToJSON (J.aesonDrop 4 J.snakeCase) ''ActionPermissionInfo)
|
||||
|
||||
type ActionPermissionMap = Map.HashMap RoleName ActionPermissionInfo
|
||||
type ActionOutputFields = Map.HashMap G.Name G.GType
|
||||
|
||||
data ActionInfo
|
||||
= ActionInfo
|
||||
{ _aiName :: !ActionName
|
||||
, _aiDefinition :: !ResolvedActionDefinition
|
||||
, _aiPermissions :: !ActionPermissionMap
|
||||
, _aiComment :: !(Maybe Text)
|
||||
{ _aiName :: !ActionName
|
||||
, _aiOutputFields :: !ActionOutputFields
|
||||
, _aiDefinition :: !ResolvedActionDefinition
|
||||
, _aiPermissions :: !ActionPermissionMap
|
||||
, _aiComment :: !(Maybe Text)
|
||||
} deriving (Show, Eq)
|
||||
$(J.deriveToJSON (J.aesonDrop 3 J.snakeCase) ''ActionInfo)
|
||||
$(makeLenses ''ActionInfo)
|
||||
|
@ -7,7 +7,7 @@ import Control.Concurrent.MVar.Lifted
|
||||
import Control.Exception (IOException, try)
|
||||
import Control.Lens (view, _2)
|
||||
import Control.Monad.Stateless
|
||||
import Control.Monad.Trans.Control (MonadBaseControl)
|
||||
import Control.Monad.Trans.Control (MonadBaseControl)
|
||||
import Data.Aeson hiding (json)
|
||||
import Data.Either (isRight)
|
||||
import Data.Int (Int64)
|
||||
@ -21,6 +21,7 @@ import Web.Spock.Core ((<//>))
|
||||
|
||||
import qualified Control.Concurrent.Async.Lifted.Safe as LA
|
||||
import qualified Data.ByteString.Lazy as BL
|
||||
import qualified Data.CaseInsensitive as CI
|
||||
import qualified Data.HashMap.Strict as M
|
||||
import qualified Data.HashSet as S
|
||||
import qualified Data.Text as T
|
||||
@ -71,7 +72,7 @@ data SchemaCacheRef
|
||||
-- 1. Allow maximum throughput for serving requests (/v1/graphql) (as each
|
||||
-- request reads the current schemacache)
|
||||
-- 2. We don't want to process more than one request at any point of time
|
||||
-- which would modify the schema cache as such queries are expensive.
|
||||
-- which would modify the schema cache as such queries are expensive.
|
||||
--
|
||||
-- Another option is to consider removing this lock in place of `_scrCache ::
|
||||
-- MVar ...` if it's okay or in fact correct to block during schema update in
|
||||
@ -79,7 +80,7 @@ data SchemaCacheRef
|
||||
-- situation (in between building new schemacache and before writing it to
|
||||
-- the IORef) where we serve a request with a stale schemacache but I guess
|
||||
-- it is an okay trade-off to pay for a higher throughput (I remember doing a
|
||||
-- bunch of benchmarks to test this hypothesis).
|
||||
-- bunch of benchmarks to test this hypothesis).
|
||||
, _scrCache :: IORef (RebuildableSchemaCache Run, SchemaCacheVer)
|
||||
, _scrOnChange :: IO ()
|
||||
-- ^ an action to run when schemacache changes
|
||||
@ -143,7 +144,7 @@ withSCUpdate scr logger action = do
|
||||
(!res, !newSC) <- action
|
||||
liftIO $ do
|
||||
-- update schemacache in IO reference
|
||||
modifyIORef' cacheRef $ \(_, prevVer) ->
|
||||
modifyIORef' cacheRef $ \(_, prevVer) ->
|
||||
let !newVer = incSchemaCacheVer prevVer
|
||||
in (newSC, newVer)
|
||||
-- log any inconsistent objects
|
||||
@ -198,6 +199,10 @@ buildQCtx = do
|
||||
sqlGenCtx <- scSQLGenCtx . hcServerCtx <$> ask
|
||||
return $ QCtx userInfo cache sqlGenCtx
|
||||
|
||||
setHeader :: MonadIO m => HTTP.Header -> Spock.ActionT m ()
|
||||
setHeader (headerName, headerValue) =
|
||||
Spock.setHeader (bsToTxt $ CI.original headerName) (bsToTxt headerValue)
|
||||
|
||||
-- | Typeclass representing the metadata API authorization effect
|
||||
class MetadataApiAuthorization m where
|
||||
authorizeMetadataApi :: RQLQuery -> UserInfo -> Handler m ()
|
||||
@ -270,24 +275,22 @@ mkSpockAction serverCtx qErrEncoder qErrModifier apiHandler = do
|
||||
case result of
|
||||
JSONResp (HttpResponse encJson h) ->
|
||||
possiblyCompressedLazyBytes userInfo reqId req reqBody qTime (encJToLBS encJson)
|
||||
(pure jsonHeader <> mkHeaders h) reqHeaders
|
||||
(pure jsonHeader <> h) reqHeaders
|
||||
RawResp (HttpResponse rawBytes h) ->
|
||||
possiblyCompressedLazyBytes userInfo reqId req reqBody qTime rawBytes (mkHeaders h) reqHeaders
|
||||
possiblyCompressedLazyBytes userInfo reqId req reqBody qTime rawBytes h reqHeaders
|
||||
|
||||
possiblyCompressedLazyBytes userInfo reqId req reqBody qTime respBytes respHeaders reqHeaders = do
|
||||
let (compressedResp, mEncodingHeader, mCompressionType) =
|
||||
compressResponse (Wai.requestHeaders req) respBytes
|
||||
encodingHeader = maybe [] pure mEncodingHeader
|
||||
reqIdHeader = (requestIdHeader, unRequestId reqId)
|
||||
reqIdHeader = (requestIdHeader, txtToBs $ unRequestId reqId)
|
||||
allRespHeaders = pure reqIdHeader <> encodingHeader <> respHeaders
|
||||
lift $ logHttpSuccess logger userInfo reqId req reqBody respBytes compressedResp qTime mCompressionType reqHeaders
|
||||
mapM_ (uncurry Spock.setHeader) allRespHeaders
|
||||
mapM_ setHeader allRespHeaders
|
||||
Spock.lazyBytes compressedResp
|
||||
|
||||
mkHeaders = maybe [] (map unHeader)
|
||||
|
||||
v1QueryHandler
|
||||
:: (HasVersion, MonadIO m, MonadBaseControl IO m, MetadataApiAuthorization m)
|
||||
v1QueryHandler
|
||||
:: (HasVersion, MonadIO m, MonadBaseControl IO m, MetadataApiAuthorization m)
|
||||
=> RQLQuery -> Handler m (HttpResponse EncJSON)
|
||||
v1QueryHandler query = do
|
||||
userInfo <- asks hcUser
|
||||
@ -296,7 +299,7 @@ v1QueryHandler query = do
|
||||
logger <- scLogger . hcServerCtx <$> ask
|
||||
res <- bool (fst <$> dbAction) (withSCUpdate scRef logger dbAction) $
|
||||
queryModifiesSchemaCache query
|
||||
return $ HttpResponse res Nothing
|
||||
return $ HttpResponse res []
|
||||
where
|
||||
-- Hit postgres
|
||||
dbAction = do
|
||||
@ -341,14 +344,14 @@ gqlExplainHandler query = do
|
||||
sqlGenCtx <- scSQLGenCtx . hcServerCtx <$> ask
|
||||
enableAL <- scEnableAllowlist . hcServerCtx <$> ask
|
||||
res <- GE.explainGQLQuery pgExecCtx sc sqlGenCtx enableAL query
|
||||
return $ HttpResponse res Nothing
|
||||
return $ HttpResponse res []
|
||||
|
||||
v1Alpha1PGDumpHandler :: (MonadIO m) => PGD.PGDumpReqBody -> Handler m APIResp
|
||||
v1Alpha1PGDumpHandler b = do
|
||||
onlyAdmin
|
||||
ci <- scConnInfo . hcServerCtx <$> ask
|
||||
output <- PGD.execPGDump b ci
|
||||
return $ RawResp $ HttpResponse output (Just [Header sqlHeader])
|
||||
return $ RawResp $ HttpResponse output [sqlHeader]
|
||||
|
||||
consoleAssetsHandler
|
||||
:: (MonadIO m, HttpLog m)
|
||||
@ -366,7 +369,7 @@ consoleAssetsHandler logger dir path = do
|
||||
either (onError reqHeaders) onSuccess eFileContents
|
||||
where
|
||||
onSuccess c = do
|
||||
mapM_ (uncurry Spock.setHeader) headers
|
||||
mapM_ setHeader headers
|
||||
Spock.lazyBytes c
|
||||
onError :: (MonadIO m, HttpLog m) => [HTTP.Header] -> IOException -> Spock.ActionT m ()
|
||||
onError hdrs = raiseGenericApiError logger hdrs . err404 NotFound . T.pack . show
|
||||
@ -375,7 +378,7 @@ consoleAssetsHandler logger dir path = do
|
||||
(fileName, encHeader) = case T.stripSuffix ".gz" fn of
|
||||
Just v -> (v, [gzipHeader])
|
||||
Nothing -> (fn, [])
|
||||
mimeType = bsToTxt $ defaultMimeLookup fileName
|
||||
mimeType = defaultMimeLookup fileName
|
||||
headers = ("Content-Type", mimeType) : encHeader
|
||||
|
||||
class (Monad m) => ConsoleRenderer m where
|
||||
@ -552,7 +555,7 @@ httpApp corsCfg serverCtx enableConsole consoleAssetsDir enableTelemetry = do
|
||||
else Spock.setStatus HTTP.status500 >> Spock.text "ERROR"
|
||||
|
||||
Spock.get "v1/version" $ do
|
||||
uncurry Spock.setHeader jsonHeader
|
||||
setHeader jsonHeader
|
||||
Spock.lazyBytes $ encode $ object [ "version" .= currentVersion ]
|
||||
|
||||
when enableMetadata $ do
|
||||
@ -578,7 +581,7 @@ httpApp corsCfg serverCtx enableConsole consoleAssetsDir enableTelemetry = do
|
||||
mkGetHandler $ do
|
||||
onlyAdmin
|
||||
let res = encJFromJValue $ runGetConfig (scAuthMode serverCtx)
|
||||
return $ JSONResp $ HttpResponse res Nothing
|
||||
return $ JSONResp $ HttpResponse res []
|
||||
|
||||
when enableGraphQL $ do
|
||||
Spock.post "v1alpha1/graphql" $ spockAction GH.encodeGQErr id $
|
||||
@ -592,22 +595,22 @@ httpApp corsCfg serverCtx enableConsole consoleAssetsDir enableTelemetry = do
|
||||
mkGetHandler $ do
|
||||
onlyAdmin
|
||||
respJ <- liftIO $ EKG.sampleAll $ scEkgStore serverCtx
|
||||
return $ JSONResp $ HttpResponse (encJFromJValue $ EKG.sampleToJson respJ) Nothing
|
||||
return $ JSONResp $ HttpResponse (encJFromJValue $ EKG.sampleToJson respJ) []
|
||||
Spock.get "dev/plan_cache" $ spockAction encodeQErr id $
|
||||
mkGetHandler $ do
|
||||
onlyAdmin
|
||||
respJ <- liftIO $ E.dumpPlanCache $ scPlanCache serverCtx
|
||||
return $ JSONResp $ HttpResponse (encJFromJValue respJ) Nothing
|
||||
return $ JSONResp $ HttpResponse (encJFromJValue respJ) []
|
||||
Spock.get "dev/subscriptions" $ spockAction encodeQErr id $
|
||||
mkGetHandler $ do
|
||||
onlyAdmin
|
||||
respJ <- liftIO $ EL.dumpLiveQueriesState False $ scLQState serverCtx
|
||||
return $ JSONResp $ HttpResponse (encJFromJValue respJ) Nothing
|
||||
return $ JSONResp $ HttpResponse (encJFromJValue respJ) []
|
||||
Spock.get "dev/subscriptions/extended" $ spockAction encodeQErr id $
|
||||
mkGetHandler $ do
|
||||
onlyAdmin
|
||||
respJ <- liftIO $ EL.dumpLiveQueriesState True $ scLQState serverCtx
|
||||
return $ JSONResp $ HttpResponse (encJFromJValue respJ) Nothing
|
||||
return $ JSONResp $ HttpResponse (encJFromJValue respJ) []
|
||||
|
||||
forM_ [Spock.GET, Spock.POST] $ \m -> Spock.hookAny m $ \_ -> do
|
||||
req <- Spock.request
|
||||
@ -672,6 +675,6 @@ raiseGenericApiError logger headers qErr = do
|
||||
reqBody <- liftIO $ Wai.strictRequestBody req
|
||||
reqId <- getRequestId $ Wai.requestHeaders req
|
||||
lift $ logHttpError logger Nothing reqId req (Left reqBody) qErr headers
|
||||
uncurry Spock.setHeader jsonHeader
|
||||
setHeader jsonHeader
|
||||
Spock.setStatus $ qeStatus qErr
|
||||
Spock.lazyBytes $ encode qErr
|
||||
|
@ -19,20 +19,20 @@ module Hasura.Server.Auth
|
||||
) where
|
||||
|
||||
import Control.Concurrent.Extended (forkImmortal)
|
||||
import Control.Exception (try)
|
||||
import Control.Exception (try)
|
||||
import Control.Lens
|
||||
import Data.Aeson
|
||||
import Data.IORef (newIORef)
|
||||
import Data.Time.Clock (UTCTime)
|
||||
import Hasura.Server.Version (HasVersion)
|
||||
import Data.IORef (newIORef)
|
||||
import Data.Time.Clock (UTCTime)
|
||||
import Hasura.Server.Version (HasVersion)
|
||||
|
||||
import qualified Data.Aeson as J
|
||||
import qualified Data.ByteString.Lazy as BL
|
||||
import qualified Data.HashMap.Strict as Map
|
||||
import qualified Data.Text as T
|
||||
import qualified Network.HTTP.Client as H
|
||||
import qualified Network.HTTP.Types as N
|
||||
import qualified Network.Wreq as Wreq
|
||||
import qualified Data.Aeson as J
|
||||
import qualified Data.ByteString.Lazy as BL
|
||||
import qualified Data.HashMap.Strict as Map
|
||||
import qualified Data.Text as T
|
||||
import qualified Network.HTTP.Client as H
|
||||
import qualified Network.HTTP.Types as N
|
||||
import qualified Network.Wreq as Wreq
|
||||
|
||||
import Hasura.HTTP
|
||||
import Hasura.Logging
|
||||
@ -294,7 +294,7 @@ getUserInfoWithExpTime logger manager rawHeaders = \case
|
||||
|
||||
userInfoWhenNoAdminSecret = \case
|
||||
Nothing -> throw401 $ adminSecretHeader <> "/"
|
||||
<> deprecatedAccessKeyHeader <> " required, but not found"
|
||||
<> deprecatedAccessKeyHeader <> " required, but not found"
|
||||
Just role -> return $ mkUserInfo role usrVars
|
||||
|
||||
withNoExpTime a = (, Nothing) <$> a
|
||||
|
@ -29,7 +29,7 @@ import Hasura.Prelude
|
||||
import Hasura.RQL.Types
|
||||
import Hasura.Server.Auth.JWT.Internal (parseHmacKey, parseRsaKey)
|
||||
import Hasura.Server.Auth.JWT.Logging
|
||||
import Hasura.Server.Utils (fmapL, userRoleHeader)
|
||||
import Hasura.Server.Utils (fmapL, getRequestHeader, userRoleHeader)
|
||||
import Hasura.Server.Version (HasVersion)
|
||||
|
||||
import qualified Control.Concurrent.Extended as C
|
||||
@ -297,8 +297,7 @@ processAuthZHeader jwtCtx headers authzHeader = do
|
||||
|
||||
-- see if there is a x-hasura-role header, or else pick the default role
|
||||
getCurrentRole defaultRole =
|
||||
let userRoleHeaderB = CS.cs userRoleHeader
|
||||
mUserRole = snd <$> find (\h -> fst h == CI.mk userRoleHeaderB) headers
|
||||
let mUserRole = getRequestHeader userRoleHeader headers
|
||||
in maybe defaultRole RoleName $ mUserRole >>= mkNonEmptyText . bsToTxt
|
||||
|
||||
decodeJSON val = case J.fromJSON val of
|
||||
|
@ -24,7 +24,7 @@ compressionTypeToTxt CTGZip = "gzip"
|
||||
compressResponse
|
||||
:: NH.RequestHeaders
|
||||
-> BL.ByteString
|
||||
-> (BL.ByteString, Maybe (Text, Text), Maybe CompressionType)
|
||||
-> (BL.ByteString, Maybe NH.Header, Maybe CompressionType)
|
||||
compressResponse reqHeaders unCompressedResp =
|
||||
let compressionTypeM = getRequestedCompression reqHeaders
|
||||
appendCompressionType (res, headerM) = (res, headerM, compressionTypeM)
|
||||
|
@ -1,20 +1,13 @@
|
||||
module Hasura.Server.Context
|
||||
( HttpResponse(..)
|
||||
, Header (..)
|
||||
, Headers
|
||||
)
|
||||
(HttpResponse(..))
|
||||
where
|
||||
|
||||
import Hasura.Prelude
|
||||
|
||||
newtype Header
|
||||
= Header { unHeader :: (Text, Text) }
|
||||
deriving (Show, Eq)
|
||||
|
||||
type Headers = [Header]
|
||||
import qualified Network.HTTP.Types as HTTP
|
||||
|
||||
data HttpResponse a
|
||||
= HttpResponse
|
||||
{ _hrBody :: !a
|
||||
, _hrHeaders :: !(Maybe Headers)
|
||||
, _hrHeaders :: !HTTP.ResponseHeaders
|
||||
} deriving (Functor, Foldable, Traversable)
|
||||
|
@ -1,6 +1,7 @@
|
||||
{-# LANGUAGE TypeApplications #-}
|
||||
module Hasura.Server.Utils where
|
||||
|
||||
import Control.Lens ((^..))
|
||||
import Data.Aeson
|
||||
import Data.Char
|
||||
import Data.List (find)
|
||||
@ -21,6 +22,7 @@ import qualified Data.UUID.V4 as UUID
|
||||
import qualified Language.Haskell.TH.Syntax as TH
|
||||
import qualified Network.HTTP.Client as HC
|
||||
import qualified Network.HTTP.Types as HTTP
|
||||
import qualified Network.Wreq as Wreq
|
||||
import qualified Text.Regex.TDFA as TDFA
|
||||
import qualified Text.Regex.TDFA.ByteString as TDFA
|
||||
|
||||
@ -30,45 +32,42 @@ newtype RequestId
|
||||
= RequestId { unRequestId :: Text }
|
||||
deriving (Show, Eq, ToJSON, FromJSON)
|
||||
|
||||
jsonHeader :: (T.Text, T.Text)
|
||||
jsonHeader :: HTTP.Header
|
||||
jsonHeader = ("Content-Type", "application/json; charset=utf-8")
|
||||
|
||||
sqlHeader :: (T.Text, T.Text)
|
||||
sqlHeader :: HTTP.Header
|
||||
sqlHeader = ("Content-Type", "application/sql; charset=utf-8")
|
||||
|
||||
htmlHeader :: (T.Text, T.Text)
|
||||
htmlHeader :: HTTP.Header
|
||||
htmlHeader = ("Content-Type", "text/html; charset=utf-8")
|
||||
|
||||
gzipHeader :: (T.Text, T.Text)
|
||||
gzipHeader :: HTTP.Header
|
||||
gzipHeader = ("Content-Encoding", "gzip")
|
||||
|
||||
brHeader :: (T.Text, T.Text)
|
||||
brHeader = ("Content-Encoding", "br")
|
||||
|
||||
userRoleHeader :: T.Text
|
||||
userRoleHeader :: IsString a => a
|
||||
userRoleHeader = "x-hasura-role"
|
||||
|
||||
deprecatedAccessKeyHeader :: T.Text
|
||||
deprecatedAccessKeyHeader :: IsString a => a
|
||||
deprecatedAccessKeyHeader = "x-hasura-access-key"
|
||||
|
||||
adminSecretHeader :: T.Text
|
||||
adminSecretHeader :: IsString a => a
|
||||
adminSecretHeader = "x-hasura-admin-secret"
|
||||
|
||||
userIdHeader :: T.Text
|
||||
userIdHeader :: IsString a => a
|
||||
userIdHeader = "x-hasura-user-id"
|
||||
|
||||
requestIdHeader :: T.Text
|
||||
requestIdHeader :: IsString a => a
|
||||
requestIdHeader = "x-request-id"
|
||||
|
||||
getRequestHeader :: B.ByteString -> [HTTP.Header] -> Maybe B.ByteString
|
||||
getRequestHeader :: HTTP.HeaderName -> [HTTP.Header] -> Maybe B.ByteString
|
||||
getRequestHeader hdrName hdrs = snd <$> mHeader
|
||||
where
|
||||
mHeader = find (\h -> fst h == CI.mk hdrName) hdrs
|
||||
mHeader = find (\h -> fst h == hdrName) hdrs
|
||||
|
||||
getRequestId :: (MonadIO m) => [HTTP.Header] -> m RequestId
|
||||
getRequestId headers =
|
||||
-- generate a request id for every request if the client has not sent it
|
||||
case getRequestHeader (txtToBs requestIdHeader) headers of
|
||||
case getRequestHeader requestIdHeader headers of
|
||||
Nothing -> RequestId <$> liftIO generateFingerprint
|
||||
Just reqId -> return $ RequestId $ bsToTxt reqId
|
||||
|
||||
@ -173,6 +172,12 @@ mkClientHeadersForward reqHeaders =
|
||||
"User-Agent" -> Just ("X-Forwarded-User-Agent", hdrValue)
|
||||
_ -> Nothing
|
||||
|
||||
mkSetCookieHeaders :: Wreq.Response a -> HTTP.ResponseHeaders
|
||||
mkSetCookieHeaders resp =
|
||||
map (headerName,) $ resp ^.. Wreq.responseHeader headerName
|
||||
where
|
||||
headerName = "Set-Cookie"
|
||||
|
||||
filterRequestHeaders :: [HTTP.Header] -> [HTTP.Header]
|
||||
filterRequestHeaders =
|
||||
filterHeaders $ Set.fromList commonClientHeadersIgnored
|
||||
|
@ -257,7 +257,12 @@ def evts_webhook(request):
|
||||
web_server.join()
|
||||
|
||||
@pytest.fixture(scope='module')
|
||||
def actions_webhook(hge_ctx):
|
||||
def actions_fixture(hge_ctx):
|
||||
pg_version = hge_ctx.pg_version
|
||||
if pg_version < 100000: # version less than 10.0
|
||||
pytest.skip('Actions are not supported on Postgres version < 10')
|
||||
|
||||
# Start actions' webhook server
|
||||
webhook_httpd = ActionsWebhookServer(hge_ctx, server_address=('127.0.0.1', 5593))
|
||||
web_server = threading.Thread(target=webhook_httpd.serve_forever)
|
||||
web_server.start()
|
||||
|
@ -186,6 +186,10 @@ class ActionsWebhookHandler(http.server.BaseHTTPRequestHandler):
|
||||
elif req_path == "/invalid-response":
|
||||
self._send_response(HTTPStatus.OK, "some-string")
|
||||
|
||||
elif req_path == "/mirror-action":
|
||||
resp, status = self.mirror_action()
|
||||
self._send_response(status, resp)
|
||||
|
||||
else:
|
||||
self.send_response(HTTPStatus.NO_CONTENT)
|
||||
self.end_headers()
|
||||
@ -263,6 +267,11 @@ class ActionsWebhookHandler(http.server.BaseHTTPRequestHandler):
|
||||
response = resp['data']['insert_user']['returning']
|
||||
return response, HTTPStatus.OK
|
||||
|
||||
def mirror_action(self):
|
||||
response = self.req_json['input']['arg']
|
||||
return response, HTTPStatus.OK
|
||||
|
||||
|
||||
def check_email(self, email):
|
||||
regex = '^\w+([\.-]?\w+)*@\w+([\.-]?\w+)*(\.\w{2,3})+$'
|
||||
return re.search(regex,email)
|
||||
@ -279,6 +288,7 @@ class ActionsWebhookHandler(http.server.BaseHTTPRequestHandler):
|
||||
def _send_response(self, status, body):
|
||||
self.send_response(status)
|
||||
self.send_header('Content-Type', 'application/json')
|
||||
self.send_header('Set-Cookie', 'abcd')
|
||||
self.end_headers()
|
||||
self.wfile.write(json.dumps(body).encode("utf-8"))
|
||||
|
||||
@ -333,7 +343,7 @@ class EvtsWebhookHandler(http.server.BaseHTTPRequestHandler):
|
||||
"headers": req_headers})
|
||||
|
||||
# A very slightly more sane/performant http server.
|
||||
# See: https://stackoverflow.com/a/14089457/176841
|
||||
# See: https://stackoverflow.com/a/14089457/176841
|
||||
#
|
||||
# TODO use this elsewhere, or better yet: use e.g. bottle + waitress
|
||||
class ThreadedHTTPServer(ThreadingMixIn, http.server.HTTPServer):
|
||||
@ -409,7 +419,7 @@ class HGECtx:
|
||||
|
||||
self.ws_client = GQLWsClient(self, '/v1/graphql')
|
||||
|
||||
|
||||
# HGE version
|
||||
result = subprocess.run(['../../scripts/get-version.sh'], shell=False, stdout=subprocess.PIPE, check=True)
|
||||
env_version = os.getenv('VERSION')
|
||||
self.version = env_version if env_version else result.stdout.decode('utf-8').strip()
|
||||
@ -421,6 +431,11 @@ class HGECtx:
|
||||
raise HGECtxError(repr(e))
|
||||
assert st_code == 200, resp
|
||||
|
||||
# Postgres version
|
||||
pg_version_text = self.sql('show server_version_num').fetchone()['server_version_num']
|
||||
self.pg_version = int(pg_version_text)
|
||||
|
||||
|
||||
def reflect_tables(self):
|
||||
self.meta.reflect(bind=self.engine)
|
||||
|
||||
|
@ -0,0 +1,22 @@
|
||||
description: Expected field not found in response
|
||||
url: /v1/graphql
|
||||
status: 200
|
||||
response:
|
||||
errors:
|
||||
- extensions:
|
||||
internal:
|
||||
webhook_response:
|
||||
name: Alice
|
||||
path: $
|
||||
code: unexpected
|
||||
message: field "id" expected in webhook response, but not found
|
||||
query:
|
||||
variables:
|
||||
name: Alice
|
||||
query: |
|
||||
mutation ($name: String) {
|
||||
mirror(arg: {name: $name}){
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
@ -0,0 +1,24 @@
|
||||
description: Null value for non-null output field
|
||||
url: /v1/graphql
|
||||
status: 200
|
||||
response:
|
||||
errors:
|
||||
- extensions:
|
||||
internal:
|
||||
webhook_response:
|
||||
name: Alice
|
||||
id: null
|
||||
path: $
|
||||
code: unexpected
|
||||
message: expecting not null value for field "id"
|
||||
query:
|
||||
variables:
|
||||
id: null
|
||||
name: Alice
|
||||
query: |
|
||||
mutation ($id: ID, $name: String) {
|
||||
mirror(arg: {id: $id, name: $name}){
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
@ -0,0 +1,19 @@
|
||||
description: A successful query to mirror action
|
||||
url: /v1/graphql
|
||||
status: 200
|
||||
response:
|
||||
data:
|
||||
mirror:
|
||||
id: some-id
|
||||
name: Alice
|
||||
query:
|
||||
variables:
|
||||
id: some-id
|
||||
name: Alice
|
||||
query: |
|
||||
mutation ($id: ID, $name: String) {
|
||||
mirror(arg: {id: $id, name: $name}){
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
@ -0,0 +1,26 @@
|
||||
description: Unexpected extra field in response
|
||||
url: /v1/graphql
|
||||
status: 200
|
||||
response:
|
||||
errors:
|
||||
- extensions:
|
||||
internal:
|
||||
webhook_response:
|
||||
age: 25
|
||||
name: Alice
|
||||
id: some-id
|
||||
path: $
|
||||
code: unexpected
|
||||
message: 'unexpected fields in webhook response: age'
|
||||
query:
|
||||
variables:
|
||||
id: some-id
|
||||
name: Alice
|
||||
age: 25
|
||||
query: |
|
||||
mutation ($id: ID, $name: String, $age: Int) {
|
||||
mirror(arg: {id: $id, name: $name, age: $age}){
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
@ -25,6 +25,15 @@ args:
|
||||
- name: email
|
||||
type: String!
|
||||
|
||||
- name: InObject
|
||||
fields:
|
||||
- name: id
|
||||
type: ID
|
||||
- name: name
|
||||
type: String
|
||||
- name: age
|
||||
type: Int
|
||||
|
||||
objects:
|
||||
- name: UserId
|
||||
fields:
|
||||
@ -37,6 +46,13 @@ args:
|
||||
field_mapping:
|
||||
id: id
|
||||
|
||||
- name: OutObject
|
||||
fields:
|
||||
- name: id
|
||||
type: ID! # For issue https://github.com/hasura/graphql-engine/issues/4061
|
||||
- name: name
|
||||
type: String
|
||||
|
||||
- type: create_action
|
||||
args:
|
||||
name: create_user
|
||||
@ -60,3 +76,14 @@ args:
|
||||
type: '[UserInput!]!'
|
||||
output_type: '[UserId]'
|
||||
handler: http://127.0.0.1:5593/create-users
|
||||
|
||||
- type: create_action
|
||||
args:
|
||||
name: mirror
|
||||
definition:
|
||||
kind: synchronous
|
||||
arguments:
|
||||
- name: arg
|
||||
type: 'InObject!'
|
||||
output_type: 'OutObject'
|
||||
handler: http://127.0.0.1:5593/mirror-action
|
||||
|
@ -8,6 +8,10 @@ args:
|
||||
args:
|
||||
name: create_users
|
||||
clear_data: true
|
||||
- type: drop_action
|
||||
args:
|
||||
name: mirror
|
||||
clear_data: true
|
||||
# clear custom types
|
||||
- type: set_custom_types
|
||||
args: {}
|
||||
|
@ -64,11 +64,11 @@ args:
|
||||
sql: |
|
||||
INSERT INTO geom_table (type, geom_col)
|
||||
VALUES
|
||||
('point', ST_GeomFromText('SRID=4326;POINT(1 2)')),
|
||||
('linestring', ST_GeomFromText('SRID=4326;LINESTRING(0 0, 0.5 1, 1 2, 1.5 3)')),
|
||||
('linestring', ST_GeomFromText('SRID=4326;LINESTRING(1 0, 0.5 0.5, 0 1)')),
|
||||
('polygon', ST_GeomFromText('SRID=4326;POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))')),
|
||||
('polygon', ST_GeomFromText('SRID=4326;POLYGON((2 0, 2 1, 3 1, 3 0, 2 0))'))
|
||||
('point', ST_GeomFromEWKT('SRID=4326;POINT(1 2)')),
|
||||
('linestring', ST_GeomFromEWKT('SRID=4326;LINESTRING(0 0, 0.5 1, 1 2, 1.5 3)')),
|
||||
('linestring', ST_GeomFromEWKT('SRID=4326;LINESTRING(1 0, 0.5 0.5, 0 1)')),
|
||||
('polygon', ST_GeomFromEWKT('SRID=4326;POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))')),
|
||||
('polygon', ST_GeomFromEWKT('SRID=4326;POLYGON((2 0, 2 1, 3 1, 3 0, 2 0))'))
|
||||
;
|
||||
- type: run_sql
|
||||
args:
|
||||
@ -86,7 +86,7 @@ args:
|
||||
sql: |
|
||||
INSERT INTO geog_as_geom_table (name, geom_col)
|
||||
VALUES
|
||||
('London', ST_GeomFromText('SRID=4326;POINT(0.1278 51.5074)')),
|
||||
('Paris', ST_GeomFromText('SRID=4326;POINT(2.3522 48.8566)')),
|
||||
('Moscow', ST_GeomFromText('SRID=4326;POINT(37.6173 55.7558)')),
|
||||
('New York', ST_GeomFromText('SRID=4326;POINT(-74.0060 40.7128)'));
|
||||
('London', ST_GeomFromEWKT('SRID=4326;POINT(0.1278 51.5074)')),
|
||||
('Paris', ST_GeomFromEWKT('SRID=4326;POINT(2.3522 48.8566)')),
|
||||
('Moscow', ST_GeomFromEWKT('SRID=4326;POINT(37.6173 55.7558)')),
|
||||
('New York', ST_GeomFromEWKT('SRID=4326;POINT(-74.0060 40.7128)'));
|
||||
|
@ -19,8 +19,8 @@ args:
|
||||
rast raster
|
||||
);
|
||||
INSERT INTO dummy_rast (rast) values
|
||||
(ST_AsRaster(ST_Buffer(ST_GeomFromText('SRID=4326;POINT(1 2)'),2), 5, 5))
|
||||
, (ST_AsRaster(ST_Buffer(ST_GeomFromText('SRID=4326;LINESTRING(0 0, 0.5 1, 1 2, 1.5 3)'), 2), 5, 5))
|
||||
(ST_AsRaster(ST_Buffer(ST_GeomFromEWKT('SRID=4326;POINT(1 2)'),2), 5, 5))
|
||||
, (ST_AsRaster(ST_Buffer(ST_GeomFromEWKT('SRID=4326;LINESTRING(0 0, 0.5 1, 1 2, 1.5 3)'), 2), 5, 5))
|
||||
;
|
||||
- type: track_table
|
||||
args:
|
||||
|
@ -370,11 +370,11 @@ args:
|
||||
sql: |
|
||||
INSERT INTO geom_table (type, geom_col)
|
||||
VALUES
|
||||
('point', ST_GeomFromText('SRID=4326;POINT(1 2)')),
|
||||
('linestring', ST_GeomFromText('SRID=4326;LINESTRING(0 0, 0.5 1, 1 2, 1.5 3)')),
|
||||
('linestring', ST_GeomFromText('SRID=4326;LINESTRING(1 0, 0.5 0.5, 0 1)')),
|
||||
('polygon', ST_GeomFromText('SRID=4326;POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))')),
|
||||
('polygon', ST_GeomFromText('SRID=4326;POLYGON((2 0, 2 1, 3 1, 3 0, 2 0))'))
|
||||
('point', ST_GeomFromEWKT('SRID=4326;POINT(1 2)')),
|
||||
('linestring', ST_GeomFromEWKT('SRID=4326;LINESTRING(0 0, 0.5 1, 1 2, 1.5 3)')),
|
||||
('linestring', ST_GeomFromEWKT('SRID=4326;LINESTRING(1 0, 0.5 0.5, 0 1)')),
|
||||
('polygon', ST_GeomFromEWKT('SRID=4326;POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))')),
|
||||
('polygon', ST_GeomFromEWKT('SRID=4326;POLYGON((2 0, 2 1, 3 1, 3 0, 2 0))'))
|
||||
;
|
||||
|
||||
|
||||
|
@ -66,11 +66,11 @@ args:
|
||||
sql: |
|
||||
INSERT INTO geom_table (type, geom_col)
|
||||
VALUES
|
||||
('point', ST_GeomFromText('SRID=4326;POINT(1 2)')),
|
||||
('linestring', ST_GeomFromText('SRID=4326;LINESTRING(0 0, 0.5 1, 1 2, 1.5 3)')),
|
||||
('linestring', ST_GeomFromText('SRID=4326;LINESTRING(1 0, 0.5 0.5, 0 1)')),
|
||||
('polygon', ST_GeomFromText('SRID=4326;POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))')),
|
||||
('polygon', ST_GeomFromText('SRID=4326;POLYGON((2 0, 2 1, 3 1, 3 0, 2 0))'))
|
||||
('point', ST_GeomFromEWKT('SRID=4326;POINT(1 2)')),
|
||||
('linestring', ST_GeomFromEWKT('SRID=4326;LINESTRING(0 0, 0.5 1, 1 2, 1.5 3)')),
|
||||
('linestring', ST_GeomFromEWKT('SRID=4326;LINESTRING(1 0, 0.5 0.5, 0 1)')),
|
||||
('polygon', ST_GeomFromEWKT('SRID=4326;POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))')),
|
||||
('polygon', ST_GeomFromEWKT('SRID=4326;POLYGON((2 0, 2 1, 3 1, 3 0, 2 0))'))
|
||||
;
|
||||
- type: run_sql
|
||||
args:
|
||||
@ -88,7 +88,7 @@ args:
|
||||
sql: |
|
||||
INSERT INTO geog_as_geom_table (name, geom_col)
|
||||
VALUES
|
||||
('London', ST_GeomFromText('SRID=4326;POINT(0.1278 51.5074)')),
|
||||
('Paris', ST_GeomFromText('SRID=4326;POINT(2.3522 48.8566)')),
|
||||
('Moscow', ST_GeomFromText('SRID=4326;POINT(37.6173 55.7558)')),
|
||||
('New York', ST_GeomFromText('SRID=4326;POINT(-74.0060 40.7128)'));
|
||||
('London', ST_GeomFromEWKT('SRID=4326;POINT(0.1278 51.5074)')),
|
||||
('Paris', ST_GeomFromEWKT('SRID=4326;POINT(2.3522 48.8566)')),
|
||||
('Moscow', ST_GeomFromEWKT('SRID=4326;POINT(37.6173 55.7558)')),
|
||||
('New York', ST_GeomFromEWKT('SRID=4326;POINT(-74.0060 40.7128)'));
|
||||
|
@ -256,11 +256,11 @@ args:
|
||||
sql: |
|
||||
INSERT INTO geom_table (type, geom_col)
|
||||
VALUES
|
||||
('point', ST_GeomFromText('SRID=4326;POINT(1 2)')),
|
||||
('linestring', ST_GeomFromText('SRID=4326;LINESTRING(0 0, 0.5 1, 1 2, 1.5 3)')),
|
||||
('linestring', ST_GeomFromText('SRID=4326;LINESTRING(1 0, 0.5 0.5, 0 1)')),
|
||||
('polygon', ST_GeomFromText('SRID=4326;POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))')),
|
||||
('polygon', ST_GeomFromText('SRID=4326;POLYGON((2 0, 2 1, 3 1, 3 0, 2 0))'))
|
||||
('point', ST_GeomFromEWKT('SRID=4326;POINT(1 2)')),
|
||||
('linestring', ST_GeomFromEWKT('SRID=4326;LINESTRING(0 0, 0.5 1, 1 2, 1.5 3)')),
|
||||
('linestring', ST_GeomFromEWKT('SRID=4326;LINESTRING(1 0, 0.5 0.5, 0 1)')),
|
||||
('polygon', ST_GeomFromEWKT('SRID=4326;POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))')),
|
||||
('polygon', ST_GeomFromEWKT('SRID=4326;POLYGON((2 0, 2 1, 3 1, 3 0, 2 0))'))
|
||||
;
|
||||
|
||||
#Permission based on Geography columns
|
||||
|
@ -30,14 +30,27 @@ args:
|
||||
age INTEGER NOT NULL
|
||||
);
|
||||
|
||||
|
||||
INSERT INTO author (name) VALUES
|
||||
('Author 1'),
|
||||
('Author 2');
|
||||
|
||||
INSERT INTO article (title, author_id) VALUES
|
||||
('Lorem ipsum dolor sit amet', 1),
|
||||
('lolcats: an anthology', 2),
|
||||
('consectetur adipiscing elit', 1);
|
||||
|
||||
INSERT INTO hge_tests.resident (name, age) VALUES
|
||||
('Resident 1', 23),
|
||||
('Resident 2', 31);
|
||||
|
||||
|
||||
CREATE MATERIALIZED VIEW articles AS
|
||||
SELECT article.title, author.name
|
||||
FROM article
|
||||
LEFT JOIN author ON author.id = article.author_id
|
||||
;
|
||||
|
||||
CREATE OR REPLACE FUNCTION test1()
|
||||
RETURNS SETOF test2 AS $$
|
||||
SELECT * FROM test2
|
||||
|
@ -4,6 +4,7 @@ args:
|
||||
- type: run_sql
|
||||
args:
|
||||
sql: |
|
||||
DROP MATERIALIZED VIEW articles;
|
||||
DROP TABLE hge_tests.resident;
|
||||
DROP TABLE article;
|
||||
DROP TABLE author;
|
||||
|
@ -0,0 +1,77 @@
|
||||
- description: Track materialized view
|
||||
url: /v1/query
|
||||
status: 200
|
||||
response:
|
||||
message: success
|
||||
query:
|
||||
type: track_table
|
||||
args:
|
||||
name: articles
|
||||
|
||||
- description: Track already untracked materialized view
|
||||
url: /v1/query
|
||||
status: 400
|
||||
response:
|
||||
path: $.args
|
||||
error: 'view/table already tracked : "articles"'
|
||||
code: already-tracked
|
||||
query:
|
||||
type: track_table
|
||||
args:
|
||||
name: articles
|
||||
|
||||
- description: Select query
|
||||
url: /v1/query
|
||||
status: 200
|
||||
response:
|
||||
- title: "Lorem ipsum dolor sit amet"
|
||||
name: Author 1
|
||||
- title: "lolcats: an anthology"
|
||||
name: Author 2
|
||||
- title: "consectetur adipiscing elit"
|
||||
name: Author 1
|
||||
query:
|
||||
type: select
|
||||
args:
|
||||
table: articles
|
||||
columns:
|
||||
- title
|
||||
- name
|
||||
|
||||
- description: Untrack materialized view
|
||||
url: /v1/query
|
||||
status: 200
|
||||
response:
|
||||
message: success
|
||||
query:
|
||||
type: untrack_table
|
||||
args:
|
||||
table: articles
|
||||
|
||||
|
||||
- description: Untrack materialized view
|
||||
url: /v1/query
|
||||
status: 400
|
||||
response:
|
||||
path: $.args
|
||||
error: 'view/table already untracked : "articles"'
|
||||
code: already-untracked
|
||||
query:
|
||||
type: untrack_table
|
||||
args:
|
||||
table: articles
|
||||
|
||||
- description: Select query error
|
||||
url: /v1/query
|
||||
status: 400
|
||||
response:
|
||||
path: $.args.table
|
||||
error: table "articles" does not exist
|
||||
code: not-exists
|
||||
query:
|
||||
type: select
|
||||
args:
|
||||
table: articles
|
||||
columns:
|
||||
- title
|
||||
- name
|
@ -10,14 +10,14 @@ TODO:- Test Actions metadata
|
||||
"""
|
||||
|
||||
use_action_fixtures = pytest.mark.usefixtures(
|
||||
"actions_webhook",
|
||||
"actions_fixture",
|
||||
'per_class_db_schema_for_mutation_tests',
|
||||
'per_method_db_data_for_mutation_tests'
|
||||
)
|
||||
|
||||
@pytest.mark.parametrize("transport", ['http', 'websocket'])
|
||||
@use_action_fixtures
|
||||
class TestActionsSync:
|
||||
class TestActionsSyncWebsocket:
|
||||
|
||||
@classmethod
|
||||
def dir(cls):
|
||||
@ -35,28 +35,74 @@ class TestActionsSync:
|
||||
def test_create_users_success(self, hge_ctx, transport):
|
||||
check_query_f(hge_ctx, self.dir() + '/create_users_success.yaml', transport)
|
||||
|
||||
def test_invalid_webhook_response(self, hge_ctx, transport):
|
||||
@use_action_fixtures
|
||||
class TestActionsSync:
|
||||
|
||||
@classmethod
|
||||
def dir(cls):
|
||||
return 'queries/actions/sync'
|
||||
|
||||
def test_invalid_webhook_response(self, hge_ctx):
|
||||
check_query_f(hge_ctx, self.dir() + '/invalid_webhook_response.yaml')
|
||||
|
||||
def test_expecting_object_response(self, hge_ctx, transport):
|
||||
def test_expecting_object_response(self, hge_ctx):
|
||||
check_query_f(hge_ctx, self.dir() + '/expecting_object_response.yaml')
|
||||
|
||||
def test_expecting_array_response(self, hge_ctx, transport):
|
||||
def test_expecting_array_response(self, hge_ctx):
|
||||
check_query_f(hge_ctx, self.dir() + '/expecting_array_response.yaml')
|
||||
|
||||
# Webhook response validation tests. See https://github.com/hasura/graphql-engine/issues/3977
|
||||
def test_mirror_action_not_null(self, hge_ctx):
|
||||
check_query_f(hge_ctx, self.dir() + '/mirror_action_not_null.yaml')
|
||||
|
||||
def test_mirror_action_unexpected_field(self, hge_ctx):
|
||||
check_query_f(hge_ctx, self.dir() + '/mirror_action_unexpected_field.yaml')
|
||||
|
||||
def test_mirror_action_no_field(self, hge_ctx):
|
||||
check_query_f(hge_ctx, self.dir() + '/mirror_action_no_field.yaml')
|
||||
|
||||
def test_mirror_action_success(self, hge_ctx):
|
||||
check_query_f(hge_ctx, self.dir() + '/mirror_action_success.yaml')
|
||||
|
||||
def mk_headers_with_secret(hge_ctx, headers={}):
|
||||
admin_secret = hge_ctx.hge_key
|
||||
if admin_secret:
|
||||
headers['X-Hasura-Admin-Secret'] = admin_secret
|
||||
return headers
|
||||
|
||||
@use_action_fixtures
|
||||
class TestActionsSyncResponseHeaders:
|
||||
|
||||
@classmethod
|
||||
def dir(cls):
|
||||
return 'queries/actions/sync'
|
||||
|
||||
# See https://github.com/hasura/graphql-engine/issues/4021
|
||||
def test_set_cookie_header(self, hge_ctx):
|
||||
mutation = '''
|
||||
mutation {
|
||||
create_user(email: "clarke@gmail.com", name: "Clarke"){
|
||||
id
|
||||
}
|
||||
}
|
||||
'''
|
||||
query = {
|
||||
'query': mutation,
|
||||
'variables': {}
|
||||
}
|
||||
status, resp, resp_headers = hge_ctx.anyq('/v1/graphql', query, mk_headers_with_secret(hge_ctx))
|
||||
assert status == 200, resp
|
||||
assert 'data' in resp, resp
|
||||
assert ('Set-Cookie' in resp_headers and
|
||||
resp_headers['Set-Cookie'] == 'abcd'), resp_headers
|
||||
|
||||
|
||||
@use_action_fixtures
|
||||
class TestActionsAsync:
|
||||
@classmethod
|
||||
def dir(cls):
|
||||
return 'queries/actions/async'
|
||||
|
||||
def mk_headers_with_secret(self, hge_ctx, headers={}):
|
||||
admin_secret = hge_ctx.hge_key
|
||||
if admin_secret:
|
||||
headers['X-Hasura-Admin-Secret'] = admin_secret
|
||||
return headers
|
||||
|
||||
|
||||
def test_create_user_fail(self, hge_ctx):
|
||||
graphql_mutation = '''
|
||||
mutation {
|
||||
@ -67,11 +113,11 @@ class TestActionsAsync:
|
||||
'query': graphql_mutation,
|
||||
'variables': {}
|
||||
}
|
||||
status, resp, _ = hge_ctx.anyq('/v1/graphql', query, self.mk_headers_with_secret(hge_ctx))
|
||||
status, resp, _ = hge_ctx.anyq('/v1/graphql', query, mk_headers_with_secret(hge_ctx))
|
||||
assert status == 200, resp
|
||||
assert 'data' in resp
|
||||
action_id = resp['data']['create_user']
|
||||
time.sleep(2)
|
||||
time.sleep(3)
|
||||
|
||||
query_async = '''
|
||||
query ($action_id: uuid!){
|
||||
@ -118,11 +164,11 @@ class TestActionsAsync:
|
||||
'query': graphql_mutation,
|
||||
'variables': {}
|
||||
}
|
||||
status, resp, _ = hge_ctx.anyq('/v1/graphql', query, self.mk_headers_with_secret(hge_ctx))
|
||||
status, resp, _ = hge_ctx.anyq('/v1/graphql', query, mk_headers_with_secret(hge_ctx))
|
||||
assert status == 200, resp
|
||||
assert 'data' in resp
|
||||
action_id = resp['data']['create_user']
|
||||
time.sleep(2)
|
||||
time.sleep(3)
|
||||
|
||||
query_async = '''
|
||||
query ($action_id: uuid!){
|
||||
@ -185,7 +231,7 @@ class TestActionsAsync:
|
||||
'query': graphql_mutation,
|
||||
'variables': {}
|
||||
}
|
||||
headers_user_1 = self.mk_headers_with_secret(hge_ctx, {
|
||||
headers_user_1 = mk_headers_with_secret(hge_ctx, {
|
||||
'X-Hasura-Role': 'user',
|
||||
'X-Hasura-User-Id': '1'
|
||||
})
|
||||
@ -194,7 +240,7 @@ class TestActionsAsync:
|
||||
assert status == 200, resp
|
||||
assert 'data' in resp
|
||||
action_id = resp['data']['create_user']
|
||||
time.sleep(2)
|
||||
time.sleep(3)
|
||||
|
||||
query_async = '''
|
||||
query ($action_id: uuid!){
|
||||
@ -213,7 +259,7 @@ class TestActionsAsync:
|
||||
}
|
||||
}
|
||||
|
||||
headers_user_2 = self.mk_headers_with_secret(hge_ctx, {
|
||||
headers_user_2 = mk_headers_with_secret(hge_ctx, {
|
||||
'X-Hasura-Role': 'user',
|
||||
'X-Hasura-User-Id': '2'
|
||||
})
|
||||
|
@ -629,6 +629,10 @@ class TestTrackTables:
|
||||
check_query_f(hge_ctx, self.dir() + '/track_untrack_table.yaml')
|
||||
hge_ctx.may_skip_test_teardown = True
|
||||
|
||||
def test_track_untrack_materialized_view(self, hge_ctx):
|
||||
check_query_f(hge_ctx, self.dir() + '/track_untrack_materialized_view.yaml')
|
||||
hge_ctx.may_skip_test_teardown = True
|
||||
|
||||
def test_track_untrack_table_with_deps(self, hge_ctx):
|
||||
check_query_f(hge_ctx, self.dir() + '/track_untrack_table_deps.yaml')
|
||||
hge_ctx.may_skip_test_teardown = True
|
||||
|
Loading…
Reference in New Issue
Block a user