mirror of
https://github.com/hasura/graphql-engine.git
synced 2024-11-10 10:29:12 +03:00
server, console, docs: add update_remote_schema
API
https://github.com/hasura/graphql-engine-mono/pull/1546 GitOrigin-RevId: 142b0d0e0ffc35b2679c91c411868c45a8b8e221
This commit is contained in:
parent
a924720a3a
commit
608e4fbb20
@ -7,9 +7,11 @@
|
||||
- server: Backends Citus, MSSQL, and BigQuery now all support the `set_table_customization` operation.
|
||||
- server: Adds caching support for queries using remote schema permissions
|
||||
- server: All Postgres boolean operators now support the null-collapsing behaviour described in [#704](https://github.com/hasura/graphql-engine/issues/704) and enabled via the `HASURA_GRAPHQL_V1_BOOLEAN_NULL_COLLAPSE` environment variable.
|
||||
- server: add `update_remote_schema` metadata query
|
||||
- console: add citus support
|
||||
- cli: add citus support
|
||||
- console: add support for `update_remote_schema` API while modifying remote schemas
|
||||
- cli: `metadata diff` will now only show the differences in metadata. old behaviour is avialble behind a flag (`--type unified-common`) (#5487)
|
||||
- cli: add citus support
|
||||
- cli: allow `--skip-execution` to be used with `up` and `down` flags in `migrate apply`
|
||||
|
||||
## v2.0.0-beta.2
|
||||
|
@ -218,41 +218,12 @@ export const passWithRemoteSchemaHeader = () => {
|
||||
cy.wait(5000);
|
||||
};
|
||||
|
||||
export const passWithEditRemoteSchema = () => {
|
||||
cy.visit(
|
||||
`${baseUrl}/remote-schemas/manage/${getRemoteSchemaName(
|
||||
3,
|
||||
testName
|
||||
)}/modify`
|
||||
);
|
||||
cy.wait(3000);
|
||||
cy.get(getElementFromAlias('remote-schema-edit-modify-btn'))
|
||||
.should('exist')
|
||||
.click();
|
||||
cy.get(getElementFromAlias('remote-schema-schema-name'))
|
||||
.clear()
|
||||
.type(getRemoteSchemaName(5, testName));
|
||||
|
||||
cy.get(getElementFromAlias('remote-schema-edit-save-btn')).click();
|
||||
cy.wait(10000);
|
||||
validateRS(getRemoteSchemaName(5, testName), ResultType.SUCCESS);
|
||||
|
||||
cy.get(getElementFromAlias('remote-schemas-modify')).click();
|
||||
cy.get(getElementFromAlias('remote-schema-schema-name')).should(
|
||||
'have.attr',
|
||||
'value',
|
||||
getRemoteSchemaName(5, testName)
|
||||
);
|
||||
cy.get(getElementFromAlias('remote-schema-edit-modify-btn')).should('exist');
|
||||
cy.wait(7000);
|
||||
};
|
||||
|
||||
export const deleteRemoteSchema = () => {
|
||||
cy.visit(`remote-schemas/manage/${getRemoteSchemaName(5, testName)}/details`);
|
||||
cy.visit(`remote-schemas/manage/${getRemoteSchemaName(3, testName)}/details`);
|
||||
|
||||
cy.get(getElementFromAlias('remote-schemas-modify')).click();
|
||||
cy.wait(5000);
|
||||
setPromptValue(getRemoteSchemaName(5, testName));
|
||||
setPromptValue(getRemoteSchemaName(3, testName));
|
||||
cy.get(getElementFromAlias('remote-schema-edit-delete-btn')).click();
|
||||
cy.window().its('prompt').should('be.called');
|
||||
cy.wait(5000);
|
||||
@ -298,8 +269,35 @@ export const createSimpleRemoteSchemaPermission = () => {
|
||||
cy.wait(5000);
|
||||
};
|
||||
|
||||
// export const deleteRemoteSchemaPermission = () => {
|
||||
// cy.get(getElementFromAlias('delete-remote-schema-permissions'))
|
||||
// .click();
|
||||
export const passWithUpdateRemoteSchema = () => {
|
||||
cy.visit(
|
||||
`${baseUrl}/remote-schemas/manage/${getRemoteSchemaName(
|
||||
3,
|
||||
testName
|
||||
)}/modify`
|
||||
);
|
||||
cy.wait(3000);
|
||||
cy.get(getElementFromAlias('remote-schema-edit-modify-btn'))
|
||||
.should('exist')
|
||||
.click();
|
||||
cy.get(getElementFromAlias('remote-schema-schema-name')).should(
|
||||
'have.attr',
|
||||
'disabled'
|
||||
);
|
||||
cy.get(getElementFromAlias('remote-schema-comment'))
|
||||
.clear()
|
||||
.type("This is a new remote schema comment");
|
||||
|
||||
// }
|
||||
cy.get(getElementFromAlias('remote-schema-edit-save-btn')).click();
|
||||
cy.wait(5000);
|
||||
validateRS(getRemoteSchemaName(3, testName), ResultType.SUCCESS);
|
||||
|
||||
cy.get(getElementFromAlias('remote-schemas-modify')).click();
|
||||
cy.get(getElementFromAlias('remote-schema-schema-name')).should(
|
||||
'have.attr',
|
||||
'value',
|
||||
getRemoteSchemaName(3, testName)
|
||||
);
|
||||
cy.get(getElementFromAlias('remote-schema-edit-modify-btn')).should('exist');
|
||||
cy.wait(7000);
|
||||
};
|
||||
|
@ -14,10 +14,10 @@ import {
|
||||
failWithRemoteSchemaEnvUrl,
|
||||
failWithRemoteSchemaEnvHeader,
|
||||
passWithRemoteSchemaHeader,
|
||||
passWithEditRemoteSchema,
|
||||
deleteRemoteSchema,
|
||||
visitRemoteSchemaPermissionsTab,
|
||||
createSimpleRemoteSchemaPermission,
|
||||
passWithUpdateRemoteSchema
|
||||
} from './spec';
|
||||
|
||||
const setup = () => {
|
||||
@ -70,7 +70,7 @@ export const runCreateRemoteSchemaTableTests = () => {
|
||||
failWithRemoteSchemaEnvHeader
|
||||
);
|
||||
it('Create remote schema with headers', passWithRemoteSchemaHeader);
|
||||
it('Edit remote schema with headers', passWithEditRemoteSchema);
|
||||
it('Update remote schema on Modify page', passWithUpdateRemoteSchema);
|
||||
it('Delete remote schema with headers', deleteRemoteSchema);
|
||||
});
|
||||
};
|
||||
|
@ -1,27 +1,27 @@
|
||||
/* defaultState */
|
||||
import { addState } from '../state';
|
||||
/* */
|
||||
|
||||
import { push } from 'react-router-redux';
|
||||
|
||||
import { generateHeaderSyms } from '../../../Common/Layout/ReusableHeader/HeaderReducer';
|
||||
import { makeRequest } from '../Actions';
|
||||
// import { UPDATE_MIGRATION_STATUS_ERROR } from '../../../Main/Actions';
|
||||
import { appPrefix } from '../constants';
|
||||
|
||||
import globals from '../../../../Globals';
|
||||
import { clearIntrospectionSchemaCache } from '../graphqlUtils';
|
||||
import { exportMetadata } from '../../../../metadata/actions';
|
||||
import { getRemoteSchemaSelector } from '../../../../metadata/selector';
|
||||
import Migration from '../../../../utils/migration/Migration';
|
||||
import { showErrorNotification } from '../../Common/Notification';
|
||||
import {
|
||||
addRemoteSchemaQuery,
|
||||
removeRemoteSchemaQuery,
|
||||
updateRemoteSchemaQuery,
|
||||
} from '../../../../metadata/queryUtils';
|
||||
import _push from '../../Data/push';
|
||||
|
||||
const prefixUrl = globals.urlPrefix + appPrefix;
|
||||
|
||||
/* */
|
||||
const MANUAL_URL_CHANGED = '@addRemoteSchema/MANUAL_URL_CHANGED';
|
||||
const ENV_URL_CHANGED = '@addRemoteSchema/ENV_URL_CHANGED';
|
||||
const NAME_CHANGED = '@addRemoteSchema/NAME_CHANGED';
|
||||
const TIMEOUT_CONF_CHANGED = '@addRemoteSchema/TIMEOUT_CONF_CHANGED';
|
||||
const COMMENT_CHANGED = '@addRemoteSchema/COMMENT_CHANGED';
|
||||
// const HEADER_CHANGED = '@addRemoteSchema/HEADER_CHANGED';
|
||||
const ADDING_REMOTE_SCHEMA = '@addRemoteSchema/ADDING_REMOTE_SCHEMA';
|
||||
const ADD_REMOTE_SCHEMA_FAIL = '@addRemoteSchema/ADD_REMOTE_SCHEMA_FAIL';
|
||||
@ -41,16 +41,14 @@ const MODIFYING_REMOTE_SCHEMA = '@addRemoteSchema/MODIFYING_REMOTE_SCHEMA';
|
||||
const UPDATE_FORWARD_CLIENT_HEADERS =
|
||||
'@addRemoteSchema/UPDATE_FORWARD_CLIENT_HEADERS';
|
||||
|
||||
/* */
|
||||
const TOGGLE_MODIFY = '@editRemoteSchema/TOGGLE_MODIFY';
|
||||
/* */
|
||||
/* */
|
||||
|
||||
const inputEventMap = {
|
||||
name: NAME_CHANGED,
|
||||
envName: ENV_URL_CHANGED,
|
||||
manualUrl: MANUAL_URL_CHANGED,
|
||||
timeoutConf: TIMEOUT_CONF_CHANGED,
|
||||
comment: COMMENT_CHANGED,
|
||||
};
|
||||
|
||||
/* Action creators */
|
||||
@ -87,7 +85,6 @@ const getReqHeader = headers => {
|
||||
const fetchRemoteSchema = remoteSchema => {
|
||||
return (dispatch, getState) => {
|
||||
const schema = getRemoteSchemaSelector(getState())(remoteSchema);
|
||||
|
||||
if (schema) {
|
||||
dispatch({ type: REMOTE_SCHEMA_FETCH_SUCCESS, data: schema });
|
||||
const headerObj = [];
|
||||
@ -108,281 +105,231 @@ const fetchRemoteSchema = remoteSchema => {
|
||||
data: headerObj,
|
||||
});
|
||||
} else {
|
||||
dispatch(push(`${prefixUrl}`));
|
||||
dispatch(_push(`${prefixUrl}`));
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const addRemoteSchema = () => {
|
||||
return (dispatch, getState) => {
|
||||
const currState = getState().remoteSchemas.addData;
|
||||
const addRemoteSchema = () => (dispatch, getState) => {
|
||||
const currState = getState().remoteSchemas.addData;
|
||||
|
||||
let timeoutSeconds = parseInt(currState.timeoutConf, 10);
|
||||
if (isNaN(timeoutSeconds)) timeoutSeconds = 60;
|
||||
let timeoutSeconds = parseInt(currState.timeoutConf, 10);
|
||||
if (isNaN(timeoutSeconds)) timeoutSeconds = 60;
|
||||
|
||||
const resolveObj = {
|
||||
name: currState.name.trim().replace(/ +/g, ''),
|
||||
definition: {
|
||||
url: currState.manualUrl?.trim(),
|
||||
url_from_env: currState.envName?.trim(),
|
||||
headers: [],
|
||||
timeout_seconds: timeoutSeconds,
|
||||
forward_client_headers: currState.forwardClientHeaders,
|
||||
},
|
||||
};
|
||||
const manualUrl = currState?.manualUrl?.trim();
|
||||
const envName = currState?.envName?.trim();
|
||||
const remoteSchemaName = currState.name.trim().replace(/ +/g, '');
|
||||
const remoteSchemaDef = {
|
||||
timeout_seconds: timeoutSeconds,
|
||||
forward_client_headers: currState.forwardClientHeaders,
|
||||
headers: getReqHeader(getState().remoteSchemas.headerData.headers),
|
||||
};
|
||||
const remoteSchemaComment = currState?.comment;
|
||||
|
||||
resolveObj.definition.headers = [
|
||||
...getReqHeader(getState().remoteSchemas.headerData.headers),
|
||||
];
|
||||
|
||||
if (resolveObj.definition.url) {
|
||||
delete resolveObj.definition.url_from_env;
|
||||
} else {
|
||||
delete resolveObj.definition.url;
|
||||
}
|
||||
/* TODO: Add mandatory fields validation */
|
||||
|
||||
const migrationName =
|
||||
'create_remote_schema_' + currState.name.trim().replace(/ +/g, '');
|
||||
|
||||
const payload = {
|
||||
type: 'add_remote_schema',
|
||||
args: {
|
||||
...resolveObj,
|
||||
},
|
||||
};
|
||||
|
||||
const downPayload = {
|
||||
type: 'remove_remote_schema',
|
||||
args: {
|
||||
name: currState.name,
|
||||
},
|
||||
};
|
||||
|
||||
const migration = new Migration();
|
||||
migration.add(payload, downPayload);
|
||||
|
||||
const requestMsg = 'Adding remote schema...';
|
||||
const successMsg = 'Remote schema added successfully';
|
||||
const errorMsg = 'Adding remote schema failed';
|
||||
|
||||
const customOnSuccess = data => {
|
||||
Promise.all([
|
||||
dispatch({ type: RESET }),
|
||||
dispatch(exportMetadata()).then(() => {
|
||||
dispatch(push(`${prefixUrl}/manage/${resolveObj.name}/details`));
|
||||
}),
|
||||
dispatch({ type: getHeaderEvents.RESET_HEADER, data: data }),
|
||||
]);
|
||||
};
|
||||
const customOnError = err => {
|
||||
console.error('Failed to create remote schema' + JSON.stringify(err));
|
||||
dispatch({ type: ADD_REMOTE_SCHEMA_FAIL, data: err });
|
||||
};
|
||||
dispatch({ type: ADDING_REMOTE_SCHEMA });
|
||||
|
||||
return dispatch(
|
||||
makeRequest(
|
||||
migration.upMigration,
|
||||
migration.downMigration,
|
||||
migrationName,
|
||||
customOnSuccess,
|
||||
customOnError,
|
||||
requestMsg,
|
||||
successMsg,
|
||||
errorMsg
|
||||
if (!manualUrl && !envName) {
|
||||
dispatch(
|
||||
showErrorNotification(
|
||||
'Error in adding remote schema...',
|
||||
'A valid GraphQL server URL is required'
|
||||
)
|
||||
);
|
||||
};
|
||||
};
|
||||
return;
|
||||
} else if (manualUrl) {
|
||||
remoteSchemaDef.url = manualUrl;
|
||||
} else if (envName) {
|
||||
remoteSchemaDef.url_from_env = envName;
|
||||
}
|
||||
|
||||
const deleteRemoteSchema = () => {
|
||||
return (dispatch, getState) => {
|
||||
const currState = getState().remoteSchemas.addData;
|
||||
const resolveObj = {
|
||||
name: currState.editState.originalName,
|
||||
};
|
||||
const migrationName =
|
||||
'remove_remote_schema_' + resolveObj.name.trim().replace(/ +/g, '');
|
||||
const payload = {
|
||||
type: 'remove_remote_schema',
|
||||
args: {
|
||||
name: currState.editState.originalName,
|
||||
},
|
||||
};
|
||||
const downPayload = {
|
||||
type: 'add_remote_schema',
|
||||
args: {
|
||||
name: currState.editState.originalName,
|
||||
definition: {
|
||||
url: currState.editState.originalUrl,
|
||||
url_from_env: currState.editState.originalEnvUrl,
|
||||
headers: [],
|
||||
forward_client_headers:
|
||||
currState.editState.originalForwardClientHeaders,
|
||||
},
|
||||
},
|
||||
};
|
||||
const migrationName = `create_remote_schema_${remoteSchemaName}`;
|
||||
const payload = addRemoteSchemaQuery(
|
||||
remoteSchemaName,
|
||||
remoteSchemaDef,
|
||||
remoteSchemaComment
|
||||
);
|
||||
const downPayload = removeRemoteSchemaQuery(remoteSchemaName);
|
||||
|
||||
downPayload.args.definition.headers = [
|
||||
...currState.editState.originalHeaders,
|
||||
];
|
||||
const requestMsg = 'Adding remote schema...';
|
||||
const successMsg = 'Remote schema added successfully';
|
||||
const errorMsg = 'Adding remote schema failed';
|
||||
|
||||
const migration = new Migration();
|
||||
migration.add(payload, downPayload);
|
||||
|
||||
const requestMsg = 'Deleting remote schema...';
|
||||
const successMsg = 'Remote schema deleted successfully';
|
||||
const errorMsg = 'Delete remote schema failed';
|
||||
|
||||
const customOnSuccess = () => {
|
||||
// dispatch({ type: REQUEST_SUCCESS });
|
||||
Promise.all([
|
||||
dispatch({ type: RESET }),
|
||||
dispatch(push(prefixUrl)),
|
||||
dispatch(exportMetadata()),
|
||||
]);
|
||||
clearIntrospectionSchemaCache();
|
||||
};
|
||||
const customOnError = error => {
|
||||
Promise.all([dispatch({ type: DELETE_REMOTE_SCHEMA_FAIL, data: error })]);
|
||||
};
|
||||
|
||||
dispatch({ type: DELETING_REMOTE_SCHEMA });
|
||||
return dispatch(
|
||||
makeRequest(
|
||||
migration.upMigration,
|
||||
migration.downMigration,
|
||||
migrationName,
|
||||
customOnSuccess,
|
||||
customOnError,
|
||||
requestMsg,
|
||||
successMsg,
|
||||
errorMsg
|
||||
)
|
||||
);
|
||||
};
|
||||
};
|
||||
|
||||
const modifyRemoteSchema = () => {
|
||||
return (dispatch, getState) => {
|
||||
const currState = getState().remoteSchemas.addData;
|
||||
const remoteSchemaName = currState.name.trim().replace(/ +/g, '');
|
||||
// const url = Endpoints.getSchema;
|
||||
const migration = new Migration();
|
||||
const migrationName = 'update_remote_schema_' + remoteSchemaName;
|
||||
const deleteRemoteSchemaUp = {
|
||||
type: 'remove_remote_schema',
|
||||
args: {
|
||||
name: currState.editState.originalName,
|
||||
},
|
||||
};
|
||||
|
||||
let newTimeout = parseInt(currState.timeoutConf, 10);
|
||||
let oldTimeout = parseInt(currState.editState.originalTimeoutConf, 10);
|
||||
if (isNaN(newTimeout)) newTimeout = 60;
|
||||
if (isNaN(oldTimeout)) oldTimeout = 60;
|
||||
|
||||
const resolveObj = {
|
||||
name: remoteSchemaName,
|
||||
definition: {
|
||||
url: currState.manualUrl,
|
||||
url_from_env: currState.envName,
|
||||
timeout_seconds: newTimeout,
|
||||
forward_client_headers: currState.forwardClientHeaders,
|
||||
headers: [],
|
||||
},
|
||||
};
|
||||
|
||||
resolveObj.definition.headers = getReqHeader(
|
||||
getState().remoteSchemas.headerData.headers
|
||||
);
|
||||
|
||||
if (resolveObj.definition.url) {
|
||||
delete resolveObj.definition.url_from_env;
|
||||
} else {
|
||||
delete resolveObj.definition.url;
|
||||
}
|
||||
|
||||
const createRemoteSchemaUp = {
|
||||
type: 'add_remote_schema',
|
||||
args: {
|
||||
...resolveObj,
|
||||
},
|
||||
};
|
||||
|
||||
// Delete the new one and create the old one
|
||||
const deleteRemoteSchemaDown = {
|
||||
type: 'remove_remote_schema',
|
||||
args: {
|
||||
name: remoteSchemaName,
|
||||
},
|
||||
};
|
||||
|
||||
const resolveDownObj = {
|
||||
name: currState.editState.originalName,
|
||||
definition: {
|
||||
url: currState.editState.originalUrl,
|
||||
url_from_env: currState.editState.originalEnvUrl,
|
||||
timeout_seconds: oldTimeout,
|
||||
headers: [],
|
||||
forward_client_headers:
|
||||
currState.editState.originalForwardClientHeaders,
|
||||
},
|
||||
};
|
||||
|
||||
resolveDownObj.definition.headers = [
|
||||
...currState.editState.originalHeaders,
|
||||
];
|
||||
if (resolveDownObj.definition.url) {
|
||||
delete resolveDownObj.definition.url_from_env;
|
||||
} else {
|
||||
delete resolveDownObj.definition.url;
|
||||
}
|
||||
|
||||
const createRemoteSchemaDown = {
|
||||
type: 'add_remote_schema',
|
||||
args: {
|
||||
...resolveDownObj,
|
||||
},
|
||||
};
|
||||
// old schema
|
||||
migration.add(deleteRemoteSchemaUp, createRemoteSchemaDown);
|
||||
// new schema
|
||||
migration.add(createRemoteSchemaUp, deleteRemoteSchemaDown);
|
||||
// End of down
|
||||
|
||||
const requestMsg = 'Modifying remote schema...';
|
||||
const successMsg = 'Remote schema modified';
|
||||
const errorMsg = 'Modify remote schema failed';
|
||||
|
||||
const customOnSuccess = data => {
|
||||
dispatch({ type: RESET, data: data });
|
||||
dispatch(push(`${prefixUrl}/manage/schemas`)); // to avoid 404
|
||||
const customOnSuccess = data => {
|
||||
Promise.all([
|
||||
dispatch({ type: RESET }),
|
||||
dispatch(exportMetadata()).then(() => {
|
||||
dispatch(push(`${prefixUrl}/manage/${remoteSchemaName}/details`));
|
||||
dispatch(fetchRemoteSchema(remoteSchemaName));
|
||||
});
|
||||
clearIntrospectionSchemaCache();
|
||||
};
|
||||
const customOnError = error => {
|
||||
Promise.all([dispatch({ type: MODIFY_REMOTE_SCHEMA_FAIL, data: error })]);
|
||||
};
|
||||
dispatch(_push(`${prefixUrl}/manage/${remoteSchemaName}/details`));
|
||||
}),
|
||||
dispatch({ type: getHeaderEvents.RESET_HEADER, data: data }),
|
||||
]);
|
||||
};
|
||||
const customOnError = err => {
|
||||
console.error(`Failed to create remote schema ${JSON.stringify(err)}`);
|
||||
dispatch({ type: ADD_REMOTE_SCHEMA_FAIL, data: err });
|
||||
};
|
||||
dispatch({ type: ADDING_REMOTE_SCHEMA });
|
||||
|
||||
dispatch({ type: MODIFYING_REMOTE_SCHEMA });
|
||||
return dispatch(
|
||||
makeRequest(
|
||||
migration.upMigration,
|
||||
migration.downMigration,
|
||||
migrationName,
|
||||
customOnSuccess,
|
||||
customOnError,
|
||||
requestMsg,
|
||||
successMsg,
|
||||
errorMsg
|
||||
return dispatch(
|
||||
makeRequest(
|
||||
[payload],
|
||||
[downPayload],
|
||||
migrationName,
|
||||
customOnSuccess,
|
||||
customOnError,
|
||||
requestMsg,
|
||||
successMsg,
|
||||
errorMsg
|
||||
)
|
||||
);
|
||||
};
|
||||
|
||||
const deleteRemoteSchema = () => (dispatch, getState) => {
|
||||
const currState = getState().remoteSchemas.addData;
|
||||
|
||||
const remoteSchemaName = currState.editState.originalName;
|
||||
const remoteSchemaDef = {
|
||||
headers: currState.editState.originalHeaders,
|
||||
forward_client_headers: currState.editState.originalForwardClientHeaders,
|
||||
timeout_seconds: currState.editState.originalTimeoutConf,
|
||||
};
|
||||
const remoteSchemaComment = currState.editState?.originalComment ?? '';
|
||||
|
||||
if (!currState.editState.originalUrl) {
|
||||
remoteSchemaDef.url_from_env = currState.editState.originalEnvUrl;
|
||||
} else if (!currState.editState.originalEnvUrl) {
|
||||
remoteSchemaDef.url = currState.editState.originalUrl;
|
||||
}
|
||||
|
||||
const migrationName = `remove_remote_schema_${remoteSchemaName
|
||||
.trim()
|
||||
.replace(/ +/g, '')}`;
|
||||
const payload = removeRemoteSchemaQuery(remoteSchemaName);
|
||||
const downPayload = addRemoteSchemaQuery(
|
||||
remoteSchemaName,
|
||||
remoteSchemaDef,
|
||||
remoteSchemaComment
|
||||
);
|
||||
|
||||
const requestMsg = 'Deleting remote schema...';
|
||||
const successMsg = 'Remote schema deleted successfully';
|
||||
const errorMsg = 'Delete remote schema failed';
|
||||
|
||||
const customOnSuccess = () => {
|
||||
Promise.all([
|
||||
dispatch({ type: RESET }),
|
||||
dispatch(_push(prefixUrl)),
|
||||
dispatch(exportMetadata()),
|
||||
]);
|
||||
clearIntrospectionSchemaCache();
|
||||
};
|
||||
const customOnError = error => {
|
||||
Promise.all([dispatch({ type: DELETE_REMOTE_SCHEMA_FAIL, data: error })]);
|
||||
};
|
||||
dispatch({ type: DELETING_REMOTE_SCHEMA });
|
||||
|
||||
return dispatch(
|
||||
makeRequest(
|
||||
[payload],
|
||||
[downPayload],
|
||||
migrationName,
|
||||
customOnSuccess,
|
||||
customOnError,
|
||||
requestMsg,
|
||||
successMsg,
|
||||
errorMsg
|
||||
)
|
||||
);
|
||||
};
|
||||
|
||||
const modifyRemoteSchema = () => (dispatch, getState) => {
|
||||
const currState = getState().remoteSchemas.addData;
|
||||
|
||||
let timeoutSeconds = parseInt(currState.timeoutConf, 10);
|
||||
if (isNaN(timeoutSeconds)) timeoutSeconds = 60;
|
||||
|
||||
const manualUrl = currState?.manualUrl?.trim();
|
||||
const envName = currState?.envName?.trim();
|
||||
const remoteSchemaName = currState.name.trim().replace(/ +/g, '');
|
||||
const remoteSchemaDef = {
|
||||
timeout_seconds: timeoutSeconds,
|
||||
forward_client_headers: currState.forwardClientHeaders,
|
||||
headers: getReqHeader(getState().remoteSchemas.headerData.headers),
|
||||
};
|
||||
const remoteSchemaComment = currState?.comment;
|
||||
|
||||
if (!manualUrl && !envName) {
|
||||
dispatch(
|
||||
showErrorNotification(
|
||||
'Error in adding remote schema...',
|
||||
'A valid GraphQL server URL is required'
|
||||
)
|
||||
);
|
||||
return;
|
||||
} else if (manualUrl) {
|
||||
remoteSchemaDef.url = manualUrl;
|
||||
} else if (envName) {
|
||||
remoteSchemaDef.url_from_env = envName;
|
||||
}
|
||||
|
||||
const upQuery = updateRemoteSchemaQuery(
|
||||
remoteSchemaName,
|
||||
remoteSchemaDef,
|
||||
remoteSchemaComment
|
||||
);
|
||||
|
||||
let oldTimeout = parseInt(currState?.editState?.originalTimeoutConf, 10);
|
||||
if (isNaN(oldTimeout)) oldTimeout = 60;
|
||||
|
||||
const oldRemoteSchemaDef = {
|
||||
timeout_seconds: oldTimeout,
|
||||
headers: currState.editState.originalHeaders,
|
||||
forward_client_headers: currState.editState.originalForwardClientHeaders,
|
||||
};
|
||||
|
||||
if (!currState.editState.originalUrl) {
|
||||
oldRemoteSchemaDef.url_from_env = currState.editState.originalEnvUrl;
|
||||
} else if (!currState.editState.originalEnvUrl) {
|
||||
oldRemoteSchemaDef.url = currState.editState.originalUrl;
|
||||
}
|
||||
|
||||
const downQuery = updateRemoteSchemaQuery(
|
||||
remoteSchemaName,
|
||||
oldRemoteSchemaDef,
|
||||
currState.editState.originalComment
|
||||
);
|
||||
|
||||
const migration = new Migration();
|
||||
const migrationName = `update_remote_schema_${remoteSchemaName}`;
|
||||
migration.add(upQuery, downQuery);
|
||||
|
||||
const requestMsg = 'Modifying remote schema...';
|
||||
const successMsg = 'Remote schema modified';
|
||||
const errorMsg = 'Modify remote schema failed';
|
||||
|
||||
const customOnSuccess = data => {
|
||||
dispatch({ type: RESET, data: data });
|
||||
dispatch(_push(`${prefixUrl}/manage/schemas`)); // to avoid 404
|
||||
dispatch(exportMetadata()).then(() => {
|
||||
dispatch(_push(`${prefixUrl}/manage/${remoteSchemaName}/details`));
|
||||
dispatch(fetchRemoteSchema(remoteSchemaName));
|
||||
});
|
||||
clearIntrospectionSchemaCache();
|
||||
};
|
||||
const customOnError = error => {
|
||||
Promise.all([dispatch({ type: MODIFY_REMOTE_SCHEMA_FAIL, data: error })]);
|
||||
};
|
||||
|
||||
dispatch({ type: MODIFYING_REMOTE_SCHEMA });
|
||||
return dispatch(
|
||||
makeRequest(
|
||||
migration.upMigration,
|
||||
migration.downMigration,
|
||||
migrationName,
|
||||
customOnSuccess,
|
||||
customOnError,
|
||||
requestMsg,
|
||||
successMsg,
|
||||
errorMsg
|
||||
)
|
||||
);
|
||||
};
|
||||
|
||||
const addRemoteSchemaReducer = (state = addState, action) => {
|
||||
@ -409,6 +356,11 @@ const addRemoteSchemaReducer = (state = addState, action) => {
|
||||
...state,
|
||||
timeoutConf: action.data,
|
||||
};
|
||||
case COMMENT_CHANGED:
|
||||
return {
|
||||
...state,
|
||||
comment: action.data,
|
||||
};
|
||||
case ADDING_REMOTE_SCHEMA:
|
||||
return {
|
||||
...state,
|
||||
@ -452,6 +404,7 @@ const addRemoteSchemaReducer = (state = addState, action) => {
|
||||
? action.data.definition.timeout_seconds.toString()
|
||||
: '60',
|
||||
forwardClientHeaders: action.data.definition.forward_client_headers,
|
||||
comment: action.data?.comment || '',
|
||||
editState: {
|
||||
...state,
|
||||
isModify: false,
|
||||
@ -461,6 +414,7 @@ const addRemoteSchemaReducer = (state = addState, action) => {
|
||||
originalEnvUrl: action.data.definition.url_from_env || null,
|
||||
originalForwardClientHeaders:
|
||||
action.data.definition.forward_client_headers || false,
|
||||
originalComment: action.data?.comment || '',
|
||||
},
|
||||
isFetching: false,
|
||||
isFetchError: null,
|
||||
|
@ -42,6 +42,7 @@ class Common extends React.Component {
|
||||
envName,
|
||||
timeoutConf,
|
||||
forwardClientHeaders,
|
||||
comment,
|
||||
isNew = false,
|
||||
} = this.props;
|
||||
const { isModify } = this.props.editState;
|
||||
@ -77,6 +78,11 @@ class Common extends React.Component {
|
||||
seconds.
|
||||
</Tooltip>
|
||||
),
|
||||
comment: (
|
||||
<Tooltip id="tooltip-cascade">
|
||||
A statement to help describe the remote schema in brief
|
||||
</Tooltip>
|
||||
),
|
||||
};
|
||||
|
||||
const getTimeoutSection = () => {
|
||||
@ -130,7 +136,7 @@ class Common extends React.Component {
|
||||
value={name}
|
||||
data-key="name"
|
||||
onChange={this.handleInputChange.bind(this)}
|
||||
disabled={isDisabled}
|
||||
disabled={!isNew}
|
||||
required
|
||||
data-test="remote-schema-schema-name"
|
||||
pattern="^[a-zA-Z0-9-_]*$"
|
||||
@ -225,6 +231,27 @@ class Common extends React.Component {
|
||||
/>
|
||||
<hr />
|
||||
{getTimeoutSection()}
|
||||
<hr />
|
||||
<div className={styles.subheading_text}>
|
||||
Comment
|
||||
<OverlayTrigger placement="right" overlay={tooltips.comment}>
|
||||
<i className="fa fa-question-circle" aria-hidden="true" />
|
||||
</OverlayTrigger>
|
||||
</div>
|
||||
<label
|
||||
className={`${styles.inputLabel} radio-inline ${styles.padd_left_remove}`}
|
||||
>
|
||||
<input
|
||||
className="form-control"
|
||||
type="text"
|
||||
placeholder="Comment"
|
||||
value={comment}
|
||||
data-key="comment"
|
||||
onChange={this.handleInputChange.bind(this)}
|
||||
disabled={isDisabled}
|
||||
data-test="remote-schema-comment"
|
||||
/>
|
||||
</label>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
@ -22,6 +22,7 @@ const addState: AddState = {
|
||||
timeoutConf: '',
|
||||
name: '',
|
||||
forwardClientHeaders: false,
|
||||
comment: '',
|
||||
...asyncState,
|
||||
editState: {
|
||||
id: -1,
|
||||
@ -32,6 +33,7 @@ const addState: AddState = {
|
||||
originalEnvUrl: '',
|
||||
originalTimeoutConf: '',
|
||||
originalForwardClientHeaders: false,
|
||||
originalComment: '',
|
||||
},
|
||||
};
|
||||
|
||||
|
@ -27,6 +27,7 @@ export type EditState = {
|
||||
originalEnvUrl: string;
|
||||
originalTimeoutConf: string;
|
||||
originalForwardClientHeaders: boolean;
|
||||
originalComment?: string;
|
||||
};
|
||||
|
||||
export type AddState = AsyncState & {
|
||||
@ -37,6 +38,7 @@ export type AddState = AsyncState & {
|
||||
name: string;
|
||||
forwardClientHeaders: boolean;
|
||||
editState: EditState;
|
||||
comment?: string;
|
||||
};
|
||||
|
||||
export type ListState = AsyncState & {
|
||||
|
@ -7,6 +7,7 @@ import {
|
||||
HasuraMetadataV3,
|
||||
QualifiedFunction,
|
||||
RestEndpointEntry,
|
||||
RemoteSchemaDef,
|
||||
} from './types';
|
||||
import { transformHeaders } from '../components/Common/Headers/utils';
|
||||
import { LocalEventTriggerState } from '../components/Services/Events/EventTriggers/state';
|
||||
@ -54,6 +55,7 @@ export const metadataQueryTypes = [
|
||||
'get_inconsistent_metadata',
|
||||
'drop_inconsistent_metadata',
|
||||
'add_remote_schema',
|
||||
'update_remote_schema',
|
||||
'remove_remote_schema',
|
||||
'reload_remote_schema',
|
||||
'introspect_remote_schema',
|
||||
@ -829,3 +831,27 @@ export const dropRESTEndpointQuery = (name: string) => ({
|
||||
type: 'drop_rest_endpoint',
|
||||
args: { name },
|
||||
});
|
||||
|
||||
const getMetadataQueryForRemoteSchema = (queryName: 'add' | 'update') => (
|
||||
name: string,
|
||||
definition: RemoteSchemaDef,
|
||||
comment?: string
|
||||
) => ({
|
||||
type: `${queryName}_remote_schema` as MetadataQueryType,
|
||||
args: {
|
||||
name,
|
||||
definition,
|
||||
comment: comment ?? null,
|
||||
},
|
||||
});
|
||||
|
||||
export const addRemoteSchemaQuery = getMetadataQueryForRemoteSchema('add');
|
||||
|
||||
export const updateRemoteSchemaQuery = getMetadataQueryForRemoteSchema(
|
||||
'update'
|
||||
);
|
||||
|
||||
export const removeRemoteSchemaQuery = (name: string) => ({
|
||||
type: 'remove_remote_schema',
|
||||
args: { name },
|
||||
});
|
||||
|
@ -267,6 +267,11 @@ The various types of queries are listed in the following table:
|
||||
- 1
|
||||
- Add a remote GraphQL server as a remote schema
|
||||
|
||||
* - :ref:`metadata_update_remote_schema`
|
||||
- :ref:`update_remote_schema_args <metadata_update_remote_schema_syntax>`
|
||||
- 1
|
||||
- Update the details for a remote schema
|
||||
|
||||
* - :ref:`metadata_remove_remote_schema`
|
||||
- :ref:`remove_remote_schema_args <metadata_remove_remote_schema_syntax>`
|
||||
- 1
|
||||
|
@ -54,6 +54,60 @@ An example request as follows:
|
||||
|
||||
.. _metadata_add_remote_schema_syntax:
|
||||
|
||||
.. list-table::
|
||||
:header-rows: 1
|
||||
|
||||
* - Key
|
||||
- Required
|
||||
- Schema
|
||||
- Description
|
||||
* - name
|
||||
- true
|
||||
- :ref:`RemoteSchemaName`
|
||||
- Name of the remote schema
|
||||
* - definition
|
||||
- true
|
||||
- :ref:`RemoteSchemaDef`
|
||||
- Definition for the remote schema
|
||||
* - comment
|
||||
- false
|
||||
- Text
|
||||
- comment
|
||||
|
||||
.. _metadata_update_remote_schema:
|
||||
|
||||
update_remote_schema
|
||||
-----------------
|
||||
|
||||
``update_remote_schema`` is used to update the configuration of a remote schema. If the remote schema URL has changed
|
||||
then it will perform a introspection as well. After introspection, if there are any inconsistencies detected with other
|
||||
metadata objects (like remote relationships or remote schema permissions) they will be reported as `inconsistent_metadata`.
|
||||
|
||||
An example request as follows:
|
||||
|
||||
.. code-block:: http
|
||||
|
||||
POST /v1/query HTTP/1.1
|
||||
Content-Type: application/json
|
||||
X-Hasura-Role: admin
|
||||
|
||||
{
|
||||
"type": "update_remote_schema",
|
||||
"args": {
|
||||
"name": "my remote schema",
|
||||
"definition": {
|
||||
"url": "https://remote-server.com/graphql",
|
||||
"headers": [{"name": "X-Server-Request-From", "value": "Hasura"}],
|
||||
"forward_client_headers": false,
|
||||
"timeout_seconds": 60
|
||||
},
|
||||
"comment": "some optional comment"
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
.. _metadata_update_remote_schema_syntax:
|
||||
|
||||
.. list-table::
|
||||
:header-rows: 1
|
||||
|
||||
|
@ -266,6 +266,11 @@ The various types of queries are listed in the following table:
|
||||
- 1
|
||||
- Add a remote GraphQL server as a remote schema
|
||||
|
||||
* - :ref:`update_remote_schema`
|
||||
- :ref:`update_remote_schema_args <update_remote_schema_syntax>`
|
||||
- 1
|
||||
- Update the details for a remote schema
|
||||
|
||||
* - :ref:`remove_remote_schema`
|
||||
- :ref:`remove_remote_schema_args <remove_remote_schema_syntax>`
|
||||
- 1
|
||||
|
@ -56,6 +56,60 @@ An example request as follows:
|
||||
|
||||
.. _add_remote_schema_syntax:
|
||||
|
||||
.. list-table::
|
||||
:header-rows: 1
|
||||
|
||||
* - Key
|
||||
- Required
|
||||
- Schema
|
||||
- Description
|
||||
* - name
|
||||
- true
|
||||
- :ref:`RemoteSchemaName`
|
||||
- Name of the remote schema
|
||||
* - definition
|
||||
- true
|
||||
- :ref:`RemoteSchemaDef`
|
||||
- Definition for the remote schema
|
||||
* - comment
|
||||
- false
|
||||
- Text
|
||||
- comment
|
||||
|
||||
.. _update_remote_schema:
|
||||
|
||||
update_remote_schema
|
||||
-----------------
|
||||
|
||||
``update_remote_schema`` is used to update the configuration of a remote schema. If the remote schema URL has changed
|
||||
then it will perform a introspection as well. After introspection, if there are any inconsistencies detected with other
|
||||
metadata objects (like remote relationships or remote schema permissions) they will be reported as `inconsistent_metadata`.
|
||||
|
||||
An example request as follows:
|
||||
|
||||
.. code-block:: http
|
||||
|
||||
POST /v1/query HTTP/1.1
|
||||
Content-Type: application/json
|
||||
X-Hasura-Role: admin
|
||||
|
||||
{
|
||||
"type": "update_remote_schema",
|
||||
"args": {
|
||||
"name": "my remote schema",
|
||||
"definition": {
|
||||
"url": "https://remote-server.com/graphql",
|
||||
"headers": [{"name": "X-Server-Request-From", "value": "Hasura"}],
|
||||
"forward_client_headers": false,
|
||||
"timeout_seconds": 60
|
||||
},
|
||||
"comment": "some optional comment"
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
.. _update_remote_schema_syntax:
|
||||
|
||||
.. list-table::
|
||||
:header-rows: 1
|
||||
|
||||
|
@ -9,6 +9,7 @@ module Hasura.RQL.DDL.RemoteSchema
|
||||
, dropRemoteSchemaPermissionInMetadata
|
||||
, runAddRemoteSchemaPermissions
|
||||
, runDropRemoteSchemaPermissions
|
||||
, runUpdateRemoteSchema
|
||||
) where
|
||||
|
||||
import Hasura.Prelude
|
||||
@ -31,7 +32,6 @@ import Hasura.RQL.Types
|
||||
import Hasura.Server.Version (HasVersion)
|
||||
import Hasura.Session
|
||||
|
||||
|
||||
runAddRemoteSchema
|
||||
:: ( HasVersion
|
||||
, QErrM m
|
||||
@ -109,8 +109,8 @@ addRemoteSchemaP1
|
||||
:: (QErrM m, CacheRM m)
|
||||
=> RemoteSchemaName -> m ()
|
||||
addRemoteSchemaP1 name = do
|
||||
remoteSchemaMap <- scRemoteSchemas <$> askSchemaCache
|
||||
onJust (Map.lookup name remoteSchemaMap) $ const $
|
||||
remoteSchemaNames <- getAllRemoteSchemas <$> askSchemaCache
|
||||
when (name `elem` remoteSchemaNames) $
|
||||
throw400 AlreadyExists $ "remote schema with name "
|
||||
<> name <<> " already exists"
|
||||
|
||||
@ -149,7 +149,7 @@ removeRemoteSchemaP1 rsn = do
|
||||
|
||||
-- we only report the non permission dependencies because we
|
||||
-- drop the related permissions
|
||||
when (nonPermDependentObjs /= []) $ reportDeps nonPermDependentObjs
|
||||
unless (null nonPermDependentObjs) $ reportDeps nonPermDependentObjs
|
||||
pure roles
|
||||
where
|
||||
remoteSchemaDepId = SORemoteSchema rsn
|
||||
@ -191,3 +191,50 @@ runIntrospectRemoteSchema (RemoteSchemaNameQuery rsName) = do
|
||||
RemoteSchemaCtx _ _ _ introspectionByteString _ _ <-
|
||||
Map.lookup rsName (scRemoteSchemas sc) `onNothing` throw400 NotExists ("remote schema: " <> rsName <<> " not found")
|
||||
pure $ encJFromLBS introspectionByteString
|
||||
|
||||
runUpdateRemoteSchema
|
||||
:: (HasVersion
|
||||
, QErrM m
|
||||
, CacheRWM m
|
||||
, MonadIO m
|
||||
, MonadUnique m
|
||||
, HasHttpManagerM m
|
||||
, MetadataM m
|
||||
)
|
||||
=> Env.Environment
|
||||
-> AddRemoteSchemaQuery
|
||||
-> m EncJSON
|
||||
runUpdateRemoteSchema env (AddRemoteSchemaQuery name defn comment) = do
|
||||
remoteSchemaNames <- getAllRemoteSchemas <$> askSchemaCache
|
||||
remoteSchemaMap <- _metaRemoteSchemas <$> getMetadata
|
||||
|
||||
let metadataRMSchema = OMap.lookup name remoteSchemaMap
|
||||
metadataRMSchemaPerms = maybe mempty _rsmPermissions metadataRMSchema
|
||||
-- `metadataRMSchemaURL` and `metadataRMSchemaURLFromEnv` represent
|
||||
-- details that were stored within the metadata
|
||||
metadataRMSchemaURL = (_rsdUrl . _rsmDefinition) =<< metadataRMSchema
|
||||
metadataRMSchemaURLFromEnv = (_rsdUrlFromEnv . _rsmDefinition) =<< metadataRMSchema
|
||||
-- `currentRMSchemaURL` and `currentRMSchemaURLFromEnv` represent
|
||||
-- the details that were provided in the request
|
||||
currentRMSchemaURL = _rsdUrl defn
|
||||
currentRMSchemaURLFromEnv = _rsdUrlFromEnv defn
|
||||
|
||||
unless (name `elem` remoteSchemaNames) $
|
||||
throw400 NotExists $ "remote schema with name " <> name <<> " doesn't exist"
|
||||
|
||||
rsi <- validateRemoteSchemaDef env defn
|
||||
|
||||
-- we only proceed to fetch the remote schema if the url has been updated
|
||||
unless ((isJust metadataRMSchemaURL && isJust currentRMSchemaURL && metadataRMSchemaURL == currentRMSchemaURL) ||
|
||||
(isJust metadataRMSchemaURLFromEnv && isJust currentRMSchemaURLFromEnv && metadataRMSchemaURLFromEnv == currentRMSchemaURLFromEnv)) $ do
|
||||
httpMgr <- askHttpManager
|
||||
void $ fetchRemoteSchema env httpMgr name rsi
|
||||
|
||||
-- This will throw an error if the new schema fetched in incompatible
|
||||
-- with the existing permissions and relations
|
||||
withNewInconsistentObjsCheck $ buildSchemaCacheFor (MORemoteSchema name) $
|
||||
MetadataModifier $ metaRemoteSchemas %~ OMap.insert name (remoteSchemaMeta metadataRMSchemaPerms)
|
||||
|
||||
pure successMsg
|
||||
where
|
||||
remoteSchemaMeta perms = RemoteSchemaMetadata name defn comment perms
|
||||
|
@ -183,6 +183,7 @@ data RQLMetadataV1
|
||||
|
||||
-- Remote schemas
|
||||
| RMAddRemoteSchema !AddRemoteSchemaQuery
|
||||
| RMUpdateRemoteSchema !AddRemoteSchemaQuery
|
||||
| RMRemoveRemoteSchema !RemoteSchemaNameQuery
|
||||
| RMReloadRemoteSchema !RemoteSchemaNameQuery
|
||||
| RMIntrospectRemoteSchema !RemoteSchemaNameQuery
|
||||
@ -539,6 +540,7 @@ runMetadataQueryV1M env currentResourceVersion = \case
|
||||
RMDropInconsistentMetadata q -> runDropInconsistentMetadata q
|
||||
|
||||
RMAddRemoteSchema q -> runAddRemoteSchema env q
|
||||
RMUpdateRemoteSchema q -> runUpdateRemoteSchema env q
|
||||
RMRemoveRemoteSchema q -> runRemoveRemoteSchema q
|
||||
RMReloadRemoteSchema q -> runReloadRemoteSchema q
|
||||
RMIntrospectRemoteSchema q -> runIntrospectRemoteSchema q
|
||||
|
@ -97,6 +97,7 @@ data RQLQueryV1
|
||||
|
||||
-- schema-stitching, custom resolver related
|
||||
| RQAddRemoteSchema !AddRemoteSchemaQuery
|
||||
| RQUpdateRemoteSchema !AddRemoteSchemaQuery
|
||||
| RQRemoveRemoteSchema !RemoteSchemaNameQuery
|
||||
| RQReloadRemoteSchema !RemoteSchemaNameQuery
|
||||
| RQIntrospectRemoteSchema !RemoteSchemaNameQuery
|
||||
@ -271,6 +272,7 @@ queryModifiesSchemaCache (RQV1 qi) = case qi of
|
||||
RQCount _ -> False
|
||||
|
||||
RQAddRemoteSchema _ -> True
|
||||
RQUpdateRemoteSchema _ -> True
|
||||
RQRemoveRemoteSchema _ -> True
|
||||
RQReloadRemoteSchema _ -> True
|
||||
RQIntrospectRemoteSchema _ -> False
|
||||
@ -411,6 +413,7 @@ runQueryM env rq = withPathK "args" $ case rq of
|
||||
RQCount q -> runCount q
|
||||
|
||||
RQAddRemoteSchema q -> runAddRemoteSchema env q
|
||||
RQUpdateRemoteSchema q -> runUpdateRemoteSchema env q
|
||||
RQRemoveRemoteSchema q -> runRemoveRemoteSchema q
|
||||
RQReloadRemoteSchema q -> runReloadRemoteSchema q
|
||||
RQIntrospectRemoteSchema q -> runIntrospectRemoteSchema q
|
||||
@ -510,6 +513,7 @@ requiresAdmin = \case
|
||||
RQCount _ -> False
|
||||
|
||||
RQAddRemoteSchema _ -> True
|
||||
RQUpdateRemoteSchema _ -> True
|
||||
RQRemoveRemoteSchema _ -> True
|
||||
RQReloadRemoteSchema _ -> True
|
||||
RQIntrospectRemoteSchema _ -> True
|
||||
|
@ -0,0 +1,7 @@
|
||||
type: update_remote_schema
|
||||
args:
|
||||
name: my-remote-schema
|
||||
definition:
|
||||
url: http://localhost:4020
|
||||
forward_client_headers: false
|
||||
timeout_seconds: 60
|
@ -0,0 +1,8 @@
|
||||
type: update_remote_schema
|
||||
args:
|
||||
name: my-remote-schema
|
||||
comment: this is from update query
|
||||
definition:
|
||||
url: http://localhost:4021
|
||||
forward_client_headers: True
|
||||
timeout_seconds: 120
|
@ -0,0 +1,8 @@
|
||||
type: update_remote_schema
|
||||
args:
|
||||
name: my-remote-schema
|
||||
comment: this is from update query with error
|
||||
definition:
|
||||
url: http://localhost:4022
|
||||
forward_client_headers: True
|
||||
timeout_seconds: 120
|
@ -2,7 +2,6 @@ const { ApolloServer, ApolloError } = require('apollo-server');
|
||||
const gql = require('graphql-tag');
|
||||
const { print } = require('graphql');
|
||||
|
||||
|
||||
const allMessages = [
|
||||
{ id: 1, name: "Clarke", msg: "Welcome to the team, Clarke"},
|
||||
{ id: 2, name: "Alice", msg: "Welcome to the team, Alice"},
|
||||
|
@ -2,7 +2,6 @@ const { ApolloServer, ApolloError } = require('apollo-server');
|
||||
const gql = require('graphql-tag');
|
||||
const { print } = require('graphql');
|
||||
|
||||
|
||||
const allMessages = [
|
||||
{ id: 1, name: "alice", msg: "You win!"},
|
||||
{ id: 2, name: "bob", msg: "You lose!"},
|
||||
|
@ -2,7 +2,6 @@ const { ApolloServer, ApolloError } = require('apollo-server');
|
||||
const gql = require('graphql-tag');
|
||||
const { print } = require('graphql');
|
||||
|
||||
|
||||
const allMessages = [
|
||||
{ id: 1, name: "alice", msg: "You win!"},
|
||||
{ id: 2, name: "bob", msg: "You lose!"},
|
||||
@ -10,13 +9,12 @@ const allMessages = [
|
||||
];
|
||||
|
||||
const typeDefs = gql`
|
||||
|
||||
type User {
|
||||
user_id: Int
|
||||
userMessages(whered: MessageWhereInpObj, includes: IncludeInpObj): [Message]
|
||||
gimmeText(text: String): String
|
||||
}
|
||||
|
||||
|
||||
interface Communication {
|
||||
id: Int!
|
||||
msg: String!
|
||||
@ -94,7 +92,6 @@ const typeDefs = gql`
|
||||
`;
|
||||
|
||||
const resolvers = {
|
||||
|
||||
User: {
|
||||
userMessages: (parent, { whered, includes }) => {
|
||||
var result = allMessages.filter(m => m.id == parent.user_id);
|
||||
|
@ -0,0 +1,265 @@
|
||||
const { ApolloServer, ApolloError } = require('apollo-server');
|
||||
const gql = require('graphql-tag');
|
||||
const { print } = require('graphql');
|
||||
|
||||
const allMessages = [
|
||||
{ id: 1, name: "alice", msg: "You win!"},
|
||||
{ id: 2, name: "bob", msg: "You lose!"},
|
||||
{ id: 3, name: "alice", msg: "Another alice"},
|
||||
];
|
||||
|
||||
const typeDefs = gql`
|
||||
type User {
|
||||
user_id: Int
|
||||
userMessages(whered: MessageWhereInpObj, includes: IncludeInpObj): [Message]
|
||||
gimmeText(text: String): String
|
||||
}
|
||||
|
||||
interface Communication {
|
||||
id: Int!
|
||||
msg: String!
|
||||
}
|
||||
|
||||
type Message implements Communication {
|
||||
id: Int!
|
||||
name: String!
|
||||
msg: String!
|
||||
errorMsg: String
|
||||
}
|
||||
|
||||
input MessageWhereInpObj {
|
||||
id: IntCompareObj
|
||||
name: StringCompareObj
|
||||
}
|
||||
|
||||
input IntCompareObj {
|
||||
eq : Int
|
||||
gt : Int
|
||||
lt : Int
|
||||
}
|
||||
|
||||
input StringCompareObj {
|
||||
eq : String
|
||||
}
|
||||
|
||||
input IncludeInpObj {
|
||||
id: [Int]
|
||||
name: [String]
|
||||
}
|
||||
|
||||
enum MessageStatus {
|
||||
READ
|
||||
DELIVERED
|
||||
SENT
|
||||
}
|
||||
|
||||
type Person implements Name {
|
||||
firstName: String
|
||||
lastName: String
|
||||
age: Int
|
||||
}
|
||||
|
||||
input Dimensions {
|
||||
height: Int
|
||||
width: Int
|
||||
}
|
||||
|
||||
type Photo {
|
||||
height: Int
|
||||
width: Int
|
||||
}
|
||||
|
||||
type SearchQuery {
|
||||
firstSearchResult: SearchResult
|
||||
}
|
||||
|
||||
union SearchResult = Photo | Person
|
||||
|
||||
interface Name {
|
||||
firstName: String
|
||||
lastName: String
|
||||
}
|
||||
|
||||
type Query {
|
||||
hello: String
|
||||
messages(where: MessageWhereInpObj, includes: IncludeInpObj): [Message]
|
||||
user(user_id: Int!): User
|
||||
users(user_ids: [Int]!): [User]
|
||||
message(id: Int!) : Message
|
||||
communications(id: Int): [Communication]
|
||||
profilePicture(dimensions: Dimensions): Photo
|
||||
}
|
||||
`;
|
||||
|
||||
const resolvers = {
|
||||
User: {
|
||||
userMessages: (parent, { whered, includes }) => {
|
||||
var result = allMessages.filter(m => m.id == parent.user_id);
|
||||
if (whered && whered.id) {
|
||||
var intExp = whered.id;
|
||||
Object.keys(intExp).forEach(op => {
|
||||
switch(op) {
|
||||
case "eq":
|
||||
result = result.filter(m => m.id == intExp[op]);
|
||||
break;
|
||||
case "gt":
|
||||
result = result.filter(m => m.id > intExp[op]);
|
||||
break;
|
||||
case "lt":
|
||||
result = result.filter(m => m.id < intExp[op]);
|
||||
break;
|
||||
default:
|
||||
throw new ApolloError("invalid argument", "invalid");
|
||||
}
|
||||
});
|
||||
}
|
||||
if (whered && whered.name) {
|
||||
var stringExp = whered.name;
|
||||
Object.keys(stringExp).forEach(op => {
|
||||
switch(op) {
|
||||
case "eq":
|
||||
result = result.filter(m => m.name == stringExp[op]);
|
||||
break;
|
||||
default:
|
||||
throw new ApolloError("invalid argument", "invalid");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (includes && includes.id) {
|
||||
var ids = includes.id;
|
||||
result = result.filter(m => ids.includes(m.id));
|
||||
}
|
||||
|
||||
if (includes && includes.name) {
|
||||
var names = includes.name;
|
||||
result = result.filter(m => names.includes(m.name));
|
||||
}
|
||||
|
||||
return result;
|
||||
},
|
||||
|
||||
gimmeText: (_, { text }) => {
|
||||
if (text) {
|
||||
return text;
|
||||
} else {
|
||||
return "no text";
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
Message: {
|
||||
errorMsg : () => {
|
||||
throw new ApolloError("intentional-error", "you asked for it");
|
||||
}
|
||||
},
|
||||
|
||||
Photo: {
|
||||
},
|
||||
|
||||
Query: {
|
||||
hello: () => "world",
|
||||
message: (_, { id }) => {
|
||||
return allMessages.find(m => m.id == id);
|
||||
},
|
||||
messages: (_, { where, includes }) => {
|
||||
var result = allMessages;
|
||||
if (where && where.id) {
|
||||
var intExp = where.id;
|
||||
Object.keys(intExp).forEach(op => {
|
||||
switch(op) {
|
||||
case "eq":
|
||||
result = result.filter(m => m.id == intExp[op]);
|
||||
break;
|
||||
case "gt":
|
||||
result = result.filter(m => m.id > intExp[op]);
|
||||
break;
|
||||
case "lt":
|
||||
result = result.filter(m => m.id < intExp[op]);
|
||||
break;
|
||||
default:
|
||||
throw new ApolloError("invalid argument", "invalid");
|
||||
}
|
||||
});
|
||||
}
|
||||
if (where && where.name) {
|
||||
var stringExp = where.name;
|
||||
Object.keys(stringExp).forEach(op => {
|
||||
switch(op) {
|
||||
case "eq":
|
||||
result = result.filter(m => m.name == stringExp[op]);
|
||||
break;
|
||||
default:
|
||||
throw new ApolloError("invalid argument", "invalid");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (includes && includes.id) {
|
||||
var ids = includes.id;
|
||||
result = result.filter(m => ids.includes(m.id));
|
||||
}
|
||||
|
||||
if (includes && includes.name) {
|
||||
var names = includes.name;
|
||||
result = result.filter(m => names.includes(m.name));
|
||||
}
|
||||
|
||||
return result;
|
||||
},
|
||||
user: (_, { user_id }) => {
|
||||
return { "user_id": user_id };
|
||||
},
|
||||
users: (parent, args, context, info) => {
|
||||
var results = []
|
||||
for (userId of args.user_ids) {
|
||||
results.push({"user_id":userId})
|
||||
}
|
||||
return results;
|
||||
},
|
||||
communications: (_, { id }) => {
|
||||
var result = allMessages;
|
||||
if(id) {
|
||||
result = allMessages.filter(m => m.id == id);
|
||||
}
|
||||
return result;
|
||||
},
|
||||
profilePicture: (_, { dimensions }) => {
|
||||
return dimensions
|
||||
},
|
||||
},
|
||||
Communication: {
|
||||
__resolveType(communication, context, info){
|
||||
if(communication.name) {
|
||||
return "Message";
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
class BasicLogging {
|
||||
requestDidStart({queryString, parsedQuery, variables}) {
|
||||
const query = queryString || print(parsedQuery);
|
||||
console.log(query);
|
||||
console.log(variables);
|
||||
}
|
||||
|
||||
willSendResponse({graphqlResponse}) {
|
||||
console.log(JSON.stringify(graphqlResponse, null, 2));
|
||||
}
|
||||
}
|
||||
|
||||
const schema = new ApolloServer(
|
||||
{ typeDefs,
|
||||
resolvers,
|
||||
extensions: [() => new BasicLogging()],
|
||||
formatError: (err) => {
|
||||
// Stack traces make expected test output brittle and noisey:
|
||||
delete err.extensions;
|
||||
return err;
|
||||
} });
|
||||
|
||||
schema.listen({ port: process.env.PORT || 4021 }).then(({ url }) => {
|
||||
console.log(`schema ready at ${url}`);
|
||||
});
|
@ -0,0 +1,265 @@
|
||||
const { ApolloServer, ApolloError } = require('apollo-server');
|
||||
const gql = require('graphql-tag');
|
||||
const { print } = require('graphql');
|
||||
|
||||
const allMessages = [
|
||||
{ id: 1, name: "alice", msg: "You win!"},
|
||||
{ id: 2, name: "bob", msg: "You lose!"},
|
||||
{ id: 3, name: "alice", msg: "Another alice"},
|
||||
];
|
||||
|
||||
const typeDefs = gql`
|
||||
type User {
|
||||
user_id: Float
|
||||
userMessages(whered: MessageWhereInpObj, includes: IncludeInpObj): [Message]
|
||||
gimmeText(text: String): String
|
||||
}
|
||||
|
||||
interface Communication {
|
||||
id: Int!
|
||||
msg: String!
|
||||
}
|
||||
|
||||
type Message implements Communication {
|
||||
id: Int!
|
||||
name: String!
|
||||
msg: String!
|
||||
errorMsg: String
|
||||
}
|
||||
|
||||
input MessageWhereInpObj {
|
||||
id: IntCompareObj
|
||||
name: StringCompareObj
|
||||
}
|
||||
|
||||
input IntCompareObj {
|
||||
eq : Int
|
||||
gt : Int
|
||||
lt : Int
|
||||
}
|
||||
|
||||
input StringCompareObj {
|
||||
eq : String
|
||||
}
|
||||
|
||||
input IncludeInpObj {
|
||||
id: [Int]
|
||||
name: [String]
|
||||
}
|
||||
|
||||
enum MessageStatus {
|
||||
READ
|
||||
DELIVERED
|
||||
SENT
|
||||
}
|
||||
|
||||
type Person implements Name {
|
||||
firstName: String
|
||||
lastName: String
|
||||
age: Int
|
||||
}
|
||||
|
||||
input Dimensions {
|
||||
height: Int
|
||||
width: Int
|
||||
}
|
||||
|
||||
type Photo {
|
||||
height: Int
|
||||
width: Int
|
||||
}
|
||||
|
||||
type SearchQuery {
|
||||
firstSearchResult: SearchResult
|
||||
}
|
||||
|
||||
union SearchResult = Photo | Person
|
||||
|
||||
interface Name {
|
||||
firstName: String
|
||||
lastName: String
|
||||
}
|
||||
|
||||
type Query {
|
||||
hello: String
|
||||
messages(where: MessageWhereInpObj, includes: IncludeInpObj): [Message]
|
||||
user(user_id: Float!): User
|
||||
users(user_ids: [Float]!): [User]
|
||||
message(id: Int!) : Message
|
||||
communications(id: Int): [Communication]
|
||||
profilePicture(dimensions: Dimensions): Photo
|
||||
}
|
||||
`;
|
||||
|
||||
const resolvers = {
|
||||
User: {
|
||||
userMessages: (parent, { whered, includes }) => {
|
||||
var result = allMessages.filter(m => m.id == parent.user_id);
|
||||
if (whered && whered.id) {
|
||||
var intExp = whered.id;
|
||||
Object.keys(intExp).forEach(op => {
|
||||
switch(op) {
|
||||
case "eq":
|
||||
result = result.filter(m => m.id == intExp[op]);
|
||||
break;
|
||||
case "gt":
|
||||
result = result.filter(m => m.id > intExp[op]);
|
||||
break;
|
||||
case "lt":
|
||||
result = result.filter(m => m.id < intExp[op]);
|
||||
break;
|
||||
default:
|
||||
throw new ApolloError("invalid argument", "invalid");
|
||||
}
|
||||
});
|
||||
}
|
||||
if (whered && whered.name) {
|
||||
var stringExp = whered.name;
|
||||
Object.keys(stringExp).forEach(op => {
|
||||
switch(op) {
|
||||
case "eq":
|
||||
result = result.filter(m => m.name == stringExp[op]);
|
||||
break;
|
||||
default:
|
||||
throw new ApolloError("invalid argument", "invalid");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (includes && includes.id) {
|
||||
var ids = includes.id;
|
||||
result = result.filter(m => ids.includes(m.id));
|
||||
}
|
||||
|
||||
if (includes && includes.name) {
|
||||
var names = includes.name;
|
||||
result = result.filter(m => names.includes(m.name));
|
||||
}
|
||||
|
||||
return result;
|
||||
},
|
||||
|
||||
gimmeText: (_, { text }) => {
|
||||
if (text) {
|
||||
return text;
|
||||
} else {
|
||||
return "no text";
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
Message: {
|
||||
errorMsg : () => {
|
||||
throw new ApolloError("intentional-error", "you asked for it");
|
||||
}
|
||||
},
|
||||
|
||||
Photo: {
|
||||
},
|
||||
|
||||
Query: {
|
||||
hello: () => "world",
|
||||
message: (_, { id }) => {
|
||||
return allMessages.find(m => m.id == id);
|
||||
},
|
||||
messages: (_, { where, includes }) => {
|
||||
var result = allMessages;
|
||||
if (where && where.id) {
|
||||
var intExp = where.id;
|
||||
Object.keys(intExp).forEach(op => {
|
||||
switch(op) {
|
||||
case "eq":
|
||||
result = result.filter(m => m.id == intExp[op]);
|
||||
break;
|
||||
case "gt":
|
||||
result = result.filter(m => m.id > intExp[op]);
|
||||
break;
|
||||
case "lt":
|
||||
result = result.filter(m => m.id < intExp[op]);
|
||||
break;
|
||||
default:
|
||||
throw new ApolloError("invalid argument", "invalid");
|
||||
}
|
||||
});
|
||||
}
|
||||
if (where && where.name) {
|
||||
var stringExp = where.name;
|
||||
Object.keys(stringExp).forEach(op => {
|
||||
switch(op) {
|
||||
case "eq":
|
||||
result = result.filter(m => m.name == stringExp[op]);
|
||||
break;
|
||||
default:
|
||||
throw new ApolloError("invalid argument", "invalid");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (includes && includes.id) {
|
||||
var ids = includes.id;
|
||||
result = result.filter(m => ids.includes(m.id));
|
||||
}
|
||||
|
||||
if (includes && includes.name) {
|
||||
var names = includes.name;
|
||||
result = result.filter(m => names.includes(m.name));
|
||||
}
|
||||
|
||||
return result;
|
||||
},
|
||||
user: (_, { user_id }) => {
|
||||
return { "user_id": user_id };
|
||||
},
|
||||
users: (parent, args, context, info) => {
|
||||
var results = []
|
||||
for (userId of args.user_ids) {
|
||||
results.push({"user_id":userId})
|
||||
}
|
||||
return results;
|
||||
},
|
||||
communications: (_, { id }) => {
|
||||
var result = allMessages;
|
||||
if(id) {
|
||||
result = allMessages.filter(m => m.id == id);
|
||||
}
|
||||
return result;
|
||||
},
|
||||
profilePicture: (_, { dimensions }) => {
|
||||
return dimensions
|
||||
},
|
||||
},
|
||||
Communication: {
|
||||
__resolveType(communication, context, info){
|
||||
if(communication.name) {
|
||||
return "Message";
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
class BasicLogging {
|
||||
requestDidStart({queryString, parsedQuery, variables}) {
|
||||
const query = queryString || print(parsedQuery);
|
||||
console.log(query);
|
||||
console.log(variables);
|
||||
}
|
||||
|
||||
willSendResponse({graphqlResponse}) {
|
||||
console.log(JSON.stringify(graphqlResponse, null, 2));
|
||||
}
|
||||
}
|
||||
|
||||
const schema = new ApolloServer(
|
||||
{ typeDefs,
|
||||
resolvers,
|
||||
extensions: [() => new BasicLogging()],
|
||||
formatError: (err) => {
|
||||
// Stack traces make expected test output brittle and noisey:
|
||||
delete err.extensions;
|
||||
return err;
|
||||
} });
|
||||
|
||||
schema.listen({ port: process.env.PORT || 4022 }).then(({ url }) => {
|
||||
console.log(`schema ready at ${url}`);
|
||||
});
|
@ -18,8 +18,24 @@ def graphql_service():
|
||||
yield svc
|
||||
svc.stop()
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def graphql_service_2():
|
||||
svc = NodeGraphQL(["node", "remote_schemas/nodejs/secondary_remote_schema_perms.js"])
|
||||
svc.start()
|
||||
yield svc
|
||||
svc.stop()
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def graphql_service_3():
|
||||
svc = NodeGraphQL(["node", "remote_schemas/nodejs/secondary_remote_schema_perms_error.js"])
|
||||
svc.start()
|
||||
yield svc
|
||||
svc.stop()
|
||||
|
||||
use_test_fixtures = pytest.mark.usefixtures (
|
||||
"graphql_service",
|
||||
"graphql_service_2",
|
||||
"graphql_service_3",
|
||||
"per_method_tests_db_state"
|
||||
)
|
||||
|
||||
@ -34,6 +50,31 @@ class TestAddRemoteSchemaPermissions:
|
||||
st_code, resp = hge_ctx.v1metadataq_f(self.dir() + 'add_permission_with_valid_subset_of_fields.yaml')
|
||||
assert st_code == 200, resp
|
||||
|
||||
""" Here the schemas are compatible """
|
||||
def test_update_remote_schema_details_with_permissions_set(self, hge_ctx):
|
||||
""" Permissions check """
|
||||
st_code, resp = hge_ctx.v1metadataq_f(self.dir() + 'add_permission_with_valid_subset_of_fields.yaml')
|
||||
assert st_code == 200, resp
|
||||
|
||||
st_code, resp = hge_ctx.v1metadataq_f(self.dir() + 'update_remote_schema/update_schema.yaml')
|
||||
assert st_code == 200, resp
|
||||
""" check the details of remote schema in metadata """
|
||||
st_code, resp = hge_ctx.v1metadataq({"type": "export_metadata", "args": {}})
|
||||
assert st_code == 200, resp
|
||||
assert resp['remote_schemas'][0]['definition']['url'] == "http://localhost:4021"
|
||||
assert resp['remote_schemas'][0]['comment'] == 'this is from update query', resp
|
||||
assert resp['remote_schemas'][0]['definition']['timeout_seconds'] == 120, resp
|
||||
""" reset the changes to the original config """
|
||||
st_code, resp = hge_ctx.v1metadataq_f(self.dir() + 'update_remote_schema/revert_to_original_config.yaml')
|
||||
assert st_code == 200, resp
|
||||
|
||||
def test_update_remote_schema_details_with_permissions_set_with_error(self, hge_ctx):
|
||||
st_code, resp = hge_ctx.v1metadataq_f(self.dir() + 'add_permission_with_valid_subset_of_fields.yaml')
|
||||
assert st_code == 200, resp
|
||||
|
||||
st_code, resp = hge_ctx.v1metadataq_f(self.dir() + 'update_remote_schema/update_schema_error.yaml')
|
||||
assert st_code == 400, resp
|
||||
|
||||
def test_add_permission_with_valid_subset_of_arguments(self, hge_ctx):
|
||||
st_code, resp = hge_ctx.v1metadataq_f(self.dir() + 'add_permission_with_valid_subset_of_arguments.yaml')
|
||||
assert st_code == 200, resp
|
||||
|
@ -29,6 +29,21 @@ def mk_add_remote_q(name, url, headers=None, client_hdrs=False, timeout=None):
|
||||
}
|
||||
}
|
||||
|
||||
def mk_update_remote_q(name, url, headers=None, client_hdrs=False, timeout=None):
|
||||
return {
|
||||
"type": "update_remote_schema",
|
||||
"args": {
|
||||
"name": name,
|
||||
"comment": "testing " + name,
|
||||
"definition": {
|
||||
"url": url,
|
||||
"headers": headers,
|
||||
"forward_client_headers": client_hdrs,
|
||||
"timeout_seconds": timeout
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def mk_delete_remote_q(name):
|
||||
return {
|
||||
"type" : "remove_remote_schema",
|
||||
@ -71,6 +86,42 @@ class TestRemoteSchemaBasic:
|
||||
st_code, resp = hge_ctx.v1q(export_metadata_q)
|
||||
assert st_code == 200, resp
|
||||
assert resp['remote_schemas'][0]['name'] == "simple 1"
|
||||
|
||||
def test_update_schema_with_no_url_change(self, hge_ctx):
|
||||
""" call update_remote_schema API and check the details stored in metadata """
|
||||
q = mk_update_remote_q('simple 1', 'http://localhost:5000/hello-graphql', None, True, 120)
|
||||
st_code, resp = hge_ctx.v1q(q)
|
||||
assert st_code == 200, resp
|
||||
|
||||
st_code, resp = hge_ctx.v1q(export_metadata_q)
|
||||
assert st_code == 200, resp
|
||||
assert resp['remote_schemas'][0]['name'] == "simple 1"
|
||||
assert resp['remote_schemas'][0]['definition']['timeout_seconds'] == 120
|
||||
assert resp['remote_schemas'][0]['definition']['forward_client_headers'] == True
|
||||
|
||||
""" revert to original config for remote schema """
|
||||
q = mk_update_remote_q('simple 1', 'http://localhost:5000/hello-graphql', None, False, 60)
|
||||
st_code, resp = hge_ctx.v1q(q)
|
||||
assert st_code == 200, resp
|
||||
|
||||
def test_update_schema_with_url_change(self, hge_ctx):
|
||||
""" call update_remote_schema API and check the details stored in metadata """
|
||||
q = mk_update_remote_q('simple 1', 'http://localhost:5000/user-graphql', None, True, 80)
|
||||
st_code, resp = hge_ctx.v1q(q)
|
||||
# This should succeed since there isn't any conflicting relations or permissions set up
|
||||
assert st_code == 200, resp
|
||||
|
||||
st_code, resp = hge_ctx.v1q(export_metadata_q)
|
||||
assert st_code == 200, resp
|
||||
assert resp['remote_schemas'][0]['name'] == "simple 1"
|
||||
assert resp['remote_schemas'][0]['definition']['url'] == 'http://localhost:5000/user-graphql'
|
||||
assert resp['remote_schemas'][0]['definition']['timeout_seconds'] == 80
|
||||
assert resp['remote_schemas'][0]['definition']['forward_client_headers'] == True
|
||||
|
||||
""" revert to original config for remote schema """
|
||||
q = mk_update_remote_q('simple 1', 'http://localhost:5000/hello-graphql', None, False, 60)
|
||||
st_code, resp = hge_ctx.v1q(q)
|
||||
assert st_code == 200, resp
|
||||
|
||||
@pytest.mark.allow_server_upgrade_test
|
||||
def test_introspection(self, hge_ctx):
|
||||
|
Loading…
Reference in New Issue
Block a user