Remote Joins: Create relationships across database and remote schemas (#2392)

add remote joins: Create relationships across database and remote schemas (#2392)

Co-authored-by: Aleksandra Sikora <ola.zxcvbnm@gmail.com>

Co-authored-by: Chris Done <chrisdone@gmail.com>
Co-authored-by: Chris Done <github@chrisdone.com>
Co-authored-by: wawhal <rishichandra.wawhal@gmail.com>
Co-authored-by: Aravind Shankar <aravind@hasura.io>
Co-authored-by: Brandon Simmons <brandon.m.simmons@gmail.com>
Co-authored-by: Rishichandra Wawhal <rishi@hasura.io>
Co-authored-by: Brandon Simmons <brandon@hasura.io>
Co-authored-by: nizar-m <19857260+nizar-m@users.noreply.github.com>
Co-authored-by: Praveen Durairaju <praveend.web@gmail.com>
Co-authored-by: rakeshkky <12475069+rakeshkky@users.noreply.github.com>
Co-authored-by: Anon Ray <rayanon004@gmail.com>
Co-authored-by: Shahidh K Muhammed <shahidh@hasura.io>
Co-authored-by: soorajshankar <soorajshankar@users.noreply.github.com>
Co-authored-by: Sooraj Sanker <sooraj@Soorajs-MacBook-Pro.local>
Co-authored-by: Karthikeyan Chinnakonda <karthikeyan@hasura.io>
Co-authored-by: Aleksandra Sikora <ola.zxcvbnm@gmail.com>
This commit is contained in:
Tirumarai Selvan 2020-05-27 20:32:58 +05:30 committed by GitHub
parent c1197be208
commit c0d2bc6653
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
185 changed files with 7889 additions and 784 deletions

View File

@ -6,6 +6,7 @@ FROM phadej/ghc:8.10.1-stretch
ARG docker_ver="17.09.0-ce"
ARG postgres_ver="12"
ARG node_ver="8.x"
# Install GNU make, curl, git and docker client. Required to build the server
RUN apt-get -y update \
@ -14,6 +15,8 @@ RUN apt-get -y update \
&& curl -s https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - \
&& apt-get -y update \
&& apt-get install -y g++ gcc libc6-dev libpq-dev libffi-dev libgmp-dev make xz-utils zlib1g-dev git gnupg upx netcat python3 python3-pip postgresql-client-${postgres_ver} postgresql-client-common \
&& curl -sL https://deb.nodesource.com/setup_${node_ver} | bash - \
&& apt-get install -y nodejs \
&& curl -Lo /tmp/docker-${docker_ver}.tgz https://download.docker.com/linux/static/stable/x86_64/docker-${docker_ver}.tgz \
&& tar -xz -C /tmp -f /tmp/docker-${docker_ver}.tgz \
&& mv /tmp/docker/* /usr/bin \

View File

@ -188,6 +188,13 @@ echo -e "INFO: Logs Folder : $OUTPUT_FOLDER\n"
pip3 install -r requirements.txt
# node js deps
curl -sL https://deb.nodesource.com/setup_8.x | bash -
apt-get install -y nodejs
npm_config_loglevel=error npm install $PYTEST_ROOT/remote_schemas/nodejs/
npm install apollo-server graphql
mkdir -p "$OUTPUT_FOLDER/hpc"
export EVENT_WEBHOOK_HEADER="MyEnvValue"

View File

@ -2,6 +2,19 @@
## Next release
### Remote Joins
Remote Joins extend the concept of joining data across tables, to being able to join data across tables and remote schemas.
It works similar to table relationships. Head to the `Relationship` tab in your table page and define a remote relationship:
1. give a name for the relationship
2. select the remote schema
3. give the join configuration from table columns to remote schema fields.
[Add docs links]
[Add console screenshot]
### Scheduled Triggers
A scheduled trigger can be used to execute custom business logic based on time. There are two types of timing events: cron based or timestamp based.
@ -10,7 +23,8 @@ A cron trigger will be useful when something needs to be done periodically. For
You can also schedule one-off events based on a timestamp. For example, a new scheduled event can be created for 2 weeks from when a user signs up to send them an email about their experience.
<Add docs links>
[Add docs links]
[Add console screenshot]
(close #1914)

View File

@ -160,6 +160,10 @@
"no-unused-expressions": "off",
"no-console": "off",
"prefer-destructuring": "off",
"jsx-a11y/click-events-have-key-events": "off",
"jsx-a11y/anchor-is-valid": "off",
"jsx-a11y/interactive-supports-focus": "off",
"no-restricted-properties": "off",
"react/no-danger": "off",
"react/no-array-index-key": "off"
}

View File

@ -1342,6 +1342,19 @@ code {
cursor: not-allowed;
}
.wd300Px {
width: 300px;
}
.overflowAuto {
overflow: auto;
}
.tooltip {
cursor: pointer;
color: #4D4D4D;
}
.hiddenMoreWidth {
width: 300px;
}

View File

@ -0,0 +1,27 @@
import React from 'react';
import OverlayTrigger from 'react-bootstrap/lib/OverlayTrigger';
import Tooltip from 'react-bootstrap/lib/Tooltip';
type Props = {
children: React.ReactElement;
message: string;
placement?: string;
};
const OverlayMessage: React.FC<Props> = ({
message,
children,
placement = 'left',
}) =>
message ? (
<OverlayTrigger
placement={placement}
overlay={<Tooltip id={message}>{message}</Tooltip>}
>
{children}
</OverlayTrigger>
) : (
children
);
export default OverlayMessage;

View File

@ -28,6 +28,17 @@ export const isNumber = value => {
return typeof value === 'number';
};
export const isFloat = value => {
if (typeof value === 'number') {
return parseInt(value, 10) !== parseFloat(value, 10);
}
return false;
};
export const isBoolean = value => {
return typeof value === 'boolean';
};
export const isPromise = value => {
if (!value) return false;
return value.constructor.name === 'Promise';

View File

@ -314,3 +314,22 @@ export const getFetchManualTriggersQuery = tableName => ({
},
},
});
export const getSaveRemoteRelQuery = (args, isNew) => ({
type: `${isNew ? 'create' : 'update'}_remote_relationship`,
args,
});
export const getDropRemoteRelQuery = (name, table) => ({
type: 'delete_remote_relationship',
args: {
name,
table,
},
});
export const getRemoteSchemaIntrospectionQuery = remoteSchemaName => ({
type: 'introspect_remote_schema',
args: {
name: remoteSchemaName,
},
});

View File

@ -26,6 +26,7 @@ import {
fetchTrackedTableFkQuery,
fetchTableListQuery,
fetchTrackedTableListQuery,
fetchTrackedTableRemoteRelationshipQuery,
mergeLoadSchemaData,
} from './utils';
@ -195,6 +196,30 @@ const initQueries = {
},
};
export const mergeRemoteRelationshipsWithSchema = (
remoteRelationships,
table
) => {
return (dispatch, getState) => {
const { allSchemas } = getState().tables;
const t = allSchemas.find(s => {
return s.table_name === table.name && s.table_schema === table.schema;
});
if (!t) return;
const newAllSchemas = allSchemas.filter(
s => !(s.table_name === table.name && s.table_schema === table.schema)
);
newAllSchemas.push({
...t,
remote_relationships: remoteRelationships,
});
dispatch({
type: LOAD_SCHEMA,
allSchemas: newAllSchemas,
});
};
};
const fetchTrackedFunctions = () => {
return (dispatch, getState) => {
const url = Endpoints.getSchema;
@ -290,6 +315,7 @@ const loadSchema = configOptions => {
fetchTrackedTableListQuery(configOptions), // v1/query
fetchTrackedTableFkQuery(configOptions),
fetchTrackedTableReferencedFkQuery(configOptions),
fetchTrackedTableRemoteRelationshipQuery(configOptions),
],
};
@ -302,11 +328,17 @@ const loadSchema = configOptions => {
return dispatch(requestAction(url, options)).then(
data => {
const tableList = JSON.parse(data[0].result[1]);
const fkList = JSON.parse(data[2].result[1]);
const refFkList = JSON.parse(data[3].result[1]);
const remoteRelationships = data[4];
const mergedData = mergeLoadSchemaData(
JSON.parse(data[0].result[1]),
tableList,
data[1],
JSON.parse(data[2].result[1]),
JSON.parse(data[3].result[1])
fkList,
refFkList,
remoteRelationships
);
const { inconsistentObjects } = getState().metadata;

View File

@ -1,3 +1,5 @@
import { defaultRemoteRelationship } from './TableRelationships/Actions';
const defaultCurFilter = {
where: { $and: [{ '': { '': '' } }] },
limit: 10,
@ -117,6 +119,11 @@ const defaultModifyState = {
colMappings: [{ column: '', refColumn: '' }],
isToggled: false,
},
remoteRelationships: {
remoteSchema: {},
relationships: [{ ...defaultRemoteRelationship }],
fetchedRemoteRelationships: false,
},
rootFieldsEdit: {
select: '',
select_by_pk: '',

View File

@ -38,6 +38,11 @@ import {
MANUAL_REL_RESET,
REL_SELECTION_CHANGED,
REL_ADD_NEW_CLICKED,
INTROSPECTING_REMOTE_SCHEMA,
INTROSPECTION_ERROR,
INTROSPECTION_SUCCESSFUL,
SET_REMOTE_RELATIONSHIPS,
FETCHED_REMOTE_RELATIONSHIPS,
} from '../TableRelationships/Actions';
// TABLE PERMISSIONS
@ -584,6 +589,48 @@ const modifyReducer = (tableName, schemas, modifyStateOrig, action) => {
...modifyState,
uniqueKeyModify: action.keys,
};
case SET_REMOTE_RELATIONSHIPS:
return {
...modifyState,
remoteRelationships: {
...modifyState.remoteRelationships,
relationships: action.remoteRelationships,
},
};
case INTROSPECTING_REMOTE_SCHEMA:
return {
...modifyState,
remoteRelationships: {
...modifyState.remoteRelationships,
loading: true,
},
};
case INTROSPECTION_ERROR:
return {
...modifyState,
remoteRelationships: {
...modifyState.remoteRelationships,
loading: false,
error: true,
},
};
case INTROSPECTION_SUCCESSFUL:
return {
...modifyState,
remoteRelationships: {
...modifyState.remoteRelationships,
loading: false,
},
};
case FETCHED_REMOTE_RELATIONSHIPS:
return {
...modifyState,
remoteRelationships: {
...modifyState.remoteRelationships,
fetchedRemoteRelationships: true,
},
};
case TOGGLE_ENUM:
return {
...modifyState,

View File

@ -3,7 +3,16 @@ import inflection from 'inflection';
import { makeMigrationCall, updateSchemaInfo } from '../DataActions';
import gqlPattern, { gqlRelErrorNotif } from '../Common/GraphQLValidation';
import { showErrorNotification } from '../../Common/Notification';
import {
getSaveRemoteRelQuery,
getDropRemoteRelQuery,
} from '../../../Common/utils/v1QueryUtils';
import { getConfirmation } from '../../../Common/utils/jsUtils';
import suggestedRelationshipsRaw from './autoRelations';
import {
getRemoteRelPayload,
parseRemoteRelationship,
} from './RemoteRelationships/utils';
export const SET_MANUAL_REL_ADD = 'ModifyTable/SET_MANUAL_REL_ADD';
export const MANUAL_REL_SET_TYPE = 'ModifyTable/MANUAL_REL_SET_TYPE';
@ -15,6 +24,178 @@ export const REL_SELECTION_CHANGED = 'ModifyTable/REL_SELECTION_CHANGED';
export const MANUAL_REL_NAME_CHANGED = 'ModifyTable/MANUAL_REL_NAME_CHANGED';
export const REL_NAME_CHANGED = 'ModifyTable/REL_NAME_CHANGED';
export const REL_ADD_NEW_CLICKED = 'ModifyTable/REL_ADD_NEW_CLICKED';
export const FETCHING_REMOTE_RELATIONSHIPS =
'ModifyTable/FETCHING_REMOTE_RELATIONSHIPS';
export const FETCHED_REMOTE_RELATIONSHIPS =
'ModifyTable/FETCHED_REMOTE_RELATIONSHIPS';
export const SET_REMOTE_RELATIONSHIPS = 'ModifyTable/SET_REMOTE_RELATIONSHIPS';
export const INTROSPECTING_REMOTE_SCHEMA =
'ModifyTable/INTROSPECTING_REMOTE_SCHEMA';
export const INTROSPECTION_ERROR = 'ModifyTable/INTROSPECTION_ERROR';
export const INTROSPECTION_SUCCESSFUL = 'ModifyTable/SET_INTROSPECTION_SCHEMA';
export const defaultRemoteRelationship = {
name: '',
remoteSchema: '',
remoteField: [],
};
export const saveRemoteRelationship = (
state,
existingRel,
successCallback,
errorCallback
) => {
return (dispatch, getState) => {
const isNew = !existingRel;
if (!gqlPattern.test(state.name)) {
return dispatch(
showErrorNotification(
gqlRelErrorNotif[0],
gqlRelErrorNotif[1],
gqlRelErrorNotif[2],
gqlRelErrorNotif[3]
)
);
}
if (!state.remoteSchema) {
return dispatch(showErrorNotification('Remote schema is required'));
}
const table = {
schema: getState().tables.currentSchema,
name: getState().tables.currentTable,
};
const errorMsg = `${
isNew ? 'Creating' : 'Updating'
} remote relationship failed`;
let remoteRelQueryArgs;
try {
remoteRelQueryArgs = getRemoteRelPayload(state);
} catch (e) {
if (errorCallback) {
errorCallback();
}
return dispatch(showErrorNotification(errorMsg, e.message));
}
const upQuery = [getSaveRemoteRelQuery(remoteRelQueryArgs, !existingRel)];
const downQuery = [];
if (isNew) {
downQuery.push(getDropRemoteRelQuery(state.name, state.table));
} else {
const downQueryArgs = getSaveRemoteRelQuery(
getRemoteRelPayload(parseRemoteRelationship(existingRel)),
isNew
);
downQuery.push(downQueryArgs);
}
// Apply migrations
const migrationName = `table_${table.name}_create_remote_relationship_${state.name}`;
const requestMsg = `${
isNew ? 'Creating' : 'Updating'
} remote relationship...`;
const successMsg = `Successfully ${
isNew ? 'created' : 'updated'
} remote relationship`;
const customOnSuccess = () => {
if (successCallback) {
successCallback();
}
};
const customOnError = () => {
if (errorCallback) {
errorCallback();
}
};
// Rename relationship should fetch entire schema info.
makeMigrationCall(
dispatch,
getState,
upQuery,
downQuery,
migrationName,
customOnSuccess,
customOnError,
requestMsg,
successMsg,
errorMsg
);
};
};
export const dropRemoteRelationship = (
state,
existingRel,
successCallback,
errorCallback
) => {
return (dispatch, getState) => {
if (
!getConfirmation('This will permanently delete the remote relationship')
) {
if (errorCallback) {
errorCallback();
}
return;
}
const downQuery = [
getSaveRemoteRelQuery(
getRemoteRelPayload(parseRemoteRelationship(existingRel)),
true
),
];
const table = state.table;
const upQuery = [
getDropRemoteRelQuery(existingRel.remote_relationship_name, table),
];
// Apply migrations
const migrationName = `table_${table.name}_drop_remote_relationship_${state.name}`;
const requestMsg = 'Deleting remote relationship...';
const successMsg = 'Successfully deleted remote relationship';
const errorMsg = 'Deleting remote relationship failed';
const customOnSuccess = () => {
if (successCallback) {
successCallback();
}
};
const customOnError = () => {
if (errorCallback) {
errorCallback();
}
};
// Rename relationship should fetch entire schema info.
makeMigrationCall(
dispatch,
getState,
upQuery,
downQuery,
migrationName,
customOnSuccess,
customOnError,
requestMsg,
successMsg,
errorMsg
);
};
};
export const setRemoteRelationships = remoteRelationships => ({
type: SET_REMOTE_RELATIONSHIPS,
remoteRelationships,
});
const resetRelationshipForm = () => ({ type: REL_RESET });
const resetManualRelationshipForm = () => ({ type: MANUAL_REL_RESET });

View File

@ -1,7 +1,6 @@
import React, { Component } from 'react';
import React, { useEffect } from 'react';
import PropTypes from 'prop-types';
import TableHeader from '../TableCommon/TableHeader';
import { RESET } from '../TableModify/ModifyActions';
import {
addNewRelClicked,
addRelNewFromStateMigrate,
@ -19,9 +18,13 @@ import { getRelDef, getObjArrRelList } from './utils';
import Button from '../../../Common/Button/Button';
import AddManualRelationship from './AddManualRelationship';
import RemoteRelationships from './RemoteRelationships/RemoteRelationships';
import suggestedRelationshipsRaw from './autoRelations';
import RelationshipEditor from './RelationshipEditor';
import { NotFoundError } from '../../../Error/PageNotFound';
import { fetchRemoteSchemas } from '../../RemoteSchema/Actions';
import styles from '../TableModify/ModifyTable.scss';
import tableStyles from '../../../Common/TableCommon/TableStyles.scss';
const addRelationshipCellView = (
dispatch,
@ -31,8 +34,6 @@ const addRelationshipCellView = (
relMetaData,
tableSchema
) => {
const tableStyles = require('../../../Common/TableCommon/TableStyles.scss');
const onAdd = e => {
e.preventDefault();
dispatch(relSelectionChanged(rel));
@ -125,9 +126,6 @@ const AddRelationship = ({
cachedRelationshipData,
dispatch,
}) => {
const styles = require('../TableModify/ModifyTable.scss');
const tableStyles = require('../../../Common/TableCommon/TableStyles.scss');
const cTable = allSchemas.find(
t => t.table_name === tableName && t.table_schema === currentSchema
);
@ -296,195 +294,202 @@ const AddRelationship = ({
);
};
class Relationships extends Component {
componentDidMount() {
const { dispatch, tableName } = this.props;
dispatch({ type: RESET });
const Relationships = ({
tableName,
allSchemas,
ongoingRequest,
lastError,
lastFormError,
lastSuccess,
dispatch,
relAdd,
remoteSchemas,
manualRelAdd,
currentSchema,
migrationMode,
schemaList,
readOnlyMode,
}) => {
useEffect(() => {
dispatch(resetRelationshipForm());
dispatch(setTable(tableName));
dispatch(fetchRemoteSchemas());
}, []);
const tableSchema = allSchemas.find(
t => t.table_name === tableName && t.table_schema === currentSchema
);
if (!tableSchema) {
// throw a 404 exception
throw new NotFoundError();
}
render() {
const {
tableName,
allSchemas,
ongoingRequest,
lastError,
lastFormError,
lastSuccess,
dispatch,
relAdd,
manualRelAdd,
currentSchema,
migrationMode,
readOnlyMode,
schemaList,
} = this.props;
const styles = require('../TableModify/ModifyTable.scss');
const tableStyles = require('../../../Common/TableCommon/TableStyles.scss');
const tableSchema = allSchemas.find(
t => t.table_name === tableName && t.table_schema === currentSchema
let alert = null;
if (ongoingRequest) {
alert = (
<div className="hidden alert alert-warning" role="alert">
Saving...
</div>
);
if (!tableSchema) {
// throw a 404 exception
throw new NotFoundError();
}
let alert = null;
if (ongoingRequest) {
alert = (
<div className="hidden alert alert-warning" role="alert">
Saving...
</div>
);
} else if (lastError) {
alert = (
<div className="hidden alert alert-danger" role="alert">
Error: {JSON.stringify(lastError)}
</div>
);
} else if (lastSuccess) {
alert = (
<div className="hidden alert alert-success" role="alert">
Saved!
</div>
);
} else if (lastFormError) {
alert = (
<div className="hidden alert alert-warning" role="alert">
{lastFormError}
</div>
);
}
const objArrRelList = getObjArrRelList(tableSchema.relationships);
let addedRelationshipsView = null;
if (objArrRelList.length > 0) {
addedRelationshipsView = (
<div className={tableStyles.tableContainer}>
<table
className={`${tableStyles.table} table table-bordered table-striped table-hover`}
>
<thead>
<tr>
{['Object relationships', 'Array relationships'].map((s, i) => (
<th key={i}>{s}</th>
))}
</tr>
</thead>
<tbody>
{objArrRelList.map(rel => {
const column1 = rel.objRel ? (
<RelationshipEditor
dispatch={dispatch}
key={rel.objRel.rel_name}
readOnlyMode={readOnlyMode}
relConfig={findAllFromRel(
allSchemas,
tableSchema,
rel.objRel
)}
/>
) : (
<td />
);
const column2 = rel.arrRel ? (
<RelationshipEditor
key={rel.arrRel.rel_name}
dispatch={dispatch}
readOnlyMode={readOnlyMode}
relConfig={findAllFromRel(
allSchemas,
tableSchema,
rel.arrRel
)}
/>
) : (
<td />
);
return (
<tr>
{column1}
{column2}
</tr>
);
})}
</tbody>
</table>
</div>
);
}
const getAddRelSection = () => {
if (readOnlyMode) {
return null;
}
let addRelSection = null;
if (relAdd.isActive) {
addRelSection = (
<div className={styles.activeEdit}>
<AddRelationship
tableName={tableName}
currentSchema={currentSchema}
allSchemas={allSchemas}
cachedRelationshipData={relAdd}
dispatch={dispatch}
/>
<hr />
<AddManualRelationship
tableSchema={tableSchema}
allSchemas={allSchemas}
schemaList={schemaList}
relAdd={manualRelAdd}
dispatch={dispatch}
/>
</div>
);
} else {
addRelSection = (
<Button
type="submit"
color="white"
size="sm"
onClick={() => {
dispatch(addNewRelClicked());
}}
>
+ Add relationship
</Button>
);
}
return addRelSection;
};
return (
<div className={`${styles.container} container-fluid`}>
<TableHeader
dispatch={dispatch}
table={tableSchema}
tabName="relationships"
migrationMode={migrationMode}
readOnlyMode={readOnlyMode}
/>
<br />
<div className={`${styles.padd_left_remove} container-fluid`}>
<div className={`${styles.padd_left_remove} col-xs-10 col-md-10`}>
<h4 className={styles.subheading_text}>Relationships</h4>
{addedRelationshipsView}
<br />
{getAddRelSection()}
</div>
</div>
<div className={`${styles.fixed} hidden`}>{alert}</div>
} else if (lastError) {
alert = (
<div className="hidden alert alert-danger" role="alert">
Error: {JSON.stringify(lastError)}
</div>
);
} else if (lastSuccess) {
alert = (
<div className="hidden alert alert-success" role="alert">
Saved!
</div>
);
} else if (lastFormError) {
alert = (
<div className="hidden alert alert-warning" role="alert">
{lastFormError}
</div>
);
}
}
const objArrRelList = getObjArrRelList(tableSchema.relationships);
let addedRelationshipsView = null;
if (objArrRelList.length > 0) {
addedRelationshipsView = (
<div className={tableStyles.tableContainer}>
<table
className={`${tableStyles.table} table table-bordered table-striped table-hover`}
>
<thead>
<tr>
{['Object relationships', 'Array relationships'].map((s, i) => (
<th key={i}>{s}</th>
))}
</tr>
</thead>
<tbody>
{objArrRelList.map(rel => {
const column1 = rel.objRel ? (
<RelationshipEditor
dispatch={dispatch}
key={rel.objRel.rel_name}
readOnlyMode={readOnlyMode}
relConfig={findAllFromRel(
allSchemas,
tableSchema,
rel.objRel
)}
/>
) : (
<td />
);
const column2 = rel.arrRel ? (
<RelationshipEditor
key={rel.arrRel.rel_name}
dispatch={dispatch}
readOnlyMode={readOnlyMode}
relConfig={findAllFromRel(
allSchemas,
tableSchema,
rel.arrRel
)}
/>
) : (
<td />
);
return (
<tr>
{column1}
{column2}
</tr>
);
})}
</tbody>
</table>
</div>
);
}
const getAddRelSection = () => {
if (readOnlyMode) {
return null;
}
let addRelSection = null;
if (relAdd.isActive) {
addRelSection = (
<div className={styles.activeEdit}>
<AddRelationship
tableName={tableName}
currentSchema={currentSchema}
allSchemas={allSchemas}
cachedRelationshipData={relAdd}
dispatch={dispatch}
/>
<hr />
<AddManualRelationship
tableSchema={tableSchema}
allSchemas={allSchemas}
schemaList={schemaList}
relAdd={manualRelAdd}
dispatch={dispatch}
/>
</div>
);
} else {
addRelSection = (
<Button
type="submit"
color="white"
size="sm"
onClick={() => {
dispatch(addNewRelClicked());
}}
>
+ Add relationship
</Button>
);
}
return addRelSection;
};
const existingRemoteRelationships = tableSchema.remote_relationships;
return (
<div className={`${styles.container} container-fluid`}>
<TableHeader
dispatch={dispatch}
table={tableSchema}
tabName="relationships"
migrationMode={migrationMode}
/>
<br />
<div className={`${styles.padd_left_remove} container-fluid`}>
<div
className={`${styles.padd_left_remove} col-xs-10 col-md-10 ${styles.add_mar_bottom}`}
>
<h4 className={styles.subheading_text}>Table Relationships</h4>
{addedRelationshipsView}
<br />
{getAddRelSection()}
</div>
<div className={`${styles.padd_left_remove} col-xs-10 col-md-10`}>
<h4 className={styles.subheading_text}>Remote Relationships</h4>
<RemoteRelationships
relationships={existingRemoteRelationships}
reduxDispatch={dispatch}
table={tableSchema}
remoteSchemas={remoteSchemas}
/>
</div>
</div>
<div className={`${styles.fixed} hidden`}>{alert}</div>
</div>
);
};
Relationships.propTypes = {
tableName: PropTypes.string.isRequired,
@ -501,7 +506,7 @@ Relationships.propTypes = {
lastFormError: PropTypes.object,
lastSuccess: PropTypes.bool,
dispatch: PropTypes.func.isRequired,
serverVersion: PropTypes.string,
remoteSchemas: PropTypes.array.isRequired,
};
const mapStateToProps = (state, ownProps) => ({
@ -512,6 +517,8 @@ const mapStateToProps = (state, ownProps) => ({
readOnlyMode: state.main.readOnlyMode,
serverVersion: state.main.serverVersion,
schemaList: state.tables.schemaList,
remoteSchemas: state.remoteSchemas.listData.remoteSchemas.map(r => r.name),
adminHeaders: state.tables.dataHeaders,
...state.tables.modify,
});

View File

@ -1,24 +1,25 @@
import React, { Component } from 'react';
import PropTypes from 'prop-types';
import TableHeader from '../TableCommon/TableHeader';
import { RESET } from '../TableModify/ModifyActions';
import { findAllFromRel } from '../utils';
import { getObjArrRelList } from './utils';
import { setTable, UPDATE_REMOTE_SCHEMA_MANUAL_REL } from '../DataActions';
import AddManualRelationship from './AddManualRelationship';
import RelationshipEditor from './RelationshipEditor';
import { NotFoundError } from '../../../Error/PageNotFound';
import RemoteRelationships from './RemoteRelationships/RemoteRelationships';
import { fetchRemoteSchemas } from '../../RemoteSchema/Actions';
class RelationshipsView extends Component {
componentDidMount() {
const { dispatch, currentSchema, tableName } = this.props;
dispatch({ type: RESET });
dispatch(setTable(tableName));
// Sourcing the current schema into manual relationship
dispatch({
type: UPDATE_REMOTE_SCHEMA_MANUAL_REL,
data: currentSchema,
});
dispatch(fetchRemoteSchemas());
}
render() {
@ -35,7 +36,9 @@ class RelationshipsView extends Component {
migrationMode,
readOnlyMode,
schemaList,
remoteSchemas,
} = this.props;
const styles = require('../TableModify/ModifyTable.scss');
const tableStyles = require('../../../Common/TableCommon/TableStyles.scss');
@ -132,6 +135,20 @@ class RelationshipsView extends Component {
);
}
const remoteRelationshipsSection = () => {
return (
<div className={`${styles.padd_left_remove} col-xs-10 col-md-10`}>
<h4 className={styles.subheading_text}>Remote Relationships</h4>
<RemoteRelationships
relationships={tableSchema.remote_relationships}
reduxDispatch={dispatch}
table={tableSchema}
remoteSchemas={remoteSchemas}
/>
</div>
);
};
return (
<div className={`${styles.container} container-fluid`}>
<TableHeader
@ -156,6 +173,7 @@ class RelationshipsView extends Component {
/>
<hr />
</div>
{remoteRelationshipsSection()}
</div>
<div className={`${styles.fixed} hidden`}>{alert}</div>
</div>
@ -177,6 +195,8 @@ RelationshipsView.propTypes = {
lastSuccess: PropTypes.bool,
dispatch: PropTypes.func.isRequired,
serverVersion: PropTypes.string,
remoteSchemas: PropTypes.array.isRequired,
featuresCompatibility: PropTypes.object,
};
const mapStateToProps = (state, ownProps) => ({
@ -187,6 +207,7 @@ const mapStateToProps = (state, ownProps) => ({
readOnlyMode: state.main.readOnlyMode,
serverVersion: state.main.serverVersion,
schemaList: state.tables.schemaList,
remoteSchemas: state.remoteSchemas.listData.remoteSchemas.map(r => r.name),
...state.tables.modify,
});

View File

@ -0,0 +1,41 @@
import React from 'react';
import styles from '../../TableModify/ModifyTable.scss';
import { RemoteRelationshipServer } from './utils';
import RemoteRelationshipList from './components/RemoteRelationshipList';
import { fetchRemoteSchemas } from '../../../RemoteSchema/Actions';
type Props = {
relationships: RemoteRelationshipServer[];
reduxDispatch: any;
table: any;
remoteSchemas: string[];
};
const RemoteRelationships: React.FC<Props> = ({
relationships,
reduxDispatch,
table,
remoteSchemas,
}) => {
React.useEffect(() => {
reduxDispatch(fetchRemoteSchemas());
}, []);
return (
<div>
<div className={styles.add_mar_bottom}>
Relationships to remote schemas
</div>
<div>
<RemoteRelationshipList
relationships={relationships}
table={table}
remoteSchemas={remoteSchemas}
reduxDispatch={reduxDispatch}
/>
</div>
</div>
);
};
export default RemoteRelationships;

View File

@ -0,0 +1,59 @@
@import "../../../../Common/Common.scss";
.schemaExplorerContainer {
background-color: #f7f7f7;
padding: 10px;
border: 1px solid #ccc;
border-radius: 4px;
}
.height200Px {
height: 200px;
}
.skeletonItem {
margin-bottom: 15px;
}
.skeletonCheckbox {
width: 15px;
height: 15px;
background-color: white;
margin-right: 15px;
}
.skeletonLabel {
width: 200px;
height: 15px;
background-color: white;
}
.scalarColumnSelect {
margin-left: 5px;
border-radius: 4px;
background-color: white;
border: 1px solid #ccc;
}
.argValue {
background-color: white;
height: 30px;
padding: 5px;
color: #555555;
font-style: normal;
}
.fieldElement {
color: rgb(31, 97, 160);
font-style: normal;
}
.fieldElementDisabled {
color: #8091a1;
font-style: normal;
}
.argElement {
color: #8B2BB9;
font-style: italic;
}

View File

@ -0,0 +1,38 @@
import React from 'react';
import Tooltip from '../../../../Common/Tooltip/Tooltip';
export const ColumnScalar = ({ argName }) => {
return (
<Tooltip
id="tooltip-remote-rel-scalar-column"
message={`The value for "${argName}" will be injected from this column value at runtime`}
/>
);
};
export const RelName = ({ tableName }) => {
return (
<Tooltip
id="tooltip-remote-rel-name"
message={`The name of the relationship. This will be added as a field under the{' '} "${tableName}" node in the GraphQL schema.`}
/>
);
};
export const RemoteSchema = ({ tableName }) => {
return (
<Tooltip
id="tooltip-remote-rel-remote-schema"
message={`The remote schema that you wish to join the "${tableName}" table with`}
/>
);
};
export const Configuration = () => {
return (
<Tooltip
id="tooltip-remote-rel-config"
message={`Form a GraphQL query and inject the table column values in place of scalar arguments`}
/>
);
};

View File

@ -0,0 +1,57 @@
import React from 'react';
import { TreeArgElement, ArgValueKind } from '../utils';
import styles from '../SchemaExplorer.scss';
import ArgValueElement from './ArgValue';
type Props = {
arg: TreeArgElement;
handleToggle: (a: TreeArgElement) => void;
handleArgValueKindChange: (a: TreeArgElement, type: ArgValueKind) => void;
handleArgValueChange: (a: TreeArgElement, value: string) => void;
columns: string[];
};
const ArgElement: React.FC<Props> = ({
arg,
handleToggle,
handleArgValueChange,
handleArgValueKindChange,
columns,
}) => {
const style = {
marginLeft: `${(arg.depth + arg.parentFieldDepth) * 20 + 20}px`,
};
const toggle = () => handleToggle(arg);
return (
<div
style={style}
className={`${styles.display_flex} ${styles.add_mar_bottom_mid} ${styles.argElement}`}
>
<div
className={`${styles.add_mar_right_small} ${styles.cursorPointer}`}
onClick={toggle}
role="checkbox"
aria-checked={arg.isChecked}
>
<input
checked={arg.isChecked}
type="checkbox"
className={`${styles.add_mar_right_small} ${styles.cursorPointer}`}
/>
{arg.name}
</div>
{arg.isChecked && arg.isLeafArg && (
<ArgValueElement
value={arg.value}
handleArgValueKindChange={e =>
handleArgValueKindChange(arg, e.target.value as ArgValueKind)
}
handleArgValueChange={e => handleArgValueChange(arg, e.target.value)}
columns={columns}
/>
)}
</div>
);
};
export default ArgElement;

View File

@ -0,0 +1,65 @@
import React from 'react';
import { ArgValue } from '../utils';
import styles from '../SchemaExplorer.scss';
type Props = {
value: ArgValue;
handleArgValueKindChange: (e: React.ChangeEvent<HTMLSelectElement>) => void;
handleArgValueChange: (e: React.BaseSyntheticEvent) => void;
columns: string[];
};
const ArgValueElement: React.FC<Props> = ({
columns,
value,
handleArgValueChange,
handleArgValueKindChange,
}) => {
return (
<div className={styles.display_flex}>
:&nbsp;
<select
onChange={handleArgValueKindChange}
value={value.kind}
className={`form-control ${styles.argValue}`}
>
<option key="arg-value-column" value="column">
From Column
</option>
<option key="arg-value-static" value="static">
From Static Value
</option>
</select>
{value.kind === 'column' ? (
<select
value={value.value}
className={`form-control ${styles.argValue}`}
onChange={handleArgValueChange}
>
{!value.value && (
<option key="arg-value-col-placeholder" value="">
-- column-name --
</option>
)}
{columns.map(o => {
return (
<option key={o} value={o}>
{o}
</option>
);
})}
</select>
) : (
<input
type="text"
value={value.value}
placeholder="Value"
className={`form-control ${styles.argValue}`}
onChange={handleArgValueChange}
/>
)}
</div>
);
};
export default ArgValueElement;

View File

@ -0,0 +1,29 @@
import React from 'react';
import { RemoteRelationshipServer } from '../utils';
import styles from '../SchemaExplorer.scss';
type Props = {
relationship?: RemoteRelationshipServer;
};
const Collapsed: React.FC<Props> = ({ relationship }) => {
if (!relationship) {
return null;
}
return (
<div className={styles.display_flex}>
<div>
<b>{`${relationship.remote_relationship_name}`}</b>&nbsp;
</div>
<div>
<i>
{`- ${relationship.table_name}${
relationship.definition.remote_schema
} . ${Object.keys(relationship.definition.remote_field)}`}
</i>
</div>
</div>
);
};
export default Collapsed;

View File

@ -0,0 +1,93 @@
import React from 'react';
import globals from '../../../../../../Globals';
import { useIntrospectionSchemaRemote } from '../../../../RemoteSchema/graphqlUtils';
import {
RemoteRelationship,
TreeArgElement,
ArgValueKind,
TreeFieldElement,
buildSchemaTree,
} from '../utils';
import { LoadingSkeleton, NoRemoteSchemaPlaceholder } from './PlaceHolder';
import ArgElement from './ArgElement';
import FieldElement from './FieldElement';
import styles from '../SchemaExplorer.scss';
type Props = {
relationship: RemoteRelationship;
toggleArg: (a: TreeArgElement) => void;
toggleField: (f: TreeFieldElement) => void;
handleArgValueKindChange: (a: TreeArgElement, type: ArgValueKind) => void;
handleArgValueChange: (a: TreeArgElement, value: string) => void;
remoteSchemaName: string;
columns: string[];
};
const Explorer: React.FC<Props> = ({
relationship,
toggleArg,
toggleField,
handleArgValueChange,
handleArgValueKindChange,
remoteSchemaName,
columns,
}) => {
const { loading, error, schema, introspect } = useIntrospectionSchemaRemote(
remoteSchemaName,
{
'x-hasura-admin-secret': globals.adminSecret,
}
);
if (!remoteSchemaName) {
return <NoRemoteSchemaPlaceholder />;
}
if (loading) {
return <LoadingSkeleton />;
}
if (error || !schema) {
return (
<div>
Error introspecting remote schema.{' '}
<a onClick={introspect} className={styles.cursorPointer} role="button">
{' '}
Try again{' '}
</a>
</div>
);
}
const tree = buildSchemaTree(relationship, schema || undefined);
return (
<div className={styles.schemaExplorerContainer}>
{tree.map(element => {
switch (element.kind) {
case 'argument': {
const el: TreeArgElement = element;
return (
<ArgElement
arg={el}
handleToggle={toggleArg}
handleArgValueChange={handleArgValueChange}
handleArgValueKindChange={handleArgValueKindChange}
columns={columns}
key={`arg-element-${el.name}-${el.depth}`}
/>
);
}
case 'field': {
const el: TreeFieldElement = element;
return <FieldElement field={el} handleToggle={toggleField} />;
}
default:
return null;
}
})}
</div>
);
};
export default Explorer;

View File

@ -0,0 +1,50 @@
import React from 'react';
import { TreeFieldElement } from '../utils';
import OverlayMessage from '../../../../../Common/OverlayMessage';
import styles from '../SchemaExplorer.scss';
type Props = {
field: TreeFieldElement;
handleToggle: (a: TreeFieldElement) => void;
};
const FieldElement: React.FC<Props> = ({ field, handleToggle }) => {
const style = {
marginLeft: `${field.depth * 20}px`,
};
const toggle = () => {
if (!field.enabled) {
return;
}
handleToggle(field);
};
const overlayMessage = field.enabled
? ''
: 'Only fields with arguments or subfields can be toggled';
return (
<OverlayMessage message={overlayMessage}>
<div
style={style}
className={`${styles.display_flex} ${styles.add_mar_bottom_mid} ${
field.enabled ? styles.fieldElement : styles.fieldElementDisabled
}`}
>
<div
className={`${styles.add_mar_right_small} ${styles.cursorPointer}`}
onClick={toggle}
role="checkbox"
aria-checked={field.isChecked}
>
<input
checked={field.isChecked}
type="checkbox"
className={`${styles.add_mar_right_small} ${styles.cursorPointer}`}
/>
{field.name}
</div>
</div>
</OverlayMessage>
);
};
export default FieldElement;

View File

@ -0,0 +1,29 @@
import React from 'react';
import styles from '../SchemaExplorer.scss';
export const NoRemoteSchemaPlaceholder: React.FC = () => {
return (
<div
className={`${styles.schemaExplorerContainer}`}
style={{ overflow: 'auto' }}
>
<i>Select a remote schema first</i>
</div>
);
};
export const LoadingSkeleton = () => {
const skeletonItem = (
<div className={`${styles.display_flex} ${styles.skeletonItem}`}>
<div className={styles.skeletonCheckbox} />
<div className={styles.skeletonLabel} />
</div>
);
return (
<div className={`${styles.schemaExplorerContainer} ${styles.overflowAuto}`}>
{Array(5).fill(null).map((_, i) => (
<div key={i}>{skeletonItem}</div>
))}
</div>
);
};

View File

@ -0,0 +1,158 @@
import React from 'react';
import {
RemoteRelationship,
TreeArgElement,
TreeFieldElement,
ArgValueKind,
} from '../utils';
import {
Action as RemoteRelAction,
setName,
setRemoteSchema,
setArgValue,
setArgValueKind,
toggleArg,
toggleField,
} from '../state';
import styles from '../SchemaExplorer.scss';
import {
RelName as RelNameTooltip,
RemoteSchema as RemoteSchemaTooltip,
Configuration as ConfigTooltip,
} from '../Tooltips';
import Explorer from './Explorer';
type Props = {
table: any; // use "Table" type after ST is merged
remoteSchemas: string[];
isLast: boolean;
state: RemoteRelationship;
dispatch: React.Dispatch<RemoteRelAction>;
};
const RemoteRelEditor: React.FC<Props> = ({
table,
isLast,
remoteSchemas,
state,
dispatch,
}) => {
const handleNameChange = (e: React.ChangeEvent<HTMLInputElement>) => {
e.persist();
dispatch(setName(e.target.value));
};
const handleRemoteSchemaChange = (
e: React.ChangeEvent<HTMLSelectElement>
) => {
e.persist();
dispatch(setRemoteSchema(e.target.value));
};
const handleFieldToggle = (field: TreeFieldElement) =>
dispatch(toggleField(field));
const handleArgToggle = (arg: TreeArgElement) => dispatch(toggleArg(arg));
const handleArgValueKindChange = (
arg: TreeArgElement,
type: ArgValueKind
) => {
dispatch(setArgValueKind(arg, type));
};
const handleArgValueChange = (arg: TreeArgElement, value: string) => {
dispatch(setArgValue(arg, value));
};
const tableColumns: string[] = table.columns.map(
(c: { column_name: string }) => {
return c.column_name;
}
);
return (
<React.Fragment>
<div>
<div className={`${styles.add_mar_bottom}`}>
<div
className={`${styles.add_mar_bottom_mid} ${styles.display_flex}`}
>
<div className={styles.add_mar_right_small}>
<b>Name</b>
</div>
<div>
<RelNameTooltip tableName={table.table_name} />
</div>
</div>
<div>
<input
type="text"
className={`form-control ${styles.wd300Px}`}
placeholder="name"
value={state.name}
onChange={handleNameChange}
disabled={!isLast}
title={!isLast ? 'Name cannot be changed' : undefined}
/>
</div>
</div>
<div className={`${styles.add_mar_bottom}`}>
<div
className={`${styles.add_mar_bottom_mid} ${styles.display_flex} ${styles.add_mar_right_small}`}
>
<div className={styles.add_mar_right_small}>
<b>Remote Schema:</b>
</div>
<div>
<RemoteSchemaTooltip tableName={table.table_name} />
</div>
</div>
<div>
<select
className={`form-control ${styles.wd300Px}`}
value={state.remoteSchema}
onChange={handleRemoteSchemaChange}
>
<option key="placeholder" value="">
{' '}
-- remote schema --
</option>
{remoteSchemas.map(s => {
return (
<option key={s} value={s}>
{s}
</option>
);
})}
</select>
</div>
</div>
<div>
<div
className={`${styles.add_mar_bottom_mid} ${styles.display_flex} ${styles.add_mar_right_small}`}
>
<div className={styles.add_mar_right_small}>
<b>Configuration:</b>
</div>
<div>
<ConfigTooltip />
</div>
</div>
<Explorer
relationship={state}
toggleArg={handleArgToggle}
toggleField={handleFieldToggle}
handleArgValueKindChange={handleArgValueKindChange}
handleArgValueChange={handleArgValueChange}
remoteSchemaName={state.remoteSchema}
columns={tableColumns}
/>
</div>
</div>
</React.Fragment>
);
};
export default RemoteRelEditor;

View File

@ -0,0 +1,67 @@
import React from 'react';
import ExpandableEditor from '../../../../../Common/Layout/ExpandableEditor/Editor';
import { RemoteRelationshipServer } from '../utils';
import RemoteRelEditor from './RemoteRelEditor';
import RemoteRelCollapsedLabel from './EditorCollapsed';
import { useRemoteRelationship } from '../state';
import { saveRemoteRelationship, dropRemoteRelationship } from '../../Actions';
type Props = {
relationship?: RemoteRelationshipServer;
table: any;
isLast: boolean;
remoteSchemas: string[];
reduxDispatch: any; // TODO use Dispatch after ST is merged
};
const EditorWrapper: React.FC<Props> = ({
relationship,
table,
isLast,
remoteSchemas,
reduxDispatch,
}) => {
const { state, dispatch, reset } = useRemoteRelationship(table, relationship);
const expandedContent = () => (
<RemoteRelEditor
table={table}
remoteSchemas={remoteSchemas}
isLast={isLast}
state={state}
dispatch={dispatch}
/>
);
const collapsedLabel = () => (
<RemoteRelCollapsedLabel relationship={relationship} />
);
const saveFunc = (toggle: VoidFunction) => {
reduxDispatch(saveRemoteRelationship(state, relationship, toggle));
};
const removeFunc = !isLast
? () => {
reduxDispatch(dropRemoteRelationship(state, relationship));
}
: null;
const expandButtonText = isLast ? 'Add a remote relationship' : 'Edit';
const collapseButtonText = isLast ? 'Cancel' : 'Close';
return (
<ExpandableEditor
editorExpanded={expandedContent}
property={`remote-relationship-${isLast ? 'add' : 'edit'}`}
service="table-relationship"
saveFunc={saveFunc}
expandButtonText={expandButtonText}
collapseButtonText={collapseButtonText}
collapsedLabel={collapsedLabel}
removeFunc={removeFunc}
collapseCallback={reset}
/>
);
};
export default EditorWrapper;

View File

@ -0,0 +1,41 @@
import React from 'react';
import { RemoteRelationshipServer } from '../utils';
import RemoteRelationshipEditor from './RemoteRelEditorWrapper';
type Props = {
relationships: RemoteRelationshipServer[];
table: any;
remoteSchemas: string[];
reduxDispatch: any; // TODO use Dispatch after ST is merged
};
const RemoteRelationshipList: React.FC<Props> = ({
relationships,
table,
remoteSchemas,
reduxDispatch,
}) => {
return (
<React.Fragment>
{relationships.map(r => (
<RemoteRelationshipEditor
key={r.remote_relationship_name}
relationship={r}
table={table}
remoteSchemas={remoteSchemas}
reduxDispatch={reduxDispatch}
isLast={false}
/>
))}
<RemoteRelationshipEditor
key="add-remote-rel"
table={table}
remoteSchemas={remoteSchemas}
reduxDispatch={reduxDispatch}
isLast
/>
</React.Fragment>
);
};
export default RemoteRelationshipList;

View File

@ -0,0 +1,254 @@
import React from 'react';
import {
compareRFArguments,
compareRemoteFields,
findRemoteField,
findRemoteFieldArgument,
findArgParentField,
defaultArgValue,
RemoteRelationship,
RemoteRelationshipServer,
parseRemoteRelationship,
TreeFieldElement,
TreeArgElement,
ArgValueKind,
} from './utils';
const getDefaultState = (table: any): RemoteRelationship => ({
name: '',
remoteSchema: '',
remoteFields: [],
table: {
name: table.table_name,
schema: table.table_schema,
},
});
export const setName = (data: string) => ({
type: 'SetName' as const,
data,
});
export const setRemoteSchema = (data: string) => ({
type: 'SetRemoteSchema' as const,
data,
});
export const toggleField = (data: TreeFieldElement) => ({
type: 'ToggleField' as const,
data,
});
export const toggleArg = (data: TreeArgElement) => ({
type: 'ToggleArg' as const,
data,
});
export const setArgValueKind = (
arg: TreeArgElement,
valueKind: ArgValueKind
) => ({
type: 'ChangeArgValueKind' as const,
data: {
arg,
valueKind,
},
});
export const setArgValue = (arg: TreeArgElement, value: string) => ({
type: 'ChangeArgValue' as const,
data: {
arg,
value,
},
});
export const resetState = (data: RemoteRelationship) => ({
type: 'ResetState' as const,
data,
});
export type Action =
| ReturnType<typeof toggleField>
| ReturnType<typeof toggleArg>
| ReturnType<typeof setName>
| ReturnType<typeof setRemoteSchema>
| ReturnType<typeof setArgValueKind>
| ReturnType<typeof setArgValue>
| ReturnType<typeof resetState>;
const reducer = (
state: RemoteRelationship,
action: Action
): RemoteRelationship => {
switch (action.type) {
case 'SetName': {
return {
...state,
name: action.data,
};
break;
}
case 'SetRemoteSchema': {
return {
...state,
remoteSchema: action.data,
};
break;
}
case 'ToggleField': {
const changedField = action.data;
const selectedField = findRemoteField(state.remoteFields, changedField);
if (selectedField) {
return {
...state,
remoteFields: state.remoteFields.filter(
f => !(f.depth >= changedField.depth)
),
};
}
return {
...state,
remoteFields: [
...state.remoteFields.filter(f => !(f.depth >= changedField.depth)),
{ ...changedField, arguments: [] },
],
};
break;
}
case 'ToggleArg': {
const changedArg = action.data;
return {
...state,
remoteFields: state.remoteFields.map(rf => {
if (
rf.name === changedArg.parentField &&
rf.depth === changedArg.parentFieldDepth
) {
const selectedArg = findRemoteFieldArgument(
rf.arguments,
changedArg
);
if (selectedArg) {
return {
...rf,
arguments: rf.arguments.filter(
a => !compareRFArguments(a, changedArg)
),
};
}
return {
...rf,
arguments: [
...rf.arguments,
{ ...changedArg, value: defaultArgValue },
],
};
}
return rf;
}),
};
break;
}
case 'ChangeArgValueKind': {
const changedArg = action.data.arg;
const parentField = findArgParentField(state.remoteFields, changedArg);
if (parentField) {
const newParentField = {
...parentField,
arguments: parentField.arguments.map(a => {
if (compareRFArguments(a, changedArg)) {
return {
...a,
type: action.data.valueKind === 'column' ? 'String' : a.type,
value: {
...a.value,
kind: action.data.valueKind,
value: '',
},
};
}
return a;
}),
};
return {
...state,
remoteFields: state.remoteFields.map(f => {
if (compareRemoteFields(f, parentField)) {
return newParentField;
}
return f;
}),
};
}
return state;
}
case 'ChangeArgValue': {
const changedArg = action.data.arg;
const parentField = findArgParentField(state.remoteFields, changedArg);
if (parentField) {
const newParentField = {
...parentField,
arguments: parentField.arguments.map(a => {
if (compareRFArguments(a, changedArg)) {
return {
...a,
value: {
...a.value,
value: action.data.value,
},
};
}
return a;
}),
};
return {
...state,
remoteFields: state.remoteFields.map(f => {
if (compareRemoteFields(f, parentField)) {
return newParentField;
}
return f;
}),
};
}
return state;
}
case 'ResetState': {
return action.data;
}
default:
return state;
break;
}
};
// type "table" once ST PR is merged
export const useRemoteRelationship = (
table: any,
relationship?: RemoteRelationshipServer
) => {
const [state, dispatch] = React.useReducer(
reducer,
relationship
? parseRemoteRelationship(relationship)
: getDefaultState(table)
);
const reset = () => {
dispatch(
resetState(
relationship
? parseRemoteRelationship(relationship)
: getDefaultState(table)
)
);
};
React.useEffect(reset, [relationship]);
return {
state,
dispatch,
reset,
};
};

View File

@ -0,0 +1,532 @@
import {
isInputObjectType,
isInterfaceType,
isEnumType,
isObjectType,
isScalarType,
GraphQLSchema,
GraphQLField,
GraphQLType,
GraphQLArgument,
GraphQLInputField,
} from 'graphql';
import {
isString,
isObject,
isFloat,
isNumber,
} from '../../../../Common/utils/jsUtils';
import { getUnderlyingType } from '../../../../../shared/utils/graphqlSchemaUtils';
export type ArgValueKind = 'column' | 'static';
export type ArgValue = {
kind: ArgValueKind;
value: string;
type: string;
};
export const defaultArgValue: ArgValue = {
kind: 'column',
value: '',
type: 'String',
};
// Client Type
export type RemoteFieldArgument = {
name: string;
depth: number;
parentField: string;
parentFieldDepth: number;
isChecked: boolean;
parent?: string;
value: ArgValue;
type: string;
};
export interface TreeArgElement extends RemoteFieldArgument {
isLeafArg: boolean;
kind: 'argument';
}
export type RemoteField = {
name: string;
depth: number;
parent?: string;
arguments: RemoteFieldArgument[];
};
export interface TreeFieldElement extends Omit<RemoteField, 'arguments'> {
kind: 'field';
isChecked: boolean;
enabled: boolean;
}
export type RJSchemaTreeElement = TreeArgElement | TreeFieldElement;
export type RemoteRelationship = {
name: string;
remoteSchema: string;
remoteFields: RemoteField[];
table: {
name: string;
schema: string;
};
};
// Server Type
type RemoteRelationshipFieldServer = {
field?: Record<string, RemoteRelationshipFieldServer>;
arguments: Record<string, any>;
};
export type RemoteRelationshipServer = {
remote_relationship_name: string;
definition: {
remote_field: Record<string, RemoteRelationshipFieldServer>;
remote_schema: string;
hasura_fields: string[];
};
table_name: string;
table_schema: string;
};
export const parseArgValue = (argValue: any): ArgValue | null => {
if (isObject(argValue)) {
return null;
}
if (isString(argValue)) {
const isStatic = !argValue.startsWith('$');
return {
value: isStatic ? argValue.toString() : argValue.substr(1),
kind: isStatic ? 'static' : 'column',
type: 'String',
};
}
if (argValue === true || argValue === false) {
return {
kind: 'static',
value: argValue.toString(),
type: 'Boolean',
};
}
return {
kind: 'static',
value: argValue.toString(),
type:
(isNumber(argValue) && (isFloat(argValue) ? 'Float' : 'Int')) || 'String',
};
};
// Converters and parsers
const serialiseArguments = (
args: Record<string, any>,
depth: number,
parentField: string,
parentFieldDepth: number,
parent?: string
): RemoteFieldArgument[] => {
let allArgs: RemoteFieldArgument[] = [];
Object.keys(args).forEach(argName => {
const argValue = args[argName];
const argValueMetadata = parseArgValue(argValue);
if (argValueMetadata) {
allArgs.push({
name: argName,
depth,
parent,
isChecked: false,
parentField,
parentFieldDepth,
value: argValueMetadata,
type: argValueMetadata.type,
});
} else {
allArgs = [
...allArgs,
{
name: argName,
depth,
parent,
isChecked: false,
parentField,
parentFieldDepth,
value: defaultArgValue,
type: 'String',
},
...serialiseArguments(
argValue,
depth + 1,
parentField,
parentFieldDepth,
argName
),
];
}
});
return allArgs;
};
const serialiseRemoteField = (
name: string,
depth: number,
field: RemoteRelationshipFieldServer,
callback: (f: RemoteField) => void,
parent?: string
): void => {
callback({
name,
depth,
parent,
arguments: serialiseArguments(field.arguments, 0, name, depth, undefined),
});
if (field.field) {
const subFieldName = Object.keys(field.field)[0];
const subField = field.field[subFieldName];
serialiseRemoteField(subFieldName, depth + 1, subField, callback, name);
}
};
export const parseRemoteRelationship = (
relationship: RemoteRelationshipServer
): RemoteRelationship => {
const remoteField = relationship.definition.remote_field;
const allRemoteFields: RemoteField[] = [];
Object.keys(remoteField).forEach(fieldName => {
serialiseRemoteField(
fieldName,
0,
remoteField[fieldName],
(field: RemoteField) => allRemoteFields.push(field),
undefined
);
});
return {
name: relationship.remote_relationship_name,
remoteSchema: relationship.definition.remote_schema,
table: {
name: relationship.table_name,
schema: relationship.table_schema,
},
remoteFields: allRemoteFields,
};
};
const getTypedArgValueInput = (argValue: ArgValue, type: string) => {
let error: string | null = null;
let value: any;
if (argValue.kind === 'column') {
return {
value: `$${argValue.value}`,
error,
};
}
switch (type) {
case 'Float': {
const number = parseFloat(argValue.value);
if (window.isNaN(number)) {
error = 'invalid float value';
}
value = number;
break;
}
case 'Int': {
const number = parseInt(argValue.value, 10);
if (window.isNaN(number)) {
error = 'invalid int value';
}
value = number;
break;
}
case 'ID': {
const number = parseInt(argValue.value, 10);
if (window.isNaN(number)) {
error = 'invalid int value';
}
value = number;
break;
}
case 'Boolean': {
error =
argValue.value.toLowerCase() !== 'false' &&
argValue.value.toLowerCase() !== 'true'
? 'invalid boolean value'
: null;
if (!error) {
value = argValue.value.toLowerCase() === 'true';
}
break;
}
default:
value = argValue.value;
break;
}
return {
error,
value,
};
};
export const getRemoteRelPayload = (relationship: RemoteRelationship) => {
const hasuraFields: string[] = [];
const getRemoteFieldArguments = (field: RemoteField) => {
const getArgumentObject = (depth: number, parent?: string) => {
const depthArguments = field.arguments.filter(
a => a.depth === depth && a.parent === parent
);
const finalArgObj: any = depthArguments.reduce(
(argObj: any, currentArg) => {
const nestedArgObj = getArgumentObject(depth + 1, currentArg.name);
if (!nestedArgObj) {
const argValueTyped = getTypedArgValueInput(
currentArg.value,
currentArg.type
);
if (argValueTyped.error) {
throw Error(argValueTyped.error);
}
if (currentArg.value.kind === 'column') {
hasuraFields.push(argValueTyped.value);
}
return {
...argObj,
[currentArg.name]: argValueTyped.value,
};
}
return {
...argObj,
[currentArg.name]: nestedArgObj,
};
return {
...argObj,
};
},
{}
);
return Object.keys(finalArgObj).length ? finalArgObj : undefined;
};
return getArgumentObject(0);
};
const getRemoteFieldObject = (
depth: number
): Record<string, RemoteRelationshipFieldServer> | undefined => {
const obj: Record<string, RemoteRelationshipFieldServer> = {};
const depthRemoteFields = relationship.remoteFields.filter(
f => f.depth === depth
);
depthRemoteFields.forEach(f => {
obj[f.name] = {
field: getRemoteFieldObject(depth + 1),
arguments: getRemoteFieldArguments(f) || {},
};
});
return Object.keys(obj).length ? obj : undefined;
};
return {
name: relationship.name,
remote_schema: relationship.remoteSchema,
remote_field: getRemoteFieldObject(0),
hasura_fields: hasuraFields
.map(f => f.substr(1))
.filter((v, i, s) => s.indexOf(v) === i),
table: relationship.table,
};
};
const isFieldChecked = (
relationship: RemoteRelationship,
fieldName: string,
depth: number,
parent?: string
) => {
return relationship.remoteFields.some(f => {
return f.name === fieldName && f.depth === depth && f.parent === parent;
});
};
/* returns checked value if arg is checked
* returns null if arg isn't involved in the relationship
*/
export const getCheckedArgValue = (
relationship: RemoteRelationship,
argName: string,
argType: GraphQLType,
depth: number,
parentField: string,
parentFieldDepth: number,
parent?: string
): ArgValue | null => {
const parentRemoteField = relationship.remoteFields.find(f => {
return f.name === parentField && f.depth === parentFieldDepth;
});
if (parentRemoteField) {
const checkedArg = parentRemoteField.arguments.find(
arg =>
arg.name === argName && arg.depth === depth && arg.parent === parent
);
if (checkedArg) {
return {
...checkedArg.value,
type: getUnderlyingType(argType).type.name,
};
}
}
return null;
};
const buildArgElement = (
relationship: RemoteRelationship,
arg: GraphQLArgument | GraphQLInputField,
depth: number,
parentField: string,
parentFieldDepth: number,
callback: (fe: TreeArgElement) => void,
parent?: string
) => {
const { type: argType }: { type: GraphQLType } = getUnderlyingType(arg.type);
const argValue = getCheckedArgValue(
relationship,
arg.name,
argType,
depth,
parentField,
parentFieldDepth,
parent
);
const isLeafArg = isScalarType(argType) || isEnumType(argType);
callback({
name: arg.name,
kind: 'argument',
depth,
type:
(isLeafArg &&
(isEnumType(argType)
? 'String'
: getUnderlyingType(argType).type.name)) ||
undefined,
parent,
parentField,
parentFieldDepth,
value: argValue || defaultArgValue,
isChecked: !!argValue,
isLeafArg,
});
if (!!argValue && (isInputObjectType(argType) || isInterfaceType(argType))) {
const argFields = argType.getFields();
Object.values(argFields).forEach(argField => {
buildArgElement(
relationship,
argField,
depth + 1,
parentField,
parentFieldDepth,
callback,
arg.name
);
});
}
};
const buildFieldElement = (
relationship: RemoteRelationship,
field: GraphQLField<any, any, Record<string, any>>,
depth: number,
callback: (element: RJSchemaTreeElement) => void,
parent?: string
) => {
const { type: fieldType }: { type: GraphQLType } = getUnderlyingType(
field.type
);
const isChecked = isFieldChecked(relationship, field.name, depth, parent);
callback({
name: field.name,
kind: 'field',
depth,
parent,
isChecked,
enabled: (field.args && !!field.args.length) || isObjectType(fieldType),
});
if (isChecked) {
if (field.args) {
field.args.forEach(arg => {
buildArgElement(relationship, arg, 0, field.name, depth, callback);
});
}
if (isObjectType(fieldType) || isInterfaceType(fieldType)) {
const subFields = fieldType.getFields();
Object.values(subFields).forEach(subField => {
buildFieldElement(
relationship,
subField,
depth + 1,
callback,
field.name
);
});
}
}
};
export const buildSchemaTree = (
relationship: RemoteRelationship,
remoteSchema?: GraphQLSchema
): RJSchemaTreeElement[] => {
if (!remoteSchema) return [];
const schemaTree: (TreeArgElement | TreeFieldElement)[] = [];
const queryType = remoteSchema.getQueryType();
if (!queryType) return [];
const fields = queryType.getFields();
Object.values(fields).forEach(field => {
buildFieldElement(
relationship,
field,
0,
(fieldElement: RJSchemaTreeElement) => schemaTree.push(fieldElement)
);
});
return schemaTree;
};
export const compareRemoteFields = (
rf1: RemoteField | TreeFieldElement,
rf2: RemoteField | TreeFieldElement
) =>
rf1.name === rf2.name && rf1.depth === rf2.depth && rf1.parent === rf2.parent;
export const compareRFArguments = (
a1: RemoteFieldArgument,
a2: RemoteFieldArgument
) =>
a1.name === a2.name &&
a1.depth === a2.depth &&
a1.parent === a2.parent &&
a1.parentField === a2.parentField &&
a1.parentFieldDepth === a2.parentFieldDepth;
export const findRemoteFieldArgument = (
args: RemoteFieldArgument[],
arg: RemoteFieldArgument
) => {
return args.find(a => compareRFArguments(a, arg));
};
export const findRemoteField = (
fields: RemoteField[],
field: RemoteField | TreeFieldElement
) => {
return fields.find(
f =>
f.name === field.name &&
f.depth === field.depth &&
f.parent === field.parent
);
};
export const findArgParentField = (
fields: RemoteField[],
arg: RemoteFieldArgument
) => {
return fields.find(
f => f.name === arg.parentField && f.depth === arg.parentFieldDepth
);
};

View File

@ -1,4 +1,12 @@
import React from 'react';
import {
isInputObjectType,
isInterfaceType,
isEnumType,
isObjectType,
isScalarType,
isWrappingType,
} from 'graphql';
/* This function sets the styling to the way the relationship looks, for eg: article.id -> user.user_id */
export const getRelDef = relMeta => {
@ -29,11 +37,9 @@ export const getRelDef = relMeta => {
export const getObjArrRelList = relationships => {
const objRels = relationships.filter(r => r.rel_type === 'object');
const arrRels = relationships.filter(r => r.rel_type !== 'object');
const requiredList = [];
const length =
objRels.length > arrRels.length ? objRels.length : arrRels.length;
for (let i = 0; i < length; i++) {
const objRel = objRels[i] ? objRels[i] : null;
const arrRel = arrRels[i] ? arrRels[i] : null;
@ -43,6 +49,329 @@ export const getObjArrRelList = relationships => {
arrRel,
});
}
return requiredList;
};
const getUnderlyingType = t => {
let _type = t;
while (isWrappingType(_type)) {
_type = _type.ofType;
}
return _type;
};
export const getSchemaTree = (relationship, fields) => {
const { remoteField } = relationship;
const schemaTree = [];
const isArgChecked = (
arg,
fieldNesting,
argNesting,
parentField,
parentArg
) => {
if (parentField.arguments) {
const search = parentField.arguments.find(
a =>
a.name === arg.name &&
a.argNesting === argNesting &&
a.parentArg === parentArg
);
if (search) {
return {
column: search.column,
static: search.static,
};
}
}
return false;
};
const handleArg = (arg, nesting, argNesting, parentField, parentArg) => {
const isChecked = isArgChecked(
arg,
nesting,
argNesting,
parentField,
parentArg
);
schemaTree.push({
name: arg.name,
type: getUnderlyingType(arg.type).name,
nesting,
argNesting,
isChecked: !!isChecked,
column: isChecked ? isChecked.column : false,
static: isChecked ? isChecked.static : false,
isScalar:
isScalarType(getUnderlyingType(arg.type)) ||
isEnumType(getUnderlyingType(arg.type)),
isArg: true,
parentFieldName: parentField.name,
parentFieldNesting: parentField.nesting,
parentArg,
});
if (isChecked) {
const handleWrappingTypeArg = __fieldtype => {
const currentFieldType = getUnderlyingType(__fieldtype);
if (currentFieldType._fields) {
Object.values(currentFieldType._fields).forEach(fa =>
handleArg(
fa,
nesting,
argNesting + 1,
parentField,
`${parentArg}.${arg.name}`
)
);
}
};
const handleInputObjectTypeArg = __fieldtype => {
if (__fieldtype._fields) {
Object.values(__fieldtype._fields).forEach(fa =>
handleArg(
fa,
nesting,
argNesting + 1,
parentField,
`${parentArg}.${arg.name}`
)
);
}
};
if (isWrappingType(arg.type)) {
handleWrappingTypeArg(arg.type);
} else if (isInputObjectType(arg.type) || isInterfaceType(arg.type)) {
handleInputObjectTypeArg(arg.type);
}
}
};
const isFieldChecked = (field, nesting) => {
if (
remoteField.find(rf => field.name === rf.name && nesting === rf.nesting)
) {
return true;
}
return false;
};
const handleField = (field, nesting) => {
if (isScalarType(getUnderlyingType(field.type))) {
if (!field.args || (field.args && field.args.length === 0)) {
return;
}
}
const isChecked = isFieldChecked(field, nesting);
schemaTree.push({
name: field.name,
nesting,
type: field.type,
isChecked,
});
if (isChecked) {
const currentSelectedField = remoteField.find(
rf => field.name === rf.name && nesting === rf.nesting
);
field.args
.sort((fa1, fa2) => {
return fa1.name > fa2.name ? 1 : fa1.name < fa2.name ? -1 : 0;
})
.forEach(fa => {
handleArg(fa, nesting + 1, 0, currentSelectedField, '');
});
const handleScalarTypeField = () => {};
const handleObjectTypeField = __fieldtype => {
Object.values(__fieldtype._fields).forEach(f =>
handleField(f, nesting + 1)
);
};
const handleListTypeField = __fieldtype => {
const unwrappedType = getUnderlyingType(__fieldtype);
if (isObjectType(unwrappedType) || isInterfaceType(unwrappedType)) {
handleObjectTypeField(unwrappedType);
} else {
handleScalarTypeField(unwrappedType);
}
};
if (isWrappingType(field.type) || isWrappingType(field.type)) {
handleListTypeField(field.type);
} else if (isObjectType(field.type) || isInterfaceType(field.type)) {
handleObjectTypeField(field.type);
} else {
handleScalarTypeField(field.type);
}
}
};
fields.forEach(f => handleField(f, 0));
return schemaTree;
};
const getTypedInput = (input_, argType, argName) => {
const throwError = () => {
throw Error(
`Invalid static input for argument "${argName}" of type "${argType}".`
);
};
const input = input_.trim();
if (argType === 'Int') {
const intVal = parseInt(input, 10);
if (isNaN(intVal)) {
throwError();
}
return intVal;
}
if (argType === 'Boolean') {
if (input.toLowerCase() === 'true') return true;
if (input.toLowerCase() === 'false') return false;
throwError();
}
if (argType === 'Float') {
const floatVal = parseFloat(input);
if (isNaN(floatVal)) {
throwError();
}
return floatVal;
}
return input;
};
export const getRemoteRelPayload = (remoteRel, table) => {
const payload = {};
const { remoteField, name, remoteSchema } = remoteRel;
payload.name = name;
payload.remote_schema = remoteSchema;
payload.table = table;
const hasuraFields = [];
const getArgs = (field, argNesting, parentArg, _argObj) => {
const argObj = { ..._argObj };
field.arguments.forEach(a => {
if (a.argNesting === argNesting && parentArg === a.parentArg) {
if (a.column) {
argObj[a.name] = `$${a.column}`;
hasuraFields.push(a.column);
} else if (a.static) {
argObj[a.name] = getTypedInput(a.static, a.type, a.name);
} else {
argObj[a.name] = getArgs(
field,
argNesting + 1,
`${parentArg}.${a.name}`,
{}
);
}
}
});
return argObj;
};
const getRemoteFieldObj = nesting => {
const _rf = remoteField.find(rf => rf.nesting === nesting);
if (!_rf) {
return undefined;
}
const _field = {
[_rf.name]: {
arguments: getArgs(_rf, 0, '', {}),
field: getRemoteFieldObj(nesting + 1),
},
};
if (_field[_rf.name].field === undefined) {
delete _field[_rf.name].field;
}
return _field;
};
payload.remote_field = getRemoteFieldObj(0);
payload.hasura_fields = [];
hasuraFields.forEach(hf => {
if (hf.constructor.name === 'Array') {
hf.forEach(f => payload.hasura_fields.push(f));
} else {
payload.hasura_fields.push(hf);
}
});
payload.hasura_fields = [...new Set(payload.hasura_fields)];
return payload;
};
export const parseRemoteRelationship = remoteRel => {
let _remoteField = { ...remoteRel.remote_field };
const remoteFields = [];
let nesting = 0;
const getArgs = field => {
const argsList = [];
const serialiseArgs = (args, argNesting, parentArg) => {
Object.keys(args).forEach((a, i) => {
const argObj = {
name: a,
parentArg,
argNesting,
};
const argValue = Object.values(args)[i];
if (typeof argValue === 'string') {
if (argValue[0] === '$') {
argObj.column = argValue.substr(1);
} else {
argObj.static = argValue;
argObj.type = 'String';
}
} else if (typeof argValue === 'number') {
argObj.static = argValue.toString();
argObj.type = 'Int';
} else if (typeof argValue === 'boolean') {
argObj.static = argValue.toString();
argObj.type = 'Boolean';
}
argsList.push(argObj);
if (typeof argValue === 'object') {
serialiseArgs(argValue, argNesting + 1, `${parentArg}.${a}`);
}
});
};
serialiseArgs(field.arguments, 0, '');
return argsList;
};
while (_remoteField && Object.keys(_remoteField).length > 0) {
remoteFields.push({
name: Object.keys(_remoteField)[0],
nesting,
arguments: getArgs(Object.values(_remoteField)[0]),
});
_remoteField = Object.values(_remoteField)[0].field;
nesting++;
}
return {
name: remoteRel.name,
remoteSchema: remoteRel.remote_schema,
remoteField: remoteFields,
};
};
export const getRemoteRelConfig = (rel, tableName, styles) => {
if (!rel.remoteSchema) {
return '';
}
const remoteField = rel.remoteField.find(f => f.nesting === 0);
if (!remoteField) return '';
return (
<div className={styles.display_flex}>
<div>
<b>{`${rel.name}`}</b>&nbsp;
</div>
<div>
<i>{`- ${tableName}${rel.remoteSchema} . ${remoteField.name}`}</i>
</div>
</div>
);
};

View File

@ -521,11 +521,51 @@ FROM (
);
};
const generateWhereObject = options => {
const where = {};
if (options.schemas) {
options.schemas.forEach(s => {
if (!where.$and) where.$and = [];
where.$and.push({
table_schema: s,
});
});
}
if (options.tables) {
options.tables.forEach(t => {
if (!where.$and) where.$and = [];
where.$and.push({
table_schema: t.table_schema,
table_name: t.table_name,
});
});
}
return where;
};
export const fetchTrackedTableRemoteRelationshipQuery = options => {
const query = {
type: 'select',
args: {
table: {
schema: 'hdb_catalog',
name: 'hdb_remote_relationship',
},
columns: ['*.*', 'remote_relationship_name'],
where: generateWhereObject(options),
order_by: [{ column: 'remote_relationship_name', type: 'asc' }],
},
};
return query;
};
export const mergeLoadSchemaData = (
infoSchemaTableData,
hdbTableData,
fkData,
refFkData
refFkData,
remoteRelData
) => {
const _mergedTableData = [];
@ -551,6 +591,7 @@ export const mergeLoadSchemaData = (
let _uniqueConstraints = [];
let _fkConstraints = [];
let _refFkConstraints = [];
let _remoteRelationships = [];
let _isEnum = false;
let _checkConstraints = [];
let _configuration = {};
@ -575,6 +616,11 @@ export const mergeLoadSchemaData = (
fk.ref_table_table_schema === _tableSchema &&
fk.ref_table === _tableName
);
_remoteRelationships = remoteRelData.filter(
rel =>
rel.table_schema === _tableSchema && rel.table_name === _tableName
);
}
const _mergedInfo = {
@ -593,6 +639,7 @@ export const mergeLoadSchemaData = (
foreign_key_constraints: _fkConstraints,
opp_foreign_key_constraints: _refFkConstraints,
view_info: _viewInfo,
remote_relationships: _remoteRelationships,
is_enum: _isEnum,
configuration: _configuration,
computed_fields: _computed_fields,

View File

@ -14,6 +14,7 @@ import { makeRequest } from '../Actions';
import { appPrefix } from '../constants';
import globals from '../../../../Globals';
import { clearIntrospectionSchemaCache } from '../graphqlUtils';
const prefixUrl = globals.urlPrefix + appPrefix;
@ -291,6 +292,7 @@ const deleteRemoteSchema = () => {
dispatch(push(prefixUrl)),
dispatch(fetchRemoteSchemas()),
]);
clearIntrospectionSchemaCache();
};
const customOnError = error => {
Promise.all([dispatch({ type: DELETE_REMOTE_SCHEMA_FAIL, data: error })]);
@ -422,6 +424,7 @@ const modifyRemoteSchema = () => {
dispatch(push(`${prefixUrl}/manage/${remoteSchemaName}/details`));
});
dispatch(fetchRemoteSchema(remoteSchemaName));
clearIntrospectionSchemaCache();
};
const customOnError = error => {
Promise.all([dispatch({ type: MODIFY_REMOTE_SCHEMA_FAIL, data: error })]);

View File

@ -0,0 +1,91 @@
import { useEffect, useState } from 'react';
import endpoints from '../../../Endpoints';
import { getRemoteSchemaIntrospectionQuery } from '../../Common/utils/v1QueryUtils';
import { buildClientSchema, isWrappingType, isObjectType } from 'graphql';
// local cache where introspection schema is cached
let introspectionSchemaCache = {};
export const clearIntrospectionSchemaCache = remoteSchemaName => {
if (remoteSchemaName) {
delete introspectionSchemaCache[remoteSchemaName];
} else {
introspectionSchemaCache = {};
}
};
// custom hook for introspecting remote schema
export const useIntrospectionSchemaRemote = (remoteSchemaName, headers) => {
const [schema, setSchema] = useState(null);
const [loading, setLoading] = useState(true);
const [error, setError] = useState(null);
const introspectSchema = () => {
// if the introspection result is present in cache, skip introspection
if (!remoteSchemaName) return;
if (introspectionSchemaCache[remoteSchemaName]) {
setSchema(introspectionSchemaCache[remoteSchemaName]);
setLoading(false);
return;
}
// perform introspection
setLoading(true);
setError(null);
fetch(endpoints.query, {
method: 'POST',
headers: {
...headers,
},
body: JSON.stringify(getRemoteSchemaIntrospectionQuery(remoteSchemaName)),
})
.then(r => r.json())
.then(response => {
const clientSchema = buildClientSchema(response.data);
setSchema(clientSchema);
introspectionSchemaCache[remoteSchemaName] = clientSchema;
setLoading(false);
})
.catch(err => {
console.error(err);
setError(err);
setLoading(false);
});
};
useEffect(introspectSchema, [remoteSchemaName]);
return {
schema,
loading,
error,
introspect: introspectSchema,
};
};
// get underlying GraphQL type if it is wrapped type
export const getUnderlyingType = t => {
let currentType = t;
while (isWrappingType(currentType)) {
currentType = currentType.ofType;
}
return currentType;
};
// get fields of a type from the graphql schema
export const getTypeFields = (typeName, objectTypes) => {
const fields = {};
if (objectTypes[typeName]) {
const type = getUnderlyingType(objectTypes[typeName]);
Object.keys(type._fields).forEach(field => {
const fieldType = getUnderlyingType(type._fields[field].type);
fields[field] = {
typeName: fieldType.name,
isScalar: !isObjectType(fieldType),
isChecked: false,
};
});
}
return fields;
};

View File

@ -1,4 +1,5 @@
import requestAction from '../../../utils/requestAction';
import { clearIntrospectionSchemaCache } from '../RemoteSchema/graphqlUtils';
import { push } from 'react-router-redux';
import globals from '../../../Globals';
import endpoints, { globalCookiePolicy } from '../../../Endpoints';
@ -256,6 +257,9 @@ export const loadInconsistentObjects = (reloadConfig, successCb, failureCb) => {
if (successCb) {
successCb();
}
if (shouldReloadRemoteSchemas) {
clearIntrospectionSchemaCache();
}
},
error => {
console.error(error);
@ -291,6 +295,8 @@ export const reloadRemoteSchema = (remoteSchemaName, successCb, failureCb) => {
dispatch(handleInconsistentObjects(inconsistentObjects));
clearIntrospectionSchemaCache();
if (successCb) {
successCb();
}
@ -340,6 +346,7 @@ export const dropInconsistentObjects = (successCb, failureCb) => {
dispatch({ type: DROPPED_INCONSISTENT_METADATA });
dispatch(showSuccessNotification('Dropped inconsistent metadata'));
dispatch(loadInconsistentObjects({ shouldReloadRemoteSchemas: false }));
clearIntrospectionSchemaCache();
if (successCb) {
successCb();
}

View File

@ -41,6 +41,13 @@ const MetadataStatus = ({ dispatch, metadata }) => {
definition = `relationship of table "${getTableNameFromDef(
ico.definition.table
)}"`;
} else if (ico.type === 'remote_relationship') {
name = ico.definition.configuration.name;
definition = `relationship between table "${getTableNameFromDef(
ico.definition.table
)}" and remote schema "${
ico.definition.configuration.remote_schema
}"`;
} else if (permissionTypes.includes(ico.type)) {
name = `${ico.definition.role}-permission`;
definition = `${ico.type} on table "${getTableNameFromDef(

View File

@ -256,6 +256,22 @@ The various types of queries are listed in the following table:
- 1
- Reload schema of an existing remote schema
* - :ref:`create_remote_relationship`
- :ref:`create_remote_relationship_args <create_remote_relationship_syntax>`
- 1
- Create a remote relationship with an existing remote schema
* - :ref:`update_remote_relationship`
- :ref:`update_remote_relationship_args <update_remote_relationship_syntax>`
- 1
- Update an existing remote relationship
* - :ref:`delete_remote_relationship`
- :ref:`delete_remote_relationship_args <delete_remote_relationship_syntax>`
- 1
- Delete an existing remote relationship
* - :ref:`export_metadata`
- :ref:`Empty Object`
- 1
@ -443,6 +459,7 @@ See :ref:`server_flag_reference` for info on setting the above flag/env var.
Event Triggers <event-triggers>
Scheduled Triggers <scheduled-triggers>
Remote Schemas <remote-schemas>
Remote Relationships <remote-relationships>
Query Collections <query-collections>
Custom Types <custom-types>
Actions <actions>

View File

@ -0,0 +1,262 @@
.. meta::
:description: Manage remote relationships with the Hasura schema/metadata API
:keywords: hasura, docs, schema/metadata API, API reference, remote joins, remote relationships
Schema/Metadata API Reference: Remote Relationships
===================================================
.. contents:: Table of contents
:backlinks: none
:depth: 1
:local:
Remote Relationships allow you to join tables with remote schemas.
.. _create_remote_relationship:
create_remote_relationship
--------------------------
``create_remote_relationship`` is used to create a new remote relationship with an existing remote schema.
.. code-block:: http
POST /v1/query HTTP/1.1
Content-Type: application/json
X-Hasura-Role: admin
{
"type":"create_remote_relationship",
"args":{
"name": "sample_remote_relationship",
"table": "users",
"hasura_fields": ["id"],
"remote_schema": "my-remote-schema",
"remote_field": {
"messages": {
"arguments": {
"id":"$id"
}
}
}
}
}
.. _create_remote_relationship_syntax:
Args syntax
^^^^^^^^^^^
.. list-table::
:header-rows: 1
* - Key
- Required
- Schema
- Description
* - name
- true
- RemoteRelationshipName_
- Name of the remote relationship
* - table
- true
- :ref:`QualifiedTable <QualifiedTable>`
- Object with table name and schema
* - hasura_fields
- true
- [:ref:`PGColumn <PGColumn>`]
- Column(s) in the table that is used for joining with remote schema field. All join keys in ``remote_field`` must appear here.
* - remote_schema
- true
- :ref:`RemoteSchemaName <RemoteSchemaName>`
- Name of the remote schema to join with
* - remote_field
- true
- RemoteField_
- The schema tree ending at the field in remote schema which needs to be joined with.
.. _update_remote_relationship:
update_remote_relationship
--------------------------
``update_remote_relationship`` is used to update an existing remote relationship.
.. code-block:: http
POST /v1/query HTTP/1.1
Content-Type: application/json
X-Hasura-Role: admin
{
"type": "update_remote_relationship",
"args": {
"name": "sample_remote_relationship",
"table": "users",
"hasura_fields": ["id"],
"remote_schema": "my-remote-schema",
"remote_field": {
"posts": {
"arguments": {
"id": "$id",
"likes": {
"lte":"1000"
}
}
}
}
}
}
.. _update_remote_relationship_syntax:
Args syntax
^^^^^^^^^^^
.. list-table::
:header-rows: 1
* - Key
- Required
- Schema
- Description
* - name
- true
- RemoteRelationshipName_
- Name of the remote relationship
* - table
- true
- :ref:`QualifiedTable <QualifiedTable>`
- Object with table name and schema
* - hasura_fields
- true
- [:ref:`PGColumn <PGColumn>`]
- Column(s) in the table that is used for joining with remote schema field. All join keys in ``remote_field`` must appear here.
* - remote_schema
- true
- :ref:`RemoteSchemaName <RemoteSchemaName>`
- Name of the remote schema to join with
* - remote_field
- true
- RemoteField_
- The schema tree ending at the field in remote schema which needs to be joined with.
.. _delete_remote_relationship:
delete_remote_relationship
--------------------------
``delete_remote_relationship`` is used to delete an existing remote relationship.
.. code-block:: http
POST /v1/query HTTP/1.1
Content-Type: application/json
X-Hasura-Role: admin
{
"type" : "delete_remote_relationship",
"args" : {
"table":{
"name":"users",
"schema":"public"
},
"name":"sample_remote_relationship"
}
}
.. _delete_remote_relationship_syntax:
Args syntax
^^^^^^^^^^^
.. list-table::
:header-rows: 1
* - Key
- Required
- Schema
- Description
* - table
- true
- :ref:`QualifiedTable <QualifiedTable>`
- Object with table name and schema
* - name
- true
- RemoteRelationshipName_
- Name of the remote relationship
.. _RemoteRelationshipName:
RemoteRelationshipName
&&&&&&&&&&&&&&&&&&&&&&
.. parsed-literal::
String
RemoteField
&&&&&&&&&&&
.. parsed-literal::
:class: haskell-pre
{
FieldName: {
"arguments": InputArguments
"field": RemoteField # optional
}
}
``RemoteField`` is a recursive tree structure that points to the field in the remote schema that needs to be joined with. It is recursive because the remote field maybe nested deeply in the remote schema.
Examples:
.. code-block:: http
POST /v1/query HTTP/1.1
Content-Type: application/json
X-Hasura-Role: admin
{
"message": {
"arguments":{
"message_id":"$id"
}
}
}
.. code-block:: http
POST /v1/query HTTP/1.1
Content-Type: application/json
X-Hasura-Role: admin
{
"messages": {
"arguments": {
"limit": 100
},
"field": {
"private": {
"arguments": {
"id" : "$id"
}
}
}
}
}
InputArguments
&&&&&&&&&&&&&&
.. parsed-literal::
:class: haskell-pre
{
InputField : $PGColumn | Scalar
}
Table columns can be referred by prefixing ``$`` e.g ``$id``.

View File

@ -172,7 +172,7 @@ if [ "$MODE" = "graphql-engine" ]; then
if command -v hpc >/dev/null && command -v jq >/dev/null ; then
# Get the appropriate mix dir (the newest one). This way this hopefully
# works when cabal.project.dev-sh.local is edited to turn on optimizations.
# See also: https://hackage.haskell.org/package/cabal-plan
# See also: https://hackage.haskell.org/package/cabal-plan
distdir=$(cat dist-newstyle/cache/plan.json | jq -r '."install-plan"[] | select(."id" == "graphql-engine-1.0.0-inplace")? | ."dist-dir"')
hpcdir="$distdir/hpc/vanilla/mix/graphql-engine-1.0.0"
echo_pretty "Generating code coverage report..."
@ -402,8 +402,18 @@ elif [ "$MODE" = "test" ]; then
done
echo " Ok"
### Check for and install dependencies in venv
cd "$PROJECT_ROOT/server/tests-py"
## Install misc test dependencies:
if [ ! -d "node_modules" ]; then
npm_config_loglevel=error npm install remote_schemas/nodejs/
else
echo_pretty "It looks like node dependencies have been installed already. Skipping."
echo_pretty "If things fail please run this and try again"
echo_pretty " $ rm -r \"$PROJECT_ROOT/server/tests-py/node_modules\""
fi
### Check for and install dependencies in venv
PY_VENV=.hasura-dev-python-venv
DEVSH_VERSION_FILE=.devsh_version
# Do we need to force reinstall?

46
scripts/dump-remote-schema.js Executable file
View File

@ -0,0 +1,46 @@
#!/usr/bin/env node
// Some copypasta that does an exhaustive introspection query on some graphql
// server and outputs a pretty-printed schema.
//
// Install dependencies:
//
// $ npm install -g axios graphql
//
// Usage, e.g.:
//
// $ NODE_PATH=$(npm root --quiet -g) utils/dump-remote-schema.js http://localhost:8088/v1/graphql
//
// TODO whatever if there's a more appropriate way to install dependencies such
// that this script can be called from anywhere, and without littering
// everything with node_modules directories.
const { introspectionQuery, buildClientSchema, printSchema } = require('graphql');
const axios = require('axios');
if (process.argv.length != 3){
console.log("Supply the graphql server URL as the only argument on the command line");
process.exit(1);
}
axios({
url: process.argv[2],
method: 'post',
headers: { 'Content-Type': 'application/json' },
data: {operationName: "IntrospectionQuery", query: introspectionQuery},
}).then(({data}) => {
console.log(data);
if (data.errors) {
console.log(data.errors);
console.log("\n ^^^^^^^^^^^^^^^ OOPS GOT SOME ERRORS FROM THE SERVER ^^^^^^^^^^^^^^^\n\n");
// proceed anyway I guess
}
const schema = buildClientSchema(data.data);
console.log(printSchema(schema));
}).catch(error => {
console.log(error);
console.log("\n ^^^^^^^^^^^^^^^ OOPS GOT SOME ERRORS ^^^^^^^^^^^^^^^\n\n");
});

View File

@ -43,8 +43,8 @@ ci-build:
# Copy the .mix files needed for `hpc` to generate code coverage reports into the build output
# directory, only if coverage is enabled (the mix files aren't generated otherwise).
if [[ -n '$(enable_coverage)' ]]; then \
mkdir -p '$(build_output)/mix/' && \
shopt -s failglob globstar && cp -R $(mix_dirs_glob) '$(build_output)/mix/'; \
mkdir -p '$(build_output)/mix/' && \
shopt -s failglob globstar && cp -R $(mix_dirs_glob) '$(build_output)/mix/'; \
fi
# assumes this is built in circleci

View File

@ -127,7 +127,6 @@ constraints: any.Cabal ==3.2.0.0,
any.erf ==2.0.0.0,
any.errors ==2.3.0,
any.exceptions ==0.10.4,
exceptions +transformers-0-4,
any.fail ==4.9.0.0,
any.fast-logger ==3.0.1,
any.file-embed ==0.0.11.2,
@ -333,6 +332,7 @@ constraints: any.Cabal ==3.2.0.0,
any.utf8-string ==1.0.1.1,
any.uuid ==1.3.13,
any.uuid-types ==1.0.3,
any.validation ==1.1,
any.vault ==0.3.1.4,
vault +useghc,
any.vector ==0.12.1.2,
@ -356,6 +356,7 @@ constraints: any.Cabal ==3.2.0.0,
warp +allow-sendfilefd -network-bytestring -warp-debug,
any.websockets ==0.12.7.0,
websockets -example,
any.witherable ==0.3.1,
any.wl-pprint-annotated ==0.1.0.1,
any.word8 ==0.1.3,
any.wreq ==0.5.3.2,

View File

@ -36,6 +36,7 @@ common common-all
InstanceSigs LambdaCase MultiParamTypeClasses MultiWayIf NamedFieldPuns NoImplicitPrelude
OverloadedStrings QuantifiedConstraints QuasiQuotes RankNTypes RecordWildCards ScopedTypeVariables
StandaloneDeriving TemplateHaskell TupleSections TypeApplications TypeFamilies TypeOperators
RecordWildCards
common common-exe
ghc-options:
@ -53,10 +54,14 @@ library
import: common-all
hs-source-dirs: src-lib
build-depends: base
, bifunctors
, witherable
, validation
, lifted-base
, pg-client
, text
, text-builder >= 0.6
, vector-builder
, bytestring
, postgresql-libpq
, mtl
@ -95,6 +100,7 @@ library
-- Encoder related
, uuid
, vector
, vector-builder
-- Logging related
, network
@ -288,6 +294,7 @@ library
, Hasura.RQL.DDL.ComputedField
, Hasura.RQL.DDL.Relationship
, Hasura.RQL.Types.CustomTypes
, Hasura.RQL.Types.RemoteRelationship
, Hasura.RQL.DDL.Deps
, Hasura.RQL.DDL.Permission.Internal
, Hasura.RQL.DDL.Permission
@ -312,6 +319,8 @@ library
, Hasura.RQL.DDL.QueryCollection
, Hasura.RQL.DDL.Action
, Hasura.RQL.DDL.CustomTypes
, Hasura.RQL.DDL.RemoteRelationship
, Hasura.RQL.DDL.RemoteRelationship.Validate
, Hasura.RQL.DML.Delete
, Hasura.RQL.DML.Internal
, Hasura.RQL.DML.Insert
@ -322,6 +331,7 @@ library
, Hasura.RQL.DML.Select
, Hasura.RQL.DML.Update
, Hasura.RQL.DML.Count
, Hasura.RQL.DML.RemoteJoin
, Hasura.RQL.GBoolExp
, Hasura.GraphQL.Transport.HTTP.Protocol

View File

@ -17,6 +17,7 @@ module Data.Aeson.Ordered
, Data.Aeson.Ordered.toList
, fromList
, object
, asObject
, array
, insert
, delete
@ -179,6 +180,11 @@ fromOrdered v = case v of
Bool boolean -> J.Bool boolean
Null -> J.Null
asObject :: Value -> Either Text Object
asObject = \case
Object o -> Right o
_ -> Left "expecting ordered object"
--------------------------------------------------------------------------------
-- Top-level entry points

View File

@ -1,5 +1,6 @@
module Data.List.Extended
( duplicates
, uniques
, module L
) where
@ -9,7 +10,11 @@ import Prelude
import qualified Data.HashMap.Strict as Map
import qualified Data.HashSet as Set
import qualified Data.List as L
import qualified Data.List.NonEmpty as NE
duplicates :: (Eq a, Hashable a) => [a] -> Set.HashSet a
duplicates =
Set.fromList . Map.keys . Map.filter (> 1) . Map.fromListWith (+) . map (,1::Int)
uniques :: Eq a => [a] -> [a]
uniques = map NE.head . NE.group

View File

@ -57,7 +57,6 @@ import Hasura.Server.Telemetry
import Hasura.Server.Version
import Hasura.Session
printErrExit :: (MonadIO m) => forall a . String -> m a
printErrExit = liftIO . (>> exitFailure) . putStrLn

View File

@ -1,3 +1,4 @@
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE UndecidableInstances #-}
-- A module for postgres execution related types and operations
@ -66,6 +67,11 @@ data LazyTx e a
= LTErr !e
| LTNoTx !a
| LTTx !(Q.TxE e a)
deriving Show
-- orphan:
instance Show (Q.TxE e a) where
show = const "(error \"TxE\")"
lazyTxToQTx :: LazyTx e a -> Q.TxE e a
lazyTxToQTx = \case

View File

@ -5,7 +5,7 @@ module Hasura.GraphQL.Execute
, getExecPlanPartial
, ExecOp(..)
, ExecPlanResolved
, GQExecPlanResolved
, getResolvedExecPlan
, execRemoteGQ
, getSubsOp
@ -19,22 +19,20 @@ module Hasura.GraphQL.Execute
, ExecutionCtx(..)
) where
import Control.Exception (try)
import Control.Lens
import Data.Has
import qualified Data.Aeson as J
import qualified Data.HashMap.Strict as Map
import qualified Data.HashSet as Set
import qualified Data.Text as T
import qualified Language.GraphQL.Draft.Syntax as G
import qualified Network.HTTP.Client as HTTP
import qualified Network.HTTP.Types as N
import qualified Network.Wreq as Wreq
import Hasura.EncJSON
import Hasura.GraphQL.Context
import Hasura.GraphQL.Logging
import Hasura.GraphQL.RemoteServer (execRemoteGQ')
import Hasura.GraphQL.Resolve.Action
import Hasura.GraphQL.Resolve.Context
import Hasura.GraphQL.Schema
@ -42,10 +40,8 @@ import Hasura.GraphQL.Transport.HTTP.Protocol
import Hasura.GraphQL.Validate.Types
import Hasura.HTTP
import Hasura.Prelude
import Hasura.RQL.DDL.Headers
import Hasura.RQL.Types
import Hasura.Server.Utils (RequestId, mkClientHeadersForward,
mkSetCookieHeaders)
import Hasura.Server.Utils (RequestId)
import Hasura.Server.Version (HasVersion)
import Hasura.Session
@ -143,9 +139,9 @@ getExecPlanPartial userInfo sc enableAL req = do
case typeLoc of
VT.TLHasuraType -> do
rootSelSet <- runReaderT (VQ.validateGQ queryParts) gCtx
return $ GExPHasura (gCtx, rootSelSet)
pure $ GExPHasura (gCtx, rootSelSet)
VT.TLRemoteType _ rsi ->
return $ GExPRemote rsi opDef
pure $ GExPRemote rsi opDef
VT.TLCustom ->
throw500 "unexpected custom type for top level field"
where
@ -172,10 +168,10 @@ data ExecOp
| ExOpSubs !EL.LiveQueryPlan
-- The graphql query is resolved into an execution operation
type ExecPlanResolved = GQExecPlan ExecOp
type GQExecPlanResolved = GQExecPlan ExecOp
getResolvedExecPlan
:: (HasVersion, MonadError QErr m, MonadIO m)
:: forall m. (HasVersion, MonadError QErr m, MonadIO m)
=> PGExecCtx
-> EP.PlanCache
-> UserInfo
@ -186,7 +182,7 @@ getResolvedExecPlan
-> HTTP.Manager
-> [N.Header]
-> GQLReqUnparsed
-> m (Telem.CacheHit, ExecPlanResolved)
-> m (Telem.CacheHit, GQExecPlanResolved)
getResolvedExecPlan pgExecCtx planCache userInfo sqlGenCtx
enableAL sc scVer httpManager reqHeaders reqUnparsed = do
planM <- liftIO $ EP.getPlan scVer (_uiRole userInfo)
@ -196,8 +192,8 @@ getResolvedExecPlan pgExecCtx planCache userInfo sqlGenCtx
-- plans are only for queries and subscriptions
Just plan -> (Telem.Hit,) . GExPHasura <$> case plan of
EP.RPQuery queryPlan -> do
(tx, genSql) <- EQ.queryOpFromPlan usrVars queryVars queryPlan
return $ ExOpQuery tx (Just genSql)
(tx, genSql) <- EQ.queryOpFromPlan httpManager reqHeaders userInfo queryVars queryPlan
pure $ ExOpQuery tx (Just genSql)
EP.RPSubs subsPlan ->
ExOpSubs <$> EL.reuseLiveQueryPlan pgExecCtx usrVars queryVars subsPlan
Nothing -> (Telem.Miss,) <$> noExistingPlan
@ -206,6 +202,8 @@ getResolvedExecPlan pgExecCtx planCache userInfo sqlGenCtx
addPlanToCache plan =
liftIO $ EP.addPlan scVer (_uiRole userInfo)
opNameM queryStr plan planCache
noExistingPlan :: m GQExecPlanResolved
noExistingPlan = do
req <- toParsed reqUnparsed
(partialExecPlan, queryReusability) <- runReusabilityT $
@ -216,7 +214,7 @@ getResolvedExecPlan pgExecCtx planCache userInfo sqlGenCtx
(tx, respHeaders) <- getMutOp gCtx sqlGenCtx userInfo httpManager reqHeaders selSet
pure $ ExOpMutation respHeaders tx
VQ.RQuery selSet -> do
(queryTx, plan, genSql) <- getQueryOp gCtx sqlGenCtx userInfo queryReusability (allowQueryActionExecuter httpManager reqHeaders) selSet
(queryTx, plan, genSql) <- getQueryOp gCtx sqlGenCtx httpManager reqHeaders userInfo queryReusability (allowQueryActionExecuter httpManager reqHeaders) selSet
traverse_ (addPlanToCache . EP.RPQuery) plan
return $ ExOpQuery queryTx (Just genSql)
VQ.RSubscription fld -> do
@ -261,13 +259,15 @@ getQueryOp
, MonadIO m)
=> GCtx
-> SQLGenCtx
-> HTTP.Manager
-> [N.Header]
-> UserInfo
-> QueryReusability
-> QueryActionExecuter
-> VQ.SelSet
-> m (LazyRespTx, Maybe EQ.ReusableQueryPlan, EQ.GeneratedSqlMap)
getQueryOp gCtx sqlGenCtx userInfo queryReusability actionExecuter selSet =
runE gCtx sqlGenCtx userInfo $ EQ.convertQuerySelSet queryReusability selSet actionExecuter
getQueryOp gCtx sqlGenCtx manager reqHdrs userInfo queryReusability actionExecuter selSet =
runE gCtx sqlGenCtx userInfo $ EQ.convertQuerySelSet manager reqHdrs queryReusability selSet actionExecuter
resolveMutSelSet
:: ( HasVersion
@ -358,46 +358,14 @@ execRemoteGQ
-> [N.Header]
-> GQLReqUnparsed
-> RemoteSchemaInfo
-> G.TypedOperationDefinition
-> G.OperationType
-> m (DiffTime, HttpResponse EncJSON)
-- ^ Also returns time spent in http request, for telemetry.
execRemoteGQ reqId userInfo reqHdrs q rsi opDef = do
execRemoteGQ reqId userInfo reqHdrs q rsi opType = do
execCtx <- ask
let logger = _ecxLogger execCtx
manager = _ecxHttpManager execCtx
opTy = G._todType opDef
when (opTy == G.OperationTypeSubscription) $
throw400 NotSupported "subscription to remote server is not supported"
confHdrs <- makeHeadersFromConf hdrConf
let clientHdrs = bool [] (mkClientHeadersForward reqHdrs) fwdClientHdrs
-- filter out duplicate headers
-- priority: conf headers > resolved userinfo vars > client headers
hdrMaps = [ Map.fromList confHdrs
, Map.fromList userInfoToHdrs
, Map.fromList clientHdrs
]
headers = Map.toList $ foldr Map.union Map.empty hdrMaps
finalHeaders = addDefaultHeaders headers
initReqE <- liftIO $ try $ HTTP.parseRequest (show url)
initReq <- either httpThrow pure initReqE
let req = initReq
{ HTTP.method = "POST"
, HTTP.requestHeaders = finalHeaders
, HTTP.requestBody = HTTP.RequestBodyLBS (J.encode q)
, HTTP.responseTimeout = HTTP.responseTimeoutMicro (timeout * 1000000)
}
L.unLogger logger $ QueryLog q Nothing reqId
(time, res) <- withElapsedTime $ liftIO $ try $ HTTP.httpLbs req manager
resp <- either httpThrow return res
let !httpResp = HttpResponse (encJFromLBS $ resp ^. Wreq.responseBody) $ mkSetCookieHeaders resp
(time, respHdrs, resp) <- execRemoteGQ' manager userInfo reqHdrs q rsi opType
let !httpResp = HttpResponse (encJFromLBS resp) respHdrs
return (time, httpResp)
where
RemoteSchemaInfo url hdrConf fwdClientHdrs timeout = rsi
httpThrow :: (MonadError QErr m) => HTTP.HttpException -> m a
httpThrow = \case
HTTP.HttpExceptionRequest _req content -> throw500 $ T.pack . show $ content
HTTP.InvalidUrlException _url reason -> throw500 $ T.pack . show $ reason
userInfoToHdrs = sessionVariablesToHeaders $ _uiSession userInfo

View File

@ -245,7 +245,7 @@ data LiveQueryPlan
= LiveQueryPlan
{ _lqpParameterizedPlan :: !ParameterizedLiveQueryPlan
, _lqpVariables :: !CohortVariables
}
} deriving Show
data ParameterizedLiveQueryPlan
= ParameterizedLiveQueryPlan
@ -288,6 +288,12 @@ buildLiveQueryPlan pgExecCtx initialReusability actionExecutioner fields = do
_ -> do
unresolvedAST <- GR.queryFldToPGAST field actionExecutioner
resolvedAST <- GR.traverseQueryRootFldAST resolveMultiplexedValue unresolvedAST
let (_, remoteJoins) = GR.toPGQuery resolvedAST
-- Reject remote relationships in subscription live query
when (remoteJoins /= mempty) $
throw400 NotSupported
"Remote relationships are not allowed in subscriptions"
pure (GV._fAlias field, resolvedAST)
userInfo <- asks getter

View File

@ -196,8 +196,9 @@ pushResultToCohort result !respHashM (LiveQueryMetadata dTime) cohortSnapshot =
where
CohortSnapshot _ respRef curSinks newSinks = cohortSnapshot
response = result <&> \payload -> LiveQueryResponse (encJToLBS payload) dTime
pushResultToSubscribers = A.mapConcurrently_ $ \(Subscriber action) -> action response
pushResultToSubscribers = A.mapConcurrently_ $ \(Subscriber action) ->
action response
-- -------------------------------------------------------------------------------------------------
-- Pollers

View File

@ -14,6 +14,8 @@ import qualified Data.IntMap as IntMap
import qualified Data.TByteString as TBS
import qualified Database.PG.Query as Q
import qualified Language.GraphQL.Draft.Syntax as G
import qualified Network.HTTP.Client as HTTP
import qualified Network.HTTP.Types as N
import Control.Lens ((^?))
import Data.Has
@ -30,7 +32,8 @@ import Hasura.GraphQL.Resolve.Action
import Hasura.GraphQL.Resolve.Types
import Hasura.GraphQL.Validate.Types
import Hasura.Prelude
import Hasura.RQL.DML.Select (asSingleRowJsonResp)
import Hasura.RQL.DML.RemoteJoin
import Hasura.RQL.DML.Select
import Hasura.RQL.Types
import Hasura.Server.Version (HasVersion)
import Hasura.Session
@ -45,13 +48,14 @@ type PrepArgMap = IntMap.IntMap (Q.PrepArg, PGScalarValue)
data PGPlan
= PGPlan
{ _ppQuery :: !Q.Query
, _ppVariables :: !PlanVariables
, _ppPrepared :: !PrepArgMap
{ _ppQuery :: !Q.Query
, _ppVariables :: !PlanVariables
, _ppPrepared :: !PrepArgMap
, _ppRemoteJoins :: !(Maybe RemoteJoins)
}
instance J.ToJSON PGPlan where
toJSON (PGPlan q vars prepared) =
toJSON (PGPlan q vars prepared _) =
J.object [ "query" J..= Q.getQueryText q
, "variables" J..= vars
, "prepared" J..= fmap show prepared
@ -86,10 +90,10 @@ instance J.ToJSON ReusableQueryPlan where
withPlan
:: (MonadError QErr m)
=> SessionVariables -> PGPlan -> ReusableVariableValues -> m PreparedSql
withPlan usrVars (PGPlan q reqVars prepMap) annVars = do
withPlan usrVars (PGPlan q reqVars prepMap rq) annVars = do
prepMap' <- foldM getVar prepMap (Map.toList reqVars)
let args = withSessionVariables usrVars $ IntMap.elems prepMap'
return $ PreparedSql q args
return $ PreparedSql q args rq
where
getVar accum (var, prepNo) = do
let varName = G.unName $ G.unVariable var
@ -100,21 +104,23 @@ withPlan usrVars (PGPlan q reqVars prepMap) annVars = do
-- turn the current plan into a transaction
mkCurPlanTx
:: (MonadError QErr m)
=> SessionVariables
:: (HasVersion, MonadError QErr m)
=> HTTP.Manager
-> [N.Header]
-> UserInfo
-> FieldPlans
-> m (LazyRespTx, GeneratedSqlMap)
mkCurPlanTx usrVars fldPlans = do
mkCurPlanTx manager reqHdrs userInfo fldPlans = do
-- generate the SQL and prepared vars or the bytestring
resolved <- forM fldPlans $ \(alias, fldPlan) -> do
fldResp <- case fldPlan of
RFPRaw resp -> return $ RRRaw resp
RFPPostgres (PGPlan q _ prepMap) -> do
let args = withSessionVariables usrVars $ IntMap.elems prepMap
return $ RRSql $ PreparedSql q args
RFPPostgres (PGPlan q _ prepMap rq) -> do
let args = withSessionVariables (_uiSession userInfo) $ IntMap.elems prepMap
return $ RRSql $ PreparedSql q args rq
return (alias, fldResp)
return (mkLazyRespTx resolved, mkGeneratedSqlMap resolved)
return (mkLazyRespTx manager reqHdrs userInfo resolved, mkGeneratedSqlMap resolved)
withSessionVariables :: SessionVariables -> [(Q.PrepArg, PGScalarValue)] -> [(Q.PrepArg, PGScalarValue)]
withSessionVariables usrVars list =
@ -190,12 +196,14 @@ convertQuerySelSet
, HasVersion
, MonadIO m
)
=> QueryReusability
=> HTTP.Manager
-> [N.Header]
-> QueryReusability
-> V.SelSet
-> QueryActionExecuter
-> m (LazyRespTx, Maybe ReusableQueryPlan, GeneratedSqlMap)
convertQuerySelSet initialReusability fields actionRunner = do
usrVars <- asks (_uiSession . getter)
convertQuerySelSet manager reqHdrs initialReusability fields actionRunner = do
userInfo <- asks getter
(fldPlans, finalReusability) <- runReusabilityTWith initialReusability $
forM (toList fields) $ \fld -> do
fldPlan <- case V._fName fld of
@ -206,42 +214,46 @@ convertQuerySelSet initialReusability fields actionRunner = do
unresolvedAst <- R.queryFldToPGAST fld actionRunner
(q, PlanningSt _ vars prepped) <- flip runStateT initPlanningSt $
R.traverseQueryRootFldAST prepareWithPlan unresolvedAst
pure . RFPPostgres $ PGPlan (R.toPGQuery q) vars prepped
let (query, remoteJoins) = R.toPGQuery q
pure . RFPPostgres $ PGPlan query vars prepped remoteJoins
pure (V._fAlias fld, fldPlan)
let varTypes = finalReusability ^? _Reusable
reusablePlan = ReusableQueryPlan <$> varTypes <*> pure fldPlans
(tx, sql) <- mkCurPlanTx usrVars fldPlans
(tx, sql) <- mkCurPlanTx manager reqHdrs userInfo fldPlans
pure (tx, reusablePlan, sql)
-- use the existing plan and new variables to create a pg query
queryOpFromPlan
:: (MonadError QErr m)
=> SessionVariables
:: (HasVersion, MonadError QErr m)
=> HTTP.Manager
-> [N.Header]
-> UserInfo
-> Maybe GH.VariableValues
-> ReusableQueryPlan
-> m (LazyRespTx, GeneratedSqlMap)
queryOpFromPlan usrVars varValsM (ReusableQueryPlan varTypes fldPlans) = do
queryOpFromPlan manager reqHdrs userInfo varValsM (ReusableQueryPlan varTypes fldPlans) = do
validatedVars <- GV.validateVariablesForReuse varTypes varValsM
-- generate the SQL and prepared vars or the bytestring
resolved <- forM fldPlans $ \(alias, fldPlan) ->
(alias,) <$> case fldPlan of
RFPRaw resp -> return $ RRRaw resp
RFPPostgres pgPlan -> RRSql <$> withPlan usrVars pgPlan validatedVars
return (mkLazyRespTx resolved, mkGeneratedSqlMap resolved)
RFPPostgres pgPlan -> RRSql <$> withPlan (_uiSession userInfo) pgPlan validatedVars
return (mkLazyRespTx manager reqHdrs userInfo resolved, mkGeneratedSqlMap resolved)
data PreparedSql
= PreparedSql
{ _psQuery :: !Q.Query
, _psPrepArgs :: ![(Q.PrepArg, PGScalarValue)]
{ _psQuery :: !Q.Query
, _psPrepArgs :: ![(Q.PrepArg, PGScalarValue)]
-- ^ The value is (Q.PrepArg, PGScalarValue) because we want to log the human-readable value of the
-- prepared argument (PGScalarValue) and not the binary encoding in PG format (Q.PrepArg)
, _psRemoteJoins :: !(Maybe RemoteJoins)
}
deriving Show
-- | Required to log in `query-log`
instance J.ToJSON PreparedSql where
toJSON (PreparedSql q prepArgs) =
toJSON (PreparedSql q prepArgs _) =
J.object [ "query" J..= Q.getQueryText q
, "prepared_arguments" J..= map (txtEncodedPGVal . snd) prepArgs
]
@ -259,12 +271,19 @@ data ResolvedQuery
-- prepared statement
type GeneratedSqlMap = [(G.Alias, Maybe PreparedSql)]
mkLazyRespTx :: [(G.Alias, ResolvedQuery)] -> LazyRespTx
mkLazyRespTx resolved =
mkLazyRespTx
:: HasVersion
=> HTTP.Manager -> [N.Header] -> UserInfo -> [(G.Alias, ResolvedQuery)] -> LazyRespTx
mkLazyRespTx manager reqHdrs userInfo resolved =
fmap encJFromAssocList $ forM resolved $ \(alias, node) -> do
resp <- case node of
RRRaw bs -> return $ encJFromBS bs
RRSql (PreparedSql q args) -> liftTx $ asSingleRowJsonResp q (map fst args)
RRRaw bs -> return $ encJFromBS bs
RRSql (PreparedSql q args maybeRemoteJoins) -> do
let prepArgs = map fst args
case maybeRemoteJoins of
Nothing -> liftTx $ asSingleRowJsonResp q prepArgs
Just remoteJoins ->
executeQueryWithRemoteJoins manager reqHdrs userInfo q prepArgs remoteJoins
return (G.unName $ G.unAlias alias, resp)
mkGeneratedSqlMap :: [(G.Alias, ResolvedQuery)] -> GeneratedSqlMap

View File

@ -99,10 +99,13 @@ explainField userInfo gCtx sqlGenCtx actionExecuter fld =
_ -> do
unresolvedAST <-
runExplain (queryCtxMap, userInfo, fldMap, orderByCtx, sqlGenCtx) $
evalReusabilityT $ RS.queryFldToPGAST fld actionExecuter
evalReusabilityT $ RS.queryFldToPGAST fld actionExecuter
resolvedAST <- RS.traverseQueryRootFldAST (resolveVal userInfo) unresolvedAST
let txtSQL = Q.getQueryText $ RS.toPGQuery resolvedAST
let (query, remoteJoins) = RS.toPGQuery resolvedAST
txtSQL = Q.getQueryText query
withExplain = "EXPLAIN (FORMAT TEXT) " <> txtSQL
-- Reject if query contains any remote joins
when (remoteJoins /= mempty) $ throw400 NotSupported "Remote relationships are not allowed in explain query"
planLines <- liftTx $ map runIdentity <$>
Q.listQE dmlTxErrorHandler (Q.fromText withExplain) () True
return $ FieldPlan fName (Just txtSQL) $ Just planLines

View File

@ -1,42 +1,50 @@
module Hasura.GraphQL.RemoteServer where
import Control.Exception (try)
import Control.Lens ((^.))
import Data.Aeson ((.:), (.:?))
import Data.FileEmbed (embedStringFile)
import Data.Foldable (foldlM)
import Control.Exception (try)
import Control.Lens ((^.))
import Data.Aeson ((.:), (.:?))
import Data.Foldable (foldlM)
import Hasura.HTTP
import Hasura.Prelude
import qualified Data.Aeson as J
import qualified Data.ByteString.Lazy as BL
import qualified Data.HashMap.Strict as Map
import qualified Data.Text as T
import qualified Language.GraphQL.Draft.Parser as G
import qualified Language.GraphQL.Draft.Syntax as G
import qualified Network.HTTP.Client as HTTP
import qualified Network.Wreq as Wreq
import qualified Data.Aeson as J
import qualified Data.ByteString.Lazy as BL
import qualified Data.HashMap.Strict as Map
import qualified Data.Text as T
import qualified Language.GraphQL.Draft.Parser as G
import qualified Language.GraphQL.Draft.Syntax as G
import qualified Language.Haskell.TH.Syntax as TH
import qualified Network.HTTP.Client as HTTP
import qualified Network.HTTP.Types as N
import qualified Network.Wreq as Wreq
import Hasura.GraphQL.Schema.Merge
import Hasura.RQL.DDL.Headers (makeHeadersFromConf)
import Hasura.GraphQL.Transport.HTTP.Protocol
import Hasura.RQL.DDL.Headers (makeHeadersFromConf)
import Hasura.RQL.Types
import Hasura.Server.Utils (httpExceptToJSON)
import Hasura.Server.Version (HasVersion)
import Hasura.Server.Utils
import Hasura.Server.Version (HasVersion)
import Hasura.Session
import qualified Hasura.GraphQL.Context as GC
import qualified Hasura.GraphQL.Schema as GS
import qualified Hasura.GraphQL.Validate.Types as VT
import qualified Hasura.GraphQL.Context as GC
import qualified Hasura.GraphQL.Schema as GS
import qualified Hasura.GraphQL.Validate.Types as VT
introspectionQuery :: BL.ByteString
introspectionQuery = $(embedStringFile "src-rsr/introspection.json")
introspectionQuery :: GQLReqParsed
introspectionQuery =
$(do
let fp = "src-rsr/introspection.json"
TH.qAddDependentFile fp
eitherResult <- TH.runIO $ J.eitherDecodeFileStrict fp
case eitherResult of
Left e -> fail e
Right (r::GQLReqParsed) -> TH.lift r
)
fetchRemoteSchema
:: (HasVersion, MonadIO m, MonadError QErr m)
=> HTTP.Manager
-> RemoteSchemaName
-> RemoteSchemaInfo
-> m GC.RemoteGCtx
fetchRemoteSchema manager name def@(RemoteSchemaInfo url headerConf _ timeout) = do
=> HTTP.Manager -> RemoteSchemaInfo -> m GC.RemoteGCtx
fetchRemoteSchema manager def@(RemoteSchemaInfo name url headerConf _ timeout) = do
headers <- makeHeadersFromConf headerConf
let hdrsWithDefaults = addDefaultHeaders headers
@ -45,7 +53,7 @@ fetchRemoteSchema manager name def@(RemoteSchemaInfo url headerConf _ timeout) =
let req = initReq
{ HTTP.method = "POST"
, HTTP.requestHeaders = hdrsWithDefaults
, HTTP.requestBody = HTTP.RequestBodyLBS introspectionQuery
, HTTP.requestBody = HTTP.RequestBodyLBS $ J.encode introspectionQuery
, HTTP.responseTimeout = HTTP.responseTimeoutMicro (timeout * 1000000)
}
res <- liftIO $ try $ HTTP.httpLbs req manager
@ -62,12 +70,12 @@ fetchRemoteSchema manager name def@(RemoteSchemaInfo url headerConf _ timeout) =
typMap <- either remoteSchemaErr return $ VT.fromSchemaDoc sDoc $
VT.TLRemoteType name def
let mQrTyp = Map.lookup qRootN typMap
mMrTyp = maybe Nothing (`Map.lookup` typMap) mRootN
mSrTyp = maybe Nothing (`Map.lookup` typMap) sRootN
mMrTyp = (`Map.lookup` typMap) =<< mRootN
mSrTyp = (`Map.lookup` typMap) =<< sRootN
qrTyp <- liftMaybe noQueryRoot mQrTyp
let mRmQR = VT.getObjTyM qrTyp
mRmMR = join $ VT.getObjTyM <$> mMrTyp
mRmSR = join $ VT.getObjTyM <$> mSrTyp
mRmMR = VT.getObjTyM =<< mMrTyp
mRmSR = VT.getObjTyM =<< mSrTyp
rmQR <- liftMaybe (err400 Unexpected "query root has to be an object type") mRmQR
return $ GC.RemoteGCtx typMap rmQR mRmMR mRmSR
@ -110,9 +118,9 @@ mergeSchemas rmSchemaMap gCtxMap = do
mkDefaultRemoteGCtx
:: (MonadError QErr m)
=> [GC.RemoteGCtx] -> m GS.GCtx
=> [GC.GCtx] -> m GS.GCtx
mkDefaultRemoteGCtx =
foldlM (\combG -> mergeGCtx combG . convRemoteGCtx) GC.emptyGCtx
foldlM mergeGCtx GC.emptyGCtx
-- merge a remote schema `gCtx` into current `gCtxMap`
mergeRemoteSchema
@ -124,14 +132,6 @@ mergeRemoteSchema ctxMap mergedRemoteGCtx =
flip Map.traverseWithKey ctxMap $ \_ schemaCtx ->
for schemaCtx $ \gCtx -> mergeGCtx gCtx mergedRemoteGCtx
convRemoteGCtx :: GC.RemoteGCtx -> GS.GCtx
convRemoteGCtx rmGCtx =
GC.emptyGCtx { GS._gTypes = GC._rgTypes rmGCtx
, GS._gQueryRoot = GC._rgQueryRoot rmGCtx
, GS._gMutRoot = GC._rgMutationRoot rmGCtx
, GS._gSubRoot = GC._rgSubscriptionRoot rmGCtx
}
-- | Parsing the introspection query result
newtype FromIntrospection a
= FromIntrospection { fromIntrospection :: a }
@ -339,3 +339,49 @@ getNamedTyp ty = case ty of
G.TypeDefinitionUnion t -> G._utdName t
G.TypeDefinitionEnum t -> G._etdName t
G.TypeDefinitionInputObject t -> G._iotdName t
execRemoteGQ'
:: ( HasVersion
, MonadIO m
, MonadError QErr m
)
=> HTTP.Manager
-> UserInfo
-> [N.Header]
-> GQLReqUnparsed
-> RemoteSchemaInfo
-> G.OperationType
-> m (DiffTime, [N.Header], BL.ByteString)
execRemoteGQ' manager userInfo reqHdrs q rsi opType = do
when (opType == G.OperationTypeSubscription) $
throw400 NotSupported "subscription to remote server is not supported"
confHdrs <- makeHeadersFromConf hdrConf
let clientHdrs = bool [] (mkClientHeadersForward reqHdrs) fwdClientHdrs
-- filter out duplicate headers
-- priority: conf headers > resolved userinfo vars > client headers
hdrMaps = [ Map.fromList confHdrs
, Map.fromList userInfoToHdrs
, Map.fromList clientHdrs
]
headers = Map.toList $ foldr Map.union Map.empty hdrMaps
finalHeaders = addDefaultHeaders headers
initReqE <- liftIO $ try $ HTTP.parseRequest (show url)
initReq <- either httpThrow pure initReqE
let req = initReq
{ HTTP.method = "POST"
, HTTP.requestHeaders = finalHeaders
, HTTP.requestBody = HTTP.RequestBodyLBS (J.encode q)
, HTTP.responseTimeout = HTTP.responseTimeoutMicro (timeout * 1000000)
}
(time, res) <- withElapsedTime $ liftIO $ try $ HTTP.httpLbs req manager
resp <- either httpThrow return res
pure (time, mkSetCookieHeaders resp, resp ^. Wreq.responseBody)
where
RemoteSchemaInfo _ url hdrConf fwdClientHdrs timeout = rsi
httpThrow :: (MonadError QErr m) => HTTP.HttpException -> m a
httpThrow = \case
HTTP.HttpExceptionRequest _req content -> throw500 $ T.pack . show $ content
HTTP.InvalidUrlException _url reason -> throw500 $ T.pack . show $ reason
userInfoToHdrs = sessionVariablesToHeaders $ _uiSession userInfo

View File

@ -39,6 +39,7 @@ import qualified Hasura.GraphQL.Resolve.Introspect as RIntro
import qualified Hasura.GraphQL.Resolve.Mutation as RM
import qualified Hasura.GraphQL.Resolve.Select as RS
import qualified Hasura.GraphQL.Validate as V
import qualified Hasura.RQL.DML.RemoteJoin as RR
import qualified Hasura.RQL.DML.Select as DS
import qualified Hasura.SQL.DML as S
@ -67,14 +68,14 @@ traverseQueryRootFldAST f = \case
QRFActionExecuteObject s -> QRFActionExecuteObject <$> DS.traverseAnnSimpleSel f s
QRFActionExecuteList s -> QRFActionExecuteList <$> DS.traverseAnnSimpleSel f s
toPGQuery :: QueryRootFldResolved -> Q.Query
toPGQuery :: QueryRootFldResolved -> (Q.Query, Maybe RR.RemoteJoins)
toPGQuery = \case
QRFPk s -> Q.fromBuilder $ toSQL $ DS.mkSQLSelect DS.JASSingleObject s
QRFSimple s -> Q.fromBuilder $ toSQL $ DS.mkSQLSelect DS.JASMultipleRows s
QRFAgg s -> Q.fromBuilder $ toSQL $ DS.mkAggSelect s
QRFActionSelect s -> Q.fromBuilder $ toSQL $ DS.mkSQLSelect DS.JASSingleObject s
QRFActionExecuteObject s -> Q.fromBuilder $ toSQL $ DS.mkSQLSelect DS.JASSingleObject s
QRFActionExecuteList s -> Q.fromBuilder $ toSQL $ DS.mkSQLSelect DS.JASMultipleRows s
QRFPk s -> first (DS.selectQuerySQL DS.JASSingleObject) $ RR.getRemoteJoins s
QRFSimple s -> first (DS.selectQuerySQL DS.JASMultipleRows) $ RR.getRemoteJoins s
QRFAgg s -> first DS.selectAggQuerySQL $ RR.getRemoteJoinsAggSel s
QRFActionSelect s -> first (DS.selectQuerySQL DS.JASSingleObject) $ RR.getRemoteJoins s
QRFActionExecuteObject s -> first (DS.selectQuerySQL DS.JASSingleObject) $ RR.getRemoteJoins s
QRFActionExecuteList s -> first (DS.selectQuerySQL DS.JASMultipleRows) $ RR.getRemoteJoins s
validateHdrs
:: (Foldable t, QErrM m) => UserInfo -> t Text -> m ()
@ -154,28 +155,31 @@ mutFldToTx
-> m (RespTx, HTTP.ResponseHeaders)
mutFldToTx fld = do
userInfo <- asks getter
reqHeaders <- asks getter
httpManager <- asks getter
let rjCtx = (httpManager, reqHeaders, userInfo)
opCtx <- getOpCtx $ V._fName fld
let noRespHeaders = fmap (,[])
roleName = _uiRole userInfo
case opCtx of
MCInsert ctx -> do
validateHdrs userInfo (_iocHeaders ctx)
noRespHeaders $ RI.convertInsert roleName (_iocTable ctx) fld
noRespHeaders $ RI.convertInsert rjCtx roleName (_iocTable ctx) fld
MCInsertOne ctx -> do
validateHdrs userInfo (_iocHeaders ctx)
noRespHeaders $ RI.convertInsertOne roleName (_iocTable ctx) fld
noRespHeaders $ RI.convertInsertOne rjCtx roleName (_iocTable ctx) fld
MCUpdate ctx -> do
validateHdrs userInfo (_uocHeaders ctx)
noRespHeaders $ RM.convertUpdate ctx fld
noRespHeaders $ RM.convertUpdate ctx rjCtx fld
MCUpdateByPk ctx -> do
validateHdrs userInfo (_uocHeaders ctx)
noRespHeaders $ RM.convertUpdateByPk ctx fld
noRespHeaders $ RM.convertUpdateByPk ctx rjCtx fld
MCDelete ctx -> do
validateHdrs userInfo (_docHeaders ctx)
noRespHeaders $ RM.convertDelete ctx fld
noRespHeaders $ RM.convertDelete ctx rjCtx fld
MCDeleteByPk ctx -> do
validateHdrs userInfo (_docHeaders ctx)
noRespHeaders $ RM.convertDeleteByPk ctx fld
noRespHeaders $ RM.convertDeleteByPk ctx rjCtx fld
MCAction ctx ->
RA.resolveActionMutation fld ctx (_uiSession userInfo)

View File

@ -43,7 +43,6 @@ import Hasura.GraphQL.Resolve.Context
import Hasura.GraphQL.Resolve.InputValue
import Hasura.GraphQL.Resolve.Select (processTableSelectionSet)
import Hasura.GraphQL.Validate.Field
import Hasura.GraphQL.Validate.Types
import Hasura.HTTP
import Hasura.RQL.DDL.Headers (makeHeadersFromConf, toHeadersConf)
import Hasura.RQL.DDL.Schema.Cache
@ -54,8 +53,7 @@ import Hasura.Server.Utils (mkClientHeadersForward, mkSe
import Hasura.Server.Version (HasVersion)
import Hasura.Session
import Hasura.SQL.Types
import Hasura.SQL.Value (PGScalarValue (..), pgScalarValueToJson,
toTxtValue)
import Hasura.SQL.Value (PGScalarValue (..),toTxtValue)
newtype ActionContext
= ActionContext {_acName :: ActionName}
@ -536,16 +534,6 @@ callWebhook manager outputType outputFields reqHeaders confHeaders
Just v -> when (v == J.Null) $ throwUnexpected $
"expecting not null value for field " <>> fieldName
annInpValueToJson :: AnnInpVal -> J.Value
annInpValueToJson annInpValue =
case _aivValue annInpValue of
AGScalar _ pgColumnValueM -> maybe J.Null pgScalarValueToJson pgColumnValueM
AGEnum _ enumValue -> case enumValue of
AGESynthetic enumValueM -> J.toJSON enumValueM
AGEReference _ enumValueM -> J.toJSON enumValueM
AGObject _ objectM -> J.toJSON $ fmap (fmap annInpValueToJson) objectM
AGArray _ valuesM -> J.toJSON $ fmap (fmap annInpValueToJson) valuesM
mkJsonAggSelect :: GraphQLType -> RS.JsonAggSelect
mkJsonAggSelect =
bool RS.JASSingleObject RS.JASMultipleRows . isListType

View File

@ -169,6 +169,8 @@ parseColExp nt n val = do
fmapAnnBoolExp partialSQLExpToUnresolvedVal permExp
RFComputedField _ -> throw500
"computed fields are not allowed in bool_exp"
RFRemoteRelationship _ -> throw500
"remote relationships are not allowed in bool_exp"
parseBoolExp
:: ( MonadReusability m

View File

@ -65,9 +65,10 @@ getPGColInfo
getPGColInfo nt n = do
fldInfo <- getFldInfo nt n
case fldInfo of
RFPGColumn pgColInfo -> return pgColInfo
RFRelationship _ -> throw500 $ mkErrMsg "relation"
RFComputedField _ -> throw500 $ mkErrMsg "computed field"
RFPGColumn pgColInfo -> return pgColInfo
RFRelationship _ -> throw500 $ mkErrMsg "relation"
RFComputedField _ -> throw500 $ mkErrMsg "computed field"
RFRemoteRelationship _ -> throw500 $ mkErrMsg "remote relationship"
where
mkErrMsg ty =
"found " <> ty <> " when expecting pgcolinfo for "

View File

@ -24,6 +24,7 @@ module Hasura.GraphQL.Resolve.InputValue
, parseMany
, asPGColText
, asPGColTextM
, annInpValueToJson
) where
import Hasura.Prelude
@ -31,7 +32,7 @@ import Hasura.Prelude
import qualified Text.Builder as TB
import qualified Language.GraphQL.Draft.Syntax as G
import qualified Data.Aeson as J
import qualified Hasura.RQL.Types as RQL
import Hasura.GraphQL.Resolve.Context
@ -214,3 +215,13 @@ asPGColTextM :: (MonadReusability m, MonadError QErr m) => AnnInpVal -> m (Maybe
asPGColTextM val = do
pgColValM <- traverse openOpaqueValue =<< asPGColumnValueM val
traverse onlyText (pstValue . _apvValue <$> pgColValM)
annInpValueToJson :: AnnInpVal -> J.Value
annInpValueToJson annInpValue =
case _aivValue annInpValue of
AGScalar _ pgColumnValueM -> maybe J.Null pgScalarValueToJson pgColumnValueM
AGEnum _ enumValue -> case enumValue of
AGESynthetic enumValueM -> J.toJSON enumValueM
AGEReference _ enumValueM -> J.toJSON enumValueM
AGObject _ objectM -> J.toJSON $ fmap (fmap annInpValueToJson) objectM
AGArray _ valuesM -> J.toJSON $ fmap (fmap annInpValueToJson) valuesM

View File

@ -33,10 +33,12 @@ import Hasura.GraphQL.Validate.Field
import Hasura.GraphQL.Validate.Types
import Hasura.RQL.DML.Insert (insertOrUpdateCheckExpr)
import Hasura.RQL.DML.Internal (convAnnBoolExpPartialSQL, convPartialSQLExp,
dmlTxErrorHandler, sessVarFromCurrentSetting)
sessVarFromCurrentSetting)
import Hasura.RQL.DML.Mutation
import Hasura.RQL.DML.RemoteJoin
import Hasura.RQL.GBoolExp (toSQLBoolExp)
import Hasura.RQL.Types
import Hasura.Server.Version (HasVersion)
import Hasura.SQL.Types
import Hasura.SQL.Value
@ -294,13 +296,15 @@ validateInsert insCols objRels addCols = do
-- | insert an object relationship and return affected rows
-- | and parent dependent columns
insertObjRel
:: Bool
:: (HasVersion, MonadTx m, MonadIO m)
=> Bool
-> MutationRemoteJoinCtx
-> RoleName
-> ObjRelIns
-> Q.TxE QErr (Int, [PGColWithValue])
insertObjRel strfyNum role objRelIns =
-> m (Int, [PGColWithValue])
insertObjRel strfyNum rjCtx role objRelIns =
withPathK relNameTxt $ do
(affRows, colValM) <- withPathK "data" $ insertObj strfyNum role tn singleObjIns []
(affRows, colValM) <- withPathK "data" $ insertObj strfyNum rjCtx role tn singleObjIns []
colVal <- onNothing colValM $ throw400 NotSupported errMsg
retColsWithVals <- fetchFromColVals colVal rColInfos
let c = mergeListsWith (Map.toList mapCols) retColsWithVals
@ -328,18 +332,20 @@ decodeEncJSON =
-- | insert an array relationship and return affected rows
insertArrRel
:: Bool
:: (HasVersion, MonadTx m, MonadIO m)
=> Bool
-> MutationRemoteJoinCtx
-> RoleName
-> [PGColWithValue]
-> ArrRelIns
-> Q.TxE QErr Int
insertArrRel strfyNum role resCols arrRelIns =
-> m Int
insertArrRel strfyNum rjCtx role resCols arrRelIns =
withPathK relNameTxt $ do
let addCols = mergeListsWith resCols (Map.toList colMapping)
(\(col, _) (lCol, _) -> col == lCol)
(\(_, colVal) (_, rCol) -> (rCol, colVal))
resBS <- insertMultipleObjects strfyNum role tn multiObjIns addCols mutOutput "data"
resBS <- insertMultipleObjects strfyNum rjCtx role tn multiObjIns addCols mutOutput "data"
resObj <- decodeEncJSON resBS
onNothing (Map.lookup ("affected_rows" :: T.Text) resObj) $
throw500 "affected_rows not returned in array rel insert"
@ -352,18 +358,20 @@ insertArrRel strfyNum role resCols arrRelIns =
-- | insert an object with object and array relationships
insertObj
:: Bool
:: (HasVersion, MonadTx m, MonadIO m)
=> Bool
-> MutationRemoteJoinCtx
-> RoleName
-> QualifiedTable
-> SingleObjIns
-> [PGColWithValue] -- ^ additional fields
-> Q.TxE QErr (Int, Maybe ColumnValuesText)
insertObj strfyNum role tn singleObjIns addCols = do
-> m (Int, Maybe ColumnValuesText)
insertObj strfyNum rjCtx role tn singleObjIns addCols = do
-- validate insert
validateInsert (map fst cols) (map _riRelInfo objRels) $ map fst addCols
-- insert all object relations and fetch this insert dependent column values
objInsRes <- forM objRels $ insertObjRel strfyNum role
objInsRes <- forM objRels $ insertObjRel strfyNum rjCtx role
-- prepare final insert columns
let objRelAffRows = sum $ map fst objInsRes
@ -377,7 +385,7 @@ insertObj strfyNum role tn singleObjIns addCols = do
CTEExp cte insPArgs <-
mkInsertQ tn onConflictM finalInsCols defVals role (insCheck, updCheck)
MutateResp affRows colVals <- mutateAndFetchCols tn allCols (cte, insPArgs) strfyNum
MutateResp affRows colVals <- liftTx $ mutateAndFetchCols tn allCols (cte, insPArgs) strfyNum
colValM <- asSingleObject colVals
arrRelAffRows <- bool (withArrRels colValM) (return 0) $ null arrRels
@ -394,7 +402,7 @@ insertObj strfyNum role tn singleObjIns addCols = do
withArrRels colValM = do
colVal <- onNothing colValM $ throw400 NotSupported cannotInsArrRelErr
arrDepColsWithVal <- fetchFromColVals colVal arrRelDepCols
arrInsARows <- forM arrRels $ insertArrRel strfyNum role arrDepColsWithVal
arrInsARows <- forM arrRels $ insertArrRel strfyNum rjCtx role arrDepColsWithVal
return $ sum arrInsARows
asSingleObject = \case
@ -409,15 +417,17 @@ insertObj strfyNum role tn singleObjIns addCols = do
-- | insert multiple Objects in postgres
insertMultipleObjects
:: Bool
:: (HasVersion, MonadTx m, MonadIO m)
=> Bool
-> MutationRemoteJoinCtx
-> RoleName
-> QualifiedTable
-> MultiObjIns
-> [PGColWithValue] -- ^ additional fields
-> RR.MutationOutput
-> T.Text -- ^ error path
-> Q.TxE QErr EncJSON
insertMultipleObjects strfyNum role tn multiObjIns addCols mutOutput errP =
-> m EncJSON
insertMultipleObjects strfyNum rjCtx role tn multiObjIns addCols mutOutput errP =
bool withoutRelsInsert withRelsInsert anyRelsToInsert
where
AnnIns insObjs onConflictM (insCond, updCond) tableColInfos defVals = multiObjIns
@ -447,33 +457,35 @@ insertMultipleObjects strfyNum role tn multiObjIns addCols mutOutput errP =
let insQP1 = RI.InsertQueryP1 tn tableCols sqlRows onConflictM
(insCheck, updCheck) mutOutput tableColInfos
p1 = (insQP1, prepArgs)
RI.insertP2 strfyNum p1
RI.execInsertQuery strfyNum (Just rjCtx) p1
-- insert each object with relations
withRelsInsert = withErrPath $ do
insResps <- indexedForM singleObjInserts $ \objIns ->
insertObj strfyNum role tn objIns addCols
insertObj strfyNum rjCtx role tn objIns addCols
let affRows = sum $ map fst insResps
columnValues = mapMaybe snd insResps
cteExp <- mkSelCTEFromColVals tn tableColInfos columnValues
let sql = toSQL $ RR.mkMutationOutputExp tn tableColInfos (Just affRows) cteExp mutOutput strfyNum
runIdentity . Q.getRow
<$> Q.rawQE dmlTxErrorHandler (Q.fromBuilder sql) [] False
let (mutOutputRJ, remoteJoins) = getRemoteJoinsMutationOutput mutOutput
sqlQuery = Q.fromBuilder $ toSQL $
RR.mkMutationOutputExp tn tableColInfos (Just affRows) cteExp mutOutputRJ strfyNum
executeMutationOutputQuery sqlQuery [] $ (,rjCtx) <$> remoteJoins
prefixErrPath :: (MonadError QErr m) => Field -> m a -> m a
prefixErrPath fld =
withPathK "selectionSet" . fieldAsPath fld . withPathK "args"
convertInsert
:: ( MonadReusability m, MonadError QErr m, MonadReader r m, Has FieldMap r
, Has OrdByCtx r, Has SQLGenCtx r, Has InsCtxMap r
:: ( HasVersion, MonadReusability m, MonadError QErr m, MonadReader r m
, Has FieldMap r , Has OrdByCtx r, Has SQLGenCtx r, Has InsCtxMap r
)
=> RoleName
=> MutationRemoteJoinCtx
-> RoleName
-> QualifiedTable -- table
-> Field -- the mutation field
-> m RespTx
convertInsert role tn fld = prefixErrPath fld $ do
convertInsert rjCtx role tn fld = prefixErrPath fld $ do
mutOutputUnres <- RR.MOutMultirowFields <$> resolveMutationFields (_fType fld) (_fSelSet fld)
mutOutputRes <- RR.traverseMutationOutput resolveValTxt mutOutputUnres
annVals <- withArg arguments "objects" asArray
@ -493,7 +505,7 @@ convertInsert role tn fld = prefixErrPath fld $ do
tableCols defValMapRes
tableCols = Map.elems tableColMap
strfyNum <- stringifyNum <$> asks getter
return $ prefixErrPath fld $ insertMultipleObjects strfyNum role tn
return $ prefixErrPath fld $ insertMultipleObjects strfyNum rjCtx role tn
multiObjIns [] mutOutput "objects"
withEmptyObjs mutOutput =
return $ return $ buildEmptyMutResp mutOutput
@ -501,14 +513,15 @@ convertInsert role tn fld = prefixErrPath fld $ do
onConflictM = Map.lookup "on_conflict" arguments
convertInsertOne
:: ( MonadReusability m, MonadError QErr m, MonadReader r m, Has FieldMap r
, Has OrdByCtx r, Has SQLGenCtx r, Has InsCtxMap r
:: ( HasVersion, MonadReusability m, MonadError QErr m, MonadReader r m
, Has FieldMap r , Has OrdByCtx r, Has SQLGenCtx r, Has InsCtxMap r
)
=> RoleName
=> MutationRemoteJoinCtx
-> RoleName
-> QualifiedTable -- table
-> Field -- the mutation field
-> m RespTx
convertInsertOne role qt field = prefixErrPath field $ do
convertInsertOne rjCtx role qt field = prefixErrPath field $ do
tableSelFields <- processTableSelectionSet (_fType field) $ _fSelSet field
let mutationOutputUnresolved = RR.MOutSinglerowObject tableSelFields
mutationOutputResolved <- RR.traverseMutationOutput resolveValTxt mutationOutputUnresolved
@ -521,7 +534,7 @@ convertInsertOne role qt field = prefixErrPath field $ do
tableCols defValMapRes
tableCols = Map.elems tableColMap
strfyNum <- stringifyNum <$> asks getter
pure $ prefixErrPath field $ insertMultipleObjects strfyNum role qt
pure $ prefixErrPath field $ insertMultipleObjects strfyNum rjCtx role qt
multiObjIns [] mutationOutputResolved "object"
where
arguments = _fArguments field

View File

@ -36,7 +36,9 @@ import Hasura.GraphQL.Resolve.Select (processTableSelectionSet)
import Hasura.GraphQL.Validate.Field
import Hasura.GraphQL.Validate.Types
import Hasura.RQL.DML.Internal (currentSession, sessVarFromCurrentSetting)
import Hasura.RQL.DML.Mutation (MutationRemoteJoinCtx)
import Hasura.RQL.Types
import Hasura.Server.Version (HasVersion)
import Hasura.SQL.Types
import Hasura.SQL.Value
@ -210,22 +212,22 @@ convertUpdateP1 opCtx boolExpParser selectionResolver fld = do
Right items -> pure $ resolvedPreSetItems <> OMap.toList items
convertUpdateGeneric
:: ( MonadReusability m, MonadError QErr m
, MonadReader r m
, Has SQLGenCtx r
:: ( HasVersion, MonadReusability m, MonadError QErr m
, MonadReader r m , Has SQLGenCtx r
)
=> UpdOpCtx -- the update context
-> MutationRemoteJoinCtx
-> (ArgsMap -> m AnnBoolExpUnresolved) -- the bool exp parser
-> (Field -> m (RR.MutationOutputG UnresolvedVal)) -- the selection set resolver
-> Field
-> m RespTx
convertUpdateGeneric opCtx boolExpParser selectionResolver fld = do
convertUpdateGeneric opCtx rjCtx boolExpParser selectionResolver fld = do
annUpdUnresolved <- convertUpdateP1 opCtx boolExpParser selectionResolver fld
(annUpdResolved, prepArgs) <- withPrepArgs $ RU.traverseAnnUpd
resolveValPrep annUpdUnresolved
strfyNum <- stringifyNum <$> asks getter
let whenNonEmptyItems = return $ RU.updateQueryToTx strfyNum
(annUpdResolved, prepArgs)
let whenNonEmptyItems = return $ RU.execUpdateQuery strfyNum
(Just rjCtx) (annUpdResolved, prepArgs)
whenEmptyItems = return $ return $
buildEmptyMutResp $ RU.uqp1Output annUpdResolved
-- if there are not set items then do not perform
@ -233,26 +235,28 @@ convertUpdateGeneric opCtx boolExpParser selectionResolver fld = do
bool whenNonEmptyItems whenEmptyItems $ null $ RU.uqp1SetExps annUpdResolved
convertUpdate
:: ( MonadReusability m, MonadError QErr m
:: ( HasVersion, MonadReusability m, MonadError QErr m
, MonadReader r m, Has FieldMap r
, Has OrdByCtx r, Has SQLGenCtx r
)
=> UpdOpCtx -- the update context
-> MutationRemoteJoinCtx
-> Field -- the mutation field
-> m RespTx
convertUpdate opCtx =
convertUpdateGeneric opCtx whereExpressionParser mutationFieldsResolver
convertUpdate opCtx rjCtx =
convertUpdateGeneric opCtx rjCtx whereExpressionParser mutationFieldsResolver
convertUpdateByPk
:: ( MonadReusability m, MonadError QErr m
:: ( HasVersion, MonadReusability m, MonadError QErr m
, MonadReader r m, Has FieldMap r
, Has OrdByCtx r, Has SQLGenCtx r
)
=> UpdOpCtx -- the update context
-> MutationRemoteJoinCtx
-> Field -- the mutation field
-> m RespTx
convertUpdateByPk opCtx field =
convertUpdateGeneric opCtx boolExpParser tableSelectionAsMutationOutput field
convertUpdateByPk opCtx rjCtx field =
convertUpdateGeneric opCtx rjCtx boolExpParser tableSelectionAsMutationOutput field
where
boolExpParser args = withArg args "pk_columns" $ \inpVal -> do
obj <- asObject inpVal
@ -260,16 +264,16 @@ convertUpdateByPk opCtx field =
convertDeleteGeneric
:: ( MonadReusability m
, MonadReader r m
, Has SQLGenCtx r
:: ( HasVersion, MonadReusability m
, MonadReader r m, Has SQLGenCtx r
)
=> DelOpCtx -- the delete context
-> MutationRemoteJoinCtx
-> (ArgsMap -> m AnnBoolExpUnresolved) -- the bool exp parser
-> (Field -> m (RR.MutationOutputG UnresolvedVal)) -- the selection set resolver
-> Field -- the mutation field
-> m RespTx
convertDeleteGeneric opCtx boolExpParser selectionResolver fld = do
convertDeleteGeneric opCtx rjCtx boolExpParser selectionResolver fld = do
whereExp <- boolExpParser $ _fArguments fld
mutOutput <- selectionResolver fld
let unresolvedPermFltr =
@ -279,32 +283,34 @@ convertDeleteGeneric opCtx boolExpParser selectionResolver fld = do
(annDelResolved, prepArgs) <- withPrepArgs $ RD.traverseAnnDel
resolveValPrep annDelUnresolved
strfyNum <- stringifyNum <$> asks getter
return $ RD.deleteQueryToTx strfyNum (annDelResolved, prepArgs)
return $ RD.execDeleteQuery strfyNum (Just rjCtx) (annDelResolved, prepArgs)
where
DelOpCtx tn _ colGNameMap filterExp = opCtx
allCols = Map.elems colGNameMap
convertDelete
:: ( MonadReusability m, MonadError QErr m
:: ( HasVersion, MonadReusability m, MonadError QErr m
, MonadReader r m, Has FieldMap r
, Has OrdByCtx r, Has SQLGenCtx r
)
=> DelOpCtx -- the delete context
-> MutationRemoteJoinCtx
-> Field -- the mutation field
-> m RespTx
convertDelete opCtx =
convertDeleteGeneric opCtx whereExpressionParser mutationFieldsResolver
convertDelete opCtx rjCtx =
convertDeleteGeneric opCtx rjCtx whereExpressionParser mutationFieldsResolver
convertDeleteByPk
:: ( MonadReusability m, MonadError QErr m
:: ( HasVersion, MonadReusability m, MonadError QErr m
, MonadReader r m, Has FieldMap r
, Has OrdByCtx r, Has SQLGenCtx r
)
=> DelOpCtx -- the delete context
-> MutationRemoteJoinCtx
-> Field -- the mutation field
-> m RespTx
convertDeleteByPk opCtx field =
convertDeleteGeneric opCtx boolExpParser tableSelectionAsMutationOutput field
convertDeleteByPk opCtx rjCtx field =
convertDeleteGeneric opCtx rjCtx boolExpParser tableSelectionAsMutationOutput field
where
boolExpParser = pgColValToBoolExp (_docAllCols opCtx)

View File

@ -28,6 +28,7 @@ import Hasura.GraphQL.Resolve.BoolExp
import Hasura.GraphQL.Resolve.Context
import Hasura.GraphQL.Resolve.InputValue
import Hasura.GraphQL.Schema (isAggFld)
import Hasura.GraphQL.Validate
import Hasura.GraphQL.Validate.Field
import Hasura.GraphQL.Validate.Types
import Hasura.RQL.DML.Internal (onlyPositiveInt)
@ -109,8 +110,10 @@ processTableSelectionSet fldTy flds =
case fldInfo of
RFPGColumn colInfo ->
RS.mkAnnColField colInfo <$> argsToColOp (_fArguments fld)
RFComputedField computedField ->
RS.FComputedField <$> resolveComputedField computedField fld
RFRelationship (RelationshipField relInfo isAgg colGNameMap tableFilter tableLimit) -> do
let relTN = riRTable relInfo
colMapping = riMapping relInfo
@ -125,6 +128,14 @@ processTableSelectionSet fldTy flds =
ObjRel -> RS.FObj annRel
ArrRel -> RS.FArr $ RS.ASSimple annRel
RFRemoteRelationship info ->
pure $ RS.FRemote $ RS.RemoteSelect
(unValidateArgsMap $ _fArguments fld) -- Unvalidate the input arguments
(map unValidateField $ toList $ _fSelSet fld) -- Unvalidate the selection fields
(_rfiHasuraFields info)
(_rfiRemoteFields info)
(_rfiRemoteSchema info)
type TableAggFlds = RS.TableAggFldsG UnresolvedVal
fromAggSelSet
@ -133,13 +144,11 @@ fromAggSelSet
)
=> PGColGNameMap -> G.NamedType -> SelSet -> m TableAggFlds
fromAggSelSet colGNameMap fldTy selSet = fmap toFields $
withSelSet selSet $ \f -> do
let fTy = _fType f
fSelSet = _fSelSet f
case _fName f of
withSelSet selSet $ \Field{..} ->
case _fName of
"__typename" -> return $ RS.TAFExp $ G.unName $ G.unNamedType fldTy
"aggregate" -> RS.TAFAgg <$> convertAggFld colGNameMap fTy fSelSet
"nodes" -> RS.TAFNodes <$> processTableSelectionSet fTy fSelSet
"aggregate" -> RS.TAFAgg <$> convertAggFld colGNameMap _fType _fSelSet
"nodes" -> RS.TAFNodes <$> processTableSelectionSet _fType _fSelSet
G.Name t -> throw500 $ "unexpected field in _agg node: " <> t
type TableArgs = RS.TableArgsG UnresolvedVal
@ -395,14 +404,12 @@ convertAggFld
:: (MonadReusability m, MonadError QErr m)
=> PGColGNameMap -> G.NamedType -> SelSet -> m RS.AggFlds
convertAggFld colGNameMap ty selSet = fmap toFields $
withSelSet selSet $ \fld -> do
let fType = _fType fld
fSelSet = _fSelSet fld
case _fName fld of
withSelSet selSet $ \Field{..} ->
case _fName of
"__typename" -> return $ RS.AFExp $ G.unName $ G.unNamedType ty
"count" -> RS.AFCount <$> convertCount colGNameMap (_fArguments fld)
"count" -> RS.AFCount <$> convertCount colGNameMap _fArguments
n -> do
colFlds <- convertColFlds colGNameMap fType fSelSet
colFlds <- convertColFlds colGNameMap _fType _fSelSet
unless (isAggFld n) $ throwInvalidFld n
return $ RS.AFOp $ RS.AggOp (G.unName n) colFlds
where

View File

@ -7,13 +7,13 @@ module Hasura.GraphQL.Resolve.Types
import Control.Lens.TH
import Hasura.Prelude
import qualified Data.HashMap.Strict as Map
import qualified Data.Sequence as Seq
import qualified Data.Text as T
import qualified Language.GraphQL.Draft.Syntax as G
import qualified Data.HashMap.Strict as Map
import qualified Data.Sequence as Seq
import qualified Data.Text as T
import qualified Language.GraphQL.Draft.Syntax as G
import Hasura.GraphQL.Validate.Types
import Hasura.RQL.DDL.Headers (HeaderConf)
import Hasura.RQL.DDL.Headers (HeaderConf)
import Hasura.RQL.Types.Action
import Hasura.RQL.Types.BoolExp
import Hasura.RQL.Types.Column
@ -21,11 +21,12 @@ import Hasura.RQL.Types.Common
import Hasura.RQL.Types.ComputedField
import Hasura.RQL.Types.CustomTypes
import Hasura.RQL.Types.Function
import Hasura.RQL.Types.RemoteRelationship
import Hasura.Session
import Hasura.SQL.Types
import Hasura.SQL.Value
import qualified Hasura.SQL.DML as S
import qualified Hasura.SQL.DML as S
data QueryCtx
= QCSelect !SelOpCtx
@ -66,6 +67,8 @@ data SelOpCtx
, _socLimit :: !(Maybe Int)
} deriving (Show, Eq)
type PGColArgMap = Map.HashMap G.Name PGColumnInfo
data SelPkOpCtx
= SelPkOpCtx
{ _spocTable :: !QualifiedTable
@ -166,6 +169,7 @@ data ResolveField
= RFPGColumn !PGColumnInfo
| RFRelationship !RelationshipField
| RFComputedField !ComputedField
| RFRemoteRelationship !RemoteFieldInfo
deriving (Show, Eq)
type FieldMap = Map.HashMap (G.NamedType, G.Name) ResolveField
@ -210,8 +214,6 @@ data InsCtx
type InsCtxMap = Map.HashMap QualifiedTable InsCtx
type PGColArgMap = Map.HashMap G.Name PGColumnInfo
data AnnPGVal
= AnnPGVal
{ _apvVariable :: !(Maybe G.Variable)

View File

@ -11,27 +11,25 @@ module Hasura.GraphQL.Schema
, isAggFld
, qualObjectToName
, ppGCtx
, checkConflictingNode
, checkSchemaConflicts
) where
import Control.Lens.Extended hiding (op)
import Data.List.Extended (duplicates)
import qualified Data.HashMap.Strict as Map
import qualified Data.HashSet as Set
import qualified Data.Sequence as Seq
import qualified Data.Text as T
import qualified Language.GraphQL.Draft.Syntax as G
import Hasura.GraphQL.Context
import Hasura.GraphQL.Resolve.Types
import Hasura.GraphQL.Resolve.Context
import Hasura.GraphQL.Validate.Types
import Hasura.Prelude
import Hasura.RQL.DML.Internal (mkAdminRolePermInfo)
import Hasura.RQL.Types
import Hasura.Server.Utils (duplicates)
import Hasura.Session
import Hasura.SQL.Types
@ -73,6 +71,17 @@ isValidCol = G.isValidName . pgiName
isValidRel :: ToTxt a => RelName -> QualifiedObject a -> Bool
isValidRel rn rt = G.isValidName (mkRelName rn) && isValidObjectName rt
isValidRemoteRel :: RemoteFieldInfo -> Bool
isValidRemoteRel =
G.isValidName . mkRemoteRelationshipName . _rfiName
isValidField :: FieldInfo -> Bool
isValidField = \case
FIColumn colInfo -> isValidCol colInfo
FIRelationship (RelInfo rn _ _ remTab _) -> isValidRel rn remTab
FIComputedField info -> G.isValidName $ mkComputedFieldName $ _cfiName info
FIRemoteRelationship remoteField -> isValidRemoteRel remoteField
upsertable :: [ConstraintName] -> Bool -> Bool -> Bool
upsertable uniqueOrPrimaryCons isUpsertAllowed isAView =
not (null uniqueOrPrimaryCons) && isUpsertAllowed && not isAView
@ -209,6 +218,7 @@ mkGCtxRole' tn descM insPermM selPermM updColsM delPermM pkeyCols constraints vi
selFldsM = snd <$> selPermM
selColNamesM = (map pgiName . getPGColumnFields) <$> selFldsM
selColInpTyM = mkSelColumnTy tn <$> selColNamesM
-- boolexp input type
boolExpInpObjM = case selFldsM of
Just selFlds -> Just $ mkBoolExpInp tn selFlds
@ -244,6 +254,10 @@ mkGCtxRole' tn descM insPermM selPermM updColsM delPermM pkeyCols constraints vi
( (ty, mkComputedFieldName $ _cfName cf)
, RFComputedField cf
)
SFRemoteRelationship remoteField -> pure
( (ty, G.Name (remoteRelationshipNameToText (_rfiName remoteField)))
, RFRemoteRelationship remoteField
)
-- the fields used in bool exp
boolExpInpObjFldsM = mkFldMap (mkBoolExpTy tn) <$> selFldsM
@ -472,8 +486,11 @@ getSelPerm
-> RoleName -> SelPermInfo
-> m (Bool, [SelField])
getSelPerm tableCache fields roleName selPermInfo = do
relFlds <- fmap catMaybes $ forM validRels $ \relInfo -> do
selFlds <- fmap catMaybes $ forM (filter isValidField $ Map.elems fields) $ \case
FIColumn pgColInfo ->
return $ fmap SFPGColumn $ bool Nothing (Just pgColInfo) $
Set.member (pgiColumn pgColInfo) $ spiCols selPermInfo
FIRelationship relInfo -> do
remTableInfo <- getTabInfo tableCache $ riRTable relInfo
let remTableSelPermM = getSelPermission remTableInfo roleName
remTableFlds = _tciFieldInfoMap $ _tiCoreInfo remTableInfo
@ -488,40 +505,29 @@ getSelPerm tableCache fields roleName selPermInfo = do
, _rfiPermLimit = spiLimit rmSelPermM
, _rfiIsNullable = isRelNullable fields relInfo
}
FIComputedField info -> do
let ComputedFieldInfo name function returnTy _ = info
inputArgSeq = mkComputedFieldFunctionArgSeq $ _cffInputArgs function
fmap (SFComputedField . ComputedField name function inputArgSeq) <$>
case returnTy of
CFRScalar scalarTy -> pure $ Just $ CFTScalar scalarTy
CFRSetofTable retTable -> do
retTableInfo <- getTabInfo tableCache retTable
let retTableSelPermM = getSelPermission retTableInfo roleName
retTableFlds = _tciFieldInfoMap $ _tiCoreInfo retTableInfo
retTableColGNameMap =
mkPGColGNameMap $ getValidCols retTableFlds
pure $ flip fmap retTableSelPermM $
\selPerm -> CFTTable ComputedFieldTable
{ _cftTable = retTable
, _cftCols = retTableColGNameMap
, _cftPermFilter = spiFilter selPerm
, _cftPermLimit = spiLimit selPerm
}
-- TODO: Derive permissions for remote relationships
FIRemoteRelationship remoteField -> pure $ Just (SFRemoteRelationship remoteField)
computedSelFields <- fmap catMaybes $ forM computedFields $ \info -> do
let ComputedFieldInfo name function returnTy _ = info
inputArgSeq = mkComputedFieldFunctionArgSeq $ _cffInputArgs function
fmap (SFComputedField . ComputedField name function inputArgSeq) <$>
case returnTy of
CFRScalar scalarTy -> pure $ Just $ CFTScalar scalarTy
CFRSetofTable retTable -> do
retTableInfo <- getTabInfo tableCache retTable
let retTableSelPermM = getSelPermission retTableInfo roleName
retTableFlds = _tciFieldInfoMap $ _tiCoreInfo retTableInfo
retTableColGNameMap =
mkPGColGNameMap $ getValidCols retTableFlds
pure $ flip fmap retTableSelPermM $
\selPerm -> CFTTable ComputedFieldTable
{ _cftTable = retTable
, _cftCols = retTableColGNameMap
, _cftPermFilter = spiFilter selPerm
, _cftPermLimit = spiLimit selPerm
}
return (spiAllowAgg selPermInfo, cols <> relFlds <> computedSelFields)
where
validRels = getValidRels fields
validCols = getValidCols fields
cols = map SFPGColumn $ getColInfos (toList allowedCols) validCols
computedFields = flip filter (getComputedFieldInfos fields) $
\info -> case _cfiReturnType info of
CFRScalar _ ->
_cfiName info `Set.member` allowedScalarComputedFields
CFRSetofTable _ -> True
allowedCols = spiCols selPermInfo
allowedScalarComputedFields = spiScalarComputedFields selPermInfo
return (spiAllowAgg selPermInfo, selFlds)
mkInsCtx
:: MonadError QErr m
@ -585,46 +591,44 @@ mkAdminSelFlds
=> FieldInfoMap FieldInfo
-> TableCache
-> m [SelField]
mkAdminSelFlds fields tableCache = do
relSelFlds <- forM validRels $ \relInfo -> do
let remoteTable = riRTable relInfo
remoteTableInfo <- _tiCoreInfo <$> getTabInfo tableCache remoteTable
let remoteTableFlds = _tciFieldInfoMap remoteTableInfo
remoteTableColGNameMap =
mkPGColGNameMap $ getValidCols remoteTableFlds
return $ SFRelationship RelationshipFieldInfo
{ _rfiInfo = relInfo
, _rfiAllowAgg = True
, _rfiColumns = remoteTableColGNameMap
, _rfiPermFilter = noFilter
, _rfiPermLimit = Nothing
, _rfiIsNullable = isRelNullable fields relInfo
}
mkAdminSelFlds fields tableCache =
forM (filter isValidField $ Map.elems fields) $ \case
FIColumn info -> pure $ SFPGColumn info
computedSelFields <- forM computedFields $ \info -> do
let ComputedFieldInfo name function returnTy _ = info
inputArgSeq = mkComputedFieldFunctionArgSeq $ _cffInputArgs function
(SFComputedField . ComputedField name function inputArgSeq) <$>
case returnTy of
CFRScalar scalarTy -> pure $ CFTScalar scalarTy
CFRSetofTable retTable -> do
retTableInfo <- _tiCoreInfo <$> getTabInfo tableCache retTable
let retTableFlds = _tciFieldInfoMap retTableInfo
retTableColGNameMap =
mkPGColGNameMap $ getValidCols retTableFlds
pure $ CFTTable ComputedFieldTable
{ _cftTable = retTable
, _cftCols = retTableColGNameMap
, _cftPermFilter = noFilter
, _cftPermLimit = Nothing
}
FIRelationship info -> do
let remoteTable = riRTable info
remoteTableInfo <- _tiCoreInfo <$> getTabInfo tableCache remoteTable
let remoteTableFlds = _tciFieldInfoMap remoteTableInfo
remoteTableColGNameMap =
mkPGColGNameMap $ getValidCols remoteTableFlds
return $ SFRelationship RelationshipFieldInfo
{ _rfiInfo = info
, _rfiAllowAgg = True
, _rfiColumns = remoteTableColGNameMap
, _rfiPermFilter = noFilter
, _rfiPermLimit = Nothing
, _rfiIsNullable = isRelNullable fields info
}
return $ colSelFlds <> relSelFlds <> computedSelFields
where
cols = getValidCols fields
colSelFlds = map SFPGColumn cols
validRels = getValidRels fields
computedFields = getComputedFieldInfos fields
FIComputedField info -> do
let ComputedFieldInfo name function returnTy _ = info
inputArgSeq = mkComputedFieldFunctionArgSeq $ _cffInputArgs function
(SFComputedField . ComputedField name function inputArgSeq) <$>
case returnTy of
CFRScalar scalarTy -> pure $ CFTScalar scalarTy
CFRSetofTable retTable -> do
retTableInfo <- _tiCoreInfo <$> getTabInfo tableCache retTable
let retTableFlds = _tciFieldInfoMap retTableInfo
retTableColGNameMap =
mkPGColGNameMap $ getValidCols retTableFlds
pure $ CFTTable ComputedFieldTable
{ _cftTable = retTable
, _cftCols = retTableColGNameMap
, _cftPermFilter = noFilter
, _cftPermLimit = Nothing
}
FIRemoteRelationship info -> pure $ SFRemoteRelationship info
mkGCtxRole
:: (MonadError QErr m)

View File

@ -6,8 +6,6 @@ import qualified Data.HashMap.Strict as Map
import qualified Data.HashSet as Set
import qualified Language.GraphQL.Draft.Syntax as G
import Data.Coerce (coerce)
import Hasura.GraphQL.Schema.Builder
import Hasura.GraphQL.Schema.Common (mkDescriptionWith)

View File

@ -290,3 +290,4 @@ mkBoolExpInp tn fields =
remoteTable = riRTable $ _rfiInfo relationshipField
in Just $ mk (mkRelName relationshipName) (mkBoolExpTy remoteTable)
SFComputedField _ -> Nothing -- TODO: support computed fields in bool exps
SFRemoteRelationship{} -> Nothing

View File

@ -9,6 +9,7 @@ module Hasura.GraphQL.Schema.Common
, getPGColumnFields
, getRelationshipFields
, getComputedFields
, getRemoteRelationships
, mkColumnType
, mkRelName
@ -52,6 +53,7 @@ data SelField
= SFPGColumn !PGColumnInfo
| SFRelationship !RelationshipFieldInfo
| SFComputedField !ComputedField
| SFRemoteRelationship !RemoteFieldInfo
deriving (Show, Eq)
$(makePrisms ''SelField)
@ -64,6 +66,9 @@ getRelationshipFields = mapMaybe (^? _SFRelationship)
getComputedFields :: [SelField] -> [ComputedField]
getComputedFields = mapMaybe (^? _SFComputedField)
getRemoteRelationships :: [SelField] -> [RemoteFieldInfo]
getRemoteRelationships = mapMaybe (^? _SFRemoteRelationship)
qualObjectToName :: (ToTxt a) => QualifiedObject a -> G.Name
qualObjectToName = G.Name . snakeCaseQualObject

View File

@ -9,6 +9,7 @@ module Hasura.GraphQL.Schema.Select
, mkAggSelFld
, mkSelFldPKey
, mkRemoteRelationshipName
, mkSelArgs
) where
@ -171,14 +172,29 @@ mkTableObj
mkTableObj tn descM allowedFlds =
mkObjTyInfo (Just desc) (mkTableTy tn) Set.empty (mapFromL _fiName flds) TLHasuraType
where
flds = pgColFlds <> relFlds <> computedFlds
pgColFlds = map mkPGColFld $ getPGColumnFields allowedFlds
relFlds = concatMap mkRelationshipField' $ getRelationshipFields allowedFlds
computedFlds = map mkComputedFieldFld $ getComputedFields allowedFlds
flds = flip concatMap allowedFlds $ \case
SFPGColumn info -> pure $ mkPGColFld info
SFRelationship info -> mkRelationshipField' info
SFComputedField info -> pure $ mkComputedFieldFld info
SFRemoteRelationship info -> pure $ mkRemoteRelationshipFld info
mkRelationshipField' (RelationshipFieldInfo relInfo allowAgg _ _ _ isNullable) =
mkRelationshipField allowAgg relInfo isNullable
desc = mkDescriptionWith descM $ "columns and relationships of " <>> tn
mkRemoteRelationshipName :: RemoteRelationshipName -> G.Name
mkRemoteRelationshipName =
G.Name . remoteRelationshipNameToText
mkRemoteRelationshipFld :: RemoteFieldInfo -> ObjFldInfo
mkRemoteRelationshipFld remoteField =
mkHsraObjFldInfo description fieldName paramMap gType
where
description = Just "Remote relationship field"
fieldName = mkRemoteRelationshipName $ _rfiName remoteField
paramMap = _rfiParamMap remoteField
gType = _rfiGType remoteField
{-
type table_aggregate {
agg: table_aggregate_fields

View File

@ -41,7 +41,7 @@ runGQ reqId userInfo reqHdrs req = do
(telemTimeTot_DT, (telemCacheHit, telemLocality, (telemTimeIO_DT, telemQueryType, !resp))) <- withElapsedTime $ do
E.ExecutionCtx _ sqlGenCtx pgExecCtx planCache sc scVer httpManager enableAL <- ask
(telemCacheHit, execPlan) <- E.getResolvedExecPlan pgExecCtx planCache
userInfo sqlGenCtx enableAL sc scVer httpManager reqHdrs req
userInfo sqlGenCtx enableAL sc scVer httpManager reqHdrs req
case execPlan of
E.GExPHasura resolvedOp -> do
(telemTimeIO, telemQueryType, respHdrs, resp) <- runHasuraGQ reqId req userInfo resolvedOp
@ -49,10 +49,13 @@ runGQ reqId userInfo reqHdrs req = do
E.GExPRemote rsi opDef -> do
let telemQueryType | G._todType opDef == G.OperationTypeMutation = Telem.Mutation
| otherwise = Telem.Query
(telemTimeIO, resp) <- E.execRemoteGQ reqId userInfo reqHdrs req rsi opDef
return (telemCacheHit, Telem.Remote, (telemTimeIO, telemQueryType, resp))
(telemTimeIO, resp) <- E.execRemoteGQ reqId userInfo reqHdrs req rsi $ G._todType opDef
pure (telemCacheHit, Telem.Remote, (telemTimeIO, telemQueryType, resp))
let telemTimeIO = convertDuration telemTimeIO_DT
telemTimeTot = convertDuration telemTimeTot_DT
Telem.recordTimingMetric Telem.RequestDimensions{..} Telem.RequestTimings{..}
return resp
@ -97,16 +100,18 @@ runHasuraGQ
-- ^ Also return 'Mutation' when the operation was a mutation, and the time
-- spent in the PG query; for telemetry.
runHasuraGQ reqId query userInfo resolvedOp = do
E.ExecutionCtx logger _ pgExecCtx _ _ _ _ _ <- ask
(E.ExecutionCtx logger _ pgExecCtx _ _ _ _ _) <- ask
(telemTimeIO, respE) <- withElapsedTime $ liftIO $ runExceptT $ case resolvedOp of
E.ExOpQuery tx genSql -> do
E.ExOpQuery tx genSql -> do
-- log the generated SQL and the graphql query
L.unLogger logger $ QueryLog query genSql reqId
([],) <$> runLazyTx' pgExecCtx tx
E.ExOpMutation respHeaders tx -> do
-- log the graphql query
L.unLogger logger $ QueryLog query Nothing reqId
(respHeaders,) <$> runLazyTx pgExecCtx Q.ReadWrite (withUserInfo userInfo tx)
E.ExOpSubs _ ->
throw400 UnexpectedPayload
"subscriptions are not supported over HTTP, use websockets instead"

View File

@ -1,10 +1,13 @@
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE RecordWildCards #-}
module Hasura.GraphQL.Transport.HTTP.Protocol
( GQLReq(..)
, GQLBatchedReqs(..)
, GQLReqUnparsed
, GQLReqParsed
, toParsed
, GQLQueryText
, GQLQueryText(..)
, GQLExecDoc(..)
, OperationName(..)
, VariableValues
@ -16,26 +19,37 @@ module Hasura.GraphQL.Transport.HTTP.Protocol
, RemoteGqlResp(..)
, GraphqlResponse(..)
, encodeGraphqlResponse
, GQRespValue(..), gqRespData, gqRespErrors
, encodeGQRespValue
, parseGQRespValue
, parseEncJObject
, GQJoinError(..), gqJoinErrorToValue
) where
import Control.Lens
import Hasura.EncJSON
import Hasura.GraphQL.Utils
import Hasura.Prelude
import Hasura.RQL.Types
import Language.GraphQL.Draft.Instances ()
import Language.Haskell.TH.Syntax (Lift)
import qualified Data.Aeson as J
import qualified Data.Aeson.Casing as J
import qualified Data.Aeson.Ordered as OJ
import qualified Data.Aeson.TH as J
import qualified Data.ByteString.Lazy as BL
import qualified Data.HashMap.Strict as Map
import qualified Data.Vector as V
import qualified Language.GraphQL.Draft.Parser as G
import qualified Language.GraphQL.Draft.Syntax as G
import qualified VectorBuilder.Builder as VB
import qualified VectorBuilder.Vector as VB
newtype GQLExecDoc
= GQLExecDoc { unGQLExecDoc :: [G.ExecutableDefinition] }
deriving (Ord, Show, Eq, Hashable)
deriving (Ord, Show, Eq, Hashable, Lift)
instance J.FromJSON GQLExecDoc where
parseJSON v = (GQLExecDoc . G.getExecutableDefinitions) <$> J.parseJSON v
@ -45,7 +59,7 @@ instance J.ToJSON GQLExecDoc where
newtype OperationName
= OperationName { _unOperationName :: G.Name }
deriving (Ord, Show, Eq, Hashable, J.ToJSON)
deriving (Ord, Show, Eq, Hashable, J.ToJSON, Lift)
instance J.FromJSON OperationName where
parseJSON v = OperationName . G.Name <$> J.parseJSON v
@ -57,7 +71,7 @@ data GQLReq a
{ _grOperationName :: !(Maybe OperationName)
, _grQuery :: !a
, _grVariables :: !(Maybe VariableValues)
} deriving (Show, Eq, Generic)
} deriving (Show, Eq, Generic, Functor, Lift)
$(J.deriveJSON (J.aesonDrop 3 J.camelCase){J.omitNothingFields=True}
''GQLReq
@ -86,7 +100,7 @@ instance J.FromJSON a => J.FromJSON (GQLBatchedReqs a) where
newtype GQLQueryText
= GQLQueryText
{ _unGQLQueryText :: Text
} deriving (Show, Eq, Ord, J.FromJSON, J.ToJSON, Hashable)
} deriving (Show, Eq, Ord, J.FromJSON, J.ToJSON, Hashable, Lift)
type GQLReqUnparsed = GQLReq GQLQueryText
type GQLReqParsed = GQLReq GQLExecDoc
@ -102,11 +116,40 @@ encodeGQErr :: Bool -> QErr -> J.Value
encodeGQErr includeInternal qErr =
J.object [ "errors" J..= [encodeGQLErr includeInternal qErr]]
-- | https://graphql.github.io/graphql-spec/June2018/#sec-Response-Format
--
-- NOTE: this type and parseGQRespValue are a lax representation of the spec,
-- since...
-- - remote GraphQL servers may not conform strictly, and...
-- - we use this type as an accumulator.
--
-- Ideally we'd have something correct by construction for hasura results
-- someplace.
data GQRespValue =
GQRespValue
{ _gqRespData :: OJ.Object
-- ^ 'OJ.empty' (corresponding to the invalid `"data": {}`) indicates an error.
, _gqRespErrors :: VB.Builder OJ.Value
-- ^ An 'OJ.Array', but with efficient cons and concatenation. Null indicates
-- query success.
}
makeLenses ''GQRespValue
newtype GQJoinError = GQJoinError Text
deriving (Show, Eq, IsString, Monoid, Semigroup)
-- | https://graphql.github.io/graphql-spec/June2018/#sec-Errors "Error result format"
gqJoinErrorToValue :: GQJoinError -> OJ.Value
gqJoinErrorToValue (GQJoinError msg) =
OJ.Object (OJ.fromList [("message", OJ.String msg)])
data GQResult a
= GQSuccess !a
| GQPreExecError ![J.Value]
| GQExecError ![J.Value]
deriving (Show, Eq, Functor, Foldable, Traversable)
| GQGeneric !GQRespValue
deriving (Functor, Foldable, Traversable)
type GQResponse = GQResult BL.ByteString
@ -115,13 +158,6 @@ isExecError = \case
GQExecError _ -> True
_ -> False
encodeGQResp :: GQResponse -> EncJSON
encodeGQResp gqResp =
encJFromAssocList $ case gqResp of
GQSuccess r -> [("data", encJFromLBS r)]
GQPreExecError e -> [("errors", encJFromJValue e)]
GQExecError e -> [("data", "null"), ("errors", encJFromJValue e)]
-- | Represents GraphQL response from a remote server
data RemoteGqlResp
= RemoteGqlResp
@ -147,3 +183,53 @@ encodeGraphqlResponse :: GraphqlResponse -> EncJSON
encodeGraphqlResponse = \case
GRHasura resp -> encodeGQResp resp
GRRemote resp -> encodeRemoteGqlResp resp
-- emptyResp :: GQRespValue
-- emptyResp = GQRespValue OJ.empty VB.empty
parseEncJObject :: EncJSON -> Either String OJ.Object
parseEncJObject = OJ.eitherDecode . encJToLBS >=> \case
OJ.Object obj -> pure obj
_ -> Left "expected object for GraphQL response"
parseGQRespValue :: EncJSON -> Either String GQRespValue
parseGQRespValue = parseEncJObject >=> \obj -> do
_gqRespData <-
case OJ.lookup "data" obj of
-- "an error was encountered before execution began":
Nothing -> pure OJ.empty
-- "an error was encountered during the execution that prevented a valid response":
Just OJ.Null -> pure OJ.empty
Just (OJ.Object dobj) -> pure dobj
Just _ -> Left "expected object or null for GraphQL data response"
_gqRespErrors <-
case OJ.lookup "errors" obj of
Nothing -> pure VB.empty
Just (OJ.Array vec) -> pure $ VB.vector vec
Just _ -> Left "expected array for GraphQL error response"
pure (GQRespValue {_gqRespData, _gqRespErrors})
encodeGQRespValue :: GQRespValue -> EncJSON
encodeGQRespValue GQRespValue{..} = OJ.toEncJSON $ OJ.Object $ OJ.fromList $
-- "If the data entry in the response is not present, the errors entry in the
-- response must not be empty. It must contain at least one error. "
if _gqRespData == OJ.empty && not anyErrors
then
let msg = "Somehow did not accumulate any errors or data from graphql queries"
in [("errors", OJ.Array $ V.singleton $ OJ.Object (OJ.fromList [("message", OJ.String msg)]) )]
else
-- NOTE: "If an error was encountered during the execution that prevented
-- a valid response, the data entry in the response should be null."
-- TODO it's not clear to me how we can enforce that here or if we should try.
("data", OJ.Object _gqRespData) :
[("errors", OJ.Array gqRespErrorsV) | anyErrors ]
where
gqRespErrorsV = VB.build _gqRespErrors
anyErrors = not $ V.null gqRespErrorsV
encodeGQResp :: GQResponse -> EncJSON
encodeGQResp = \case
GQSuccess r -> encJFromAssocList [("data", encJFromLBS r)]
GQPreExecError e -> encJFromAssocList [("errors", encJFromJValue e)]
GQExecError e -> encJFromAssocList [("data", "null"), ("errors", encJFromJValue e)]
GQGeneric v -> encodeGQRespValue v

View File

@ -40,6 +40,7 @@ import qualified ListT
import Hasura.EncJSON
import Hasura.GraphQL.Logging
import Hasura.GraphQL.Transport.HTTP.Protocol
import Hasura.GraphQL.Transport.WebSocket.Protocol
import Hasura.HTTP
@ -282,7 +283,6 @@ onConn (L.Logger logger) corsPolicy wsId requestHead = do
<> "CORS on websocket connections, then you can use the flag --ws-read-cookie or "
<> "HASURA_GRAPHQL_WS_READ_COOKIE to force read cookie when CORS is disabled."
onStart :: HasVersion => WSServerEnv -> WSConn -> StartMsg -> IO ()
onStart serverEnv wsConn (StartMsg opId q) = catchAndIgnore $ do
timerTot <- startTimer
@ -375,7 +375,7 @@ onStart serverEnv wsConn (StartMsg opId q) = catchAndIgnore $ do
-- if it's not a subscription, use HTTP to execute the query on the remote
(runExceptT $ flip runReaderT execCtx $
E.execRemoteGQ reqId userInfo reqHdrs q rsi opDef) >>= \case
E.execRemoteGQ reqId userInfo reqHdrs q rsi (G._todType opDef)) >>= \case
Left err -> postExecErr reqId err
Right (telemTimeIO_DT, !val) -> do
-- Telemetry. NOTE: don't time network IO:
@ -398,31 +398,26 @@ onStart serverEnv wsConn (StartMsg opId q) = catchAndIgnore $ do
_ enableAL = serverEnv
WSConnData userInfoR opMap errRespTy = WS.getData wsConn
logOpEv opTy reqId =
logWSEvent logger wsConn $ EOperation opDet
logOpEv opTy reqId = logWSEvent logger wsConn $ EOperation opDet
where
opDet = OperationDetails opId reqId (_grOperationName q) opTy query
-- log the query only in errors
query = case opTy of
ODQueryErr _ -> Just q
_ -> Nothing
query =
case opTy of
ODQueryErr _ -> Just q
_ -> Nothing
getErrFn errTy =
case errTy of
ERTLegacy -> encodeQErr
ERTGraphqlCompliant -> encodeGQLErr
sendStartErr e = do
let errFn = getErrFn errRespTy
sendMsg wsConn $
SMErr $ ErrorMsg opId $ errFn False $ err400 StartFailed e
logOpEv (ODProtoErr e) Nothing
sendCompleted reqId = do
sendMsg wsConn (SMComplete $ CompletionMsg opId)
logOpEv ODCompleted reqId
postExecErr reqId qErr = do
let errFn = getErrFn errRespTy
logOpEv (ODQueryErr qErr) (Just reqId)
@ -530,7 +525,7 @@ logWSEvent (L.Logger logger) wsConn wsEv = do
ERejected _ -> True
EConnErr _ -> True
EClosed -> False
EOperation op -> case _odOperationType op of
EOperation operation -> case _odOperationType operation of
ODStarted -> False
ODProtoErr _ -> True
ODQueryErr _ -> True

View File

@ -7,12 +7,17 @@ module Hasura.GraphQL.Utils
, groupListWith
, mkMapWith
, showNames
, unwrapTy
, simpleGraphQLQuery
, jsonValueToGValue
) where
import Hasura.Prelude
import Hasura.RQL.Types.Error
import Data.Scientific (floatingOrInteger)
import qualified Data.Aeson as A
import qualified Data.HashMap.Strict as Map
import qualified Data.List.NonEmpty as NE
import qualified Data.Text as T
@ -35,6 +40,12 @@ getBaseTy = \case
where
getBaseTyL = getBaseTy . G.unListType
unwrapTy :: G.GType -> G.GType
unwrapTy =
\case
G.TypeList _ lt -> G.unListType lt
nt -> nt
groupListWith
:: (Eq k, Hashable k, Foldable t, Functor t)
=> (v -> k) -> t v -> Map.HashMap k (NE.NonEmpty v)
@ -70,3 +81,15 @@ showNames names =
-- A simple graphql query to be used in generators
simpleGraphQLQuery :: Text
simpleGraphQLQuery = "query {author {id name}}"
-- | Convert a JSON value to a GraphQL value.
jsonValueToGValue :: A.Value -> G.Value
jsonValueToGValue = \case
A.String t -> G.VString $ G.StringValue t
-- TODO: Note the danger zone of scientific:
A.Number n -> either (\(_::Float) -> G.VFloat n) G.VInt (floatingOrInteger n)
A.Bool b -> G.VBoolean b
A.Object o -> G.VObject $ G.ObjectValueG $
map (uncurry G.ObjectFieldG . (G.Name *** jsonValueToGValue)) $ Map.toList o
A.Array a -> G.VList $ G.ListValueG $ map jsonValueToGValue $ toList a
A.Null -> G.VNull

View File

@ -13,24 +13,38 @@ module Hasura.GraphQL.Validate
, validateVariablesForReuse
, isQueryInAllowlist
, unValidateArgsMap
, unValidateField
) where
import Hasura.Prelude
import Data.Has
import Data.Time
import qualified Data.Aeson as A
import qualified Data.HashMap.Strict as Map
import qualified Data.HashMap.Strict.InsOrd as OMap
import qualified Data.HashSet as HS
import qualified Language.GraphQL.Draft.Syntax as G
import qualified Data.Sequence as Seq
import qualified Data.Text as T
import qualified Data.UUID as UUID
import qualified Database.PG.Query as Q
import qualified Language.GraphQL.Draft.Syntax as G
import Hasura.GraphQL.Schema
import Hasura.GraphQL.Transport.HTTP.Protocol
import Hasura.GraphQL.Utils
import Hasura.GraphQL.Validate.Context
import Hasura.GraphQL.Validate.Field
import Hasura.GraphQL.Validate.InputValue
import Hasura.GraphQL.Validate.Types
import Hasura.RQL.Types
import Hasura.SQL.Time
import Hasura.SQL.Value
import Hasura.RQL.DML.Select.Types
import Hasura.GraphQL.Resolve.InputValue (annInpValueToJson)
data QueryParts
= QueryParts
@ -205,3 +219,100 @@ getQueryParts (GQLReq opNameM q varValsM) = do
return $ QueryParts opDef opRoot fragDefsL varValsM
where
(selSets, opDefs, fragDefsL) = G.partitionExDefs $ unGQLExecDoc q
-- | Convert the validated arguments to GraphQL parser AST arguments
unValidateArgsMap :: ArgsMap -> [RemoteFieldArgument]
unValidateArgsMap argsMap =
map (\(n, inpVal) ->
let _rfaArgument = G.Argument n $ unValidateInpVal inpVal
_rfaVariable = unValidateInpVariable inpVal
in RemoteFieldArgument {..})
. Map.toList $ argsMap
-- | Convert the validated field to GraphQL parser AST field
unValidateField :: Field -> G.Field
unValidateField (Field alias name _ argsMap selSet _) =
let args = map (\(n, inpVal) -> G.Argument n $ unValidateInpVal inpVal) $
Map.toList argsMap
sels = map (G.SelectionField . unValidateField) $ toList selSet
in G.Field (Just alias) name args [] sels
-- | Get the variable definition and it's value (if exists)
unValidateInpVariable :: AnnInpVal -> Maybe [(G.VariableDefinition,A.Value)]
unValidateInpVariable inputValue =
case (_aivValue inputValue) of
AGScalar _ _ -> mkVariableDefnValueTuple inputValue
AGEnum _ _ -> mkVariableDefnValueTuple inputValue
AGObject _ o ->
(\obj ->
let listObjects = OMap.toList obj
in concat $
mapMaybe (\(_, inpVal) -> unValidateInpVariable inpVal) listObjects)
<$> o
AGArray _ _ -> mkVariableDefnValueTuple inputValue
where
mkVariableDefnValueTuple val = maybe Nothing (\vars -> Just [vars]) $
variableDefnValueTuple val
variableDefnValueTuple :: AnnInpVal -> Maybe (G.VariableDefinition,A.Value)
variableDefnValueTuple inpVal@AnnInpVal {..} =
let varDefn = G.VariableDefinition <$> _aivVariable <*> Just _aivType <*> Just Nothing
in (,) <$> varDefn <*> Just (annInpValueToJson inpVal)
-- | Convert the validated input value to GraphQL value, if the input value
-- is a variable then it will be returned without resolving it, otherwise it
-- will be resolved
unValidateInpVal :: AnnInpVal -> G.Value
unValidateInpVal (AnnInpVal _ var val) = fromMaybe G.VNull $
-- if a variable is found, then directly return that, if not found then
-- convert it into a G.Value and return it
case var of
Just var' -> Just $ G.VVariable var'
Nothing ->
case val of
AGScalar _ v -> pgScalarToGValue <$> v
AGEnum _ v -> pgEnumToGEnum v
AGObject _ o ->
(G.VObject . G.ObjectValueG
. map (uncurry G.ObjectFieldG . (second unValidateInpVal))
. OMap.toList
) <$> o
AGArray _ vs -> (G.VList . G.ListValueG . map unValidateInpVal) <$> vs
where
pgEnumToGEnum :: AnnGEnumValue -> Maybe G.Value
pgEnumToGEnum = \case
AGESynthetic v -> G.VEnum <$> v
AGEReference _ v -> (G.VEnum . G.EnumValue . G.Name . getEnumValue) <$> v
pgScalarToGValue :: PGScalarValue -> G.Value
pgScalarToGValue = \case
PGValInteger i -> G.VInt $ fromIntegral i
PGValSmallInt i -> G.VInt $ fromIntegral i
PGValBigInt i -> G.VInt $ fromIntegral i
PGValFloat f -> G.VFloat $ realToFrac f
PGValDouble d -> G.VFloat $ realToFrac d
-- TODO: Scientific is a danger zone; use its safe conv function.
PGValNumeric sc -> G.VFloat $ realToFrac sc
PGValMoney m -> G.VFloat $ realToFrac m
PGValBoolean b -> G.VBoolean b
PGValChar t -> toStringValue $ T.singleton t
PGValVarchar t -> toStringValue t
PGValText t -> toStringValue t
PGValCitext t -> toStringValue t
PGValDate d -> toStringValue $ T.pack $ showGregorian d
PGValTimeStampTZ u -> toStringValue $ T.pack $
formatTime defaultTimeLocale "%FT%T%QZ" u
PGValTimeStamp u -> toStringValue $ T.pack $
formatTime defaultTimeLocale "%FT%T%QZ" u
PGValTimeTZ (ZonedTimeOfDay tod tz) ->
toStringValue $ T.pack (show tod ++ timeZoneOffsetString tz)
PGNull _ -> G.VNull
PGValJSON (Q.JSON v) -> jsonValueToGValue v
PGValJSONB (Q.JSONB v) -> jsonValueToGValue v
PGValGeo v -> jsonValueToGValue $ A.toJSON v
PGValRaster v -> jsonValueToGValue $ A.toJSON v
PGValUUID u -> toStringValue $ UUID.toText u
PGValUnknown t -> toStringValue t
where
toStringValue = G.VString . G.StringValue

View File

@ -1,10 +1,11 @@
module Hasura.GraphQL.Validate.Field
( ArgsMap
, Field(..)
, Field(..), fAlias, fName, fType, fArguments, fSelSet, fSource
, SelSet
, denormSelSet
) where
import Control.Lens
import Hasura.Prelude
import qualified Data.Aeson as J
@ -54,12 +55,16 @@ data Field
, _fType :: !G.NamedType
, _fArguments :: !ArgsMap
, _fSelSet :: !SelSet
, _fSource :: !TypeLoc
} deriving (Eq, Show)
$(J.deriveToJSON (J.aesonDrop 2 J.camelCase){J.omitNothingFields=True}
''Field
)
makeLenses ''Field
-- newtype FieldMapAlias
-- = FieldMapAlias
-- { unFieldMapAlias :: Map.HashMap G.Alias (FieldG FieldMapAlias)
@ -213,6 +218,7 @@ denormFld visFrags fldInfo (G.Field aliasM name args dirs selSet) = do
let fldTy = _fiTy fldInfo
fldBaseTy = getBaseTy fldTy
fldSource = _fiLoc fldInfo
fldTyInfo <- getTyInfo fldBaseTy
@ -234,13 +240,17 @@ denormFld visFrags fldInfo (G.Field aliasM name args dirs selSet) = do
throwVE $ "internal error: unexpected input type for field: "
<> showName name
(TIIFace _, _) -> throwVE $ "interface types not supported"
(TIUnion _, _) -> throwVE $ "union types not supported"
-- when scalar/enum and no empty set
(_, _) ->
throwVE $ "field " <> showName name <> " must not have a "
<> "selection since type " <> G.showGT fldTy <> " has no subfields"
withPathK "directives" $ withDirectives dirs $ return $
Field (fromMaybe (G.Alias name) aliasM) name fldBaseTy argMap fields
Field (fromMaybe (G.Alias name) aliasM) name fldBaseTy argMap fields fldSource
denormInlnFrag
:: ( MonadReader ValidationCtx m

View File

@ -7,9 +7,9 @@ module Hasura.GraphQL.Validate.InputValue
) where
import Hasura.Prelude
import Hasura.Server.Utils (duplicates)
import Data.Has
import Data.List.Extended (duplicates)
import qualified Data.Aeson as J
import qualified Data.HashMap.Strict as Map

View File

@ -101,15 +101,11 @@ import qualified Hasura.RQL.Types.Column as RQL
import Hasura.GraphQL.Utils
import Hasura.RQL.Instances ()
import Hasura.RQL.Types.RemoteSchema
import Hasura.RQL.Types.Common
import Hasura.RQL.Types.RemoteSchema (RemoteSchemaInfo, RemoteSchemaName)
import Hasura.SQL.Types
import Hasura.SQL.Value
-- | Typeclass for equating relevant properties of various GraphQL types defined below
class EquatableGType a where
type EqProps a
getEqProps :: a -> EqProps a
typeEq :: (EquatableGType a, Eq (EqProps a)) => a -> a -> Bool
typeEq a b = getEqProps a == getEqProps b
@ -169,18 +165,6 @@ mkHsraEnumTyInfo
mkHsraEnumTyInfo descM ty enumVals =
EnumTyInfo descM ty enumVals TLHasuraType
data InpValInfo
= InpValInfo
{ _iviDesc :: !(Maybe G.Description)
, _iviName :: !G.Name
, _iviDefVal :: !(Maybe G.ValueConst)
, _iviType :: !G.GType
} deriving (Show, Eq, TH.Lift)
instance EquatableGType InpValInfo where
type EqProps InpValInfo = (G.Name, G.GType)
getEqProps ity = (,) (_iviName ity) (_iviType ity)
fromInpValDef :: G.InputValueDefinition -> InpValInfo
fromInpValDef (G.InputValueDefinition descM n ty defM) =
InpValInfo descM n defM ty
@ -433,7 +417,7 @@ getPossibleObjTypes' tyMap (AOTIFace i) = toObjMap $ mapMaybe previewImplTypeM $
getPossibleObjTypes' tyMap (AOTUnion u) = toObjMap $ mapMaybe (extrObjTyInfoM tyMap) $ Set.toList $ _utiMemberTypes u
toObjMap :: [ObjTyInfo] -> Map.HashMap G.NamedType ObjTyInfo
toObjMap objs = foldr (\o -> Map.insert (_otiName o) o) Map.empty objs
toObjMap = foldr (\o -> Map.insert (_otiName o) o) Map.empty
isObjTy :: TypeInfo -> Bool
@ -642,7 +626,7 @@ fromTyDef tyDef loc = case tyDef of
fromSchemaDoc :: G.SchemaDocument -> TypeLoc -> Either Text TypeMap
fromSchemaDoc (G.SchemaDocument tyDefs) loc = do
let tyMap = mkTyInfoMap $ map (flip fromTyDef loc) tyDefs
let tyMap = mkTyInfoMap $ map (`fromTyDef` loc) tyDefs
validateTypeMap tyMap
return tyMap
@ -706,6 +690,7 @@ type FragDefMap = Map.HashMap G.Name FragDef
type AnnVarVals =
Map.HashMap G.Variable AnnInpVal
-- TODO document me
data AnnInpVal
= AnnInpVal
{ _aivType :: !G.GType

View File

@ -18,6 +18,7 @@ import Data.Functor.Classes (Eq1 (..), Eq2 (..))
import Data.GADT.Compare
import Data.Int
import Data.Scientific (Scientific)
import Data.Set (Set)
import Data.Time.Clock
import Data.Vector (Vector)
import GHC.Generics ((:*:) (..), (:+:) (..), Generic (..), K1 (..),
@ -191,6 +192,8 @@ instance (Cacheable a) => Cacheable (HashSet a) where
unchanged = liftEq . unchanged
instance (Cacheable a) => Cacheable (CI a) where
unchanged _ = (==)
instance (Cacheable a) => Cacheable (Set a) where
unchanged = liftEq . unchanged
instance Cacheable ()
instance (Cacheable a, Cacheable b) => Cacheable (a, b)

View File

@ -1,3 +1,4 @@
{-# OPTIONS_GHC -fno-warn-redundant-constraints #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Hasura.Prelude
( module M
@ -11,6 +12,9 @@ module Hasura.Prelude
, bsToTxt
, txtToBs
, spanMaybeM
-- * Efficient coercions
, coerce
, coerceSet
, findWithIndex
, mapFromL
-- * Measuring and working with moments and durations
@ -62,12 +66,15 @@ import Test.QuickCheck.Arbitrary.Generic as M
import Text.Read as M (readEither, readMaybe)
import qualified Data.ByteString as B
import Data.Coerce
import qualified Data.HashMap.Strict as Map
import qualified Data.Set as Set
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import qualified Data.Text.Encoding.Error as TE
import qualified GHC.Clock as Clock
import qualified Test.QuickCheck as QC
import Unsafe.Coerce
alphabet :: String
alphabet = ['a'..'z'] ++ ['A'..'Z']
@ -110,6 +117,16 @@ spanMaybeM f = go . toList
Just y -> first (y:) <$> go xs
Nothing -> pure ([], l)
-- | Efficiently coerce a set from one type to another.
--
-- This has the same safety properties as 'Set.mapMonotonic', and is equivalent
-- to @Set.mapMonotonic coerce@ but is more efficient. This is safe to use when
-- both @a@ and @b@ have automatically derived @Ord@ instances.
--
-- https://stackoverflow.com/q/57963881/176841
coerceSet :: Coercible a b=> Set.Set a -> Set.Set b
coerceSet = unsafeCoerce
findWithIndex :: (a -> Bool) -> [a] -> Maybe (a, Int)
findWithIndex p l = do
v <- find p l

View File

@ -42,6 +42,7 @@ import qualified Hasura.RQL.DDL.CustomTypes as CustomTypes
import qualified Hasura.RQL.DDL.Permission as Permission
import qualified Hasura.RQL.DDL.QueryCollection as Collection
import qualified Hasura.RQL.DDL.Relationship as Relationship
import qualified Hasura.RQL.DDL.RemoteRelationship as RemoteRelationship
import qualified Hasura.RQL.DDL.Schema as Schema
-- | Purge all user-defined metadata; metadata with is_system_defined = false
@ -52,6 +53,7 @@ clearUserMetadata = liftTx $ Q.catchE defaultTxErrorHandler $ do
Q.unitQ "DELETE FROM hdb_catalog.hdb_relationship WHERE is_system_defined <> 'true'" () False
Q.unitQ "DELETE FROM hdb_catalog.event_triggers" () False
Q.unitQ "DELETE FROM hdb_catalog.hdb_computed_field" () False
Q.unitQ "DELETE FROM hdb_catalog.hdb_remote_relationship" () False
Q.unitQ "DELETE FROM hdb_catalog.hdb_table WHERE is_system_defined <> 'true'" () False
Q.unitQ "DELETE FROM hdb_catalog.remote_schemas" () False
Q.unitQ "DELETE FROM hdb_catalog.hdb_allowlist" () False
@ -89,6 +91,7 @@ applyQP1 (ReplaceMetadata _ tables functionsMeta schemas collections
delPerms = map Permission.pdRole $ table ^. tmDeletePermissions
eventTriggers = map etcName $ table ^. tmEventTriggers
computedFields = map _cfmName $ table ^. tmComputedFields
remoteRelationships = map _rrmName $ table ^. tmRemoteRelationships
checkMultipleDecls "relationships" allRels
checkMultipleDecls "insert permissions" insPerms
@ -97,6 +100,7 @@ applyQP1 (ReplaceMetadata _ tables functionsMeta schemas collections
checkMultipleDecls "delete permissions" delPerms
checkMultipleDecls "event triggers" eventTriggers
checkMultipleDecls "computed fields" computedFields
checkMultipleDecls "remote relationships" remoteRelationships
withPathK "functions" $
case functionsMeta of
@ -163,6 +167,14 @@ saveMetadata (ReplaceMetadata _ tables functionsMeta
ComputedField.addComputedFieldToCatalog $
ComputedField.AddComputedField _tmTable name definition comment
-- Remote Relationships
withPathK "remote_relationships" $
indexedForM_ _tmRemoteRelationships $
\(RemoteRelationshipMeta name def) -> do
let RemoteRelationshipDef rs hf rf = def
liftTx $ RemoteRelationship.persistRemoteRelationship $
RemoteRelationship name _tmTable hf rs rf
-- Permissions
withPathK "insert_permissions" $ processPerms _tmTable _tmInsertPermissions
withPathK "select_permissions" $ processPerms _tmTable _tmSelectPermissions
@ -259,6 +271,9 @@ fetchMetadata = do
-- Fetch all computed fields
computedFields <- fetchComputedFields
-- Fetch all remote relationships
remoteRelationships <- Q.catchE defaultTxErrorHandler fetchRemoteRelationships
let (_, postRelMap) = flip runState tableMetaMap $ do
modMetaMap tmObjectRelationships objRelDefs
modMetaMap tmArrayRelationships arrRelDefs
@ -268,11 +283,12 @@ fetchMetadata = do
modMetaMap tmDeletePermissions delPermDefs
modMetaMap tmEventTriggers triggerMetaDefs
modMetaMap tmComputedFields computedFields
modMetaMap tmRemoteRelationships remoteRelationships
-- fetch all functions
functions <- FMVersion2 <$> Q.catchE defaultTxErrorHandler fetchFunctions
-- fetch all custom resolvers
-- fetch all remote schemas
remoteSchemas <- fetchRemoteSchemas
-- fetch all collections
@ -288,8 +304,13 @@ fetchMetadata = do
cronTriggers <- fetchCronTriggers
return $ ReplaceMetadata currentMetadataVersion (HMIns.elems postRelMap) functions
remoteSchemas collections allowlist
return $ ReplaceMetadata currentMetadataVersion
(HMIns.elems postRelMap)
functions
remoteSchemas
collections
allowlist
customTypes
actions
cronTriggers
@ -451,6 +472,17 @@ fetchMetadata = do
) ap on true;
|] [] False
fetchRemoteRelationships = do
r <- Q.listQ [Q.sql|
SELECT table_schema, table_name,
remote_relationship_name, definition::json
FROM hdb_catalog.hdb_remote_relationship
|] () False
pure $ flip map r $ \(schema, table, name, Q.AltJ definition) ->
( QualifiedObject schema table
, RemoteRelationshipMeta name definition
)
runExportMetadata
:: (QErrM m, MonadTx m)
=> ExportMetadata -> m EncJSON
@ -501,14 +533,15 @@ runDropInconsistentMetadata _ = do
purgeMetadataObj :: MonadTx m => MetadataObjId -> m ()
purgeMetadataObj = liftTx . \case
MOTable qt -> Schema.deleteTableFromCatalog qt
MOFunction qf -> Schema.delFunctionFromCatalog qf
MORemoteSchema rsn -> removeRemoteSchemaFromCatalog rsn
MOTableObj qt (MTORel rn _) -> Relationship.delRelFromCatalog qt rn
MOTableObj qt (MTOPerm rn pt) -> dropPermFromCatalog qt rn pt
MOTableObj _ (MTOTrigger trn) -> delEventTriggerFromCatalog trn
MOTableObj qt (MTOComputedField ccn) -> dropComputedFieldFromCatalog qt ccn
MOCustomTypes -> CustomTypes.clearCustomTypes
MOAction action -> Action.deleteActionFromCatalog action Nothing
MOActionPermission action role -> Action.deleteActionPermissionFromCatalog action role
MOCronTrigger ctName -> deleteCronTriggerFromCatalog ctName
MOTable qt -> Schema.deleteTableFromCatalog qt
MOFunction qf -> Schema.delFunctionFromCatalog qf
MORemoteSchema rsn -> removeRemoteSchemaFromCatalog rsn
MOTableObj qt (MTORel rn _) -> Relationship.delRelFromCatalog qt rn
MOTableObj qt (MTOPerm rn pt) -> dropPermFromCatalog qt rn pt
MOTableObj _ (MTOTrigger trn) -> delEventTriggerFromCatalog trn
MOTableObj qt (MTOComputedField ccn) -> dropComputedFieldFromCatalog qt ccn
MOTableObj qt (MTORemoteRelationship rn) -> RemoteRelationship.delRemoteRelFromCatalog qt rn
MOCustomTypes -> CustomTypes.clearCustomTypes
MOAction action -> Action.deleteActionFromCatalog action Nothing
MOActionPermission action role -> Action.deleteActionPermissionFromCatalog action role
MOCronTrigger ctName -> deleteCronTriggerFromCatalog ctName

View File

@ -8,7 +8,6 @@ import Hasura.Prelude
import Hasura.RQL.DDL.Headers
import Hasura.RQL.DDL.Metadata.Types
import Hasura.RQL.Types
import Hasura.Server.Utils
import Hasura.SQL.Types
import qualified Hasura.RQL.DDL.ComputedField as ComputedField
@ -29,6 +28,9 @@ import qualified Language.Haskell.TH.Syntax as TH
import qualified Network.URI as N
import qualified System.Cron.Parser as Cr
import Data.List.Extended (duplicates)
import Data.Scientific
import Test.QuickCheck
import Test.QuickCheck.Instances.Semigroup ()
import Test.QuickCheck.Instances.Time ()
@ -91,6 +93,9 @@ instance Arbitrary ComputedField.ComputedFieldDefinition where
instance Arbitrary ComputedFieldMeta where
arbitrary = genericArbitrary
instance Arbitrary Scientific where
arbitrary = ((fromRational . toRational) :: Int -> Scientific) <$> arbitrary
instance Arbitrary J.Value where
arbitrary = sized sizedArbitraryValue
where
@ -100,7 +105,7 @@ instance Arbitrary J.Value where
where
n' = n `div` 2
boolean = J.Bool <$> arbitrary
number = (J.Number . fromRational . toRational :: Int -> J.Value) <$> arbitrary
number = J.Number <$> arbitrary
string = J.String <$> arbitrary
array = J.Array . V.fromList <$> arbitrary
object' = J.Object <$> arbitrary
@ -296,6 +301,30 @@ instance Arbitrary ActionPermissionMetadata where
instance Arbitrary ActionMetadata where
arbitrary = genericArbitrary
deriving instance Arbitrary G.StringValue
deriving instance Arbitrary G.Variable
deriving instance Arbitrary G.ListValue
deriving instance Arbitrary G.ObjectValue
instance Arbitrary G.Value where
arbitrary = genericArbitrary
instance (Arbitrary a) => Arbitrary (G.ObjectFieldG a) where
arbitrary = genericArbitrary
deriving instance Arbitrary RemoteArguments
instance Arbitrary FieldCall where
arbitrary = genericArbitrary
deriving instance Arbitrary RemoteFields
instance Arbitrary RemoteRelationshipDef where
arbitrary = genericArbitrary
instance Arbitrary RemoteRelationshipMeta where
arbitrary = genericArbitrary
instance Arbitrary CronTriggerMetadata where
arbitrary = genericArbitrary

View File

@ -11,6 +11,7 @@ module Hasura.RQL.DDL.Metadata.Types
, tmObjectRelationships
, tmArrayRelationships
, tmComputedFields
, tmRemoteRelationships
, tmInsertPermissions
, tmSelectPermissions
, tmUpdatePermissions
@ -22,6 +23,7 @@ module Hasura.RQL.DDL.Metadata.Types
, ActionMetadata(..)
, ActionPermissionMetadata(..)
, ComputedFieldMeta(..)
, RemoteRelationshipMeta(..)
, FunctionsMetadata(..)
, ExportMetadata(..)
, ClearMetadata(..)
@ -33,25 +35,26 @@ module Hasura.RQL.DDL.Metadata.Types
import Hasura.Prelude
import Control.Lens hiding (set, (.=))
import Control.Lens hiding (set, (.=))
import Data.Aeson
import Data.Aeson.Casing
import Data.Aeson.TH
import Language.Haskell.TH.Syntax (Lift)
import Language.Haskell.TH.Syntax (Lift)
import qualified Data.Aeson.Ordered as AO
import qualified Data.HashMap.Strict as HM
import qualified Data.HashSet as HS
import qualified Language.GraphQL.Draft.Syntax as G
import qualified Data.Aeson.Ordered as AO
import qualified Data.HashMap.Strict as HM
import qualified Data.HashSet as HS
import qualified Language.GraphQL.Draft.Syntax as G
import Hasura.RQL.Types
import Hasura.SQL.Types
import qualified Hasura.RQL.DDL.ComputedField as ComputedField
import qualified Hasura.RQL.DDL.Permission as Permission
import qualified Hasura.RQL.DDL.QueryCollection as Collection
import qualified Hasura.RQL.DDL.Relationship as Relationship
import qualified Hasura.RQL.DDL.Schema as Schema
import qualified Hasura.RQL.DDL.ComputedField as ComputedField
import qualified Hasura.RQL.DDL.Permission as Permission
import qualified Hasura.RQL.DDL.QueryCollection as Collection
import qualified Hasura.RQL.DDL.Relationship as Relationship
import qualified Hasura.RQL.DDL.Schema as Schema
import qualified Hasura.RQL.Types.RemoteRelationship as RemoteRelationship
data MetadataVersion
= MVVersion1
@ -81,6 +84,13 @@ data ComputedFieldMeta
} deriving (Show, Eq, Lift, Generic)
$(deriveJSON (aesonDrop 4 snakeCase) ''ComputedFieldMeta)
data RemoteRelationshipMeta
= RemoteRelationshipMeta
{ _rrmName :: !RemoteRelationshipName
, _rrmDefinition :: !RemoteRelationship.RemoteRelationshipDef
} deriving (Show, Eq, Lift, Generic)
$(deriveJSON (aesonDrop 4 snakeCase) ''RemoteRelationshipMeta)
data TableMeta
= TableMeta
{ _tmTable :: !QualifiedTable
@ -89,6 +99,7 @@ data TableMeta
, _tmObjectRelationships :: ![Relationship.ObjRelDef]
, _tmArrayRelationships :: ![Relationship.ArrRelDef]
, _tmComputedFields :: ![ComputedFieldMeta]
, _tmRemoteRelationships :: ![RemoteRelationshipMeta]
, _tmInsertPermissions :: ![Permission.InsPermDef]
, _tmSelectPermissions :: ![Permission.SelPermDef]
, _tmUpdatePermissions :: ![Permission.UpdPermDef]
@ -99,7 +110,7 @@ $(makeLenses ''TableMeta)
mkTableMeta :: QualifiedTable -> Bool -> TableConfig -> TableMeta
mkTableMeta qt isEnum config =
TableMeta qt isEnum config [] [] [] [] [] [] [] []
TableMeta qt isEnum config [] [] [] [] [] [] [] [] []
instance FromJSON TableMeta where
parseJSON (Object o) = do
@ -114,6 +125,7 @@ instance FromJSON TableMeta where
<*> o .:? orKey .!= []
<*> o .:? arKey .!= []
<*> o .:? cfKey .!= []
<*> o .:? rrKey .!= []
<*> o .:? ipKey .!= []
<*> o .:? spKey .!= []
<*> o .:? upKey .!= []
@ -132,6 +144,7 @@ instance FromJSON TableMeta where
dpKey = "delete_permissions"
etKey = "event_triggers"
cfKey = "computed_fields"
rrKey = "remote_relationships"
unexpectedKeys =
HS.fromList (HM.keys o) `HS.difference` expectedKeySet
@ -139,7 +152,7 @@ instance FromJSON TableMeta where
expectedKeySet =
HS.fromList [ tableKey, isEnumKey, configKey, orKey
, arKey , ipKey, spKey, upKey, dpKey, etKey
, cfKey
, cfKey, rrKey
]
parseJSON _ =
@ -288,6 +301,7 @@ replaceMetadataToOrdJSON ( ReplaceMetadata
objectRelationships
arrayRelationships
computedFields
remoteRelationships
insertPermissions
selectPermissions
updatePermissions
@ -299,6 +313,7 @@ replaceMetadataToOrdJSON ( ReplaceMetadata
, objectRelationshipsPair
, arrayRelationshipsPair
, computedFieldsPair
, remoteRelationshipsPair
, insertPermissionsPair
, selectPermissionsPair
, updatePermissionsPair
@ -315,6 +330,8 @@ replaceMetadataToOrdJSON ( ReplaceMetadata
relDefToOrdJSON arrayRelationships
computedFieldsPair = listToMaybeOrdPair "computed_fields"
computedFieldMetaToOrdJSON computedFields
remoteRelationshipsPair = listToMaybeOrdPair "remote_relationships"
AO.toOrdered remoteRelationships
insertPermissionsPair = listToMaybeOrdPair "insert_permissions"
insPermDefToOrdJSON insertPermissions
selectPermissionsPair = listToMaybeOrdPair "select_permissions"

View File

@ -19,9 +19,10 @@ import Hasura.Prelude
import Hasura.RQL.Types
import Hasura.RQL.Types.QueryCollection
import Hasura.Server.Utils (duplicates)
import Hasura.SQL.Types
import Data.List.Extended (duplicates)
import qualified Data.Text as T
import qualified Data.Text.Extended as T
import qualified Database.PG.Query as Q
@ -33,7 +34,7 @@ addCollectionP2 (CollectionDef queryList) =
withPathK "queries" $
unless (null duplicateNames) $ throw400 NotSupported $
"found duplicate query names "
<> T.intercalate ", " (map (T.dquote . unNonEmptyText . unQueryName) duplicateNames)
<> T.intercalate ", " (map (T.dquote . unNonEmptyText . unQueryName) $ toList duplicateNames)
where
duplicateNames = duplicates $ map _lqName queryList

View File

@ -12,21 +12,23 @@ module Hasura.RQL.DDL.Relationship
)
where
import qualified Database.PG.Query as Q
import Hasura.RQL.Types.Common
import Hasura.RQL.Types.SchemaCacheTypes
import Hasura.EncJSON
import Hasura.Prelude
import Hasura.RQL.DDL.Deps
import Hasura.RQL.DDL.Permission (purgePerm)
import Hasura.RQL.DDL.Permission (purgePerm)
import Hasura.RQL.DDL.Relationship.Types
import Hasura.RQL.Types
import Hasura.SQL.Types
import Data.Aeson.Types
import qualified Data.HashMap.Strict as HM
import qualified Data.HashSet as HS
import Data.Tuple (swap)
import Instances.TH.Lift ()
import Data.Tuple (swap)
import Instances.TH.Lift ()
import qualified Data.HashMap.Strict as HM
import qualified Data.HashSet as HS
import qualified Database.PG.Query as Q
runCreateRelationship
:: (MonadTx m, CacheRWM m, HasSystemDefined m, ToJSON a)

View File

@ -7,11 +7,12 @@ import Hasura.SQL.Types
import Data.Aeson.Casing
import Data.Aeson.TH
import Data.Aeson.Types
import qualified Data.HashMap.Strict as HM
import qualified Data.Text as T
import Instances.TH.Lift ()
import Language.Haskell.TH.Syntax (Lift)
import qualified Data.HashMap.Strict as HM
import qualified Data.Text as T
data RelDef a
= RelDef
{ rdName :: !RelName

View File

@ -0,0 +1,117 @@
{-# LANGUAGE ViewPatterns #-}
module Hasura.RQL.DDL.RemoteRelationship
( runCreateRemoteRelationship
, runDeleteRemoteRelationship
, runUpdateRemoteRelationship
, persistRemoteRelationship
, resolveRemoteRelationship
, delRemoteRelFromCatalog
) where
import Hasura.EncJSON
import Hasura.GraphQL.Validate.Types
import Hasura.Prelude
import Hasura.RQL.DDL.RemoteRelationship.Validate
import Hasura.RQL.Types
import Hasura.SQL.Types
import Instances.TH.Lift ()
import qualified Database.PG.Query as Q
runCreateRemoteRelationship
:: (MonadTx m, CacheRWM m) => RemoteRelationship -> m EncJSON
runCreateRemoteRelationship remoteRelationship = do
-- Few checks
void $ askTabInfo $ rtrTable remoteRelationship
liftTx $ persistRemoteRelationship remoteRelationship
buildSchemaCacheFor $ MOTableObj table $ MTORemoteRelationship $ rtrName remoteRelationship
pure successMsg
where
table = rtrTable remoteRelationship
resolveRemoteRelationship
:: QErrM m
=> RemoteRelationship
-> [PGColumnInfo]
-> RemoteSchemaMap
-> m (RemoteFieldInfo, TypeMap, [SchemaDependency])
resolveRemoteRelationship remoteRelationship pgColumns remoteSchemaMap = do
(remoteField, typesMap) <- either (throw400 RemoteSchemaError . validateErrorToText)
pure
(validateRemoteRelationship remoteRelationship remoteSchemaMap pgColumns)
let schemaDependencies =
let table = rtrTable remoteRelationship
columns = _rfiHasuraFields remoteField
remoteSchemaName = rtrRemoteSchema remoteRelationship
tableDep = SchemaDependency (SOTable table) DRTable
columnsDep =
map
(\column ->
SchemaDependency
(SOTableObj table $ TOCol column)
DRRemoteRelationship ) $
map pgiColumn (toList columns)
remoteSchemaDep =
SchemaDependency (SORemoteSchema remoteSchemaName) DRRemoteSchema
in (tableDep : remoteSchemaDep : columnsDep)
pure (remoteField, typesMap, schemaDependencies)
runUpdateRemoteRelationship :: (MonadTx m, CacheRWM m) => RemoteRelationship -> m EncJSON
runUpdateRemoteRelationship remoteRelationship = do
fieldInfoMap <- askFieldInfoMap table
void $ askRemoteRel fieldInfoMap (rtrName remoteRelationship)
liftTx $ updateRemoteRelInCatalog remoteRelationship
buildSchemaCacheFor $ MOTableObj table $ MTORemoteRelationship $ rtrName remoteRelationship
pure successMsg
where
table = rtrTable remoteRelationship
mkRemoteRelationshipDef :: RemoteRelationship -> RemoteRelationshipDef
mkRemoteRelationshipDef RemoteRelationship {..} =
RemoteRelationshipDef rtrRemoteSchema rtrHasuraFields rtrRemoteField
persistRemoteRelationship :: RemoteRelationship -> Q.TxE QErr ()
persistRemoteRelationship remoteRelationship =
Q.unitQE defaultTxErrorHandler [Q.sql|
INSERT INTO hdb_catalog.hdb_remote_relationship
(remote_relationship_name, table_schema, table_name, definition)
VALUES ($1, $2, $3, $4::jsonb)
|] (rtrName remoteRelationship, schemaName, tableName, Q.AltJ definition) True
where
QualifiedObject schemaName tableName = rtrTable remoteRelationship
definition = mkRemoteRelationshipDef remoteRelationship
updateRemoteRelInCatalog
:: RemoteRelationship -> Q.TxE QErr ()
updateRemoteRelInCatalog remoteRelationship =
Q.unitQE defaultTxErrorHandler [Q.sql|
UPDATE hdb_catalog.hdb_remote_relationship
SET definition = $4::jsonb
WHERE remote_relationship_name = $1 AND table_schema = $2 AND table_name = $3
|] (rtrName remoteRelationship, schemaName, tableName, Q.AltJ definition) True
where
QualifiedObject schemaName tableName = rtrTable remoteRelationship
definition = mkRemoteRelationshipDef remoteRelationship
runDeleteRemoteRelationship ::
(MonadTx m, CacheRWM m) => DeleteRemoteRelationship -> m EncJSON
runDeleteRemoteRelationship (DeleteRemoteRelationship table relName)= do
fieldInfoMap <- askFieldInfoMap table
void $ askRemoteRel fieldInfoMap relName
liftTx $ delRemoteRelFromCatalog table relName
buildSchemaCacheFor $ MOTableObj table $ MTORemoteRelationship relName
pure successMsg
delRemoteRelFromCatalog
:: QualifiedTable -> RemoteRelationshipName -> Q.TxE QErr ()
delRemoteRelFromCatalog (QualifiedObject sn tn) (RemoteRelationshipName relName) =
Q.unitQE defaultTxErrorHandler [Q.sql|
DELETE FROM
hdb_catalog.hdb_remote_relationship
WHERE table_schema = $1
AND table_name = $2
AND remote_relationship_name = $3
|] (sn, tn, relName) True

View File

@ -0,0 +1,431 @@
{-# LANGUAGE ViewPatterns #-}
-- | Validate input queries against remote schemas.
module Hasura.RQL.DDL.RemoteRelationship.Validate
( validateRemoteRelationship
, validateErrorToText
) where
import Data.Bifunctor
import Data.Foldable
import Data.Validation
import Hasura.GraphQL.Validate.Types
import Hasura.Prelude hiding (first)
import Hasura.RQL.Types
import Hasura.Server.Utils (makeReasonMessage)
import Hasura.SQL.Types
import qualified Data.HashMap.Strict as HM
import qualified Data.HashSet as HS
import qualified Data.List.NonEmpty as NE
import qualified Data.Text as T
import qualified Hasura.GraphQL.Schema as GS
import qualified Language.GraphQL.Draft.Syntax as G
-- | An error validating the remote relationship.
data ValidationError
= RemoteSchemaNotFound !RemoteSchemaName
| CouldntFindRemoteField !G.Name !G.NamedType
| FieldNotFoundInRemoteSchema !G.Name
| NoSuchArgumentForRemote !G.Name
| MissingRequiredArgument !G.Name
| TypeNotFound !G.NamedType
| TableNotFound !QualifiedTable
| TableFieldNonexistent !QualifiedTable !FieldName
| ExpectedTypeButGot !G.GType !G.GType
| InvalidType !G.GType!T.Text
| InvalidVariable !G.Variable !(HM.HashMap G.Variable PGColumnInfo)
| NullNotAllowedHere
| InvalidGTypeForStripping !G.GType
| UnsupportedMultipleElementLists
| UnsupportedEnum
deriving (Show, Eq)
validateErrorToText :: NE.NonEmpty ValidationError -> Text
validateErrorToText (toList -> errs) =
"cannot validate remote relationship " <> makeReasonMessage errs errorToText
where
errorToText :: ValidationError -> Text
errorToText = \case
RemoteSchemaNotFound name ->
"remote schema with name " <> name <<> " not found"
CouldntFindRemoteField name ty ->
"remote field with name " <> name <<> " and type " <> ty <<> " not found"
FieldNotFoundInRemoteSchema name ->
"field with name " <> name <<> " not found in remote schema"
NoSuchArgumentForRemote name ->
"argument with name " <> name <<> " not found in remote schema"
MissingRequiredArgument name ->
"required argument with name " <> name <<> " is missing"
TypeNotFound ty ->
"type with name " <> ty <<> " not found"
TableNotFound name ->
"table with name " <> name <<> " not found"
TableFieldNonexistent table fieldName ->
"field with name " <> fieldName <<> " not found in table " <>> table
ExpectedTypeButGot expTy actualTy ->
"expected type " <> getBaseTy expTy <<> " but got " <>> getBaseTy actualTy
InvalidType ty err ->
"type " <> getBaseTy ty <<> err
InvalidVariable var _ ->
"variable " <> G.unVariable var <<> " is not found"
NullNotAllowedHere ->
"null is not allowed here"
InvalidGTypeForStripping ty ->
"type " <> getBaseTy ty <<> " is invalid for stripping"
UnsupportedMultipleElementLists ->
"multiple elements in list value is not supported"
UnsupportedEnum ->
"enum value is not supported"
-- | Validate a remote relationship given a context.
validateRemoteRelationship ::
RemoteRelationship
-> RemoteSchemaMap
-> [PGColumnInfo]
-> Either (NonEmpty ValidationError) (RemoteFieldInfo, TypeMap)
validateRemoteRelationship remoteRelationship remoteSchemaMap pgColumns = do
let remoteSchemaName = rtrRemoteSchema remoteRelationship
table = rtrTable remoteRelationship
hasuraFields <- forM (toList $ rtrHasuraFields remoteRelationship) $
\fieldName -> case find ((==) fieldName . fromPGCol . pgiColumn) pgColumns of
Nothing -> Left $ pure $ TableFieldNonexistent table fieldName
Just r -> pure r
case HM.lookup remoteSchemaName remoteSchemaMap of
Nothing -> Left $ pure $ RemoteSchemaNotFound remoteSchemaName
Just (RemoteSchemaCtx _ gctx rsi) -> do
(_leafTyInfo, leafGType, (leafParamMap, leafTypeMap)) <-
foldl
(\eitherObjTyInfoAndTypes fieldCall ->
case eitherObjTyInfoAndTypes of
Left err -> Left err
Right (objTyInfo, _, (_, typeMap)) -> do
objFldInfo <- lookupField (fcName fieldCall) objTyInfo
case _fiLoc objFldInfo of
TLHasuraType ->
Left
(pure (FieldNotFoundInRemoteSchema (fcName fieldCall)))
TLCustom ->
Left
(pure (FieldNotFoundInRemoteSchema (fcName fieldCall)))
TLRemoteType {} -> do
let providedArguments =
remoteArgumentsToMap (fcArguments fieldCall)
toEither
(validateRemoteArguments
(_fiParams objFldInfo)
providedArguments
(HM.fromList
(map
(first pgColumnToVariable)
(HM.toList $ mapFromL (pgiColumn) pgColumns)))
(GS._gTypes gctx))
(newParamMap, newTypeMap) <-
first
pure
(runStateT
(stripInMap
remoteRelationship
(GS._gTypes gctx)
(_fiParams objFldInfo)
providedArguments)
typeMap)
innerObjTyInfo <-
if isObjType (GS._gTypes gctx) objFldInfo
then getTyInfoFromField (GS._gTypes gctx) objFldInfo
else if isScalarType (GS._gTypes gctx) objFldInfo
then pure objTyInfo
else (Left
(pure
(InvalidType
(_fiTy objFldInfo)
"only objects or scalar types expected")))
pure
( innerObjTyInfo
, _fiTy objFldInfo
, (newParamMap, newTypeMap)))
(pure
( GS._gQueryRoot gctx
, G.toGT (_otiName $ GS._gQueryRoot gctx)
, (mempty, mempty)))
(unRemoteFields $ rtrRemoteField remoteRelationship)
pure
( RemoteFieldInfo
{ _rfiName = rtrName remoteRelationship
, _rfiGType = leafGType
, _rfiParamMap = leafParamMap
, _rfiHasuraFields = HS.fromList hasuraFields
, _rfiRemoteFields = unRemoteFields $ rtrRemoteField remoteRelationship
, _rfiRemoteSchema = rsi
}
, leafTypeMap)
where
getTyInfoFromField types field =
let baseTy = getBaseTy (_fiTy field)
fieldName = _fiName field
typeInfo = HM.lookup baseTy types
in case typeInfo of
Just (TIObj objTyInfo) -> pure objTyInfo
_ -> Left (pure (FieldNotFoundInRemoteSchema fieldName))
isObjType types field =
let baseTy = getBaseTy (_fiTy field)
typeInfo = HM.lookup baseTy types
in case typeInfo of
Just (TIObj _) -> True
_ -> False
isScalarType types field =
let baseTy = getBaseTy (_fiTy field)
typeInfo = HM.lookup baseTy types
in case typeInfo of
Just (TIScalar _) -> True
_ -> False
remoteArgumentsToMap =
HM.fromList .
map (\field -> (G._ofName field, G._ofValue field)) .
getRemoteArguments
-- | Return a map with keys deleted whose template argument is
-- specified as an atomic (variable, constant), keys which are kept
-- have their values modified by 'stripObject' or 'stripList'.
stripInMap ::
RemoteRelationship -> HM.HashMap G.NamedType TypeInfo
-> HM.HashMap G.Name InpValInfo
-> HM.HashMap G.Name G.Value
-> StateT (HM.HashMap G.NamedType TypeInfo) (Either ValidationError) (HM.HashMap G.Name InpValInfo)
stripInMap remoteRelationshipName types schemaArguments templateArguments =
fmap
(HM.mapMaybe id)
(HM.traverseWithKey
(\name inpValInfo ->
case HM.lookup name templateArguments of
Nothing -> pure (Just inpValInfo)
Just value -> do
maybeNewGType <- stripValue remoteRelationshipName types (_iviType inpValInfo) value
pure
(fmap
(\newGType -> inpValInfo {_iviType = newGType})
maybeNewGType))
schemaArguments)
-- | Strip a value type completely, or modify it, if the given value
-- is atomic-ish.
stripValue ::
RemoteRelationship -> HM.HashMap G.NamedType TypeInfo
-> G.GType
-> G.Value
-> StateT (HM.HashMap G.NamedType TypeInfo) (Either ValidationError) (Maybe G.GType)
stripValue remoteRelationshipName types gtype value = do
case value of
G.VVariable {} -> pure Nothing
G.VInt {} -> pure Nothing
G.VFloat {} -> pure Nothing
G.VString {} -> pure Nothing
G.VBoolean {} -> pure Nothing
G.VNull {} -> pure Nothing
G.VEnum {} -> pure Nothing
G.VList (G.ListValueG values) ->
case values of
[] -> pure Nothing
[gvalue] -> stripList remoteRelationshipName types gtype gvalue
_ -> lift (Left UnsupportedMultipleElementLists)
G.VObject (G.unObjectValue -> keypairs) ->
fmap Just (stripObject remoteRelationshipName types gtype keypairs)
-- | Produce a new type for the list, or strip it entirely.
stripList ::
RemoteRelationship
-> HM.HashMap G.NamedType TypeInfo
-> G.GType
-> G.Value
-> StateT (HM.HashMap G.NamedType TypeInfo) (Either ValidationError) (Maybe G.GType)
stripList remoteRelationshipName types originalOuterGType value =
case originalOuterGType of
G.TypeList nullability (G.ListType innerGType) -> do
maybeNewInnerGType <- stripValue remoteRelationshipName types innerGType value
pure
(fmap
(\newGType -> G.TypeList nullability (G.ListType newGType))
maybeNewInnerGType)
_ -> lift (Left (InvalidGTypeForStripping originalOuterGType))
-- | Produce a new type for the given InpValInfo, modified by
-- 'stripInMap'. Objects can't be deleted entirely, just keys of an
-- object.
stripObject ::
RemoteRelationship -> HM.HashMap G.NamedType TypeInfo
-> G.GType
-> [G.ObjectFieldG G.Value]
-> StateT (HM.HashMap G.NamedType TypeInfo) (Either ValidationError) G.GType
stripObject remoteRelationshipName types originalGtype keypairs =
case originalGtype of
G.TypeNamed nullability originalNamedType ->
case HM.lookup (getBaseTy originalGtype) types of
Just (TIInpObj originalInpObjTyInfo) -> do
let originalSchemaArguments = _iotiFields originalInpObjTyInfo
newNamedType =
renameNamedType
(renameTypeForRelationship remoteRelationshipName)
originalNamedType
newSchemaArguments <-
stripInMap
remoteRelationshipName
types
originalSchemaArguments
templateArguments
let newInpObjTyInfo =
originalInpObjTyInfo
{_iotiFields = newSchemaArguments, _iotiName = newNamedType}
newGtype = G.TypeNamed nullability newNamedType
modify (HM.insert newNamedType (TIInpObj newInpObjTyInfo))
pure newGtype
_ -> lift (Left (InvalidGTypeForStripping originalGtype))
_ -> lift (Left (InvalidGTypeForStripping originalGtype))
where
templateArguments :: HM.HashMap G.Name G.Value
templateArguments =
HM.fromList (map (\(G.ObjectFieldG key val) -> (key, val)) keypairs)
-- | Produce a new name for a type, used when stripping the schema
-- types for a remote relationship.
-- TODO: Consider a separator character to avoid conflicts.
renameTypeForRelationship :: RemoteRelationship -> Text -> Text
renameTypeForRelationship rtr text =
text <> "_remote_rel_" <> name
where name = schema <> "_" <> table <> remoteRelationshipNameToText (rtrName rtr)
QualifiedObject (SchemaName schema) (TableName table) = rtrTable rtr
-- | Rename a type.
renameNamedType :: (Text -> Text) -> G.NamedType -> G.NamedType
renameNamedType rename (G.NamedType (G.Name text)) =
G.NamedType (G.Name (rename text))
-- | Convert a field name to a variable name.
pgColumnToVariable :: PGCol -> G.Variable
pgColumnToVariable = G.Variable . G.Name . getPGColTxt
-- | Lookup the field in the schema.
lookupField ::
G.Name
-> ObjTyInfo
-> Either (NonEmpty ValidationError) ObjFldInfo
lookupField name objFldInfo = viaObject objFldInfo
where
viaObject =
maybe (Left (pure (CouldntFindRemoteField name $ _otiName objFldInfo))) pure .
HM.lookup name .
_otiFields
-- | Validate remote input arguments against the remote schema.
validateRemoteArguments ::
HM.HashMap G.Name InpValInfo
-> HM.HashMap G.Name G.Value
-> HM.HashMap G.Variable PGColumnInfo
-> HM.HashMap G.NamedType TypeInfo
-> Validation (NonEmpty ValidationError) ()
validateRemoteArguments expectedArguments providedArguments permittedVariables types = do
traverse validateProvided (HM.toList providedArguments)
-- Not neccessary to validate if all required args are provided in the relationship
-- traverse validateExpected (HM.toList expectedArguments)
pure ()
where
validateProvided (providedName, providedValue) =
case HM.lookup providedName expectedArguments of
Nothing -> Failure (pure (NoSuchArgumentForRemote providedName))
Just (_iviType -> expectedType) ->
validateType permittedVariables providedValue expectedType types
-- validateExpected (expectedKey, expectedInpValInfo) =
-- if G.isNullable (_iviType expectedInpValInfo)
-- then pure ()
-- else case _iviDefVal expectedInpValInfo of
-- Just {} -> pure ()
-- Nothing ->
-- case HM.lookup expectedKey providedArguments of
-- Nothing ->
-- Failure (pure (MissingRequiredArgument expectedKey))
-- Just {} -> pure ()
-- | Validate a value against a type.
validateType ::
HM.HashMap G.Variable PGColumnInfo
-> G.Value
-> G.GType
-> HM.HashMap G.NamedType TypeInfo
-> Validation (NonEmpty ValidationError) ()
validateType permittedVariables value expectedGType types =
case value of
G.VVariable variable ->
case HM.lookup variable permittedVariables of
Nothing -> Failure (pure (InvalidVariable variable permittedVariables))
Just fieldInfo ->
bindValidation
(columnInfoToNamedType fieldInfo)
(\actualNamedType -> assertType (G.toGT actualNamedType) expectedGType)
G.VInt {} -> assertType (G.toGT $ mkScalarTy PGInteger) expectedGType
G.VFloat {} -> assertType (G.toGT $ mkScalarTy PGFloat) expectedGType
G.VBoolean {} -> assertType (G.toGT $ mkScalarTy PGBoolean) expectedGType
G.VNull -> Failure (pure NullNotAllowedHere)
G.VString {} -> assertType (G.toGT $ mkScalarTy PGText) expectedGType
G.VEnum _ -> Failure (pure UnsupportedEnum)
G.VList (G.unListValue -> values) -> do
case values of
[] -> pure ()
[_] -> pure ()
_ -> Failure (pure UnsupportedMultipleElementLists)
(assertListType expectedGType)
(flip
traverse_
values
(\val ->
validateType permittedVariables val (unwrapTy expectedGType) types))
pure ()
G.VObject (G.unObjectValue -> values) ->
flip
traverse_
values
(\(G.ObjectFieldG name val) ->
let expectedNamedType = getBaseTy expectedGType
in
case HM.lookup expectedNamedType types of
Nothing -> Failure (pure $ TypeNotFound expectedNamedType)
Just typeInfo ->
case typeInfo of
TIInpObj inpObjTypeInfo ->
case HM.lookup name (_iotiFields inpObjTypeInfo) of
Nothing -> Failure (pure $ NoSuchArgumentForRemote name)
Just (_iviType -> expectedType) ->
validateType permittedVariables val expectedType types
_ ->
Failure
(pure $
InvalidType
(G.toGT $ G.NamedType name)
"not an input object type"))
assertType :: G.GType -> G.GType -> Validation (NonEmpty ValidationError) ()
assertType actualType expectedType = do
-- check if both are list types or both are named types
(when
(isListType' actualType /= isListType' expectedType)
(Failure (pure $ ExpectedTypeButGot expectedType actualType)))
-- if list type then check over unwrapped type, else check base types
if isListType' actualType
then assertType (unwrapTy actualType) (unwrapTy expectedType)
else (when
(getBaseTy actualType /= getBaseTy expectedType)
(Failure (pure $ ExpectedTypeButGot expectedType actualType)))
pure ()
assertListType :: G.GType -> Validation (NonEmpty ValidationError) ()
assertListType actualType =
(when (not $ isListType' actualType)
(Failure (pure $ InvalidType actualType "is not a list type")))
-- | Convert a field info to a named type, if possible.
columnInfoToNamedType :: PGColumnInfo -> Validation (NonEmpty ValidationError) G.NamedType
columnInfoToNamedType pci = case pgiType pci of
PGColumnScalar scalarType -> pure $ mkScalarTy scalarType
_ -> Failure $ pure UnsupportedEnum

View File

@ -1,3 +1,4 @@
{-# LANGUAGE ViewPatterns #-}
module Hasura.RQL.DDL.RemoteSchema
( runAddRemoteSchema
, runRemoveRemoteSchema
@ -6,22 +7,32 @@ module Hasura.RQL.DDL.RemoteSchema
, fetchRemoteSchemas
, addRemoteSchemaP1
, addRemoteSchemaP2Setup
, runIntrospectRemoteSchema
, addRemoteSchemaToCatalog
) where
import qualified Data.Aeson as J
import qualified Data.HashMap.Strict as Map
import qualified Data.HashSet as S
import qualified Data.Sequence as Seq
import qualified Data.Text as T
import qualified Database.PG.Query as Q
import Hasura.EncJSON
import Hasura.Prelude
import qualified Data.Aeson as J
import qualified Data.HashMap.Strict as Map
import qualified Data.HashSet as S
import qualified Database.PG.Query as Q
import Hasura.GraphQL.RemoteServer
import Hasura.GraphQL.Schema.Merge
import Hasura.Prelude
import Hasura.RQL.DDL.Deps
import Hasura.RQL.Types
import Hasura.Server.Version (HasVersion)
import Hasura.Server.Version (HasVersion)
import Hasura.SQL.Types
import qualified Hasura.GraphQL.Context as GC
import qualified Hasura.GraphQL.Resolve.Introspect as RI
import qualified Hasura.GraphQL.Schema as GS
import qualified Hasura.GraphQL.Validate as VQ
import qualified Hasura.GraphQL.Validate.Types as VT
runAddRemoteSchema
:: ( HasVersion
, QErrM m
@ -53,9 +64,16 @@ addRemoteSchemaP2Setup
=> AddRemoteSchemaQuery -> m RemoteSchemaCtx
addRemoteSchemaP2Setup (AddRemoteSchemaQuery name def _) = do
httpMgr <- askHttpManager
rsi <- validateRemoteSchemaDef def
gCtx <- fetchRemoteSchema httpMgr name rsi
pure $ RemoteSchemaCtx name gCtx rsi
rsi <- validateRemoteSchemaDef name def
gCtx <- fetchRemoteSchema httpMgr rsi
pure $ RemoteSchemaCtx name (convRemoteGCtx gCtx) rsi
where
convRemoteGCtx rmGCtx =
GC.emptyGCtx { GS._gTypes = GC._rgTypes rmGCtx
, GS._gQueryRoot = GC._rgQueryRoot rmGCtx
, GS._gMutRoot = GC._rgMutationRoot rmGCtx
, GS._gSubRoot = GC._rgSubscriptionRoot rmGCtx
}
addRemoteSchemaP2
:: (HasVersion, MonadTx m, MonadIO m, HasHttpManager m) => AddRemoteSchemaQuery -> m ()
@ -80,6 +98,13 @@ removeRemoteSchemaP1 rsn = do
let rmSchemas = scRemoteSchemas sc
void $ onNothing (Map.lookup rsn rmSchemas) $
throw400 NotExists "no such remote schema"
case Map.lookup rsn rmSchemas of
Just _ -> return ()
Nothing -> throw400 NotExists "no such remote schema"
let depObjs = getDependentObjs sc remoteSchemaDepId
when (depObjs /= []) $ reportDeps depObjs
where
remoteSchemaDepId = SORemoteSchema rsn
runReloadRemoteSchema
:: (QErrM m, CacheRWM m)
@ -119,5 +144,35 @@ fetchRemoteSchemas =
ORDER BY name ASC
|] () True
where
fromRow (name, Q.AltJ def, comment) =
AddRemoteSchemaQuery name def comment
fromRow (n, Q.AltJ def, comm) = AddRemoteSchemaQuery n def comm
runIntrospectRemoteSchema
:: (CacheRM m, QErrM m) => RemoteSchemaNameQuery -> m EncJSON
runIntrospectRemoteSchema (RemoteSchemaNameQuery rsName) = do
sc <- askSchemaCache
rGCtx <-
case Map.lookup rsName (scRemoteSchemas sc) of
Nothing ->
throw400 NotExists $
"remote schema: " <> remoteSchemaNameToTxt rsName <> " not found"
Just rCtx -> mergeGCtx (rscGCtx rCtx) GC.emptyGCtx
-- ^ merge with emptyGCtx to get default query fields
queryParts <- flip runReaderT rGCtx $ VQ.getQueryParts introspectionQuery
(rootSelSet, _) <- flip runReaderT rGCtx $ VT.runReusabilityT $ VQ.validateGQ queryParts
schemaField <-
case rootSelSet of
VQ.RQuery (Seq.viewl -> selSet) -> getSchemaField selSet
_ -> throw500 "expected query for introspection"
(introRes, _) <- flip runReaderT rGCtx $ VT.runReusabilityT $ RI.schemaR schemaField
pure $ wrapInSpecKeys introRes
where
wrapInSpecKeys introObj =
encJFromAssocList
[ ( T.pack "data"
, encJFromAssocList [(T.pack "__schema", encJFromJValue introObj)])
]
getSchemaField =
\case
Seq.EmptyL -> throw500 "found empty when looking for __schema field"
(f Seq.:< Seq.Empty) -> pure f
_ -> throw500 "expected __schema field, found many fields"

View File

@ -175,6 +175,7 @@ buildSchemaCacheRule = proc (catalogMetadata, invalidationKeys) -> do
, _boRemoteSchemas resolvedOutputs
, _boCustomTypes resolvedOutputs
, _boActions resolvedOutputs
, _boRemoteRelationshipTypes resolvedOutputs
)
returnA -< SchemaCache
@ -200,22 +201,33 @@ buildSchemaCacheRule = proc (catalogMetadata, invalidationKeys) -> do
buildAndCollectInfo = proc (catalogMetadata, invalidationKeys) -> do
let CatalogMetadata tables relationships permissions
eventTriggers remoteSchemas functions allowlistDefs
computedFields catalogCustomTypes actions cronTriggers = catalogMetadata
computedFields catalogCustomTypes actions remoteRelationships
cronTriggers = catalogMetadata
-- tables
tableRawInfos <- buildTableCache -< (tables, Inc.selectD #_ikMetadata invalidationKeys)
-- remote schemas
let remoteSchemaInvalidationKeys = Inc.selectD #_ikRemoteSchemas invalidationKeys
remoteSchemaMap <- buildRemoteSchemas -< (remoteSchemaInvalidationKeys, remoteSchemas)
-- relationships and computed fields
let relationshipsByTable = M.groupOn _crTable relationships
computedFieldsByTable = M.groupOn (_afcTable . _cccComputedField) computedFields
tableCoreInfos <- (tableRawInfos >- returnA)
remoteRelationshipsByTable = M.groupOn rtrTable remoteRelationships
rawTableCoreInfos <- (tableRawInfos >- returnA)
>-> (\info -> (info, relationshipsByTable) >- alignExtraTableInfo mkRelationshipMetadataObject)
>-> (\info -> (info, computedFieldsByTable) >- alignExtraTableInfo mkComputedFieldMetadataObject)
>-> (| Inc.keyed (\_ ((tableRawInfo, tableRelationships), tableComputedFields) -> do
>-> (\info -> (info, remoteRelationshipsByTable) >- alignExtraTableInfo mkRemoteRelationshipMetadataObject)
>-> (| Inc.keyed (\_ (((tableRawInfo, tableRelationships), tableComputedFields), tableRemoteRelationships) -> do
let columns = _tciFieldInfoMap tableRawInfo
allFields <- addNonColumnFields -<
(tableRawInfos, columns, tableRelationships, tableComputedFields)
returnA -< tableRawInfo { _tciFieldInfoMap = allFields }) |)
(allFields, typeMap) <- addNonColumnFields -<
(tableRawInfos, columns, M.map fst remoteSchemaMap, tableRelationships, tableComputedFields, tableRemoteRelationships)
returnA -< (tableRawInfo { _tciFieldInfoMap = allFields }, typeMap)) |)
let tableCoreInfos = M.map fst rawTableCoreInfos
remoteRelationshipTypes = mconcat $ map snd $ M.elems rawTableCoreInfos
-- permissions and event triggers
tableCoreInfosDep <- Inc.newDependency -< tableCoreInfos
@ -277,10 +289,6 @@ buildSchemaCacheRule = proc (catalogMetadata, invalidationKeys) -> do
cronTriggersMap <- buildCronTriggers -< ((),cronTriggers)
-- remote schemas
let remoteSchemaInvalidationKeys = Inc.selectD #_ikRemoteSchemas invalidationKeys
remoteSchemaMap <- buildRemoteSchemas -< (remoteSchemaInvalidationKeys, remoteSchemas)
returnA -< BuildOutputs
{ _boTables = tableCache
, _boActions = actionCache
@ -290,6 +298,7 @@ buildSchemaCacheRule = proc (catalogMetadata, invalidationKeys) -> do
-- If 'maybeResolvedCustomTypes' is 'Nothing', then custom types are inconsinstent.
-- In such case, use empty resolved value of custom types.
, _boCustomTypes = fromMaybe (NonObjectTypeMap mempty, mempty) maybeResolvedCustomTypes
, _boRemoteRelationshipTypes = remoteRelationshipTypes
, _boCronTriggers = cronTriggersMap
}
@ -433,13 +442,14 @@ buildSchemaCacheRule = proc (catalogMetadata, invalidationKeys) -> do
, HashMap RemoteSchemaName (RemoteSchemaCtx, MetadataObject)
, (NonObjectTypeMap, AnnotatedObjects)
, ActionCache
, VT.TypeMap
) `arr` (RemoteSchemaMap, GS.GCtxMap, GS.GCtx)
buildGQLSchema = proc (tableCache, functionCache, remoteSchemas, customTypes, actionCache) -> do
buildGQLSchema = proc (tableCache, functionCache, remoteSchemas, customTypes, actionCache, remoteRelationshipTypes) -> do
baseGQLSchema <- bindA -< GS.mkGCtxMap tableCache functionCache actionCache
(| foldlA' (\(remoteSchemaMap, gqlSchemas, remoteGQLSchemas)
(remoteSchemaName, (remoteSchema, metadataObject)) ->
(| withRecordInconsistency (do
let gqlSchema = convRemoteGCtx $ rscGCtx remoteSchema
let gqlSchema = rscGCtx remoteSchema
mergedGQLSchemas <- bindErrorA -< mergeRemoteSchema gqlSchemas gqlSchema
mergedRemoteGQLSchemas <- bindErrorA -< mergeGCtx remoteGQLSchemas gqlSchema
let mergedRemoteSchemaMap = M.insert remoteSchemaName remoteSchema remoteSchemaMap
@ -448,9 +458,12 @@ buildSchemaCacheRule = proc (catalogMetadata, invalidationKeys) -> do
>-> (| onNothingA ((remoteSchemaMap, gqlSchemas, remoteGQLSchemas) >- returnA) |))
|) (M.empty, baseGQLSchema, GC.emptyGCtx) (M.toList remoteSchemas)
-- merge the custom types into schema
>-> (\(remoteSchemaMap, gqlSchema, defGqlCtx) -> do
(schemaWithCT, defCtxWithCT) <- bindA -< mergeCustomTypes gqlSchema defGqlCtx customTypes
returnA -< (remoteSchemaMap, schemaWithCT, defCtxWithCT)
>-> (\(remoteSchemaMap, gqlSchema', defGqlCtx') -> do
(gqlSchema, defGqlCtx) <- bindA -< mergeCustomTypes gqlSchema' defGqlCtx' customTypes
returnA -< ( remoteSchemaMap
, M.map (mergeRemoteTypesWithGCtx remoteRelationshipTypes <$>) gqlSchema
, mergeRemoteTypesWithGCtx remoteRelationshipTypes defGqlCtx
)
)
-- | @'withMetadataCheck' cascade action@ runs @action@ and checks if the schema changed as a

View File

@ -15,6 +15,7 @@ import Control.Arrow.Extended
import Control.Lens
import qualified Hasura.Incremental as Inc
import qualified Hasura.GraphQL.Validate.Types as VT
import Hasura.RQL.Types
import Hasura.RQL.Types.Catalog
@ -52,16 +53,17 @@ data BuildInputs
-- 'MonadWriter' side channel.
data BuildOutputs
= BuildOutputs
{ _boTables :: !TableCache
, _boActions :: !ActionCache
, _boFunctions :: !FunctionCache
, _boRemoteSchemas :: !(HashMap RemoteSchemaName (RemoteSchemaCtx, MetadataObject))
{ _boTables :: !TableCache
, _boActions :: !ActionCache
, _boFunctions :: !FunctionCache
, _boRemoteSchemas :: !(HashMap RemoteSchemaName (RemoteSchemaCtx, MetadataObject))
-- ^ We preserve the 'MetadataObject' from the original catalog metadata in the output so we can
-- reuse it later if we need to mark the remote schema inconsistent during GraphQL schema
-- generation (because of field conflicts).
, _boAllowlist :: !(HS.HashSet GQLQuery)
, _boCustomTypes :: !(NonObjectTypeMap, AnnotatedObjects)
, _boCronTriggers :: !(M.HashMap TriggerName CronTriggerInfo)
, _boAllowlist :: !(HS.HashSet GQLQuery)
, _boCustomTypes :: !(NonObjectTypeMap, AnnotatedObjects)
, _boRemoteRelationshipTypes :: !VT.TypeMap
, _boCronTriggers :: !(M.HashMap TriggerName CronTriggerInfo)
} deriving (Show, Eq)
$(makeLenses ''BuildOutputs)

View File

@ -86,6 +86,8 @@ pruneDanglingDependents cache = fmap (M.filter (not . null)) . traverse do
SOTable tableName -> void $ resolveTable tableName
SOFunction functionName -> unless (functionName `M.member` _boFunctions cache) $
Left $ "function " <> functionName <<> " is not tracked"
SORemoteSchema remoteSchemaName -> unless (remoteSchemaName `M.member` _boRemoteSchemas cache) $
Left $ "remote schema " <> remoteSchemaName <<> " is not found"
SOTableObj tableName tableObjectId -> do
tableInfo <- resolveTable tableName
case tableObjectId of
@ -95,6 +97,8 @@ pruneDanglingDependents cache = fmap (M.filter (not . null)) . traverse do
void $ resolveField tableInfo (fromRel relName) _FIRelationship "relationship"
TOComputedField fieldName ->
void $ resolveField tableInfo (fromComputedField fieldName) _FIComputedField "computed field"
TORemoteRel fieldName ->
void $ resolveField tableInfo (fromRemoteRelationship fieldName) _FIRemoteRelationship "remote relationship"
TOForeignKey constraintName -> do
let foreignKeys = _tciForeignKeys $ _tiCoreInfo tableInfo
unless (isJust $ find ((== constraintName) . _cName . _fkConstraint) foreignKeys) $
@ -128,8 +132,9 @@ deleteMetadataObject objectId = case objectId of
MORemoteSchema name -> boRemoteSchemas %~ M.delete name
MOCronTrigger name -> boCronTriggers %~ M.delete name
MOTableObj tableName tableObjectId -> boTables.ix tableName %~ case tableObjectId of
MTORel name _ -> tiCoreInfo.tciFieldInfoMap %~ M.delete (fromRel name)
MTOComputedField name -> tiCoreInfo.tciFieldInfoMap %~ M.delete (fromComputedField name)
MTORel name _ -> tiCoreInfo.tciFieldInfoMap %~ M.delete (fromRel name)
MTOComputedField name -> tiCoreInfo.tciFieldInfoMap %~ M.delete (fromComputedField name)
MTORemoteRelationship name -> tiCoreInfo.tciFieldInfoMap %~ M.delete (fromRemoteRelationship name)
MTOPerm roleName permType -> withPermType permType \accessor ->
tiRolePermInfoMap.ix roleName.permAccToLens accessor .~ Nothing
MTOTrigger name -> tiEventTriggerInfoMap %~ M.delete name

View File

@ -4,6 +4,7 @@ module Hasura.RQL.DDL.Schema.Cache.Fields
( addNonColumnFields
, mkRelationshipMetadataObject
, mkComputedFieldMetadataObject
, mkRemoteRelationshipMetadataObject
) where
import Hasura.Prelude
@ -11,6 +12,7 @@ import qualified Data.HashMap.Strict.Extended as M
import qualified Data.HashSet as HS
import qualified Data.Sequence as Seq
import qualified Language.GraphQL.Draft.Syntax as G
import qualified Hasura.GraphQL.Validate.Types as VT
import Control.Arrow.Extended
import Data.Aeson
@ -19,6 +21,7 @@ import qualified Hasura.Incremental as Inc
import Hasura.RQL.DDL.ComputedField
import Hasura.RQL.DDL.Relationship
import Hasura.RQL.DDL.RemoteRelationship
import Hasura.RQL.DDL.Schema.Cache.Common
import Hasura.RQL.DDL.Schema.Function
import Hasura.RQL.Types
@ -30,10 +33,12 @@ addNonColumnFields
, ArrowKleisli m arr, MonadError QErr m )
=> ( HashMap QualifiedTable TableRawInfo
, FieldInfoMap PGColumnInfo
, RemoteSchemaMap
, [CatalogRelation]
, [CatalogComputedField]
) `arr` FieldInfoMap FieldInfo
addNonColumnFields = proc (rawTableInfo, columns, relationships, computedFields) -> do
, [RemoteRelationship]
) `arr` (FieldInfoMap FieldInfo, VT.TypeMap)
addNonColumnFields = proc (rawTableInfo, columns, remoteSchemaMap, relationships, computedFields, remoteRelationships) -> do
relationshipInfos
<- buildInfoMapPreservingMetadata _crRelName mkRelationshipMetadataObject buildRelationship
-< (_tciForeignKeys <$> rawTableInfo, relationships)
@ -43,15 +48,24 @@ addNonColumnFields = proc (rawTableInfo, columns, relationships, computedFields)
mkComputedFieldMetadataObject
buildComputedField
-< (HS.fromList $ M.keys rawTableInfo, computedFields)
rawRemoteRelationshipInfos
<- buildInfoMapPreservingMetadata rtrName mkRemoteRelationshipMetadataObject buildRemoteRelationship
-< ((M.elems columns, remoteSchemaMap), remoteRelationships)
let mapKey f = M.fromList . map (first f) . M.toList
relationshipFields = mapKey fromRel relationshipInfos
computedFieldFields = mapKey fromComputedField computedFieldInfos
remoteRelationshipFields = mapKey fromRemoteRelationship $
M.map (\((rf, _), mo) -> (rf, mo)) rawRemoteRelationshipInfos
typeMap = mconcat $ map (snd . fst) $ M.elems rawRemoteRelationshipInfos
-- First, check for conflicts between non-column fields, since we can raise a better error
-- message in terms of the two metadata objects that define them.
(align relationshipFields computedFieldFields >- returnA)
>-> (| Inc.keyed (\fieldName fields -> (fieldName, fields) >- noFieldConflicts) |)
fieldInfoMap <- (align relationshipFields computedFieldFields >- returnA)
>-> (| Inc.keyed (\fieldName fields -> (fieldName, fields) >- noFieldConflicts FIRelationship FIComputedField) |)
-- Second, align with remote relationship fields
>-> (\fields -> align (M.catMaybes fields) remoteRelationshipFields >- returnA)
>-> (| Inc.keyed (\fieldName fields -> (fieldName, fields) >- noFieldConflicts id FIRemoteRelationship) |)
-- Next, check for conflicts with custom field names. This is easiest to do before merging with
-- the column info itself because we have access to the information separately, and custom field
-- names are not currently stored as a separate map (but maybe should be!).
@ -59,14 +73,16 @@ addNonColumnFields = proc (rawTableInfo, columns, relationships, computedFields)
-- Finally, check for conflicts with the columns themselves.
>-> (\fields -> align columns (M.catMaybes fields) >- returnA)
>-> (| Inc.keyed (\_ fields -> fields >- noColumnConflicts) |)
returnA -< (fieldInfoMap, typeMap)
where
noFieldConflicts = proc (fieldName, fields) -> case fields of
This (relationship, metadata) -> returnA -< Just (FIRelationship relationship, metadata)
That (computedField, metadata) -> returnA -< Just (FIComputedField computedField, metadata)
These (_, relationshipMetadata) (_, computedFieldMetadata) -> do
noFieldConflicts this that = proc (fieldName, fields) -> case fields of
This (thisField, metadata) -> returnA -< Just (this thisField, metadata)
That (thatField, metadata) -> returnA -< Just (that thatField, metadata)
These (_, thisMetadata) (_, thatMetadata) -> do
tellA -< Seq.singleton $ CIInconsistency $ ConflictingObjects
("conflicting definitions for field " <>> fieldName)
[relationshipMetadata, computedFieldMetadata]
[thisMetadata, thatMetadata]
returnA -< Nothing
noCustomFieldConflicts = proc (columns, nonColumnFields) -> do
@ -143,3 +159,26 @@ buildComputedField = proc (trackedTableNames, computedField) -> do
bindErrorA -< addComputedFieldP2Setup trackedTableNames qt name def rawfi comment)
|) (addTableContext qt . addComputedFieldContext))
|) (mkComputedFieldMetadataObject computedField)
mkRemoteRelationshipMetadataObject :: RemoteRelationship -> MetadataObject
mkRemoteRelationshipMetadataObject rr =
let objectId = MOTableObj (rtrTable rr) $ MTORemoteRelationship $ rtrName rr
in MetadataObject objectId $ toJSON rr
buildRemoteRelationship
:: ( ArrowChoice arr, ArrowWriter (Seq CollectedInfo) arr
, ArrowKleisli m arr, MonadError QErr m )
=> (([PGColumnInfo], RemoteSchemaMap), RemoteRelationship) `arr` Maybe (RemoteFieldInfo, VT.TypeMap)
buildRemoteRelationship = proc ((pgColumns, remoteSchemaMap), remoteRelationship) -> do
let relationshipName = rtrName remoteRelationship
tableName = rtrTable remoteRelationship
metadataObject = mkRemoteRelationshipMetadataObject remoteRelationship
schemaObj = SOTableObj (rtrTable remoteRelationship) $ TORemoteRel relationshipName
addRemoteRelationshipContext e = "in remote relationship" <> relationshipName <<> ": " <> e
(| withRecordInconsistency (
(| modifyErrA (do
(remoteField, typeMap, dependencies) <- bindErrorA -< resolveRemoteRelationship remoteRelationship pgColumns remoteSchemaMap
recordDependencies -< (metadataObject, schemaObj, dependencies)
returnA -< (remoteField, typeMap))
|)(addTableContext tableName . addRemoteRelationshipContext))
|) metadataObject

View File

@ -24,13 +24,13 @@ module Hasura.RQL.DDL.Schema.Diff
import Hasura.Prelude
import Hasura.RQL.Types
import Hasura.RQL.Types.Catalog
import Hasura.Server.Utils (duplicates)
import Hasura.SQL.Types
import qualified Database.PG.Query as Q
import Data.Aeson.Casing
import Data.Aeson.TH
import Data.List.Extended (duplicates)
import qualified Data.HashMap.Strict as M
import qualified Data.HashSet as HS
@ -234,7 +234,7 @@ getFuncDiff oldMeta newMeta =
getOverloadedFuncs
:: [QualifiedFunction] -> [FunctionMeta] -> [QualifiedFunction]
getOverloadedFuncs trackedFuncs newFuncMeta =
duplicates $ map fmFunction trackedMeta
toList $ duplicates $ map fmFunction trackedMeta
where
trackedMeta = flip filter newFuncMeta $ \fm ->
fmFunction fm `elem` trackedFuncs

View File

@ -325,6 +325,8 @@ updateColExp qt rf (ColExp fld val) =
be <- decodeValue val
ube <- updateFieldInBoolExp remTable rf be
return $ toJSON ube
FIRemoteRelationship {} ->
throw500 "cannot update remote field" -- TODO: determine the proper behavior here.
(oFld, nFld, opQT) = case rf of
RFCol (RenameItem tn oCol nCol) -> (fromPGCol oCol, fromPGCol nCol, tn)

View File

@ -281,6 +281,10 @@ delTableAndDirectDeps qtn@(QualifiedObject sn tn) = do
DELETE FROM "hdb_catalog"."hdb_computed_field"
WHERE table_schema = $1 AND table_name = $2
|] (sn, tn) False
Q.unitQ [Q.sql|
DELETE FROM "hdb_catalog"."hdb_remote_relationship"
WHERE table_schema = $1 AND table_name = $2
|] (sn, tn) False
deleteTableFromCatalog qtn
-- | Builds an initial @'TableCache' 'PGColumnInfo'@ from catalog information. Does not fill in

View File

@ -4,7 +4,7 @@ module Hasura.RQL.DML.Delete
, AnnDelG(..)
, traverseAnnDel
, AnnDel
, deleteQueryToTx
, execDeleteQuery
, runDelete
) where
@ -20,6 +20,7 @@ import Hasura.RQL.DML.Mutation
import Hasura.RQL.DML.Returning
import Hasura.RQL.GBoolExp
import Hasura.RQL.Types
import Hasura.Server.Version (HasVersion)
import Hasura.SQL.Types
import qualified Database.PG.Query as Q
@ -112,16 +113,23 @@ validateDeleteQ
validateDeleteQ =
runDMLP1T . validateDeleteQWith sessVarFromCurrentSetting binRHSBuilder
deleteQueryToTx :: Bool -> (AnnDel, DS.Seq Q.PrepArg) -> Q.TxE QErr EncJSON
deleteQueryToTx strfyNum (u, p) =
runMutation $ Mutation (dqp1Table u) (deleteCTE, p)
execDeleteQuery
:: (HasVersion, MonadTx m, MonadIO m)
=> Bool
-> Maybe MutationRemoteJoinCtx
-> (AnnDel, DS.Seq Q.PrepArg)
-> m EncJSON
execDeleteQuery strfyNum remoteJoinCtx (u, p) =
runMutation $ mkMutation remoteJoinCtx (dqp1Table u) (deleteCTE, p)
(dqp1Output u) (dqp1AllCols u) strfyNum
where
deleteCTE = mkDeleteCTE u
runDelete
:: (QErrM m, UserInfoM m, CacheRM m, MonadTx m, HasSQLGenCtx m)
:: ( HasVersion, QErrM m, UserInfoM m, CacheRM m
, MonadTx m, HasSQLGenCtx m, MonadIO m
)
=> DeleteQuery -> m EncJSON
runDelete q = do
strfyNum <- stringifyNum <$> askSQLGenCtx
validateDeleteQ q >>= liftTx . deleteQueryToTx strfyNum
validateDeleteQ q >>= execDeleteQuery strfyNum Nothing

View File

@ -15,6 +15,7 @@ import Hasura.RQL.DML.Returning
import Hasura.RQL.GBoolExp
import Hasura.RQL.Instances ()
import Hasura.RQL.Types
import Hasura.Server.Version (HasVersion)
import Hasura.Session
import Hasura.SQL.Types
@ -250,10 +251,13 @@ convInsQ =
sessVarFromCurrentSetting
binRHSBuilder
insertP2 :: Bool -> (InsertQueryP1, DS.Seq Q.PrepArg) -> Q.TxE QErr EncJSON
insertP2 strfyNum (u, p) =
runMutation
$ Mutation (iqp1Table u) (insertCTE, p)
execInsertQuery
:: (HasVersion, MonadTx m, MonadIO m)
=> Bool
-> Maybe MutationRemoteJoinCtx
-> (InsertQueryP1, DS.Seq Q.PrepArg) -> m EncJSON
execInsertQuery strfyNum remoteJoinCtx (u, p) =
runMutation $ mkMutation remoteJoinCtx (iqp1Table u) (insertCTE, p)
(iqp1Output u) (iqp1AllCols u) strfyNum
where
insertCTE = mkInsertCTE u
@ -332,10 +336,11 @@ insertOrUpdateCheckExpr _ _ insCheck _ =
insertCheckExpr "insert check constraint failed" insCheck
runInsert
:: (QErrM m, UserInfoM m, CacheRM m, MonadTx m, HasSQLGenCtx m)
=> InsertQuery
-> m EncJSON
:: ( HasVersion, QErrM m, UserInfoM m
, CacheRM m, MonadTx m, HasSQLGenCtx m, MonadIO m
)
=> InsertQuery -> m EncJSON
runInsert q = do
res <- convInsQ q
strfyNum <- stringifyNum <$> askSQLGenCtx
liftTx $ insertP2 strfyNum res
execInsertQuery strfyNum Nothing res

View File

@ -1,6 +1,9 @@
module Hasura.RQL.DML.Mutation
( Mutation(..)
( Mutation
, mkMutation
, MutationRemoteJoinCtx
, runMutation
, executeMutationOutputQuery
, mutateAndFetchCols
, mkSelCTEFromColVals
)
@ -8,52 +11,90 @@ where
import Hasura.Prelude
import qualified Data.HashMap.Strict as Map
import qualified Data.Sequence as DS
import qualified Database.PG.Query as Q
import qualified Data.HashMap.Strict as Map
import qualified Data.Sequence as DS
import qualified Database.PG.Query as Q
import qualified Network.HTTP.Client as HTTP
import qualified Network.HTTP.Types as N
import qualified Hasura.SQL.DML as S
import qualified Hasura.SQL.DML as S
import Hasura.EncJSON
import Hasura.RQL.DML.Internal
import Hasura.RQL.DML.RemoteJoin
import Hasura.RQL.DML.Returning
import Hasura.RQL.DML.Select
import Hasura.RQL.Instances ()
import Hasura.RQL.Instances ()
import Hasura.RQL.Types
import Hasura.Server.Version (HasVersion)
import Hasura.Session
import Hasura.SQL.Types
import Hasura.SQL.Value
type MutationRemoteJoinCtx = (HTTP.Manager, [N.Header], UserInfo)
data Mutation
= Mutation
{ _mTable :: !QualifiedTable
, _mQuery :: !(S.CTE, DS.Seq Q.PrepArg)
, _mOutput :: !MutationOutput
, _mCols :: ![PGColumnInfo]
, _mStrfyNum :: !Bool
} deriving (Show, Eq)
{ _mTable :: !QualifiedTable
, _mQuery :: !(S.CTE, DS.Seq Q.PrepArg)
, _mOutput :: !MutationOutput
, _mCols :: ![PGColumnInfo]
, _mRemoteJoins :: !(Maybe (RemoteJoins, MutationRemoteJoinCtx))
, _mStrfyNum :: !Bool
}
runMutation :: Mutation -> Q.TxE QErr EncJSON
mkMutation
:: Maybe MutationRemoteJoinCtx
-> QualifiedTable
-> (S.CTE, DS.Seq Q.PrepArg)
-> MutationOutput
-> [PGColumnInfo]
-> Bool
-> Mutation
mkMutation ctx table query output' allCols strfyNum =
let (output, remoteJoins) = getRemoteJoinsMutationOutput output'
remoteJoinsCtx = (,) <$> remoteJoins <*> ctx
in Mutation table query output allCols remoteJoinsCtx strfyNum
runMutation
:: (HasVersion, MonadTx m, MonadIO m)
=> Mutation -> m EncJSON
runMutation mut =
bool (mutateAndReturn mut) (mutateAndSel mut) $
hasNestedFld $ _mOutput mut
mutateAndReturn :: Mutation -> Q.TxE QErr EncJSON
mutateAndReturn (Mutation qt (cte, p) mutationOutput allCols strfyNum) =
encJFromBS . runIdentity . Q.getRow
<$> Q.rawQE dmlTxErrorHandler (Q.fromBuilder $ toSQL selWith)
(toList p) True
mutateAndReturn
:: (HasVersion, MonadTx m, MonadIO m)
=> Mutation -> m EncJSON
mutateAndReturn (Mutation qt (cte, p) mutationOutput allCols remoteJoins strfyNum) =
executeMutationOutputQuery sqlQuery (toList p) remoteJoins
where
selWith = mkMutationOutputExp qt allCols Nothing cte mutationOutput strfyNum
sqlQuery = Q.fromBuilder $ toSQL $
mkMutationOutputExp qt allCols Nothing cte mutationOutput strfyNum
mutateAndSel :: Mutation -> Q.TxE QErr EncJSON
mutateAndSel (Mutation qt q mutationOutput allCols strfyNum) = do
mutateAndSel
:: (HasVersion, MonadTx m, MonadIO m)
=> Mutation -> m EncJSON
mutateAndSel (Mutation qt q mutationOutput allCols remoteJoins strfyNum) = do
-- Perform mutation and fetch unique columns
MutateResp _ columnVals <- mutateAndFetchCols qt allCols q strfyNum
MutateResp _ columnVals <- liftTx $ mutateAndFetchCols qt allCols q strfyNum
selCTE <- mkSelCTEFromColVals qt allCols columnVals
let selWith = mkMutationOutputExp qt allCols Nothing selCTE mutationOutput strfyNum
-- Perform select query and fetch returning fields
encJFromBS . runIdentity . Q.getRow
<$> Q.rawQE dmlTxErrorHandler (Q.fromBuilder $ toSQL selWith) [] True
executeMutationOutputQuery (Q.fromBuilder $ toSQL selWith) [] remoteJoins
executeMutationOutputQuery
:: (HasVersion, MonadTx m, MonadIO m)
=> Q.Query -- ^ SQL query
-> [Q.PrepArg] -- ^ Prepared params
-> Maybe (RemoteJoins, MutationRemoteJoinCtx) -- ^ Remote joins context
-> m EncJSON
executeMutationOutputQuery query prepArgs = \case
Nothing ->
runIdentity . Q.getRow
<$> liftTx (Q.rawQE dmlTxErrorHandler query prepArgs True)
Just (remoteJoins, (httpManager, reqHeaders, userInfo)) ->
executeQueryWithRemoteJoins httpManager reqHeaders userInfo query prepArgs remoteJoins
mutateAndFetchCols

View File

@ -0,0 +1,480 @@
-- | Types and Functions for resolving remote join fields
module Hasura.RQL.DML.RemoteJoin
( executeQueryWithRemoteJoins
, getRemoteJoins
, getRemoteJoinsAggSel
, getRemoteJoinsMutationOutput
, RemoteJoins
) where
import Hasura.Prelude
import Control.Lens
import Data.Validation
import Data.List (nub)
import Hasura.EncJSON
import Hasura.GraphQL.RemoteServer (execRemoteGQ')
import Hasura.GraphQL.Transport.HTTP.Protocol
import Hasura.GraphQL.Utils
import Hasura.RQL.DML.Internal
import Hasura.RQL.DML.Returning
import Hasura.RQL.DML.Select.Types
import Hasura.RQL.Types
import Hasura.Server.Version (HasVersion)
import Hasura.Session
import Hasura.SQL.Types ((<<>))
import qualified Data.Aeson as A
import qualified Data.Aeson.Ordered as AO
import qualified Data.HashMap.Strict as Map
import qualified Data.HashMap.Strict.Extended as Map
import qualified Data.HashMap.Strict.InsOrd as OMap
import qualified Data.HashSet as HS
import qualified Data.List.NonEmpty as NE
import qualified Data.Text as T
import qualified Database.PG.Query as Q
import qualified Language.GraphQL.Draft.Printer.Text as G
import qualified Language.GraphQL.Draft.Syntax as G
import qualified Network.HTTP.Client as HTTP
import qualified Network.HTTP.Types as N
-- | Executes given query and fetch response JSON from Postgres. Substitutes remote relationship fields.
executeQueryWithRemoteJoins
:: (HasVersion, MonadTx m, MonadIO m)
=> HTTP.Manager
-> [N.Header]
-> UserInfo
-> Q.Query
-> [Q.PrepArg]
-> RemoteJoins
-> m EncJSON
executeQueryWithRemoteJoins manager reqHdrs userInfo q prepArgs rjs = do
-- Step 1: Perform the query on database and fetch the response
pgRes <- runIdentity . Q.getRow <$> liftTx (Q.rawQE dmlTxErrorHandler q prepArgs True)
jsonRes <- either (throw500 . T.pack) pure $ AO.eitherDecode pgRes
-- Step 2: Traverse through the JSON obtained in above step and generate composite JSON value with remote joins
compositeJson <- traverseQueryResponseJSON rjMap jsonRes
let remoteJoins = collectRemoteFields compositeJson
-- Step 3: Make queries to remote server and fetch graphql response
remoteServerResp <- fetchRemoteJoinFields manager reqHdrs userInfo remoteJoins
-- Step 4: Replace remote fields in composite json with remote join values
AO.toEncJSON <$> replaceRemoteFields compositeJson remoteServerResp
where
rjMap = Map.fromList $ toList rjs
-- | Path to the remote join field in query response JSON from Postgres.
newtype FieldPath = FieldPath {unFieldPath :: [FieldName]}
deriving (Show, Eq, Semigroup, Monoid, Hashable)
appendPath :: FieldName -> FieldPath -> FieldPath
appendPath fieldName = FieldPath . (<> [fieldName]) . unFieldPath
-- | The counter which is used to append the alias generated for remote field. See 'pathToAlias'.
-- This guarentees the uniqueness of the alias.
newtype Counter = Counter {unCounter :: Int}
deriving (Show, Eq)
incCounter :: Counter -> Counter
incCounter = Counter . (+1) . unCounter
getCounter :: MonadState Counter m => m Counter
getCounter = do
c <- get
modify incCounter
pure c
-- | Generate the alias for remote field.
pathToAlias :: FieldPath -> Counter -> G.Alias
pathToAlias path counter =
G.Alias $ G.Name $ T.intercalate "_" (map getFieldNameTxt $ unFieldPath path)
<> "__" <> (T.pack . show . unCounter) counter
-- | A 'RemoteJoin' represents the context of remote relationship to be extracted from 'AnnFldG's.
data RemoteJoin
= RemoteJoin
{ _rjName :: !FieldName -- ^ The remote join field name.
, _rjArgs :: ![RemoteFieldArgument] -- ^ User-provided arguments with variables.
, _rjSelSet :: ![G.Field] -- ^ User-provided selection set of remote field.
, _rjHasuraFields :: !(HashSet FieldName) -- ^ Table fields.
, _rjFieldCall :: !(NonEmpty FieldCall) -- ^ Remote server fields.
, _rjRemoteSchema :: !RemoteSchemaInfo -- ^ The remote schema server info.
, _rjPhantomFields :: ![PGColumnInfo]
-- ^ Hasura fields which are not in the selection set, but are required as
-- parameters to satisfy the remote join.
} deriving (Show, Eq)
type RemoteJoins = NE.NonEmpty (FieldPath, NE.NonEmpty RemoteJoin)
type RemoteJoinMap = Map.HashMap FieldPath (NE.NonEmpty RemoteJoin)
mapToNonEmpty :: RemoteJoinMap -> Maybe RemoteJoins
mapToNonEmpty = NE.nonEmpty . Map.toList
-- | Traverse through 'AnnSimpleSel' and collect remote join fields (if any).
getRemoteJoins :: AnnSimpleSel -> (AnnSimpleSel, Maybe RemoteJoins)
getRemoteJoins =
second mapToNonEmpty . flip runState mempty . transformSelect mempty
transformSelect :: FieldPath -> AnnSimpleSel -> State RemoteJoinMap AnnSimpleSel
transformSelect path sel = do
let fields = _asnFields sel
-- Transform selects in array, object and computed fields
transformedFields <- transformAnnFields path fields
pure sel{_asnFields = transformedFields}
-- | Traverse through 'AnnAggSel' and collect remote join fields (if any).
getRemoteJoinsAggSel :: AnnAggSel -> (AnnAggSel, Maybe RemoteJoins)
getRemoteJoinsAggSel =
second mapToNonEmpty . flip runState mempty . transformAggSelect mempty
transformAggSelect :: FieldPath -> AnnAggSel -> State RemoteJoinMap AnnAggSel
transformAggSelect path sel = do
let aggFields = _asnFields sel
transformedFields <- forM aggFields $ \(fieldName, aggField) ->
(fieldName,) <$> case aggField of
TAFAgg agg -> pure $ TAFAgg agg
TAFNodes annFields -> TAFNodes <$> transformAnnFields (appendPath fieldName path) annFields
TAFExp t -> pure $ TAFExp t
pure sel{_asnFields = transformedFields}
-- | Traverse through 'MutationOutput' and collect remote join fields (if any)
getRemoteJoinsMutationOutput :: MutationOutput -> (MutationOutput, Maybe RemoteJoins)
getRemoteJoinsMutationOutput =
second mapToNonEmpty . flip runState mempty . transformMutationOutput mempty
where
transformMutationOutput :: FieldPath -> MutationOutput -> State RemoteJoinMap MutationOutput
transformMutationOutput path = \case
MOutMultirowFields mutationFields ->
MOutMultirowFields <$> transfromMutationFields mutationFields
MOutSinglerowObject annFields ->
MOutSinglerowObject <$> transformAnnFields path annFields
where
transfromMutationFields fields =
forM fields $ \(fieldName, field) -> do
let fieldPath = appendPath fieldName path
(fieldName,) <$> case field of
MCount -> pure MCount
MExp t -> pure $ MExp t
MRet annFields -> MRet <$> transformAnnFields fieldPath annFields
transformAnnFields :: FieldPath -> AnnFlds -> State RemoteJoinMap AnnFlds
transformAnnFields path fields = do
let pgColumnFields = map fst $ getFields _FCol fields
remoteSelects = getFields _FRemote fields
remoteJoins = flip map remoteSelects $ \(fieldName, remoteSelect) ->
let RemoteSelect argsMap selSet hasuraColumns remoteFields rsi = remoteSelect
hasuraColumnL = toList hasuraColumns
hasuraColumnFields = HS.fromList $ map (fromPGCol . pgiColumn) hasuraColumnL
phantomColumns = filter ((`notElem` pgColumnFields) . fromPGCol . pgiColumn) hasuraColumnL
in RemoteJoin fieldName argsMap selSet hasuraColumnFields remoteFields rsi phantomColumns
transformedFields <- forM fields $ \(fieldName, field) -> do
let fieldPath = appendPath fieldName path
(fieldName,) <$> case field of
FCol c -> pure $ FCol c
FObj annRel -> FObj <$> transformAnnRel fieldPath annRel
FArr (ASSimple annRel) -> FArr . ASSimple <$> transformAnnRel fieldPath annRel
FArr (ASAgg aggRel) -> FArr . ASAgg <$> transformAnnAggRel fieldPath aggRel
FComputedField computedField ->
FComputedField <$> case computedField of
CFSScalar _ -> pure computedField
CFSTable jas annSel -> CFSTable jas <$> transformSelect fieldPath annSel
FRemote rs -> pure $ FRemote rs
FExp t -> pure $ FExp t
case NE.nonEmpty remoteJoins of
Nothing -> pure transformedFields
Just nonEmptyRemoteJoins -> do
let phantomColumns = map (\ci -> (fromPGCol $ pgiColumn ci, FCol $ AnnColField ci False Nothing)) $
concatMap _rjPhantomFields remoteJoins
modify (Map.insert path nonEmptyRemoteJoins)
pure $ transformedFields <> phantomColumns
where
getFields f = mapMaybe (sequence . second (^? f))
transformAnnRel fieldPath annRel = do
let annSel = aarAnnSel annRel
transformedSel <- transformSelect fieldPath annSel
pure annRel{aarAnnSel = transformedSel}
transformAnnAggRel fieldPath annRel = do
let annSel = aarAnnSel annRel
transformedSel <- transformAggSelect fieldPath annSel
pure annRel{aarAnnSel = transformedSel}
type CompositeObject a = OMap.InsOrdHashMap Text (CompositeValue a)
-- | A hybrid JSON value representation which captures the context of remote join field in type parameter.
data CompositeValue a
= CVOrdValue !AO.Value
| CVObject !(CompositeObject a)
| CVObjectArray ![CompositeValue a]
| CVFromRemote !a
deriving (Show, Eq, Functor, Foldable, Traversable)
collectRemoteFields :: CompositeValue a -> [a]
collectRemoteFields = toList
compositeValueToJSON :: CompositeValue AO.Value -> AO.Value
compositeValueToJSON = \case
CVOrdValue v -> v
CVObject obj -> AO.object $ OMap.toList $ OMap.map compositeValueToJSON obj
CVObjectArray vals -> AO.array $ map compositeValueToJSON vals
CVFromRemote v -> v
-- | A 'RemoteJoinField' carries the minimal GraphQL AST of a remote relationship field.
-- All such 'RemoteJoinField's of a particular remote schema are batched together
-- and made GraphQL request to remote server to fetch remote join values.
data RemoteJoinField
= RemoteJoinField
{ _rjfRemoteSchema :: !RemoteSchemaInfo -- ^ The remote schema server info.
, _rjfAlias :: !G.Alias -- ^ Top level alias of the field
, _rjfField :: !G.Field -- ^ The field AST
, _rjfFieldCall :: ![G.Name] -- ^ Path to remote join value
, _rjfVariables :: ![(G.VariableDefinition,A.Value)] -- ^ Variables used in the AST
} deriving (Show, Eq)
-- | Generate composite JSON ('CompositeValue') parameterised over 'RemoteJoinField'
-- from remote join map and query response JSON from Postgres.
traverseQueryResponseJSON
:: (MonadError QErr m)
=> RemoteJoinMap -> AO.Value -> m (CompositeValue RemoteJoinField)
traverseQueryResponseJSON rjm =
flip runReaderT rjm . flip evalStateT (Counter 0) . traverseValue mempty
where
askRemoteJoins :: MonadReader RemoteJoinMap m
=> FieldPath -> m (Maybe (NE.NonEmpty RemoteJoin))
askRemoteJoins path = asks (Map.lookup path)
traverseValue :: (MonadError QErr m, MonadReader RemoteJoinMap m, MonadState Counter m)
=> FieldPath -> AO.Value -> m (CompositeValue RemoteJoinField)
traverseValue path = \case
AO.Object obj -> traverseObject obj
AO.Array arr -> CVObjectArray <$> mapM (traverseValue path) (toList arr)
v -> pure $ CVOrdValue v
where
mkRemoteSchemaField siblingFields remoteJoin = do
counter <- getCounter
let RemoteJoin fieldName inputArgs selSet hasuraFields fieldCall rsi _ = remoteJoin
hasuraFieldVariables = map (G.Variable . G.Name . getFieldNameTxt) $ toList hasuraFields
siblingFieldArgs = Map.fromList $
map ((G.Variable . G.Name) *** ordJsonvalueToGValue) siblingFields
hasuraFieldArgs = flip Map.filterWithKey siblingFieldArgs $ \k _ -> k `elem` hasuraFieldVariables
fieldAlias = pathToAlias (appendPath fieldName path) counter
queryField <- fieldCallsToField (map _rfaArgument inputArgs) hasuraFieldArgs selSet fieldAlias fieldCall
pure $ RemoteJoinField rsi
fieldAlias
queryField
(map fcName $ toList $ NE.tail fieldCall)
(concat $ mapMaybe _rfaVariable inputArgs)
where
ordJsonvalueToGValue = jsonValueToGValue . AO.fromOrdered
traverseObject obj = do
let fields = AO.toList obj
maybeRemoteJoins <- askRemoteJoins path
processedFields <- fmap catMaybes $ forM fields $ \(fieldText, value) -> do
let fieldName = FieldName fieldText
fieldPath = appendPath fieldName path
fmap (fieldText,) <$> case maybeRemoteJoins of
Nothing -> Just <$> traverseValue fieldPath value
Just nonEmptyRemoteJoins -> do
let remoteJoins = toList nonEmptyRemoteJoins
phantomColumnFields = map (fromPGCol . pgiColumn) $
concatMap _rjPhantomFields remoteJoins
if | fieldName `elem` phantomColumnFields -> pure Nothing
| otherwise ->
case find ((== fieldName) . _rjName) remoteJoins of
Just rj -> Just . CVFromRemote <$> mkRemoteSchemaField fields rj
Nothing -> Just <$> traverseValue fieldPath value
pure $ CVObject $ OMap.fromList processedFields
-- | Fetch remote join field value from remote servers by batching respective 'RemoteJoinField's
fetchRemoteJoinFields
:: ( HasVersion
, MonadError QErr m
, MonadIO m
)
=> HTTP.Manager
-> [N.Header]
-> UserInfo
-> [RemoteJoinField]
-> m AO.Object
fetchRemoteJoinFields manager reqHdrs userInfo remoteJoins = do
results <- forM (Map.toList remoteSchemaBatch) $ \(rsi, batch) -> do
let batchList = toList batch
gqlReq = fieldsToRequest G.OperationTypeQuery
(map _rjfField $ batchList)
(concat (map _rjfVariables $ batchList))
gqlReqUnparsed = (GQLQueryText . G.renderExecutableDoc . G.ExecutableDocument . unGQLExecDoc) <$> gqlReq
-- NOTE: discard remote headers (for now):
(_, _, respBody) <- execRemoteGQ' manager userInfo reqHdrs gqlReqUnparsed rsi G.OperationTypeQuery
case AO.eitherDecode respBody of
Left e -> throw500 $ "Remote server response is not valid JSON: " <> T.pack e
Right r -> do
respObj <- either throw500 pure $ AO.asObject r
let errors = AO.lookup "errors" respObj
if | isNothing errors || errors == Just AO.Null ->
case AO.lookup "data" respObj of
Nothing -> throw400 Unexpected "\"data\" field not found in remote response"
Just v -> either throw500 pure $ AO.asObject v
| otherwise ->
throwError (err400 Unexpected "Errors from remote server")
{qeInternal = Just $ A.object ["errors" A..= (AO.fromOrdered <$> errors)]}
either (throw500 . T.pack) pure $ foldM AO.safeUnion AO.empty results
where
remoteSchemaBatch = Map.groupOnNE _rjfRemoteSchema remoteJoins
fieldsToRequest :: G.OperationType -> [G.Field] -> [(G.VariableDefinition,A.Value)] -> GQLReqParsed
fieldsToRequest opType gfields vars =
case vars of
[] ->
GQLReq
{ _grOperationName = Nothing
, _grQuery =
GQLExecDoc
[ G.ExecutableDefinitionOperation
(G.OperationDefinitionTyped
( emptyOperationDefinition
{ G._todSelectionSet = map G.SelectionField gfields
}
)
)
]
, _grVariables = Nothing
}
vars' ->
GQLReq
{ _grOperationName = Nothing
, _grQuery =
GQLExecDoc
[ G.ExecutableDefinitionOperation
(G.OperationDefinitionTyped
( emptyOperationDefinition
{ G._todSelectionSet = map G.SelectionField gfields
, G._todVariableDefinitions = nub (map fst vars')
}
)
)
]
, _grVariables = Just $ Map.fromList
(map (\(varDef, val) -> (G._vdVariable varDef, val)) vars')
}
where
emptyOperationDefinition =
G.TypedOperationDefinition {
G._todType = opType
, G._todName = Nothing
, G._todVariableDefinitions = []
, G._todDirectives = []
, G._todSelectionSet = [] }
-- | Replace 'RemoteJoinField' in composite JSON with it's json value from remote server response.
replaceRemoteFields
:: MonadError QErr m
=> CompositeValue RemoteJoinField
-> AO.Object
-> m AO.Value
replaceRemoteFields compositeJson remoteServerResponse =
compositeValueToJSON <$> traverse replaceValue compositeJson
where
replaceValue rj = do
let alias = G.unAlias $ _rjfAlias rj
fieldCall = _rjfFieldCall rj
extractAtPath (alias:fieldCall) $ AO.Object remoteServerResponse
-- | 'FieldCall' is path to remote relationship value in remote server response.
-- 'extractAtPath' traverse through the path and extracts the json value
extractAtPath path v =
case NE.nonEmpty path of
Nothing -> pure v
Just (h :| rest) -> case v of
AO.Object o -> maybe
(throw500 $ "cannnot find value in remote response at path " <> T.pack (show path))
(extractAtPath rest)
(AO.lookup (G.unName h) o)
AO.Array arr -> AO.array <$> mapM (extractAtPath path) (toList arr)
_ -> throw500 $ "expecting array or object in remote response at path " <> T.pack (show path)
-- | Fold nested 'FieldCall's into a bare 'Field', inserting the passed
-- selection set at the leaf of the tree we construct.
fieldCallsToField
:: MonadError QErr m
=> [G.Argument]
-> Map.HashMap G.Variable G.Value
-> [G.Field]
-- ^ Inserted at leaf of nested FieldCalls
-> G.Alias
-- ^ Top-level name to set for this Field
-> NonEmpty FieldCall
-> m G.Field
fieldCallsToField rrArguments variables finalSelSet topAlias =
fmap (\f -> f{G._fAlias = Just topAlias}) . nest
where
-- almost: `foldr nest finalSelSet`
nest ((FieldCall name remoteArgs) :| rest) = do
templatedArguments <- createArguments variables remoteArgs
(args, selSet) <- case NE.nonEmpty rest of
Just f -> do
s <- nest f
pure (templatedArguments, pure s)
Nothing ->
let argsToMap = Map.fromList . map (G._aName &&& G._aValue)
arguments = map (uncurry G.Argument) $ Map.toList $
Map.unionWith mergeValue
(argsToMap rrArguments)
(argsToMap templatedArguments)
in pure (arguments, finalSelSet)
pure $ G.Field Nothing name args [] $ map G.SelectionField selSet
-- This is a kind of "deep merge".
-- For e.g. suppose the input argument of the remote field is something like:
-- `where: { id : 1}`
-- And during execution, client also gives the input arg: `where: {name: "tiru"}`
-- We need to merge the input argument to where: {id : 1, name: "tiru"}
mergeValue :: G.Value -> G.Value -> G.Value
mergeValue lVal rVal = case (lVal, rVal) of
(G.VList (G.ListValueG l), G.VList (G.ListValueG r)) ->
G.VList $ G.ListValueG $ l <> r
(G.VObject (G.ObjectValueG l), G.VObject (G.ObjectValueG r)) ->
let fieldsToMap = Map.fromList . map (G._ofName &&& G._ofValue)
in G.VObject $ G.ObjectValueG $ map (uncurry G.ObjectFieldG) $ Map.toList $
Map.unionWith mergeValue (fieldsToMap l) (fieldsToMap r)
(_, _) -> error $ "can only merge a list with another list or an " <>
"object with another object"
-- | Create an argument map using the inputs taken from the hasura database.
createArguments
:: (MonadError QErr m)
=> Map.HashMap G.Variable G.Value
-> RemoteArguments
-> m [G.Argument]
createArguments variables (RemoteArguments arguments) =
either
(throw400 Unexpected . \errors -> "Found errors: " <> T.intercalate ", " errors)
(pure . map (\(G.ObjectFieldG key val) -> G.Argument key val))
(toEither (substituteVariables variables arguments))
-- | Substitute values in the argument list.
substituteVariables
:: HashMap G.Variable G.Value -- ^ Values to use.
-> [G.ObjectFieldG G.Value] -- ^ A template.
-> Validation [Text] [G.ObjectFieldG G.Value]
substituteVariables values = traverse (traverse go)
where
go v = case v of
G.VVariable variable ->
case Map.lookup variable values of
Nothing -> Failure ["Value for variable " <> G.unVariable variable <<> " not provided"]
Just value -> pure value
G.VList (G.ListValueG listValue) ->
fmap (G.VList . G.ListValueG) (traverse go listValue)
G.VObject (G.ObjectValueG objectValue) ->
fmap (G.VObject . G.ObjectValueG) (traverse (traverse go) objectValue)
_ -> pure v

View File

@ -1,5 +1,7 @@
module Hasura.RQL.DML.Select
( selectP2
, selectQuerySQL
, selectAggQuerySQL
, convSelectQuery
, asSingleRowJsonResp
, module Hasura.RQL.DML.Select.Internal
@ -125,6 +127,9 @@ convOrderByElem sessVarBldr (flds, spi) = \case
[ fldName <<> " is a"
, " computed field and can't be used in 'order_by'"
]
-- TODO Rakesh
FIRemoteRelationship {} ->
throw400 UnexpectedPayload (mconcat [ fldName <<> " is a remote field" ])
OCRel fldName rest -> do
fldInfo <- askFieldInfo flds fldName
case fldInfo of
@ -146,6 +151,8 @@ convOrderByElem sessVarBldr (flds, spi) = \case
resolvedSelFltr <- convAnnBoolExpPartialSQL sessVarBldr $ spiFilter relSpi
AOCObj relInfo resolvedSelFltr <$>
convOrderByElem sessVarBldr (relFim, relSpi) rest
FIRemoteRelationship {} ->
throw400 UnexpectedPayload (mconcat [ fldName <<> " is a remote field" ])
convSelectQ
:: (UserInfoM m, QErrM m, CacheRM m, HasSQLGenCtx m)
@ -266,6 +273,14 @@ convSelectQuery sessVarBldr prepArgBuilder (DMLQuery qt selQ) = do
convSelectQ fieldInfo selPermInfo
extSelQ sessVarBldr prepArgBuilder
selectQuerySQL :: JsonAggSelect -> AnnSimpleSel -> Q.Query
selectQuerySQL jsonAggSelect sel =
Q.fromBuilder $ toSQL $ mkSQLSelect jsonAggSelect sel
selectAggQuerySQL :: AnnAggSel -> Q.Query
selectAggQuerySQL =
Q.fromBuilder . toSQL . mkAggSelect
selectP2 :: JsonAggSelect -> (AnnSimpleSel, DS.Seq Q.PrepArg) -> Q.TxE QErr EncJSON
selectP2 jsonAggSelect (sel, p) =
encJFromBS . runIdentity . Q.getRow

View File

@ -232,6 +232,7 @@ buildJsonObject pfx parAls arrRelCtx strfyNum flds =
FComputedField (CFSTable _ _) ->
let ccPfx = mkComputedFieldTableAls pfx fldAls
in S.mkQIdenExp ccPfx fldAls
FRemote _ -> S.SELit "null: remote field selected"
toSQLCol :: AnnColField -> S.SQLExp
toSQLCol (AnnColField col asText colOpM) =

View File

@ -3,17 +3,20 @@
module Hasura.RQL.DML.Select.Types where
import Control.Lens hiding ((.=))
import Data.Aeson.Types
import Language.Haskell.TH.Syntax (Lift)
import Language.Haskell.TH.Syntax (Lift)
import qualified Data.HashMap.Strict as HM
import qualified Data.List.NonEmpty as NE
import qualified Data.Sequence as Seq
import qualified Data.Text as T
import qualified Data.HashMap.Strict as HM
import qualified Data.List.NonEmpty as NE
import qualified Data.Sequence as Seq
import qualified Data.Text as T
import qualified Language.GraphQL.Draft.Syntax as G
import qualified Data.Aeson as J
import Hasura.Prelude
import Hasura.RQL.Types
import qualified Hasura.SQL.DML as S
import qualified Hasura.SQL.DML as S
import Hasura.SQL.Types
type SelectQExt = SelectG ExtCol BoolExp Int
@ -158,11 +161,28 @@ data AnnColField
, _acfOp :: !(Maybe ColOp)
} deriving (Show, Eq)
data RemoteFieldArgument
= RemoteFieldArgument
{ _rfaArgument :: !G.Argument
, _rfaVariable :: !(Maybe [(G.VariableDefinition,J.Value)])
}
deriving (Eq,Show)
data RemoteSelect
= RemoteSelect
{ _rselArgs :: ![RemoteFieldArgument]
, _rselSelection :: ![G.Field]
, _rselHasuraColumns :: !(HashSet PGColumnInfo)
, _rselFieldCall :: !(NonEmpty FieldCall)
, _rselRemoteSchema :: !RemoteSchemaInfo
} deriving (Show, Eq)
data AnnFldG v
= FCol !AnnColField
| FObj !(ObjSelG v)
| FArr !(ArrSelG v)
| FComputedField !(ComputedFieldSel v)
| FRemote !RemoteSelect
| FExp !T.Text
deriving (Show, Eq)
@ -183,6 +203,7 @@ traverseAnnFld f = \case
FArr sel -> FArr <$> traverseArrSel f sel
FComputedField sel -> FComputedField <$> traverseComputedFieldSel f sel
FExp t -> FExp <$> pure t
FRemote s -> pure $ FRemote s
type AnnFld = AnnFldG S.SQLExp
@ -361,9 +382,9 @@ insertFunctionArg
-> a
-> FunctionArgsExpG a
-> FunctionArgsExpG a
insertFunctionArg argName index value (FunctionArgsExp positional named) =
if (index + 1) <= length positional then
FunctionArgsExp (insertAt index value positional) named
insertFunctionArg argName idx value (FunctionArgsExp positional named) =
if (idx + 1) <= length positional then
FunctionArgsExp (insertAt idx value positional) named
else FunctionArgsExp positional $
HM.insert (getFuncArgNameTxt argName) value named
where
@ -477,3 +498,6 @@ data Prefixes
{ _pfThis :: !Iden -- Current node prefix
, _pfBase :: !Iden -- Base table row identifier for computed field function
} deriving (Show, Eq)
$(makeLenses ''AnnSelG)
$(makePrisms ''AnnFldG)

View File

@ -4,7 +4,7 @@ module Hasura.RQL.DML.Update
, AnnUpdG(..)
, traverseAnnUpd
, AnnUpd
, updateQueryToTx
, execUpdateQuery
, runUpdate
) where
@ -23,6 +23,7 @@ import Hasura.RQL.DML.Returning
import Hasura.RQL.GBoolExp
import Hasura.RQL.Instances ()
import Hasura.RQL.Types
import Hasura.Server.Version (HasVersion)
import Hasura.Session
import Hasura.SQL.Types
@ -223,17 +224,23 @@ validateUpdateQuery
validateUpdateQuery =
runDMLP1T . validateUpdateQueryWith sessVarFromCurrentSetting binRHSBuilder
updateQueryToTx
:: Bool -> (AnnUpd, DS.Seq Q.PrepArg) -> Q.TxE QErr EncJSON
updateQueryToTx strfyNum (u, p) =
runMutation $ Mutation (uqp1Table u) (updateCTE, p)
execUpdateQuery
:: (HasVersion, MonadTx m, MonadIO m)
=> Bool
-> Maybe MutationRemoteJoinCtx
-> (AnnUpd, DS.Seq Q.PrepArg)
-> m EncJSON
execUpdateQuery strfyNum remoteJoinCtx (u, p) =
runMutation $ mkMutation remoteJoinCtx (uqp1Table u) (updateCTE, p)
(uqp1Output u) (uqp1AllCols u) strfyNum
where
updateCTE = mkUpdateCTE u
runUpdate
:: (QErrM m, UserInfoM m, CacheRM m, MonadTx m, HasSQLGenCtx m)
:: ( HasVersion, QErrM m, UserInfoM m, CacheRM m
, MonadTx m, HasSQLGenCtx m, MonadIO m
)
=> UpdateQuery -> m EncJSON
runUpdate q = do
strfyNum <- stringifyNum <$> askSQLGenCtx
validateUpdateQuery q >>= liftTx . updateQueryToTx strfyNum
validateUpdateQuery q >>= execUpdateQuery strfyNum Nothing

View File

@ -313,6 +313,9 @@ annColExp rhsParser colInfoMap (ColExp fieldName colVal) = do
return $ AVRel relInfo annRelBoolExp
FIComputedField _ ->
throw400 UnexpectedPayload "Computed columns can not be part of the where clause"
-- TODO Rakesh
FIRemoteRelationship{} ->
throw400 UnexpectedPayload "remote field unsupported"
toSQLBoolExp
:: S.Qual -> AnnBoolExpSQL -> S.BoolExp

Some files were not shown because too many files have changed in this diff Show More