allow exposing postgres functions through GraphQL interface (close #333) (#1073)

This commit is contained in:
Rakesh Emmadi 2019-01-25 09:01:54 +05:30 committed by Vamshi Surabhi
parent e7a42c6772
commit 0bf2457e23
101 changed files with 3887 additions and 332 deletions

View File

@ -28,7 +28,8 @@ var ravenVersions = []mt.Version{
}
var testMetadata = map[string][]byte{
"metadata": []byte(`query_templates: []
"metadata": []byte(`functions: []
query_templates: []
remote_schemas: []
tables:
- array_relationships: []
@ -40,7 +41,8 @@ tables:
table: test
update_permissions: []
`),
"empty-metadata": []byte(`query_templates: []
"empty-metadata": []byte(`functions: []
query_templates: []
remote_schemas: []
tables: []
`),

View File

@ -38,3 +38,71 @@ export const makeDataAPIOptions = (dataApiUrl, key, body) => ({
body,
failOnStatusCode: false,
});
export const testCustomFunctionDefinition = i => `create function search_posts${'_' +
i} (search text) returns setof post as $$ select * from post where title ilike ('%' || search || '%') or content ilike ('%' || search || '%') $$ language sql stable;
`;
export const getCustomFunctionName = i => `search_posts${'_' + i}`;
export const testCustomFunctionSQL = i => {
return {
type: 'bulk',
args: [
{
type: 'run_sql',
args: {
sql: `CREATE OR REPLACE FUNCTION public.search_posts_${i}(search text)\n RETURNS SETOF post\n LANGUAGE sql\n STABLE\nAS $function$\n select *\n from post\n where\n title ilike ('%' || search || '%') or\n content ilike ('%' || search || '%')\n $function$\n`,
cascade: false,
},
},
{
type: 'track_function',
args: {
name: `search_posts_${i}`,
schema: 'public',
},
},
],
};
};
export const createTable = () => {
return {
type: 'bulk',
args: [
{
type: 'run_sql',
args: {
sql:
'create table post (\n id serial PRIMARY KEY,\n title TEXT,\n content TEXT\n )',
cascade: false,
},
},
{
type: 'add_existing_table_or_view',
args: {
name: 'post',
schema: 'public',
},
},
],
};
};
export const dropTable = () => {
return {
type: 'bulk',
args: [
{
type: 'run_sql',
args: {
sql: 'DROP table post;',
cascade: false,
},
},
],
};
};
export const getSchema = () => 'public';

View File

@ -0,0 +1,104 @@
import {
getElementFromAlias,
baseUrl,
// testCustomFunctionDefinition,
getCustomFunctionName,
getSchema,
testCustomFunctionSQL,
createTable,
dropTable,
} from '../../../helpers/dataHelpers';
import {
dropTableRequest,
dataRequest,
validateCFunc,
validateUntrackedFunc,
} from '../../validators/validators';
export const openRawSQL = () => {
// eslint-disable-line
// Open RawSQL
cy.get('a')
.contains('Data')
.click();
cy.wait(3000);
cy.get(getElementFromAlias('sql-link')).click();
cy.wait(3000);
// Match URL
cy.url().should('eq', `${baseUrl}/data/sql`);
};
export const createCustomFunctionSuccess = () => {
// cy.get('textarea').type(testCustomFunctionDefinition(1), { timeout: 10000, force: true});
// Round about way to create a function
dataRequest(createTable(1), 'success');
cy.wait(5000);
dataRequest(testCustomFunctionSQL(1), 'success');
cy.wait(5000);
// cy.get(getElementFromAlias('run-sql')).click();
// Check if the track checkbox is clicked or not
validateCFunc(getCustomFunctionName(1), getSchema(), 'success');
cy.wait(5000);
};
export const unTrackFunction = () => {
// Are you absolutely sure?\nThis action cannot be undone. This will permanently delete stitched GraphQL schema. Please type "DELETE" (in caps, without quotes) to confirm.\n
cy.visit(`data/schema/public/functions/${getCustomFunctionName(1)}/modify`);
cy.wait(5000);
cy.get(getElementFromAlias('custom-function-edit-untrack-btn')).click();
cy.wait(5000);
validateUntrackedFunc(getCustomFunctionName(1), getSchema(), 'success');
cy.wait(5000);
};
export const trackFunction = () => {
// Are you absolutely sure?\nThis action cannot be undone. This will permanently delete stitched GraphQL schema. Please type "DELETE" (in caps, without quotes) to confirm.\n
/*
cy.visit(
`data/schema/public/functions/${getCustomFunctionName(1)}/modify`,
);
*/
cy.get(
getElementFromAlias(`add-track-function-${getCustomFunctionName(1)}`)
).should('exist');
cy.get(
getElementFromAlias(`add-track-function-${getCustomFunctionName(1)}`)
).click();
cy.wait(5000);
validateCFunc(getCustomFunctionName(1), getSchema(), 'success');
cy.wait(5000);
};
export const verifyPermissionTab = () => {
// Are you absolutely sure?\nThis action cannot be undone. This will permanently delete stitched GraphQL schema. Please type "DELETE" (in caps, without quotes) to confirm.\n
cy.visit(
`data/schema/public/functions/${getCustomFunctionName(1)}/permissions`
);
cy.wait(5000);
cy.get(getElementFromAlias('custom-function-permission-btn')).should('exist');
cy.wait(5000);
};
export const deleteCustomFunction = () => {
// Are you absolutely sure?\nThis action cannot be undone. This will permanently delete stitched GraphQL schema. Please type "DELETE" (in caps, without quotes) to confirm.\n
cy.visit(`data/schema/public/functions/${getCustomFunctionName(1)}/modify`, {
onBeforeLoad(win) {
cy.stub(win, 'prompt').returns('DELETE');
},
});
cy.wait(5000);
cy.get(getElementFromAlias('custom-function-edit-delete-btn')).click();
cy.wait(5000);
cy.window()
.its('prompt')
.should('be.called');
cy.get(getElementFromAlias('delete-confirmation-error')).should('not.exist');
cy.url().should('eq', `${baseUrl}/data/schema/public`);
cy.wait(5000);
dropTableRequest(dropTable(1), 'success');
cy.wait(5000);
};

View File

@ -0,0 +1,44 @@
/* eslint no-unused-vars: 0 */
/* eslint import/prefer-default-export: 0 */
import { testMode } from '../../../helpers/common';
import { setMetaData } from '../../validators/validators';
import {
openRawSQL,
createCustomFunctionSuccess,
deleteCustomFunction,
unTrackFunction,
trackFunction,
verifyPermissionTab,
} from './spec';
const setup = () => {
describe('Setup route', () => {
it('Visit the index route', () => {
// Visit the index route
cy.visit('/data');
cy.wait(5000);
// Get and set validation metadata
setMetaData();
});
});
};
export const runCreateCustomFunctionsTableTests = () => {
describe('Create Custom Function', () => {
// it(
// 'Visit Run SQL page',
// openRawSQL,
// );
it('Create a custom function and track', createCustomFunctionSuccess);
it('Untrack custom function', unTrackFunction);
it('Track custom function', trackFunction);
it('Verify permission tab', verifyPermissionTab);
it('Delete custom function', deleteCustomFunction);
});
};
if (testMode !== 'cli') {
setup();
runCreateCustomFunctionsTableTests();
}

View File

@ -438,7 +438,7 @@ export const checkViewRelationship = () => {
cy.get(getElementFromAlias('ref-table')).select(getTableName(0, testName));
cy.get(getElementFromAlias('ref-col')).select(getColName(0));
cy.get(getElementFromAlias('save-button')).click();
cy.wait(300);
cy.wait(1000);
// Add relationship
cy.get(getElementFromAlias('add-rel-mod')).click();
cy.get(getElementFromAlias('obj-rel-add-0')).click();
@ -446,19 +446,19 @@ export const checkViewRelationship = () => {
.clear()
.type('someRel');
cy.get(getElementFromAlias('obj-rel-save-0')).click();
cy.wait(300);
cy.wait(2000);
// Insert a row
cy.get(getElementFromAlias('table-insert-rows')).click();
cy.get(getElementFromAlias('typed-input-1')).type('1');
cy.get(getElementFromAlias('insert-save-button')).click();
cy.wait(300);
cy.wait(1000);
cy.get(getElementFromAlias('table-browse-rows')).click();
cy.wait(300);
cy.wait(1000);
cy.get('a')
.contains('View')
.first()
.click();
cy.wait(300);
cy.wait(1000);
cy.get('a')
.contains('Close')
.first()

View File

@ -100,7 +100,7 @@ export const passMTAddColumn = () => {
cy.get(getElementFromAlias('column-name')).type(getColName(0));
cy.get(getElementFromAlias('data-type')).select('integer');
cy.get(getElementFromAlias('add-column-button')).click();
cy.wait(2500);
cy.wait(5000);
// cy.get('.notification-success').click();
validateColumn(getTableName(0, testName), [getColName(0)], 'success');
};
@ -121,6 +121,7 @@ export const passMCWithRightDefaultValue = () => {
.clear()
.type('1234');
cy.get(getElementFromAlias('save-button')).click();
cy.wait(15000);
};
export const passCreateForeignKey = () => {
@ -129,11 +130,12 @@ export const passCreateForeignKey = () => {
cy.get(getElementFromAlias('ref-table')).select(getTableName(0, testName));
cy.get(getElementFromAlias('ref-col')).select(getColName(0));
cy.get(getElementFromAlias('save-button')).click();
cy.wait(500);
cy.wait(15000);
};
export const passRemoveForeignKey = () => {
cy.get(getElementFromAlias('remove-constraint-button')).click();
cy.wait(10000);
};
export const passMTDeleteCol = () => {

View File

@ -46,7 +46,7 @@ export const createSimpleRemoteSchema = () => {
.clear()
.type(getRemoteGraphQLURL());
cy.get(getElementFromAlias('add-remote-schema-submit')).click();
cy.wait(10000);
cy.wait(15000);
validateRS(getRemoteSchemaName(1, testName), 'success');
cy.url().should(
'eq',
@ -242,7 +242,7 @@ export const passWithEditRemoteSchema = () => {
.type(getRemoteSchemaName(5, testName));
cy.get(getElementFromAlias('remote-schema-edit-save-btn')).click();
cy.wait(5000);
cy.wait(10000);
validateRS(getRemoteSchemaName(5, testName), 'success');
cy.get(getElementFromAlias('remote-schemas-modify')).click();
@ -252,7 +252,7 @@ export const passWithEditRemoteSchema = () => {
getRemoteSchemaName(5, testName)
);
cy.get(getElementFromAlias('remote-schema-edit-modify-btn')).should('exist');
cy.wait(5000);
cy.wait(7000);
};
export const deleteRemoteSchema = () => {

View File

@ -30,7 +30,7 @@ export const createView = sql => {
// ******************* VALIDATION FUNCTIONS *******************************
// ******************* Remote schema Validator ****************************
// ******************* Remote Schema Validator ****************************
export const validateRS = (remoteSchemaName, result) => {
const reqBody = {
type: 'select',
@ -59,6 +59,97 @@ export const validateRS = (remoteSchemaName, result) => {
});
};
// ******************* Custom Function Validator **************************
export const validateCFunc = (functionName, functionSchema, result) => {
const reqBody = {
type: 'select',
args: {
table: {
name: 'hdb_function',
schema: 'hdb_catalog',
},
columns: ['*'],
where: {
function_name: functionName,
function_schema: functionSchema,
},
},
};
const requestOptions = makeDataAPIOptions(dataApiUrl, accessKey, reqBody);
cy.request(requestOptions).then(response => {
if (result === 'success') {
expect(
response.body.length > 0 &&
response.body[0].function_name === functionName
).to.be.true;
} else {
expect(
response.body.length > 0 &&
response.body[0].function_name === functionName
).to.be.false;
}
});
};
export const validateUntrackedFunc = (functionName, functionSchema, result) => {
const reqBody = {
type: 'select',
args: {
table: {
name: 'hdb_function',
schema: 'hdb_catalog',
},
columns: ['*'],
where: {
function_name: functionName,
function_schema: functionSchema,
},
},
};
const requestOptions = makeDataAPIOptions(dataApiUrl, accessKey, reqBody);
cy.request(requestOptions).then(response => {
if (result === 'success') {
expect(response.body.length === 0).to.be.true;
} else {
expect(response.body.length === 0).to.be.false;
}
});
};
export const dataRequest = (reqBody, result) => {
const requestOptions = makeDataAPIOptions(dataApiUrl, accessKey, reqBody);
cy.request(requestOptions).then(response => {
if (result === 'success') {
expect(
response.body.length > 0 &&
response.body[0].result_type === 'CommandOk' &&
response.body[1].message === 'success'
).to.be.true;
} else {
expect(
response.body.length > 0 &&
response.body[0].result_type === 'CommandOk' &&
response.body[1].message === 'success'
).to.be.false;
}
});
};
export const dropTableRequest = (reqBody, result) => {
const requestOptions = makeDataAPIOptions(dataApiUrl, accessKey, reqBody);
cy.request(requestOptions).then(response => {
if (result === 'success') {
expect(
response.body.length > 0 && response.body[0].result_type === 'CommandOk'
).to.be.true;
} else {
expect(
response.body.length > 0 && response.body[0].result_type === 'CommandOk'
).to.be.false;
}
});
};
// ****************** Table Validator *********************
export const validateCT = (tableName, result) => {

View File

@ -1487,8 +1487,7 @@ span.CodeMirror-selectedtext {
-ms-user-select: none;
user-select: none;
}
.doc-explorer-title
{
.doc-explorer-title {
height: 34px;
}
.graphiql-container .doc-explorer-title,

View File

@ -21,7 +21,8 @@ class Login extends Component {
if (loginInProgress) {
loginText = (
<span>
Verifying...<i className="fa fa-spinner fa-spin" aria-hidden="true" />
Verifying...
<i className="fa fa-spinner fa-spin" aria-hidden="true" />
</span>
);
} else if (loginError) {

View File

@ -116,6 +116,9 @@
{
margin-right: 20px;
}
.red_button {
color: #FFF;
}
a
{
margin-left: 20px;

View File

@ -3,6 +3,7 @@ import _push from '../push';
import {
loadSchema,
LOAD_UNTRACKED_RELATIONS,
fetchTrackedFunctions,
makeMigrationCall,
} from '../DataActions';
import { showSuccessNotification } from '../Notification';
@ -107,6 +108,67 @@ const addExistingTableSql = () => {
};
};
const addExistingFunction = name => {
return (dispatch, getState) => {
dispatch({ type: MAKING_REQUEST });
dispatch(showSuccessNotification('Adding an function...'));
const currentSchema = getState().tables.currentSchema;
const requestBodyUp = {
type: 'track_function',
args: {
name,
schema: currentSchema,
},
};
const requestBodyDown = {
type: 'untrack_function',
args: {
name,
schema: currentSchema,
},
};
const migrationName = 'add_existing_function ' + currentSchema + '_' + name;
const upQuery = {
type: 'bulk',
args: [requestBodyUp],
};
const downQuery = {
type: 'bulk',
args: [requestBodyDown],
};
const requestMsg = 'Adding existing function...';
const successMsg = 'Existing function added';
const errorMsg = 'Adding existing function failed';
const customOnSuccess = () => {
dispatch({ type: REQUEST_SUCCESS });
// Update the left side bar
dispatch(fetchTrackedFunctions(currentSchema));
dispatch(
_push('/schema/' + currentSchema + '/functions/' + name + '/modify')
);
return;
};
const customOnError = err => {
dispatch({ type: REQUEST_ERROR, data: err });
};
makeMigrationCall(
dispatch,
getState,
upQuery.args,
downQuery.args,
migrationName,
customOnSuccess,
customOnError,
requestMsg,
successMsg,
errorMsg
);
};
};
const addAllUntrackedTablesSql = tableList => {
return (dispatch, getState) => {
const currentSchema = getState().tables.currentSchema;
@ -218,6 +280,7 @@ const addExistingTableReducer = (state = defaultState, action) => {
export default addExistingTableReducer;
export {
addExistingFunction,
setDefaults,
setTableName,
addExistingTableSql,

View File

@ -15,6 +15,10 @@ import globals from '../../../Globals';
import { SERVER_CONSOLE_MODE } from '../../../constants';
const SET_TABLE = 'Data/SET_TABLE';
const LOAD_FUNCTIONS = 'Data/LOAD_FUNCTIONS';
const LOAD_NON_TRACKABLE_FUNCTIONS = 'Data/LOAD_NON_TRACKABLE_FUNCTIONS';
const LOAD_TRACKED_FUNCTIONS = 'Data/LOAD_TRACKED_FUNCTIONS';
const UPDATE_TRACKED_FUNCTIONS = 'Data/UPDATE_TRACKED_FUNCTIONS';
const LOAD_SCHEMA = 'Data/LOAD_SCHEMA';
const LOAD_UNTRACKED_SCHEMA = 'Data/LOAD_UNTRACKED_SCHEMA';
const LOAD_TABLE_COMMENT = 'Data/LOAD_TABLE_COMMENT';
@ -87,6 +91,117 @@ const initQueries = {
},
},
},
loadTrackedFunctions: {
type: 'select',
args: {
table: {
name: 'hdb_function',
schema: 'hdb_catalog',
},
columns: ['function_name', 'function_schema', 'is_system_defined'],
where: {
function_schema: '',
},
},
},
loadTrackableFunctions: {
type: 'select',
args: {
table: {
name: 'hdb_function_agg',
schema: 'hdb_catalog',
},
columns: [
'function_name',
'function_schema',
'has_variadic',
'function_type',
'function_definition',
'return_type_schema',
'return_type_name',
'return_type_type',
'returns_set',
{
name: 'return_table_info',
columns: ['table_schema', 'table_name'],
},
],
where: {
function_schema: '',
has_variadic: false,
returns_set: true,
return_type_type: {
$ilike: '%composite%',
},
$or: [
{
function_type: {
$ilike: '%stable%',
},
},
{
function_type: {
$ilike: '%immutable%',
},
},
],
},
},
},
loadNonTrackableFunctions: {
type: 'select',
args: {
table: {
name: 'hdb_function_agg',
schema: 'hdb_catalog',
},
columns: [
'function_name',
'function_schema',
'has_variadic',
'function_type',
'function_definition',
'return_type_schema',
'return_type_name',
'return_type_type',
'returns_set',
],
where: {
function_schema: '',
has_variadic: false,
returns_set: true,
return_type_type: {
$ilike: '%composite%',
},
function_type: {
$ilike: '%volatile%',
},
},
},
},
};
const fetchTrackedFunctions = () => {
return (dispatch, getState) => {
const url = Endpoints.getSchema;
const currentSchema = getState().tables.currentSchema;
const body = initQueries.loadTrackedFunctions;
body.args.where.function_schema = currentSchema;
const options = {
credentials: globalCookiePolicy,
method: 'POST',
headers: dataHeaders(getState),
body: JSON.stringify(body),
};
return dispatch(requestAction(url, options)).then(
data => {
dispatch({ type: LOAD_TRACKED_FUNCTIONS, data: data });
},
error => {
console.error('Failed to load schema ' + JSON.stringify(error));
}
);
};
};
const fetchDataInit = () => (dispatch, getState) => {
@ -99,10 +214,12 @@ const fetchDataInit = () => (dispatch, getState) => {
initQueries.loadUntrackedSchema,
],
};
// set schema in queries
const currentSchema = getState().tables.currentSchema;
body.args[1].args.where.table_schema = currentSchema;
body.args[2].args.where.table_schema = currentSchema;
const options = {
credentials: globalCookiePolicy,
method: 'POST',
@ -121,6 +238,41 @@ const fetchDataInit = () => (dispatch, getState) => {
);
};
const fetchFunctionInit = () => (dispatch, getState) => {
const url = Endpoints.getSchema;
const body = {
type: 'bulk',
args: [
initQueries.loadTrackableFunctions,
initQueries.loadNonTrackableFunctions,
initQueries.loadTrackedFunctions,
],
};
// set schema in queries
const currentSchema = getState().tables.currentSchema;
body.args[0].args.where.function_schema = currentSchema;
body.args[1].args.where.function_schema = currentSchema;
body.args[2].args.where.function_schema = currentSchema;
const options = {
credentials: globalCookiePolicy,
method: 'POST',
headers: dataHeaders(getState),
body: JSON.stringify(body),
};
return dispatch(requestAction(url, options)).then(
data => {
dispatch({ type: LOAD_FUNCTIONS, data: data[0] });
dispatch({ type: LOAD_NON_TRACKABLE_FUNCTIONS, data: data[1] });
dispatch({ type: LOAD_TRACKED_FUNCTIONS, data: data[2] });
},
error => {
console.error('Failed to fetch schema ' + JSON.stringify(error));
}
);
};
/* ************ action creators *********************** */
const fetchSchemaList = () => (dispatch, getState) => {
const url = Endpoints.getSchema;
@ -464,6 +616,28 @@ const dataReducer = (state = defaultState, action) => {
};
}
switch (action.type) {
case LOAD_FUNCTIONS:
return {
...state,
postgresFunctions: action.data,
};
case LOAD_NON_TRACKABLE_FUNCTIONS:
return {
...state,
nonTrackablePostgresFunctions: action.data,
};
case LOAD_TRACKED_FUNCTIONS:
return {
...state,
trackedFunctions: action.data,
listedFunctions: action.data,
};
case UPDATE_TRACKED_FUNCTIONS:
return {
...state,
listedFunctions: [...action.data],
};
case LOAD_SCHEMA:
return {
...state,
@ -556,11 +730,14 @@ export {
loadUntrackedRelations,
fetchSchemaList,
fetchDataInit,
fetchFunctionInit,
ACCESS_KEY_ERROR,
UPDATE_DATA_HEADERS,
UPDATE_REMOTE_SCHEMA_MANUAL_REL,
fetchTableListBySchema,
RESET_MANUAL_REL_TABLE_LIST,
fetchViewInfoFromInformationSchema,
fetchTrackedFunctions,
UPDATE_TRACKED_FUNCTIONS,
initQueries,
};

View File

@ -10,6 +10,7 @@ import {
loadUntrackedSchema,
loadUntrackedRelations,
UPDATE_CURRENT_SCHEMA,
fetchFunctionInit,
} from './DataActions';
const sectionPrefix = '/data';
@ -51,6 +52,7 @@ const DataHeader = ({
dispatch(loadSchema()),
dispatch(loadUntrackedSchema()),
dispatch(loadUntrackedRelations()),
dispatch(fetchFunctionInit()),
]);
};
return (

View File

@ -4,8 +4,11 @@ import addTableReducer from './Add/AddActions';
import addExistingTableReducer from './Add/AddExistingTableViewActions';
import rawSQLReducer from './RawSQL/Actions';
import customFunctionReducer from './Function/customFunctionReducer';
const dataReducer = {
tables: tableReducer,
functions: customFunctionReducer,
addTable: combineReducers({
table: addTableReducer,
existingTableView: addExistingTableReducer,

View File

@ -21,11 +21,15 @@ import {
permissionsConnector,
dataHeaderConnector,
migrationsConnector,
functionWrapperConnector,
ModifyCustomFunction,
PermissionCustomFunction,
// metadataConnector,
} from '.';
import {
fetchDataInit,
fetchFunctionInit,
UPDATE_CURRENT_SCHEMA,
// UPDATE_DATA_HEADERS,
// ACCESS_KEY_ERROR,
@ -50,6 +54,14 @@ const makeDataRouter = (
<Route path=":schema" component={schemaConnector(connect)} />
<Route path=":schema/tables" component={schemaConnector(connect)} />
<Route path=":schema/views" component={schemaConnector(connect)} />
<Route
path=":schema/functions/:functionName"
component={functionWrapperConnector(connect)}
>
<IndexRedirect to="modify" />
<Route path="modify" component={ModifyCustomFunction} />
<Route path="permissions" component={PermissionCustomFunction} />
</Route>
<Route
path=":schema/tables/:table/browse"
component={viewTableConnector(connect)}
@ -145,6 +157,7 @@ const dataRouter = (connect, store, composeOnEnterHooks) => {
currentSchema: currentSchema,
}),
store.dispatch(fetchDataInit()),
store.dispatch(fetchFunctionInit()),
]).then(
() => {
cb();

View File

@ -127,6 +127,10 @@ const defaultState = {
lastSuccess: null,
},
allSchemas: [],
postgresFunctions: [],
nonTrackablePostgresFunctions: [],
trackedFunctions: [],
listedFunctions: [],
listingSchemas: [],
untrackedSchemas: [],

View File

@ -0,0 +1,40 @@
import React from 'react';
import PropTypes from 'prop-types';
import { RESET } from './customFunctionReducer';
import { setTable } from '../DataActions';
class FunctionWrapper extends React.Component {
componentDidMount() {
this.props.dispatch(setTable(''));
}
componentWillUnmount() {
this.props.dispatch({
type: RESET,
});
}
render() {
const { children } = this.props;
return <div>{children && React.cloneElement(children, this.props)}</div>;
}
}
FunctionWrapper.propTypes = {
children: PropTypes.node,
};
const mapStateToProps = state => {
return {
functionList: state.tables.postgresFunctions,
functions: {
...state.functions,
},
migrationMode: state.main.migrationMode,
};
};
const functionWrapperConnector = connect =>
connect(mapStateToProps)(FunctionWrapper);
export default functionWrapperConnector;

View File

@ -0,0 +1,208 @@
import React from 'react';
import PropTypes from 'prop-types';
import Helmet from 'react-helmet';
import { push } from 'react-router-redux';
import CommonTabLayout from '../../../Layout/CommonTabLayout/CommonTabLayout';
import _push from '../../push';
import { pageTitle, appPrefix } from './constants';
import tabInfo from './tabInfo';
import globals from '../../../../../Globals';
const prefixUrl = globals.urlPrefix + appPrefix;
import ReusableTextAreaWithCopy from '../../../Layout/ReusableTextAreaWithCopy/ReusableTextAreaWithCopy';
import {
fetchCustomFunction,
deleteFunctionSql,
unTrackCustomFunction,
} from '../customFunctionReducer';
import { SET_SQL } from '../../RawSQL/Actions';
class ModifyCustomFunction extends React.Component {
constructor() {
super();
this.state = {};
this.state.deleteConfirmationError = null;
}
componentDidMount() {
const { functionName, schema } = this.props.params;
if (!functionName) {
this.props.dispatch(push(prefixUrl));
}
Promise.all([
this.props.dispatch(fetchCustomFunction(functionName, schema)),
]);
}
loadRunSQLAndLoadPage() {
const { functionDefinition } = this.props.functions;
Promise.all([
this.props.dispatch({ type: SET_SQL, data: functionDefinition }),
this.props.dispatch(_push('/sql')),
]);
}
updateDeleteConfirmationError(data) {
this.setState({ ...this.state, deleteConfirmationError: data });
}
handleUntrackCustomFunction(e) {
e.preventDefault();
this.props.dispatch(unTrackCustomFunction());
}
handleDeleteCustomFunction(e) {
e.preventDefault();
const a = prompt(
'Are you absolutely sure?\nThis action cannot be undone. This will permanently delete function. Please type "DELETE" (in caps, without quotes) to confirm.\n'
);
try {
if (a && typeof a === 'string' && a.trim() === 'DELETE') {
this.updateDeleteConfirmationError(null);
this.props.dispatch(deleteFunctionSql());
} else {
// Input didn't match
// Show an error message right next to the button
this.updateDeleteConfirmationError('user confirmation error!');
}
} catch (err) {
console.error(err);
}
}
render() {
const styles = require('./ModifyCustomFunction.scss');
const {
functionSchema: schema,
functionName,
functionDefinition,
isRequesting,
isDeleting,
isUntracking,
isFetching,
} = this.props.functions;
const { migrationMode } = this.props;
const baseUrl = `${appPrefix}/schema/${schema}/functions/${functionName}`;
const generateMigrateBtns = () => {
return (
<div className={styles.commonBtn}>
<button
className={styles.yellow_button}
data-test={'custom-function-edit-modify-btn'}
onClick={this.loadRunSQLAndLoadPage.bind(this)}
>
Modify
</button>
<button
className={
styles.danger_button +
' ' +
styles.white_button +
' ' +
'btn-default'
}
onClick={e => {
e.preventDefault();
this.handleUntrackCustomFunction(e);
}}
disabled={isRequesting || isDeleting || isUntracking}
data-test={'custom-function-edit-untrack-btn'}
>
{isUntracking ? 'Untracking Function...' : 'Untrack Function'}
</button>
<button
className={
styles.danger_button +
' ' +
styles.red_button +
' ' +
styles.no_mr_right +
' ' +
'btn-danger'
}
onClick={e => {
e.preventDefault();
this.handleDeleteCustomFunction(e);
}}
data-test={'custom-function-edit-delete-btn'}
disabled={isRequesting || isDeleting || isUntracking}
>
{isDeleting ? 'Deleting Function...' : 'Delete Function'}
</button>
{this.state.deleteConfirmationError ? (
<span
className={styles.delete_confirmation_error}
data-test="delete-confirmation-error"
>
* {this.state.deleteConfirmationError}
</span>
) : null}
</div>
);
};
const breadCrumbs = [
{
title: 'Data',
url: appPrefix,
},
{
title: 'Schema',
url: appPrefix + '/schema',
},
{
title: schema,
url: appPrefix + '/schema/' + schema,
},
];
if (functionName) {
breadCrumbs.push({
title: functionName,
url: appPrefix + '/schema/' + schema + '/functions/' + functionName,
});
breadCrumbs.push({
title: 'Modify',
url: '',
});
}
return (
<div className={'col-xs-8' + ' ' + styles.modifyWrapper}>
<Helmet
title={`Edit ${pageTitle} - ${functionName} - ${pageTitle}s | Hasura`}
/>
<CommonTabLayout
appPrefix={appPrefix}
currentTab="modify"
heading={functionName}
tabsInfo={tabInfo}
breadCrumbs={breadCrumbs}
baseUrl={baseUrl}
showLoader={isFetching}
testPrefix={'functions'}
/>
<br />
{/*
<h4>Function Definition:</h4>
*/}
<div className={styles.sqlBlock}>
<ReusableTextAreaWithCopy
copyText={functionDefinition}
textLanguage={'sql'}
/>
</div>
{migrationMode
? [<hr key="modify-custom-function-divider" />, generateMigrateBtns()]
: null}
</div>
);
}
}
ModifyCustomFunction.propTypes = {
functions: PropTypes.array.isRequired,
};
export default ModifyCustomFunction;

View File

@ -0,0 +1,51 @@
@import '../../../../Common/Common.scss';
.modifyWrapper {
.commonBtn
{
// padding: 20px 0;
// padding-top: 40px;
.delete_confirmation_error {
margin-left: 15px;
color: #d9534f;
}
.yellow_button
{
margin-right: 20px;
}
.red_button {
margin-right: 20px;
color: #FFF;
}
.no_mr_right {
margin-right: 0px;
}
.white_button {
margin-right: 20px;
color: #333;
&:hover {
background-color: #d4d4d4;
}
border-color: #8c8c8c;
}
a
{
// margin-left: 20px;
}
.refresh_schema_btn {
margin-left: 20px;
}
span
{
i
{
cursor: pointer;
margin-left: 10px;
}
}
}
.sqlBlock {
position: relative;
width: 100%;
}
}

View File

@ -0,0 +1,4 @@
const pageTitle = 'Custom Function';
const appPrefix = '/data';
export { pageTitle, appPrefix };

View File

@ -0,0 +1,10 @@
const tabInfo = {
modify: {
display_text: 'Modify',
},
permissions: {
display_text: 'Permissions',
},
};
export default tabInfo;

View File

@ -0,0 +1,95 @@
import React from 'react';
import Helmet from 'react-helmet';
import CommonTabLayout from '../../../Layout/CommonTabLayout/CommonTabLayout';
import { Link } from 'react-router';
import { push } from 'react-router-redux';
import { pageTitle, appPrefix } from '../Modify/constants';
import tabInfo from '../Modify/tabInfo';
import globals from '../../../../../Globals';
const prefixUrl = globals.urlPrefix + appPrefix;
import { fetchCustomFunction } from '../customFunctionReducer';
class Permission extends React.Component {
componentDidMount() {
const { functionName } = this.props.params;
if (!functionName) {
this.props.dispatch(push(prefixUrl));
}
Promise.all([this.props.dispatch(fetchCustomFunction(functionName))]);
}
render() {
const styles = require('../Modify/ModifyCustomFunction.scss');
const {
functionSchema: schema,
functionName,
setOffTable,
} = this.props.functions;
const baseUrl = `${appPrefix}/schema/${schema}/functions/${functionName}`;
const permissionTableUrl = `${appPrefix}/schema/${schema}/tables/${setOffTable}/permissions`;
const breadCrumbs = [
{
title: 'Data',
url: appPrefix,
},
{
title: 'Schema',
url: appPrefix + '/schema',
},
{
title: schema,
url: appPrefix + '/schema/' + schema,
},
];
if (functionName) {
breadCrumbs.push({
title: functionName,
url: appPrefix + '/schema/' + schema + '/functions/' + functionName,
});
breadCrumbs.push({
title: 'Permission',
url: '',
});
}
return (
<div className={'col-xs-8' + ' ' + styles.modifyWrapper}>
<Helmet
title={`Permission ${pageTitle} - ${functionName} - ${pageTitle}s | Hasura`}
/>
<CommonTabLayout
appPrefix={appPrefix}
currentTab="permissions"
heading={functionName}
tabsInfo={tabInfo}
breadCrumbs={breadCrumbs}
baseUrl={baseUrl}
showLoader={false}
testPrefix={'functions'}
/>
<br />
<p>
Note: Permission defined for the setof table, {`${setOffTable}`}, are
applicable to the data returned by this function
</p>
<div className={styles.commonBtn}>
<Link to={permissionTableUrl}>
<button
className={styles.yellow_button}
data-test={'custom-function-permission-btn'}
>
{`${setOffTable} Permissions`}
</button>
</Link>
</div>
</div>
);
}
}
export default Permission;

View File

@ -0,0 +1,365 @@
/* Import default State */
import { functionData } from './customFunctionState';
import Endpoints, { globalCookiePolicy } from '../../../../Endpoints';
import requestAction from '../../../../utils/requestAction';
import dataHeaders from '../Common/Headers';
import globals from '../../../../Globals';
import returnMigrateUrl from '../Common/getMigrateUrl';
import { SERVER_CONSOLE_MODE } from '../../../../constants';
import { loadMigrationStatus } from '../../../Main/Actions';
import { handleMigrationErrors } from '../../EventTrigger/EventActions';
import { showSuccessNotification } from '../Notification';
// import { push } from 'react-router-redux';
import { fetchTrackedFunctions } from '../DataActions';
import _push from '../push';
/* Constants */
const RESET = '@customFunction/RESET';
const FETCHING_INDIV_CUSTOM_FUNCTION =
'@customFunction/FETCHING_INDIV_CUSTOM_FUNCTION';
const CUSTOM_FUNCTION_FETCH_SUCCESS =
'@customFunction/CUSTOM_FUNCTION_FETCH_SUCCESS';
const CUSTOM_FUNCTION_FETCH_FAIL = '@customFunction/CUSTOM_FUNCTION_FETCH_FAIL';
const DELETING_CUSTOM_FUNCTION = '@customFunction/DELETING_CUSTOM_FUNCTION';
const DELETE_CUSTOM_FUNCTION_FAIL =
'@customFunction/DELETE_CUSTOM_FUNCTION_FAIL';
const UNTRACKING_CUSTOM_FUNCTION = '@customFunction/UNTRACKING_CUSTOM_FUNCTION';
const UNTRACK_CUSTOM_FUNCTION_FAIL =
'@customFunction/UNTRACK_CUSTOM_FUNCTION_FAIL';
/* */
const makeRequest = (
upQueries,
downQueries,
migrationName,
customOnSuccess,
customOnError,
requestMsg,
successMsg,
errorMsg
) => {
return (dispatch, getState) => {
const upQuery = {
type: 'bulk',
args: upQueries,
};
const downQuery = {
type: 'bulk',
args: downQueries,
};
const migrationBody = {
name: migrationName,
up: upQuery.args,
down: downQuery.args,
};
const currMigrationMode = getState().main.migrationMode;
const migrateUrl = returnMigrateUrl(currMigrationMode);
let finalReqBody;
if (globals.consoleMode === SERVER_CONSOLE_MODE) {
finalReqBody = upQuery;
} else if (globals.consoleMode === 'cli') {
finalReqBody = migrationBody;
}
const url = migrateUrl;
const options = {
method: 'POST',
credentials: globalCookiePolicy,
headers: dataHeaders(getState),
body: JSON.stringify(finalReqBody),
};
const onSuccess = data => {
if (globals.consoleMode === 'cli') {
dispatch(loadMigrationStatus()); // don't call for server mode
}
if (successMsg) {
dispatch(showSuccessNotification(successMsg));
}
customOnSuccess(data);
};
const onError = err => {
dispatch(handleMigrationErrors(errorMsg, err));
customOnError(err);
};
dispatch(showSuccessNotification(requestMsg));
return dispatch(requestAction(url, options)).then(onSuccess, onError);
};
};
/* Action creators */
const fetchCustomFunction = (functionName, schema) => {
return (dispatch, getState) => {
const url = Endpoints.getSchema;
const fetchCustomFunctionQuery = {
type: 'select',
args: {
table: {
name: 'hdb_function',
schema: 'hdb_catalog',
},
columns: ['*'],
where: {
function_schema: schema,
function_name: functionName,
},
},
};
const fetchCustomFunctionDefinition = {
type: 'select',
args: {
table: {
name: 'hdb_function_agg',
schema: 'hdb_catalog',
},
columns: ['*'],
where: {
function_schema: schema,
function_name: functionName,
},
},
};
const bulkQuery = {
type: 'bulk',
args: [fetchCustomFunctionQuery, fetchCustomFunctionDefinition],
};
const options = {
credentials: globalCookiePolicy,
method: 'POST',
headers: dataHeaders(getState),
body: JSON.stringify(bulkQuery),
};
dispatch({ type: FETCHING_INDIV_CUSTOM_FUNCTION });
return dispatch(requestAction(url, options)).then(
data => {
if (data[0].length > 0 && data[1].length > 0) {
dispatch({
type: CUSTOM_FUNCTION_FETCH_SUCCESS,
data: [[...data[0]], [...data[1]]],
});
return Promise.resolve();
}
return dispatch(_push('/'));
},
error => {
console.error('Failed to fetch resolver' + JSON.stringify(error));
return dispatch({ type: CUSTOM_FUNCTION_FETCH_FAIL, data: error });
}
);
};
};
const deleteFunctionSql = () => {
return (dispatch, getState) => {
const currentSchema = getState().tables.currentSchema;
const functionName = getState().functions.functionName;
const functionDefinition = getState().functions.functionDefinition;
const sqlDropFunction =
'DROP FUNCTION ' +
'"' +
currentSchema +
'"' +
'.' +
'"' +
functionName +
'"';
const sqlUpQueries = [
{
type: 'run_sql',
args: { sql: sqlDropFunction },
},
];
const sqlDownQueries = [];
if (functionDefinition && functionDefinition.length > 0) {
sqlDownQueries.push({
type: 'run_sql',
args: { sql: functionDefinition },
});
}
// Apply migrations
const migrationName = 'drop_function_' + currentSchema + '_' + functionName;
const requestMsg = 'Deleting function...';
const successMsg = 'Function deleted';
const errorMsg = 'Deleting function failed';
const customOnSuccess = () => {
dispatch(_push('/'));
};
const customOnError = () => {};
dispatch({ type: DELETING_CUSTOM_FUNCTION });
return dispatch(
makeRequest(
sqlUpQueries,
sqlDownQueries,
migrationName,
customOnSuccess,
customOnError,
requestMsg,
successMsg,
errorMsg
)
);
};
};
const unTrackCustomFunction = () => {
return (dispatch, getState) => {
const currentSchema = getState().tables.currentSchema;
const functionName = getState().functions.functionName;
// const url = Endpoints.getSchema;
/*
const customFunctionObj = {
function_name: functionName,
};
*/
const migrationName = 'remove_custom_function_' + functionName;
const payload = {
type: 'untrack_function',
args: {
name: functionName,
schema: currentSchema,
},
};
const downPayload = {
type: 'track_function',
args: {
name: functionName,
schema: currentSchema,
},
};
const upQueryArgs = [];
upQueryArgs.push(payload);
const downQueryArgs = [];
downQueryArgs.push(downPayload);
const upQuery = {
type: 'bulk',
args: upQueryArgs,
};
const downQuery = {
type: 'bulk',
args: downQueryArgs,
};
const requestMsg = 'Deleting custom function...';
const successMsg = 'Custom function deleted successfully';
const errorMsg = 'Delete custom function failed';
const customOnSuccess = () => {
// dispatch({ type: REQUEST_SUCCESS });
Promise.all([
dispatch({ type: RESET }),
dispatch(_push('/')),
dispatch(fetchTrackedFunctions()),
]);
};
const customOnError = error => {
Promise.all([
dispatch({ type: UNTRACK_CUSTOM_FUNCTION_FAIL, data: error }),
]);
};
dispatch({ type: UNTRACKING_CUSTOM_FUNCTION });
return dispatch(
makeRequest(
upQuery.args,
downQuery.args,
migrationName,
customOnSuccess,
customOnError,
requestMsg,
successMsg,
errorMsg
)
);
};
};
/* */
/* Reducer */
const customFunctionReducer = (state = functionData, action) => {
switch (action.type) {
case RESET:
return {
...functionData,
};
case FETCHING_INDIV_CUSTOM_FUNCTION:
return {
...state,
isFetching: true,
isFetchError: null,
};
case CUSTOM_FUNCTION_FETCH_SUCCESS:
return {
...state,
functionName: action.data[0][0].function_name,
functionSchema: action.data[0][0].function_schema || null,
functionDefinition: action.data[1][0].function_definition || null,
setOffTable: action.data[1][0].return_type_name || null,
isFetching: false,
isFetchError: null,
};
case CUSTOM_FUNCTION_FETCH_FAIL:
return {
...state,
isFetching: false,
isFetchError: action.data,
};
case DELETE_CUSTOM_FUNCTION_FAIL:
return {
...state,
isDeleting: false,
isError: action.data,
};
case DELETING_CUSTOM_FUNCTION:
return {
...state,
isDeleting: true,
isError: null,
};
case UNTRACK_CUSTOM_FUNCTION_FAIL:
return {
...state,
isUntracking: false,
isError: action.data,
};
case UNTRACKING_CUSTOM_FUNCTION:
return {
...state,
isUntracking: true,
isError: null,
};
default:
return {
...state,
};
}
};
/* End of it */
export { RESET, fetchCustomFunction, deleteFunctionSql, unTrackCustomFunction };
export default customFunctionReducer;

View File

@ -0,0 +1,18 @@
const asyncState = {
isRequesting: false,
isUntracking: false,
isDeleting: false,
isError: false,
isFetching: false,
isFetchError: null,
};
const functionData = {
functionName: '',
functionSchema: '',
functionDefinition: '',
setOffTable: '',
...asyncState,
};
export { functionData };

View File

@ -6,27 +6,38 @@ import { Link } from 'react-router';
import globals from '../../../../Globals';
import Button from '../../Layout/Button/Button';
import { LISTING_SCHEMA } from '../DataActions';
import { LISTING_SCHEMA, UPDATE_TRACKED_FUNCTIONS } from '../DataActions';
import semverCheck from '../../../../helpers/semver';
const appPrefix = '/data';
const PageContainer = ({
schema,
listingSchema,
functionsList,
listedFunctions,
currentTable,
schemaName,
migrationMode,
children,
dispatch,
location,
currentFunction,
serverVersion,
}) => {
const styles = require('./PageContainer.scss');
const functionSymbol = require('./function.svg');
const functionSymbolActive = require('./function_high.svg');
const handleFunc = semverCheck('customFunctionSection', serverVersion)
? true
: false;
// Now schema might be null or an empty array
let tableLinks = (
<li className={styles.noTables}>
<i>No tables available</i>
</li>
);
let tableLinks = [
<li className={styles.noTables} key="no-tables-1">
<i>No tables/views available</i>
</li>,
];
const tables = {};
listingSchema.map(t => {
tables[t.table_name] = t;
@ -90,6 +101,58 @@ const PageContainer = ({
});
}
const dividerHr = [
<li key={'fn-divider-1'}>
<hr className={styles.tableFunctionDivider} />
</li>,
];
// If the listedFunctions is non empty
if (listedFunctions.length > 0) {
const functionHtml = listedFunctions.map((f, i) => (
<li
className={
f.function_name === currentFunction ? styles.activeTable : ''
}
key={'fn ' + i}
>
<Link
to={
appPrefix +
'/schema/' +
schemaName +
'/functions/' +
f.function_name
}
data-test={f.function_name}
>
<div className={styles.display_inline + ' ' + styles.functionIcon}>
<img
src={
f.function_name === currentFunction
? functionSymbolActive
: functionSymbol
}
/>
</div>
{f.function_name}
</Link>
</li>
));
tableLinks = [...tableLinks, ...dividerHr, ...functionHtml];
} else if (
functionsList.length !== listedFunctions.length &&
listedFunctions.length === 0
) {
const noFunctionResult = [
<li className={styles.noTables}>
<i>No matching functions available</i>
</li>,
];
tableLinks = [...tableLinks, ...dividerHr, ...noFunctionResult];
}
function tableSearch(e) {
const searchTerm = e.target.value;
// form new schema
@ -99,8 +162,14 @@ const PageContainer = ({
matchedTables.push(table);
}
});
const matchedFuncs = functionsList.filter(
f => f.function_name.indexOf(searchTerm) !== -1
);
// update schema with matchedTables
dispatch({ type: LISTING_SCHEMA, updatedSchemas: matchedTables });
dispatch({ type: UPDATE_TRACKED_FUNCTIONS, data: matchedFuncs });
}
return (
@ -116,7 +185,7 @@ const PageContainer = ({
type="text"
onChange={tableSearch.bind(this)}
className="form-control"
placeholder="search table/view"
placeholder={`search table/view${handleFunc ? '/function' : ''}`}
data-test="search-tables"
/>
</div>
@ -173,6 +242,10 @@ const mapStateToProps = state => {
listingSchema: state.tables.listingSchemas,
currentTable: state.tables.currentTable,
migrationMode: state.main.migrationMode,
functionsList: state.tables.trackedFunctions,
listedFunctions: state.tables.listedFunctions,
currentFunction: state.functions.functionName,
serverVersion: state.main.serverVersion ? state.main.serverVersion : '',
};
};

View File

@ -125,15 +125,26 @@
li {
border-bottom: 0px !important;
padding: 0 0 !important;
.tableFunctionDivider {
margin-top: 5px;
margin-bottom: 5px;
width: 95%;
}
a {
background: transparent !important;
padding: 5px 0px !important;
font-weight: 400 !important;
padding-left: 5px !important;
.tableIcon {
.tableIcon, .functionIcon {
margin-right: 5px;
font-size: 12px;
}
.functionIcon {
width: 12px;
img {
width: 100%;
}
}
}
}
.noTables {

View File

@ -0,0 +1,54 @@
<?xml version="1.0" encoding="iso-8859-1"?>
<!-- Generator: Adobe Illustrator 16.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<svg version="1.1" id="Capa_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
width="142.514px" height="142.514px" viewBox="0 0 142.514 142.514" style="enable-background:new 0 0 142.514 142.514;"
xml:space="preserve">
<g>
<g>
<path d="M34.367,142.514c11.645,0,17.827-10.4,19.645-16.544c0.029-0.097,0.056-0.196,0.081-0.297
c4.236-17.545,10.984-45.353,15.983-65.58h17.886c3.363,0,6.09-2.726,6.09-6.09c0-3.364-2.727-6.09-6.09-6.09H73.103
c1.6-6.373,2.771-10.912,3.232-12.461l0.512-1.734c1.888-6.443,6.309-21.535,13.146-21.535c6.34,0,7.285,9.764,7.328,10.236
c0.27,3.343,3.186,5.868,6.537,5.579c3.354-0.256,5.864-3.187,5.605-6.539C108.894,14.036,104.087,0,89.991,0
C74.03,0,68.038,20.458,65.159,30.292l-0.49,1.659c-0.585,1.946-2.12,7.942-4.122,15.962H39.239c-3.364,0-6.09,2.726-6.09,6.09
c0,3.364,2.726,6.09,6.09,6.09H57.53c-6.253,25.362-14.334,58.815-15.223,62.498c-0.332,0.965-2.829,7.742-7.937,7.742
c-7.8,0-11.177-10.948-11.204-11.03c-0.936-3.229-4.305-5.098-7.544-4.156c-3.23,0.937-5.092,4.314-4.156,7.545
C13.597,130.053,20.816,142.514,34.367,142.514z"/>
<path d="M124.685,126.809c3.589,0,6.605-2.549,6.605-6.607c0-1.885-0.754-3.586-2.359-5.474l-12.646-14.534l12.271-14.346
c1.132-1.416,1.98-2.926,1.98-4.908c0-3.59-2.927-6.231-6.703-6.231c-2.547,0-4.527,1.604-6.229,3.684l-9.531,12.454L98.73,78.391
c-1.89-2.357-3.869-3.682-6.7-3.682c-3.59,0-6.607,2.551-6.607,6.609c0,1.885,0.756,3.586,2.357,5.471l11.799,13.592
L86.647,115.67c-1.227,1.416-1.98,2.926-1.98,4.908c0,3.589,2.926,6.229,6.699,6.229c2.549,0,4.53-1.604,6.229-3.682l10.19-13.4
l10.193,13.4C119.872,125.488,121.854,126.809,124.685,126.809z"/>
</g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
</svg>

After

Width:  |  Height:  |  Size: 2.1 KiB

View File

@ -0,0 +1,41 @@
<?xml version="1.0" encoding="iso-8859-1"?>
<!-- Generator: Adobe Illustrator 16.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" version="1.1" id="Capa_1" x="0px" y="0px" width="512px" height="512px" viewBox="0 0 142.514 142.514" style="enable-background:new 0 0 142.514 142.514;" xml:space="preserve">
<g>
<g>
<path d="M34.367,142.514c11.645,0,17.827-10.4,19.645-16.544c0.029-0.097,0.056-0.196,0.081-0.297 c4.236-17.545,10.984-45.353,15.983-65.58h17.886c3.363,0,6.09-2.726,6.09-6.09c0-3.364-2.727-6.09-6.09-6.09H73.103 c1.6-6.373,2.771-10.912,3.232-12.461l0.512-1.734c1.888-6.443,6.309-21.535,13.146-21.535c6.34,0,7.285,9.764,7.328,10.236 c0.27,3.343,3.186,5.868,6.537,5.579c3.354-0.256,5.864-3.187,5.605-6.539C108.894,14.036,104.087,0,89.991,0 C74.03,0,68.038,20.458,65.159,30.292l-0.49,1.659c-0.585,1.946-2.12,7.942-4.122,15.962H39.239c-3.364,0-6.09,2.726-6.09,6.09 c0,3.364,2.726,6.09,6.09,6.09H57.53c-6.253,25.362-14.334,58.815-15.223,62.498c-0.332,0.965-2.829,7.742-7.937,7.742 c-7.8,0-11.177-10.948-11.204-11.03c-0.936-3.229-4.305-5.098-7.544-4.156c-3.23,0.937-5.092,4.314-4.156,7.545 C13.597,130.053,20.816,142.514,34.367,142.514z" fill="#fd9540"/>
<path d="M124.685,126.809c3.589,0,6.605-2.549,6.605-6.607c0-1.885-0.754-3.586-2.359-5.474l-12.646-14.534l12.271-14.346 c1.132-1.416,1.98-2.926,1.98-4.908c0-3.59-2.927-6.231-6.703-6.231c-2.547,0-4.527,1.604-6.229,3.684l-9.531,12.454L98.73,78.391 c-1.89-2.357-3.869-3.682-6.7-3.682c-3.59,0-6.607,2.551-6.607,6.609c0,1.885,0.756,3.586,2.357,5.471l11.799,13.592 L86.647,115.67c-1.227,1.416-1.98,2.926-1.98,4.908c0,3.589,2.926,6.229,6.699,6.229c2.549,0,4.53-1.604,6.229-3.682l10.19-13.4 l10.193,13.4C119.872,125.488,121.854,126.809,124.685,126.809z" fill="#fd9540"/>
</g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
</svg>

After

Width:  |  Height:  |  Size: 2.1 KiB

View File

@ -1,6 +1,11 @@
import defaultState from './State';
import Endpoints, { globalCookiePolicy } from '../../../../Endpoints';
import { handleMigrationErrors, fetchDataInit } from '../DataActions';
import {
// loadSchema,
handleMigrationErrors,
fetchTrackedFunctions,
fetchDataInit,
} from '../DataActions';
import {
showErrorNotification,
showSuccessNotification,
@ -12,6 +17,8 @@ import {
import dataHeaders from '../Common/Headers';
import returnMigrateUrl from '../Common/getMigrateUrl';
import semverCheck from '../../../../helpers/semver';
const MAKING_REQUEST = 'RawSQL/MAKING_REQUEST';
const SET_SQL = 'RawSQL/SET_SQL';
const SET_CASCADE_CHECKED = 'RawSQL/SET_CASCADE_CHECKED';
@ -31,8 +38,13 @@ const executeSQL = (isMigration, migrationName) => (dispatch, getState) => {
dispatch(showSuccessNotification('Executing the Query...'));
const sql = getState().rawSQL.sql;
const serverVersion = getState().main.serverVersion;
const currMigrationMode = getState().main.migrationMode;
const handleFunc = semverCheck('customFunctionSection', serverVersion)
? true
: false;
const migrateUrl = returnMigrateUrl(currMigrationMode);
const currentSchema = 'public';
const isCascadeChecked = getState().rawSQL.isCascadeChecked;
@ -45,17 +57,31 @@ const executeSQL = (isMigration, migrationName) => (dispatch, getState) => {
},
];
// check if track view enabled
if (getState().rawSQL.isTableTrackChecked) {
const regExp = /create\s*(?:|or\s*replace)\s*(view|table)\s*((\"?\w+\"?)\.(\"?\w+\"?)|(\"?\w+\"?))/; // eslint-disable-line
let regExp;
if (handleFunc) {
regExp = /create\s*(?:|or\s*replace)\s*(view|table|function)\s*((\"?\w+\"?)\.(\"?\w+\"?)|(\"?\w+\"?))/; // eslint-disable-line
} else {
regExp = /create\s*(?:|or\s*replace)\s*(view|table)\s*((\"?\w+\"?)\.(\"?\w+\"?)|(\"?\w+\"?))/; // eslint-disable-line
}
const matches = sql.match(new RegExp(regExp, 'gmi'));
if (matches) {
matches.forEach(element => {
const itemMatch = element.match(new RegExp(regExp, 'i'));
if (itemMatch && itemMatch.length === 6) {
const trackQuery = {
type: 'add_existing_table_or_view',
args: {},
};
let trackQuery = {};
if (itemMatch[1].toLowerCase() === 'function') {
trackQuery = {
type: 'track_function',
args: {},
};
} else {
trackQuery = {
type: 'add_existing_table_or_view',
args: {},
};
}
// If group 5 is undefined, use group 3 and 4 for schema and table respectively
// If group 5 is present, use group 5 for table name using public schema.
if (itemMatch[5]) {
@ -108,6 +134,7 @@ const executeSQL = (isMigration, migrationName) => (dispatch, getState) => {
dispatch(fetchDataInit()).then(() => {
dispatch({ type: REQUEST_SUCCESS, data });
});
dispatch(fetchTrackedFunctions());
},
err => {
const parsedErrorMsg = err;

View File

@ -18,6 +18,7 @@ import {
} from './Actions';
import { modalOpen, modalClose } from './Actions';
import globals from '../../../../Globals';
import semverCheck from '../../../../helpers/semver';
const cascadeTip = (
<Tooltip id="tooltip-cascade">
@ -37,10 +38,10 @@ const migrationNameTip = (
'run_sql_migration'
</Tooltip>
);
const trackTableTip = (
const trackTableTip = (hasFunctionSupport) => (
<Tooltip id="tooltip-tracktable">
If you are creating a table/view, you can track them to query them with
GraphQL
{ `If you are creating a table/view${hasFunctionSupport ? '/function' : ''}, you can track them to query them
with GraphQL`}
</Tooltip>
);
@ -58,6 +59,7 @@ const RawSQL = ({
isMigrationChecked,
isTableTrackChecked,
migrationMode,
serverVersion,
}) => {
const styles = require('../TableCommon/Table.scss');
@ -170,6 +172,10 @@ const RawSQL = ({
</div>
);
})();
const functionText = semverCheck('customFunctionSection', serverVersion)
? 'Function'
: '';
const placeholderText = functionText ? 'this' : 'table';
return (
<div
className={`${styles.main_wrapper} ${styles.padd_left} ${
@ -194,9 +200,10 @@ const RawSQL = ({
communicate with the database.
</li>
<li>
If you plan to create a Table/View using Raw SQL, remember to
link it to Hasura DB by checking the <code>Track table</code>{' '}
checkbox below.
If you plan to create a Table/View
{functionText ? '/' + functionText : ''} using Raw SQL, remember
to link it to Hasura DB by checking the{' '}
<code>Track {placeholderText}</code> checkbox below.
</li>
<li>
Please note that if the migrations are enabled,{' '}
@ -240,7 +247,13 @@ const RawSQL = ({
dispatch({ type: SET_MIGRATION_CHECKED, data: false });
}
// set track table checkbox true
const regExp = /create\s*(?:|or\s*replace)\s*(?:view|table)/; // eslint-disable-line
let regExp;
if (functionText) {
regExp = /create\s*(?:|or\s*replace)\s*(?:view|table|function)/; // eslint-disable-line
} else {
regExp = /create\s*(?:|or\s*replace)\s*(?:view|table)/; // eslint-disable-line
}
// const regExp = /create\s*(?:|or\s*replace)\s*(?:view|table|function)/; // eslint-disable-line
const matches = formattedSql.match(new RegExp(regExp, 'gmi'));
if (matches) {
dispatch({ type: SET_TRACK_TABLE_CHECKED, data: true });
@ -285,8 +298,8 @@ const RawSQL = ({
}}
data-test="raw-sql-track-check"
/>
Track table
<OverlayTrigger placement="right" overlay={trackTableTip}>
Track {placeholderText}
<OverlayTrigger placement="right" overlay={trackTableTip(!!functionText)}>
<i
className={`${styles.padd_small_left} fa fa-info-circle`}
aria-hidden="true"
@ -406,6 +419,7 @@ const mapStateToProps = state => ({
...state.rawSQL,
migrationMode: state.main.migrationMode,
currentSchema: state.tables.currentSchema,
serverVersion: state.main.serverVersion ? state.main.serverVersion : '',
});
const rawSQLConnector = connect => connect(mapStateToProps)(RawSQL);

View File

@ -8,14 +8,26 @@ import Helmet from 'react-helmet';
import { push } from 'react-router-redux';
import OverlayTrigger from 'react-bootstrap/lib/OverlayTrigger';
import { untrackedTip, untrackedRelTip } from './Tooltips';
import {
untrackedTip,
untrackedRelTip,
trackableFunctions,
// nonTrackableFunctions,
} from './Tooltips';
import Button from '../../Layout/Button/Button';
import {
setTableName,
addExistingTableSql,
addAllUntrackedTablesSql,
addExistingFunction,
} from '../Add/AddExistingTableViewActions';
import { fetchDataInit, LOAD_UNTRACKED_RELATIONS } from '../DataActions';
import {
loadUntrackedRelations,
fetchDataInit,
fetchFunctionInit,
LOAD_UNTRACKED_RELATIONS,
UPDATE_CURRENT_SCHEMA,
} from '../DataActions';
import { getAllUnTrackedRelations } from '../TableRelationships/Actions';
import AutoAddRelationsConnector from './AutoAddRelations';
import globals from '../../../../Globals';
@ -31,6 +43,7 @@ class Schema extends Component {
// Initialize this table
const dispatch = this.props.dispatch;
dispatch(fetchDataInit());
dispatch(fetchFunctionInit());
const untrackedRelations = getAllUnTrackedRelations(
this.props.schema,
this.props.currentSchema
@ -55,12 +68,39 @@ class Schema extends Component {
render() {
const {
schema,
schemaList,
untracked,
migrationMode,
untrackedRelations,
currentSchema,
dispatch,
functionsList,
// nonTrackableFunctionsList, // Not used right now, will be used in future
trackedFunctions,
} = this.props;
/* Filter */
const trackedFuncs = trackedFunctions.map(t => t.function_name);
// Assuming schema for both function and tables are same
const trackableFuncs = functionsList.filter(f => {
// return function which are tracked && function name whose setof tables are tracked
return (
trackedFuncs.indexOf(f.function_name) === -1 && !!f.return_table_info
); // && add condition which will check whether the setoff table is tracked or not
});
/* */
const handleSchemaChange = e => {
const updatedSchema = e.target.value;
dispatch(push(`${appPrefix}/schema/${updatedSchema}`));
Promise.all([
dispatch({ type: UPDATE_CURRENT_SCHEMA, currentSchema: updatedSchema }),
dispatch(fetchDataInit()),
dispatch(fetchFunctionInit()),
dispatch(loadUntrackedRelations()),
]);
};
const styles = require('../PageContainer/PageContainer.scss');
let relationships = 0;
schema.map(t => (relationships += t.relationships.length));
@ -150,6 +190,24 @@ class Schema extends Component {
) : null}
</div>
<hr />
<div>
<div className={styles.display_inline}>Current postgres schema</div>
<div className={styles.display_inline}>
<select
onChange={handleSchemaChange}
className={styles.changeSchema + ' form-control'}
value={currentSchema}
>
{schemaList.map(s => {
if (s.schema_name === currentSchema) {
return <option key={s.schema_name}>{s.schema_name}</option>;
}
return <option key={s.schema_name}>{s.schema_name}</option>;
})}
</select>
</div>
</div>
<hr />
<div className={styles.add_pad_bottom}>
<div>
<h4
@ -181,29 +239,129 @@ class Schema extends Component {
</div>
</div>
<hr />
<div>
<div>
<h4
className={`${styles.subheading_text} ${
styles.heading_tooltip
}`}
>
Untracked foreign-key relations
</h4>
<OverlayTrigger placement="right" overlay={untrackedRelTip}>
<i className="fa fa-info-circle" aria-hidden="true" />
</OverlayTrigger>
<div className={`${styles.padd_left_remove} col-xs-12`}>
<div>
<AutoAddRelationsConnector
untrackedRelations={untrackedRelations}
schema={schema}
dispatch={dispatch}
/>
</div>
<div className={styles.wd100 + ' ' + styles.clear_fix}>
<h4
className={`${styles.subheading_text} ${styles.heading_tooltip}`}
>
Untracked foreign-key relations
</h4>
<OverlayTrigger placement="right" overlay={untrackedRelTip}>
<i className="fa fa-info-circle" aria-hidden="true" />
</OverlayTrigger>
<div className={`${styles.padd_left_remove} col-xs-12`}>
<div>
<AutoAddRelationsConnector
untrackedRelations={untrackedRelations}
schema={schema}
dispatch={dispatch}
/>
</div>
</div>
</div>
{trackableFuncs.length > 0
? [
<hr
className={styles.wd100 + ' ' + styles.clear_fix}
key={'custom-functions-hr'}
/>,
<div
className={styles.wd100 + ' ' + styles.clear_fix}
key={'custom-functions-content'}
>
<h4
className={`${styles.subheading_text} ${
styles.heading_tooltip
}`}
>
Untracked custom functions
</h4>
<OverlayTrigger
placement="right"
overlay={trackableFunctions}
>
<i className="fa fa-info-circle" aria-hidden="true" />
</OverlayTrigger>
<div className={`${styles.padd_left_remove} col-xs-12`}>
{trackableFuncs.map((p, i) => (
<div
className={styles.padd_bottom}
key={`${i}untracked-function`}
>
<div
className={`${styles.display_inline} ${
styles.padd_right
}`}
>
<button
data-test={`add-track-function-${p.function_name}`}
className={`${
styles.display_inline
} btn btn-xs btn-default`}
onClick={e => {
e.preventDefault();
dispatch(addExistingFunction(p.function_name));
}}
>
Add
</button>
</div>
<div
className={`${styles.padd_right} ${
styles.inline_block
}`}
>
{p.function_name}
</div>
</div>
))}
</div>
</div>,
]
: null}
{/* nonTrackableFunctionsList.length > 0
? [
<hr
className={styles.wd100 + ' ' + styles.clear_fix}
key={'non-trackable-custom-functions-id'}
/>,
<div
className={styles.wd100 + ' ' + styles.clear_fix}
key={'non-trackable-custom-functions-content'}
>
<h4
className={`${styles.subheading_text} ${
styles.heading_tooltip
}`}
>
Non trackable custom functions
</h4>
<OverlayTrigger
placement="right"
overlay={nonTrackableFunctions}
>
<i className="fa fa-info-circle" aria-hidden="true" />
</OverlayTrigger>
<div className={`${styles.padd_left_remove} col-xs-12`}>
{nonTrackableFunctionsList.map((p, i) => (
<div
className={styles.padd_bottom}
key={`${i}untracked-function`}
>
<div
className={`${styles.padd_right} ${
styles.inline_block
}`}
>
{p.function_name}
</div>
</div>
))}
</div>
</div>,
]
: null */}
</div>
</div>
);
@ -226,6 +384,10 @@ const mapStateToProps = state => ({
migrationMode: state.main.migrationMode,
untrackedRelations: state.tables.untrackedRelations,
currentSchema: state.tables.currentSchema,
functionsList: [...state.tables.postgresFunctions],
nonTrackableFunctionsList: [...state.tables.nonTrackablePostgresFunctions],
trackedFunctions: [...state.tables.trackedFunctions],
serverVersion: state.main.serverVersion ? state.main.serverVersion : '',
});
const schemaConnector = connect => connect(mapStateToProps)(Schema);

View File

@ -32,3 +32,13 @@ export const quickDefaultReadOnly = (
The selected role can perform select on all rows of the table.
</Tooltip>
);
export const trackableFunctions = (
<Tooltip id="tooltip-permission-read">
Track supported functions to query them with GraphQL
</Tooltip>
);
export const nonTrackableFunctions = (
<Tooltip id="tooltip-permission-read">WIP</Tooltip>
);

View File

@ -50,14 +50,12 @@
background-color: #ebf7de;
}
.ReactTable .rt-table .rt-tbody
{
.ReactTable .rt-table .rt-tbody {
margin-bottom: 10px;
}
.ReactTable .rt-tbody .rt-tr-group:last-child
{
border-bottom: solid 1px rgba(0,0,0,0.05);
.ReactTable .rt-tbody .rt-tr-group:last-child {
border-bottom: solid 1px rgba(0, 0, 0, 0.05);
}
.ReactTable .rt-table .rt-tbody .rt-tr-group .rt-tr.-even:hover {
background-color: #ebf7de;

View File

@ -28,6 +28,12 @@ export dataReducer from './DataReducer';
export metadataConnector from './Metadata/Metadata.js';
/* Function component */
export functionWrapperConnector from './Function/FunctionWrapper';
export ModifyCustomFunction from './Function/Modify/ModifyCustomFunction';
export PermissionCustomFunction from './Function/Permission/Permission';
/*
export Logs from './Logs/Logs';
export BrowseTemplates from './QueryTemplates/BrowseTemplates';

View File

@ -14,6 +14,7 @@ class CommonTabLayout extends React.Component {
tabsInfo,
baseUrl,
showLoader,
testPrefix,
} = this.props;
return (
@ -28,6 +29,7 @@ class CommonTabLayout extends React.Component {
tabsInfo={tabsInfo}
baseUrl={baseUrl}
showLoader={showLoader}
testPrefix={testPrefix}
/>
</div>
);

View File

@ -1,7 +1,15 @@
import React from 'react';
import { Link } from 'react-router';
const Tabs = ({ appPrefix, tabsInfo, tabName, count, baseUrl, showLoader }) => {
const Tabs = ({
appPrefix,
tabsInfo,
tabName,
count,
baseUrl,
showLoader,
testPrefix,
}) => {
let showCount = '';
if (!(count === null || count === undefined)) {
showCount = '(' + count + ')';
@ -25,7 +33,9 @@ const Tabs = ({ appPrefix, tabsInfo, tabName, count, baseUrl, showLoader }) => {
>
<Link
to={`${baseUrl}/${t}`}
data-test={`${appPrefix.slice(1)}-${t}`}
data-test={`${
testPrefix ? testPrefix + '-' : ''
}${appPrefix.slice(1)}-${t}`}
>
{tabsInfo[t].display_text} {tabName === t ? showCount : null}
{tabName === t && showLoader ? dataLoader() : null}

View File

@ -0,0 +1,93 @@
import React from 'react';
import PropTypes from 'prop-types';
class ReusableTextAreaWithCopy extends React.Component {
copyToClip(type, id) {
let text = '';
if (this.props.copyText.length > 0) {
text = window.sqlFormatter
? window.sqlFormatter.format(this.props.copyText, {
language: this.props.textLanguage,
})
: this.props.copyText;
}
const textArea = document.createElement('textarea');
textArea.value = text;
document.body.appendChild(textArea);
textArea.focus();
textArea.select();
try {
const successful = document.execCommand('copy');
// const msg = successful ? 'successful' : 'unsuccessful';
const tooltip = document.getElementById(id);
tooltip.innerHTML = 'Copied';
if (!successful) {
throw new Error('Copy was unsuccessful');
}
} catch (err) {
alert('Oops, unable to copy - ' + err);
}
document.body.removeChild(textArea);
}
resetCopy(id) {
const tooltip = document.getElementById(id);
tooltip.innerHTML = 'Copy';
}
render() {
const style = require('./style.scss');
const { copyText } = this.props;
return (
<div className={`${style.codeBlockCustom}`}>
<div className={`${style.copyGenerated}`}>
<div className={`${style.copyTooltip}`}>
<span className={style.tooltiptext} id="copyCustomFunctionSQL">
Copy
</span>
<i
className={'fa fa-copy'}
onClick={this.copyToClip.bind(
this,
'plan',
'copyCustomFunctionSQL'
)}
onMouseLeave={this.resetCopy.bind(this, 'copyCustomFunctionSQL')}
/>
{/*
onClick={this.copyToClip.bind(this, 'plan', 'copyPlan')}
onMouseLeave={this.resetCopy.bind(this, 'copyPlan')}
*/}
</div>
</div>
{window && window.sqlFormatter && window.hljs ? (
<pre>
<code
className={style.formattedCode}
dangerouslySetInnerHTML={{
__html: window.hljs.highlight(
'sql',
window.sqlFormatter.format(copyText, {
language: this.props.textLanguage,
})
).value,
}}
/>
</pre>
) : (
<pre>
<code className={style.formattedCode}>{copyText}</code>
</pre>
)}
</div>
);
}
}
ReusableTextAreaWithCopy.propTypes = {
copyText: PropTypes.string.isRequired,
textLanguage: PropTypes.string,
};
export default ReusableTextAreaWithCopy;

View File

@ -0,0 +1,104 @@
.sqlBlock {
position: relative;
width: 80%;
}
.codeBlockCustom
{
/* position: relative;
padding: 10px 20px; */
background-color: white;
/* margin: 20px;
width: 100%; */
width: auto;
border-radius: 5px;
// max-height: calc(100% - 60px);
overflow: auto;
margin-top: 0px;
// min-height: calc(100% - 60px);
}
.codeBlockCustom pre {
display: block;
padding: 10px 20px;
margin: 0px;
font-size: 13px;
line-height: unset;
word-break: unset;
word-wrap: unset;
color: #000;
background: none;
border: none;
border-radius: 0;
overflow: unset;
padding-bottom: 10px;
}
.codeBlockCustom code {
color: #000;
background: none;
}
.codeBlockCustom .formattedCode {
padding: 0px 0px !important;
}
.copyGenerated
{
position: absolute;
bottom: 15px;
right: 30px;
cursor: pointer;
}
.copyGenerated img, .copyExecution img
{
width: 20px;
opacity: .6;
}
.copyGenerated img:hover, .copyExecution img:hover
{
opacity: 1;
}
.copyGenerated:focus, .copyExecution:focus
{
outline: none;
}
.copyTooltip {
position: relative;
display: inline-block;
}
.copyTooltip .tooltiptext {
background-color: #555;
color: #fff;
text-align: center;
border-radius: 6px;
padding: 4px 0px;
font-size: 14px;
position: absolute;
z-index: 1000000000;
right: -21px;
bottom: 30px;
opacity: 0;
-webkit-transition: opacity 0.3s;
transition: opacity 0.3s;
display: none;
width: 57px;
}
.copyTooltip .tooltiptext::after {
content: "";
position: absolute;
top: 24px;
right: 22px;
margin-left: -5px;
border-width: 5px;
border-style: solid;
border-color: #555 transparent transparent transparent;
}
.copyTooltip:hover .tooltiptext {
visibility: visible;
opacity: 1;
display: block;
}

View File

@ -13,6 +13,7 @@ const componentsSemver = {
webhookEnvSupport: '1.0.0-alpha29',
insertPermRestrictColumns: '1.0.0-alpha28',
permHideUpsertSection: '1.0.0-alpha32',
customFunctionSection: '1.0.0-alpha36',
};
const getPreRelease = version => {

View File

@ -0,0 +1,78 @@
Schema/Metadata API Reference: Custom Functions
===============================================
.. contents:: Table of contents
:backlinks: none
:depth: 1
:local:
Add or remove a custom SQL function to Hasura GraphQL Engine's metadata using following API.
.. Note::
Only custom functions added to metadata are available for ``querying/subscribing`` data over **GraphQL** API.
.. _track_function:
track_function
--------------
``track_function`` is used to add a custom SQL function.
Refer :ref:`this <supported_sql_functions>` for constraints on supported functions.
Add a SQL function ``search_articles``:
.. code-block:: http
POST /v1/query HTTP/1.1
Content-Type: application/json
X-Hasura-Role: admin
{
"type": "track_function",
"args": {
"schema": "public",
"name": "search_articles"
}
}
.. _untrack_function:
untrack_function
----------------
``untrack_function`` is used to remove a SQL function from metadata.
Remove a SQL function ``search_articles``:
.. code-block:: http
POST /v1/query HTTP/1.1
Content-Type: application/json
X-Hasura-Role: admin
{
"type": "untrack_function",
"args": {
"schema": "public",
"name": "search_articles"
}
}
.. _args_syntax:
Args syntax
^^^^^^^^^^^
.. list-table::
:header-rows: 1
* - Key
- Required
- Schema
- Description
* - table
- true
- :ref:`FunctionName <FunctionName>`
- Name of the SQL function

View File

@ -74,6 +74,14 @@ The various types of queries are listed in the following table:
- :ref:`untrack_table_args <untrack_table_syntax>`
- Remove a table/view
* - :ref:`track_function`
- :ref:`FunctionName <FunctionName>`
- Add a SQL function
* - :ref:`untrack_function`
- :ref:`FunctionName <FunctionName>`
- Remove a SQL function
* - :ref:`create_object_relationship`
- :ref:`create_object_relationship_args <create_object_relationship_syntax>`
- Define a new object relationship
@ -142,6 +150,7 @@ The various types of queries are listed in the following table:
- :doc:`Run SQL <run-sql>`
- :doc:`Tables/Views <table-view>`
- :doc:`Custom SQL Functions <custom-functions>`
- :doc:`Relationships <relationship>`
- :doc:`Permissions <permission>`
- :doc:`Event Triggers <event-triggers>`
@ -203,6 +212,7 @@ Error codes
Run SQL <run-sql>
Tables/Views <table-view>
Custom Functions <custom-functions>
Relationships <relationship>
Permissions <permission>
Event Triggers <event-triggers>

View File

@ -20,6 +20,27 @@ TableName
QualifiedTable
^^^^^^^^^^^^^^
.. parsed-literal::
:class: haskell-pre
{
"name": String,
"schema": String
}
.. _FunctionName:
FunctionName
^^^^^^^^^^^^
.. parsed-literal::
:class: haskell-pre
String | QualifiedFunction_
QualifiedFunction
^^^^^^^^^^^^^^^^^
.. parsed-literal::
:class: haskell-pre

View File

@ -0,0 +1,267 @@
Query custom SQL Functions
==========================
.. contents:: Table of contents
:backlinks: none
:depth: 2
:local:
What are custom SQL functions?
------------------------------
Custom SQL functions are user-defined SQL functions that can be used to either encapsulate some custom business
logic or extend the built-in SQL functions and operators.
Hasura GraphQL engine lets you expose certain types of custom functions over the GraphQL API to allow querying them
using both ``queries`` and ``subscriptions``.
.. _supported_sql_functions:
Supported SQL functions
-----------------------
Currently, only functions which satisfy the following constraints can be exposed over the GraphQL API
(*terminology from* `Postgres docs <https://www.postgresql.org/docs/current/sql-createfunction.html>`__):
- **Function behaviour**: ONLY ``STABLE`` or ``IMMUTABLE``
- **Return type**: MUST be ``SETOF <table-name>``
- **Argument modes**: ONLY ``IN``
Creating & exposing SQL functions
---------------------------------
Custom SQL functions can be created using SQL which can be run in the Hasura console:
- Head to the ``Data -> SQL`` section of the Hasura console
- Enter your `create function SQL statement <https://www.postgresql.org/docs/current/sql-createfunction.html>`__
- Select the ``Track this`` checkbox to expose the new function over the GraphQL API
- Hit the ``Run`` button
.. note::
If the ``SETOF`` table doesn't already exist or your function needs to return a custom type i.e. row set,
create and track an empty table with the required schema to support the function before executing the above
steps
Querying custom functions using GraphQL queries
-----------------------------------------------
Let's see how we can query custom functions using a GraphQL query by using the below examples:
Example: Text-search functions
******************************
Let's take a look at an example where the ``SETOF`` table is already part of the existing schema.
In our article/author schema, let's say we've created and tracked a custom function, ``search_articles``,
with the following definition:
.. code-block:: plpgsql
CREATE FUNCTION search_articles(search text)
RETURNS SETOF article AS $$
SELECT *
FROM article
WHERE
title ilike ('%' || search || '%')
OR content ilike ('%' || search || '%')
$$ LANGUAGE sql STABLE;
This function filters rows from the ``article`` table based on the input text argument, ``search`` i.e. it
returns ``SETOF article``. Assuming the ``article`` table is being tracked, you can use the custom function
as follows:
.. graphiql::
:view_only:
:query:
query {
search_articles(
args: {search: "hasura"}
){
id
title
content
}
}
:response:
{
"data": {
"search_articles": [
{
"id": 1,
"title": "first post by hasura",
"content": "some content for post"
},
{
"id": 2,
"title": "second post by hasura",
"content": "some other content for post"
}
]
}
}
Example: PostGIS functions
**************************
Let's take a look at an example where the ``SETOF`` table is not part of the existing schema.
Say you have 2 tables, for user and landmark location data, with the following definitions (*this example uses the
popular spatial database extension,* `PostGIS <https://postgis.net/>`__):
.. code-block:: sql
-- User location data
CREATE TABLE user_location (
user_id INTEGER PRIMARY KEY,
location GEOGRAPHY(Point)
);
-- Landmark location data
CREATE TABLE landmark (
id SERIAL PRIMARY KEY,
name TEXT,
type TEXT,
location GEOGRAPHY(Point)
);
In this example, we want to fetch a list of landmarks that are near a given user, along with the user's details in
the same query. PostGIS' built-in function ``ST_Distance`` can be used to implement this use case.
Since our use case requires an output that isn't a "subset" of any of the existing tables i.e. the ``SETOF`` table
doesn't exist, let's first create this table and then create our location search function.
- create and track the following table:
.. code-block:: sql
-- SETOF table
CREATE TABLE user_landmarks (
user_id INTEGER,
location GEOGRAPHY(Point),
nearby_landmarks JSON
);
- create and track the following function:
.. code-block:: plpgsql
-- function returns a list of landmarks near a user based on the
-- input arguments distance_kms and userid
CREATE FUNCTION search_landmarks_near_user(userid integer, distance_kms integer)
RETURNS SETOF user_landmarks AS $$
SELECT A.user_id, A.location,
(SELECT json_agg(row_to_json(B)) FROM landmark B
WHERE (
ST_Distance(
ST_Transform(B.location::Geometry, 3857),
ST_Transform(A.location::Geometry, 3857)
) /1000) < distance_kms
) AS nearby_landmarks
FROM user_location A where A.user_id = userid
$$ LANGUAGE sql STABLE;
This function fetches user information (*for the given input* ``userid``) and a list of landmarks which are
less than ``distance_kms`` kilometers away from the user's location as a JSON field. We can now refer to this
function in our GraphQL API as follows:
.. graphiql::
:view_only:
:query:
query {
search_landmarks_near_user(
args: {userid: 3, distance_kms: 20}
){
user_id
location
nearby_landmarks
}
}
:response:
{
"data": {
"search_landmarks_near_user": [
{
"user_id": 3,
"location": {
"type": "Point",
"crs": {
"type": "name",
"properties": {
"name": "urn:ogc:def:crs:EPSG::4326"
}
},
"coordinates": [
12.9406589,
77.6185572
]
},
"nearby_landmarks": [
{
"id": 3,
"name": "blue tokai",
"type": "coffee shop",
"location": "0101000020E61000004E74A785DCF22940BE44060399665340"
},
{
"id": 4,
"name": "Bangalore",
"type": "city",
"location": "0101000020E61000005396218E75F12940E78C28ED0D665340"
}
]
}
]
}
}
Aggregations on custom functions
********************************
You can query aggregations on a function result using ``<function-name>_aggregate`` field.
**For example**, count the number of articles returned by the function defined in the text-search example above:
.. code-block:: graphql
query {
search_articles_aggregate(
args: {search: "hasura"}
){
aggregate {
count
}
}
}
Using arguments with custom functions
*************************************
As with tables, arguments like ``where``, ``limit``, ``order_by``, ``offset``, etc. are also available for use with
function-based queries.
**For example**, limit the number of articles returned by the function defined in the text-search example above:
.. code-block:: graphql
query {
search_articles(
args: {search: "hasura"},
limit: 5
){
id
title
content
}
}
Permissions for custom function queries
---------------------------------------
Access control permissions configured for the ``SETOF`` table of a function are also applicable to the function itself.
**For example**, in our text-search example above, if the role ``user`` doesn't have the requisite permissions to view
the table ``article``, a validation error will be thrown if the ``search_articles`` query is run using the ``user``
role.

View File

@ -1,13 +1,13 @@
Distinct queries
================
Distinct query results
======================
.. contents:: Table of contents
:backlinks: none
:depth: 1
:local:
You can fetch distinct columns using ``distinct_on`` argument. Initial ``order_by`` columns must
match ``distinct_on`` columns. Learn more about ``order_by`` :doc:`here <sorting>`.
You can fetch rows with only distinct values of a column using the ``distinct_on`` argument. The first ``order_by``
columns must match the ``distinct_on`` column. See :doc:`sort queries <sorting>` for more info on ``order_by``.
.. code-block:: graphql
@ -16,7 +16,7 @@ match ``distinct_on`` columns. Learn more about ``order_by`` :doc:`here <sorting
order_by: [employee_order_by]
): [employee]!
#select column enum type for "employee" table
# select column enum type for "employee" table
enum employee_select_column {
id
name
@ -24,7 +24,10 @@ match ``distinct_on`` columns. Learn more about ``order_by`` :doc:`here <sorting
salary
}
For example, fetch highest salaried employee from each department:
Fetch results with distinct values of a particular field
--------------------------------------------------------
**For example**, fetch highest salaried employee from each department:
.. graphiql::
:view_only:

View File

@ -51,11 +51,12 @@ based on a typical author/article schema for reference.
simple-object-queries
nested-object-queries
aggregation-queries
distinct-queries
query-filters
sorting
distinct-queries
pagination
Using multiple arguments <multiple-arguments>
multiple-queries
custom-functions
derived-data
control-access

View File

@ -28,7 +28,7 @@ Views can be created using SQL which can be run in the Hasura console:
- Head to the ``Data -> SQL`` section of the Hasura console
- Enter your `create view SQL statement <https://www.postgresql.org/docs/9.6/static/sql-createview.html>`__
- Select the ``Track table`` checkbox to expose the new view over the GraphQL API
- Select the ``Track this`` checkbox to expose the new view over the GraphQL API
- Hit the ``Run`` button

View File

@ -166,6 +166,7 @@ library
, Hasura.RQL.DDL.Relationship
, Hasura.RQL.DDL.QueryTemplate
, Hasura.RQL.DDL.Schema.Table
, Hasura.RQL.DDL.Schema.Function
, Hasura.RQL.DDL.Schema.Diff
, Hasura.RQL.DDL.Metadata
, Hasura.RQL.DDL.Utils

View File

@ -24,7 +24,7 @@ import qualified Database.PG.Query as Q
import qualified Database.PG.Query.Connection as Q
curCatalogVer :: T.Text
curCatalogVer = "7"
curCatalogVer = "8"
initCatalogSafe
:: (QErrM m, UserInfoM m, CacheRWM m, MonadTx m, MonadIO m, HasHttpManager m)
@ -116,6 +116,15 @@ initCatalogStrict createSchema initTime = do
|] (Identity sn) False
migrateMetadata
:: (MonadTx m, HasHttpManager m, CacheRWM m, UserInfoM m, MonadIO m)
=> RQLQuery -> m ()
migrateMetadata rqlQuery = do
-- build schema cache
buildSchemaCache
-- run the RQL query to migrate metadata
void $ runQueryM rqlQuery
setAllAsSystemDefined :: (MonadTx m) => m ()
setAllAsSystemDefined = liftTx $ Q.catchE defaultTxErrorHandler $ do
Q.unitQ "UPDATE hdb_catalog.hdb_table SET is_system_defined = 'true'" () False
@ -123,22 +132,51 @@ setAllAsSystemDefined = liftTx $ Q.catchE defaultTxErrorHandler $ do
Q.unitQ "UPDATE hdb_catalog.hdb_permission SET is_system_defined = 'true'" () False
Q.unitQ "UPDATE hdb_catalog.hdb_query_template SET is_system_defined = 'true'" () False
setAsSystemDefined :: (MonadTx m) => m ()
setAsSystemDefined =
setAsSystemDefinedFor2 :: (MonadTx m) => m ()
setAsSystemDefinedFor2 =
liftTx $ Q.catchE defaultTxErrorHandler $
Q.multiQ [Q.sql|
UPDATE hdb_catalog.hdb_table
SET is_system_defined = 'true'
WHERE table_schema = 'hdb_catalog';
UPDATE hdb_catalog.hdb_permission
SET is_system_defined = 'true'
WHERE table_schema = 'hdb_catalog';
WHERE table_schema = 'hdb_catalog'
AND ( table_name = 'event_triggers'
OR table_name = 'event_log'
OR table_name = 'event_invocation_logs'
);
UPDATE hdb_catalog.hdb_relationship
SET is_system_defined = 'true'
WHERE table_schema = 'hdb_catalog';
|]
WHERE table_schema = 'hdb_catalog'
AND ( table_name = 'event_triggers'
OR table_name = 'event_log'
OR table_name = 'event_invocation_logs'
);
|]
setAsSystemDefinedFor5 :: (MonadTx m) => m ()
setAsSystemDefinedFor5 =
liftTx $ Q.catchE defaultTxErrorHandler $
Q.multiQ [Q.sql|
UPDATE hdb_catalog.hdb_table
SET is_system_defined = 'true'
WHERE table_schema = 'hdb_catalog'
AND table_name = 'remote_schemas';
|]
setAsSystemDefinedFor8 :: (MonadTx m) => m ()
setAsSystemDefinedFor8 =
liftTx $ Q.catchE defaultTxErrorHandler $
Q.multiQ [Q.sql|
UPDATE hdb_catalog.hdb_table
SET is_system_defined = 'true'
WHERE table_schema = 'hdb_catalog'
AND ( table_name = 'hdb_function_agg'
OR table_name = 'hdb_function'
);
UPDATE hdb_catalog.hdb_relationship
SET is_system_defined = 'true'
WHERE table_schema = 'hdb_catalog'
AND table_name = 'hdb_function_agg';
|]
cleanCatalog :: (MonadTx m) => m ()
cleanCatalog = liftTx $ Q.catchE defaultTxErrorHandler $ do
@ -173,9 +211,9 @@ from1To2 = do
-- migrate database
Q.Discard () <- liftTx $ Q.multiQE defaultTxErrorHandler
$(Q.sqlFromFile "src-rsr/migrate_from_1.sql")
void $ runQueryM migrateMetadataFrom1
migrateMetadata migrateMetadataFrom1
-- set as system defined
setAsSystemDefined
setAsSystemDefinedFor2
where
migrateMetadataFrom1 =
$(unTypeQ (Y.decodeFile "src-rsr/migrate_metadata_from_1.yaml" :: Q (TExp RQLQuery)))
@ -194,9 +232,9 @@ from4To5
from4To5 = do
Q.Discard () <- liftTx $ Q.multiQE defaultTxErrorHandler
$(Q.sqlFromFile "src-rsr/migrate_from_4_to_5.sql")
void $ runQueryM migrateMetadataFrom4
migrateMetadata migrateMetadataFrom4
-- set as system defined
setAsSystemDefined
setAsSystemDefinedFor5
where
migrateMetadataFrom4 =
$(unTypeQ (Y.decodeFile "src-rsr/migrate_metadata_from_4_to_5.yaml" :: Q (TExp RQLQuery)))
@ -241,6 +279,20 @@ from6To7 = liftTx $ do
$(Q.sqlFromFile "src-rsr/migrate_from_6_to_7.sql")
return ()
from7To8
:: (MonadTx m, HasHttpManager m, CacheRWM m, UserInfoM m, MonadIO m)
=> m ()
from7To8 = do
-- migrate database
Q.Discard () <- liftTx $ Q.multiQE defaultTxErrorHandler
$(Q.sqlFromFile "src-rsr/migrate_from_7_to_8.sql")
-- migrate metadata
migrateMetadata migrateMetadataFrom7
setAsSystemDefinedFor8
where
migrateMetadataFrom7 =
$(unTypeQ (Y.decodeFile "src-rsr/migrate_metadata_from_7_to_8.yaml" :: Q (TExp RQLQuery)))
migrateCatalog
:: (MonadTx m, CacheRWM m, MonadIO m, UserInfoM m, HasHttpManager m)
=> UTCTime -> m String
@ -255,12 +307,17 @@ migrateCatalog migrationTime = do
| preVer == "4" -> from4ToCurrent
| preVer == "5" -> from5ToCurrent
| preVer == "6" -> from6ToCurrent
| preVer == "7" -> from7ToCurrent
| otherwise -> throw400 NotSupported $
"unsupported version : " <> preVer
where
from7ToCurrent = do
from7To8
postMigrate
from6ToCurrent = do
from6To7
postMigrate
from7ToCurrent
from5ToCurrent = do
from5To6
@ -292,7 +349,7 @@ migrateCatalog migrationTime = do
-- clean hdb_views
liftTx $ Q.catchE defaultTxErrorHandler clearHdbViews
-- try building the schema cache
void buildSchemaCache
buildSchemaCache
return $ "successfully migrated to " ++ show curCatalogVer
updateVersion =

View File

@ -76,7 +76,7 @@ data Event
} deriving (Show, Eq)
instance ToJSON Event where
toJSON (Event eid (QualifiedTable sn tn) trigger event _ created)=
toJSON (Event eid (QualifiedObject sn tn) trigger event _ created)=
object [ "id" .= eid
, "table" .= object [ "schema" .= sn
, "name" .= tn
@ -377,7 +377,7 @@ fetchEvents =
LIMIT 100 )
RETURNING id, schema_name, table_name, trigger_id, trigger_name, payload::json, tries, created_at
|] () True
where uncurryEvent (id', sn, tn, trid, trn, Q.AltJ payload, tries, created) = Event id' (QualifiedTable sn tn) (TriggerMeta trid trn) payload tries created
where uncurryEvent (id', sn, tn, trid, trn, Q.AltJ payload, tries, created) = Event id' (QualifiedObject sn tn) (TriggerMeta trid trn) payload tries created
insertInvocation :: Invocation -> Q.TxE QErr ()
insertInvocation invo = do

View File

@ -28,6 +28,10 @@ data OpCtx
| OCSelectPkey QualifiedTable AnnBoolExpSQL [T.Text]
-- tn, filter exp, limit, req hdrs
| OCSelectAgg QualifiedTable AnnBoolExpSQL (Maybe Int) [T.Text]
-- tn, fn, filter, limit, req hdrs
| OCFuncQuery QualifiedTable QualifiedFunction AnnBoolExpSQL (Maybe Int) [T.Text]
-- tn, fn, filter, limit, req hdrs
| OCFuncAggQuery QualifiedTable QualifiedFunction AnnBoolExpSQL (Maybe Int) [T.Text]
-- tn, filter exp, req hdrs
| OCUpdate QualifiedTable AnnBoolExpSQL [T.Text]
-- tn, filter exp, req hdrs
@ -36,14 +40,15 @@ data OpCtx
data GCtx
= GCtx
{ _gTypes :: !TypeMap
, _gFields :: !FieldMap
, _gOrdByCtx :: !OrdByCtx
, _gQueryRoot :: !ObjTyInfo
, _gMutRoot :: !(Maybe ObjTyInfo)
, _gSubRoot :: !(Maybe ObjTyInfo)
, _gOpCtxMap :: !OpCtxMap
, _gInsCtxMap :: !InsCtxMap
{ _gTypes :: !TypeMap
, _gFields :: !FieldMap
, _gOrdByCtx :: !OrdByCtx
, _gFuncArgCtx :: !FuncArgCtx
, _gQueryRoot :: !ObjTyInfo
, _gMutRoot :: !(Maybe ObjTyInfo)
, _gSubRoot :: !(Maybe ObjTyInfo)
, _gOpCtxMap :: !OpCtxMap
, _gInsCtxMap :: !InsCtxMap
} deriving (Show, Eq)
instance Has TypeMap GCtx where
@ -88,17 +93,19 @@ type GCtxMap = Map.HashMap RoleName GCtx
data TyAgg
= TyAgg
{ _taTypes :: !TypeMap
, _taFields :: !FieldMap
, _taOrdBy :: !OrdByCtx
{ _taTypes :: !TypeMap
, _taFields :: !FieldMap
, _taOrdBy :: !OrdByCtx
, _taFuncArg :: !FuncArgCtx
} deriving (Show, Eq)
instance Semigroup TyAgg where
(TyAgg t1 f1 o1) <> (TyAgg t2 f2 o2) =
(TyAgg t1 f1 o1 fa1) <> (TyAgg t2 f2 o2 fa2) =
TyAgg (Map.union t1 t2) (Map.union f1 f2) (Map.union o1 o2)
(Map.union fa1 fa2)
instance Monoid TyAgg where
mempty = TyAgg Map.empty Map.empty Map.empty
mempty = TyAgg Map.empty Map.empty Map.empty Map.empty
mappend = (<>)
newtype RootFlds
@ -309,7 +316,7 @@ defaultTypes = $(fromSchemaDocQ defaultSchema HasuraType)
mkGCtx :: TyAgg -> RootFlds -> InsCtxMap -> GCtx
mkGCtx (TyAgg tyInfos fldInfos ordByEnums) (RootFlds flds) insCtxMap =
mkGCtx (TyAgg tyInfos fldInfos ordByEnums funcArgCtx) (RootFlds flds) insCtxMap =
let queryRoot = mkHsraObjTyInfo (Just "query root")
(G.NamedType "query_root") $
mapFromL _fiName (schemaFld:typeFld:qFlds)
@ -325,7 +332,7 @@ mkGCtx (TyAgg tyInfos fldInfos ordByEnums) (RootFlds flds) insCtxMap =
] <>
scalarTys <> compTys <> defaultTypes
-- for now subscription root is query root
in GCtx allTys fldInfos ordByEnums queryRoot mutRootM subRootM
in GCtx allTys fldInfos ordByEnums funcArgCtx queryRoot mutRootM subRootM
(Map.map fst flds) insCtxMap
where
colTys = Set.toList $ Set.fromList $ map pgiType $

View File

@ -48,11 +48,11 @@ data FieldPlan
$(J.deriveJSON (J.aesonDrop 3 J.camelCase) ''FieldPlan)
type Explain =
(ReaderT (FieldMap, OrdByCtx) (Except QErr))
(ReaderT (FieldMap, OrdByCtx, FuncArgCtx) (Except QErr))
runExplain
:: (MonadError QErr m)
=> (FieldMap, OrdByCtx) -> Explain a -> m a
=> (FieldMap, OrdByCtx, FuncArgCtx) -> Explain a -> m a
runExplain ctx m =
either throwError return $ runExcept $ runReaderT m ctx
@ -66,26 +66,31 @@ explainField userInfo gCtx fld =
"__typename" -> return $ FieldPlan fName Nothing Nothing
_ -> do
opCxt <- getOpCtx fName
sel <- runExplain (fldMap, orderByCtx) $ case opCxt of
OCSelect tn permFilter permLimit hdrs -> do
validateHdrs hdrs
RS.mkSQLSelect False <$>
RS.fromField txtConverter tn permFilter permLimit fld
OCSelectPkey tn permFilter hdrs -> do
validateHdrs hdrs
RS.mkSQLSelect True <$>
RS.fromFieldByPKey txtConverter tn permFilter fld
OCSelectAgg tn permFilter permLimit hdrs -> do
validateHdrs hdrs
RS.mkAggSelect <$>
RS.fromAggField txtConverter tn permFilter permLimit fld
_ -> throw500 "unexpected mut field info for explain"
builderSQL <- runExplain (fldMap, orderByCtx, funcArgCtx) $
case opCxt of
OCSelect tn permFilter permLimit hdrs -> do
validateHdrs hdrs
toSQL . RS.mkSQLSelect False <$>
RS.fromField txtConverter tn permFilter permLimit fld
OCSelectPkey tn permFilter hdrs -> do
validateHdrs hdrs
toSQL . RS.mkSQLSelect True <$>
RS.fromFieldByPKey txtConverter tn permFilter fld
OCSelectAgg tn permFilter permLimit hdrs -> do
validateHdrs hdrs
toSQL . RS.mkAggSelect <$>
RS.fromAggField txtConverter tn permFilter permLimit fld
OCFuncQuery tn fn permFilter permLimit hdrs ->
procFuncQuery tn fn permFilter permLimit hdrs False
OCFuncAggQuery tn fn permFilter permLimit hdrs ->
procFuncQuery tn fn permFilter permLimit hdrs True
_ -> throw500 "unexpected mut field info for explain"
let selectSQL = TB.run $ toSQL sel
withExplain = "EXPLAIN (FORMAT TEXT) " <> selectSQL
let txtSQL = TB.run builderSQL
withExplain = "EXPLAIN (FORMAT TEXT) " <> txtSQL
planLines <- liftTx $ map runIdentity <$>
Q.listQE dmlTxErrorHandler (Q.fromText withExplain) () True
return $ FieldPlan fName (Just selectSQL) $ Just planLines
return $ FieldPlan fName (Just txtSQL) $ Just planLines
where
fName = _fName fld
txtConverter = return . uncurry toTxtValue
@ -93,11 +98,20 @@ explainField userInfo gCtx fld =
opCtxMap = _gOpCtxMap gCtx
fldMap = _gFields gCtx
orderByCtx = _gOrdByCtx gCtx
funcArgCtx = _gFuncArgCtx gCtx
getOpCtx f =
onNothing (Map.lookup f opCtxMap) $ throw500 $
"lookup failed: opctx: " <> showName f
procFuncQuery tn fn permFilter permLimit hdrs isAgg = do
validateHdrs hdrs
(tabArgs, eSel, frmItem) <-
RS.fromFuncQueryField txtConverter fn isAgg fld
return $ toSQL $
RS.mkFuncSelectWith fn tn
(RS.TablePerm permFilter permLimit) tabArgs eSel frmItem
validateHdrs hdrs = do
let receivedHdrs = userVars userInfo
forM_ hdrs $ \hdr ->

View File

@ -27,31 +27,38 @@ import qualified Hasura.GraphQL.Resolve.Select as RS
buildTx :: UserInfo -> GCtx -> Field -> Q.TxE QErr BL.ByteString
buildTx userInfo gCtx fld = do
opCxt <- getOpCtx $ _fName fld
join $ fmap fst $ runConvert (fldMap, orderByCtx, insCtxMap) $ case opCxt of
join $ fmap fst $ runConvert (fldMap, orderByCtx, insCtxMap, funcArgCtx) $ case opCxt of
OCSelect tn permFilter permLimit hdrs ->
validateHdrs hdrs >> RS.convertSelect tn permFilter permLimit fld
OCSelectPkey tn permFilter hdrs ->
validateHdrs hdrs >> RS.convertSelectByPKey tn permFilter fld
-- RS.convertSelect tn permFilter fld
OCSelectAgg tn permFilter permLimit hdrs ->
validateHdrs hdrs >> RS.convertAggSelect tn permFilter permLimit fld
OCFuncQuery tn fn permFilter permLimit hdrs ->
validateHdrs hdrs >> RS.convertFuncQuery tn fn permFilter permLimit False fld
OCFuncAggQuery tn fn permFilter permLimit hdrs ->
validateHdrs hdrs >> RS.convertFuncQuery tn fn permFilter permLimit True fld
OCInsert tn hdrs ->
validateHdrs hdrs >> RI.convertInsert roleName tn fld
-- RM.convertInsert (tn, vn) cols fld
OCUpdate tn permFilter hdrs ->
validateHdrs hdrs >> RM.convertUpdate tn permFilter fld
-- RM.convertUpdate tn permFilter fld
OCDelete tn permFilter hdrs ->
validateHdrs hdrs >> RM.convertDelete tn permFilter fld
-- RM.convertDelete tn permFilter fld
where
roleName = userRole userInfo
opCtxMap = _gOpCtxMap gCtx
fldMap = _gFields gCtx
orderByCtx = _gOrdByCtx gCtx
insCtxMap = _gInsCtxMap gCtx
funcArgCtx = _gFuncArgCtx gCtx
getOpCtx f =
onNothing (Map.lookup f opCtxMap) $ throw500 $

View File

@ -1,6 +1,8 @@
module Hasura.GraphQL.Resolve.Context
( FieldMap
, RelationInfoMap
, FuncArgItem(..)
, FuncArgCtx
, OrdByCtx
, OrdByItemMap
, OrdByItem(..)
@ -122,7 +124,7 @@ withArgM args arg f = prependArgsInPath $ nameAsPath arg $
type PrepArgs = Seq.Seq Q.PrepArg
type Convert =
StateT PrepArgs (ReaderT (FieldMap, OrdByCtx, InsCtxMap) (Except QErr))
StateT PrepArgs (ReaderT (FieldMap, OrdByCtx, InsCtxMap, FuncArgCtx) (Except QErr))
prepare
:: (MonadState PrepArgs m) => PrepFn m
@ -133,7 +135,7 @@ prepare (colTy, colVal) = do
runConvert
:: (MonadError QErr m)
=> (FieldMap, OrdByCtx, InsCtxMap) -> Convert a -> m (a, PrepArgs)
=> (FieldMap, OrdByCtx, InsCtxMap, FuncArgCtx) -> Convert a -> m (a, PrepArgs)
runConvert ctx m =
either throwError return $
runExcept $ runReaderT (runStateT m Seq.empty) ctx

View File

@ -3,6 +3,7 @@ module Hasura.GraphQL.Resolve.ContextTypes where
import Hasura.Prelude
import qualified Data.HashMap.Strict as Map
import qualified Data.Sequence as Seq
import qualified Language.GraphQL.Draft.Syntax as G
import Hasura.RQL.Types.BoolExp
@ -26,6 +27,12 @@ type OrdByItemMap = Map.HashMap G.Name OrdByItem
type OrdByCtx = Map.HashMap G.NamedType OrdByItemMap
newtype FuncArgItem
= FuncArgItem {getArgName :: G.Name}
deriving (Show, Eq)
type FuncArgCtx = Map.HashMap G.NamedType (Seq.Seq FuncArgItem)
-- insert context
type RelationInfoMap = Map.HashMap RelName RelInfo

View File

@ -2,6 +2,7 @@ module Hasura.GraphQL.Resolve.Select
( convertSelect
, convertSelectByPKey
, convertAggSelect
, convertFuncQuery
, parseColumns
, withSelSet
, fromSelSet
@ -9,6 +10,7 @@ module Hasura.GraphQL.Resolve.Select
, fromField
, fromFieldByPKey
, fromAggField
, fromFuncQueryField
) where
import Control.Arrow (first)
@ -43,7 +45,7 @@ withSelSet selSet f =
fromSelSet
:: (MonadError QErr m, MonadReader r m, Has FieldMap r, Has OrdByCtx r)
=> PrepFn m -> G.NamedType -> SelSet -> m [(FieldName, RS.AnnFld)]
=> PrepFn m -> G.NamedType -> SelSet -> m RS.AnnFlds
fromSelSet f fldTy flds =
forM (toList flds) $ \fld -> do
let fldName = _fName fld
@ -68,6 +70,19 @@ fromSelSet f fldTy flds =
ObjRel -> RS.FObj annRel
ArrRel -> RS.FArr $ RS.ASSimple annRel
fromAggSelSet
:: (MonadError QErr m, MonadReader r m, Has FieldMap r, Has OrdByCtx r)
=> PrepFn m -> G.NamedType -> SelSet -> m RS.TableAggFlds
fromAggSelSet fn fldTy selSet = fmap toFields $
withSelSet selSet $ \f -> do
let fTy = _fType f
fSelSet = _fSelSet f
case _fName f of
"__typename" -> return $ RS.TAFExp $ G.unName $ G.unNamedType fldTy
"aggregate" -> RS.TAFAgg <$> convertAggFld fTy fSelSet
"nodes" -> RS.TAFNodes <$> fromSelSet fn fTy fSelSet
G.Name t -> throw500 $ "unexpected field in _agg node: " <> t
fieldAsPath :: (MonadError QErr m) => Field -> m a -> m a
fieldAsPath = nameAsPath . _fName
@ -103,8 +118,7 @@ fromField
:: (MonadError QErr m, MonadReader r m, Has FieldMap r, Has OrdByCtx r)
=> PrepFn m -> QualifiedTable -> AnnBoolExpSQL
-> Maybe Int -> Field -> m RS.AnnSel
fromField f tn permFilter permLimitM fld =
fieldAsPath fld $ do
fromField f tn permFilter permLimitM fld = fieldAsPath fld $ do
tableArgs <- parseTableArgs f args
annFlds <- fromSelSet f (_fType fld) $ _fSelSet fld
let tabFrom = RS.TableFrom tn Nothing
@ -266,7 +280,7 @@ convertCount args = do
mkCType isDistinct cols = return $
bool (S.CTSimple cols) (S.CTDistinct cols) isDistinct
toFields :: [(T.Text, a)] -> [(FieldName, a)]
toFields :: [(T.Text, a)] -> RS.Fields a
toFields = map (first FieldName)
convertColFlds
@ -299,24 +313,14 @@ fromAggField
:: (MonadError QErr m, MonadReader r m, Has FieldMap r, Has OrdByCtx r)
=> PrepFn m -> QualifiedTable -> AnnBoolExpSQL
-> Maybe Int -> Field -> m RS.AnnAggSel
fromAggField fn tn permFilter permLimitM fld = fieldAsPath fld $ do
tableArgs <- parseTableArgs fn args
aggSelFlds <- toFields <$>
fromAggSel (_fType fld) (_fSelSet fld)
fromAggField f tn permFilter permLimit fld = fieldAsPath fld $ do
tableArgs <- parseTableArgs f args
aggSelFlds <- fromAggSelSet f (_fType fld) (_fSelSet fld)
let tabFrom = RS.TableFrom tn Nothing
tabPerm = RS.TablePerm permFilter permLimitM
tabPerm = RS.TablePerm permFilter permLimit
return $ RS.AnnSelG aggSelFlds tabFrom tabPerm tableArgs
where
args = _fArguments fld
fromAggSel ty selSet =
withSelSet selSet $ \f -> do
let fTy = _fType f
fSelSet = _fSelSet f
case _fName f of
"__typename" -> return $ RS.TAFExp $ G.unName $ G.unNamedType ty
"aggregate" -> RS.TAFAgg <$> convertAggFld fTy fSelSet
"nodes" -> RS.TAFNodes <$> fromSelSet fn fTy fSelSet
G.Name t -> throw500 $ "unexpected field in _agg node: " <> t
convertAggSelect
:: QualifiedTable -> AnnBoolExpSQL -> Maybe Int -> Field -> Convert RespTx
@ -325,3 +329,48 @@ convertAggSelect qt permFilter permLimit fld = do
fromAggField prepare qt permFilter permLimit fld
prepArgs <- get
return $ RS.selectAggP2 (selData, prepArgs)
fromFuncQueryField
::( MonadError QErr m, MonadReader r m, Has FieldMap r
, Has OrdByCtx r, Has FuncArgCtx r
)
=> PrepFn m -> QualifiedFunction -> Bool -> Field
-> m (RS.TableArgs, Either RS.TableAggFlds RS.AnnFlds, S.FromItem)
fromFuncQueryField f qf isAgg fld = fieldAsPath fld $ do
funcArgsM <- withArgM args "args" $ parseFunctionArgs f
let funcArgs = fromMaybe [] funcArgsM
funcFrmItem = S.mkFuncFromItem qf funcArgs
tableArgs <- parseTableArgs f args
eSelFlds <- bool nonAggSel aggSel isAgg
return (tableArgs, eSelFlds, funcFrmItem)
where
args = _fArguments fld
nonAggSel = Right <$>
fromSelSet f (_fType fld) (_fSelSet fld)
aggSel = Left <$>
fromAggSelSet f (_fType fld) (_fSelSet fld)
parseFunctionArgs
::(MonadError QErr m, MonadReader r m, Has FuncArgCtx r)
=> PrepFn m -> AnnGValue -> m [S.SQLExp]
parseFunctionArgs fn val =
flip withObject val $ \nTy obj -> do
funcArgCtx :: FuncArgCtx <- asks getter
argSeq <- onNothing (Map.lookup nTy funcArgCtx) $ throw500 $
"namedType " <> showNamedTy nTy <> " not found in args context"
fmap toList $ forM argSeq $ \(FuncArgItem argName) -> do
argVal <- onNothing (OMap.lookup argName obj) $ throw500 $
"argument " <> showName argName <> " required in input type "
<> showNamedTy nTy
fn =<< asPGColVal argVal
convertFuncQuery
:: QualifiedTable -> QualifiedFunction -> AnnBoolExpSQL
-> Maybe Int -> Bool -> Field -> Convert RespTx
convertFuncQuery qt qf permFilter permLimit isAgg fld = do
(tableArgs, sel, frmItem) <- withPathK "selectionSet" $
fromFuncQueryField prepare qf isAgg fld
let tabPerm = RS.TablePerm permFilter permLimit
prepArgs <- get
return $ RS.funcQueryTx frmItem qf qt tabPerm tableArgs (sel, prepArgs)

View File

@ -8,6 +8,7 @@ module Hasura.GraphQL.Schema
, InsCtxMap
, RelationInfoMap
, isAggFld
, qualObjectToName
-- Schema stitching related
, RemoteGCtx (..)
, checkSchemaConflicts
@ -23,6 +24,8 @@ import Data.Maybe (maybeToList)
import qualified Data.HashMap.Strict as Map
import qualified Data.HashSet as Set
import qualified Data.Sequence as Seq
import qualified Data.Text as T
import qualified Language.GraphQL.Draft.Syntax as G
@ -152,13 +155,11 @@ type SelField = Either PGColInfo (RelInfo, Bool, AnnBoolExpSQL, Maybe Int, Bool)
-- mkHsraScalarTyInfo :: PGColType -> ScalarTyInfo
-- mkHsraScalarTyInfo ty = ScalarTyInfo Nothing ty HasuraType
qualTableToName :: QualifiedTable -> G.Name
qualTableToName = G.Name <$> \case
QualifiedTable (SchemaName "public") tn -> getTableTxt tn
QualifiedTable sn tn -> getSchemaTxt sn <> "_" <> getTableTxt tn
qualObjectToName :: (ToTxt a) => QualifiedObject a -> G.Name
qualObjectToName = G.Name . snakeCaseQualObject
isValidTableName :: QualifiedTable -> Bool
isValidTableName = isValidName . qualTableToName
isValidObjectName :: (ToTxt a) => QualifiedObject a -> Bool
isValidObjectName = isValidName . qualObjectToName
isValidField :: FieldInfo -> Bool
isValidField = \case
@ -167,7 +168,7 @@ isValidField = \case
where
isColEligible = isValidName . G.Name . getPGColTxt
isRelEligible rn rt = isValidName (G.Name $ getRelTxt rn)
&& isValidTableName rt
&& isValidObjectName rt
upsertable :: [ConstraintName] -> Bool -> Bool -> Bool
upsertable uniqueOrPrimaryCons isUpsertAllowed view =
@ -227,30 +228,38 @@ mkAggRelName (RelName r) = G.Name $ r <> "_aggregate"
mkBoolExpName :: QualifiedTable -> G.Name
mkBoolExpName tn =
qualTableToName tn <> "_bool_exp"
qualObjectToName tn <> "_bool_exp"
mkBoolExpTy :: QualifiedTable -> G.NamedType
mkBoolExpTy =
G.NamedType . mkBoolExpName
mkFuncArgsName :: QualifiedFunction -> G.Name
mkFuncArgsName fn =
qualObjectToName fn <> "_args"
mkFuncArgsTy :: QualifiedFunction -> G.NamedType
mkFuncArgsTy =
G.NamedType . mkFuncArgsName
mkTableTy :: QualifiedTable -> G.NamedType
mkTableTy =
G.NamedType . qualTableToName
G.NamedType . qualObjectToName
mkTableAggTy :: QualifiedTable -> G.NamedType
mkTableAggTy tn =
G.NamedType $ qualTableToName tn <> "_aggregate"
G.NamedType $ qualObjectToName tn <> "_aggregate"
mkTableAggFldsTy :: QualifiedTable -> G.NamedType
mkTableAggFldsTy tn =
G.NamedType $ qualTableToName tn <> "_aggregate_fields"
G.NamedType $ qualObjectToName tn <> "_aggregate_fields"
mkTableColAggFldsTy :: G.Name -> QualifiedTable -> G.NamedType
mkTableColAggFldsTy op tn =
G.NamedType $ qualTableToName tn <> "_" <> op <> "_fields"
G.NamedType $ qualObjectToName tn <> "_" <> op <> "_fields"
mkTableByPKeyTy :: QualifiedTable -> G.Name
mkTableByPKeyTy tn = qualTableToName tn <> "_by_pk"
mkTableByPKeyTy tn = qualObjectToName tn <> "_by_pk"
-- --- | make compare expression input type
-- mkCompExpInp :: PGColType -> InpObjTyInfo
@ -511,7 +520,7 @@ mkSelFld tn =
mkHsraObjFldInfo (Just desc) fldName args ty
where
desc = G.Description $ "fetch data from the table: " <>> tn
fldName = qualTableToName tn
fldName = qualObjectToName tn
args = fromInpValL $ mkSelArgs tn
ty = G.toGT $ G.toNT $ G.toLT $ G.toNT $ mkTableTy tn
@ -554,14 +563,80 @@ mkAggSelFld tn =
where
desc = G.Description $ "fetch aggregated fields from the table: "
<>> tn
fldName = qualTableToName tn <> "_aggregate"
fldName = qualObjectToName tn <> "_aggregate"
args = fromInpValL $ mkSelArgs tn
ty = G.toGT $ G.toNT $ mkTableAggTy tn
{-
function(
args: function_args
where: table_bool_exp
limit: Int
offset: Int
): [table!]!
-}
mkFuncArgs :: FunctionInfo -> ParamMap
mkFuncArgs funInfo =
fromInpValL $ funcInpArgs <> mkSelArgs retTable
where
funcName = fiName funInfo
funcArgs = fiInputArgs funInfo
retTable = fiReturnType funInfo
funcArgDesc = G.Description $ "input parameters for function " <>> funcName
funcInpArg = InpValInfo (Just funcArgDesc) "args" $ G.toGT $ G.toNT $
mkFuncArgsTy funcName
funcInpArgs = bool [funcInpArg] [] $ null funcArgs
mkFuncQueryFld
:: FunctionInfo -> ObjFldInfo
mkFuncQueryFld funInfo =
mkHsraObjFldInfo (Just desc) fldName (mkFuncArgs funInfo) ty
where
retTable = fiReturnType funInfo
funcName = fiName funInfo
desc = G.Description $ "execute function " <> funcName
<<> " which returns " <>> retTable
fldName = qualObjectToName funcName
ty = G.toGT $ G.toNT $ G.toLT $ G.toNT $ mkTableTy retTable
{-
function_aggregate(
args: function_args
where: table_bool_exp
limit: Int
offset: Int
): table_aggregate!
-}
mkFuncAggQueryFld
:: FunctionInfo -> ObjFldInfo
mkFuncAggQueryFld funInfo =
mkHsraObjFldInfo (Just desc) fldName (mkFuncArgs funInfo) ty
where
funcName = fiName funInfo
retTable = fiReturnType funInfo
desc = G.Description $ "execute function " <> funcName
<<> " and query aggregates on result of table type "
<>> retTable
fldName = qualObjectToName funcName <> "_aggregate"
ty = G.toGT $ G.toNT $ mkTableAggTy retTable
-- table_mutation_response
mkMutRespTy :: QualifiedTable -> G.NamedType
mkMutRespTy tn =
G.NamedType $ qualTableToName tn <> "_mutation_response"
G.NamedType $ qualObjectToName tn <> "_mutation_response"
{-
type table_mutation_response {
@ -590,6 +665,7 @@ mkMutRespObj tn sel =
where
desc = "data of the affected rows by the mutation"
-- table_bool_exp
mkBoolExpInp
:: QualifiedTable
-- the fields that are allowed
@ -630,10 +706,47 @@ mkPGColInp (PGColInfo colName colTy _) =
InpValInfo Nothing (G.Name $ getPGColTxt colName) $
G.toGT $ mkScalarTy colTy
{-
input function_args {
arg1: arg-type1!
. .
. .
argn: arg-typen!
}
-}
mkFuncArgsInp :: FunctionInfo -> Maybe (InpObjTyInfo, FuncArgCtx)
mkFuncArgsInp funcInfo =
bool (Just (inpObj, funcArgCtx)) Nothing $ null funcArgs
where
funcName = fiName funcInfo
funcArgs = fiInputArgs funcInfo
funcArgsTy = mkFuncArgsTy funcName
inpObj = mkHsraInpTyInfo Nothing funcArgsTy $
fromInpValL argInps
(argInps, ctxItems) = unzip $ fst $ foldl mkArgInps ([], 1::Int) funcArgs
funcArgCtx = Map.singleton funcArgsTy $ Seq.fromList ctxItems
mkArgInps (items, argNo) (FunctionArg nameM ty) =
case nameM of
Just argName ->
let argGName = G.Name $ getFuncArgNameTxt argName
inpVal = InpValInfo Nothing argGName $
G.toGT $ G.toNT $ mkScalarTy ty
argCtxItem = FuncArgItem argGName
in (items <> pure (inpVal, argCtxItem), argNo)
Nothing ->
let argGName = G.Name $ "arg_" <> T.pack (show argNo)
inpVal = InpValInfo Nothing argGName $
G.toGT $ G.toNT $ mkScalarTy ty
argCtxItem = FuncArgItem argGName
in (items <> pure (inpVal, argCtxItem), argNo + 1)
-- table_set_input
mkUpdSetTy :: QualifiedTable -> G.NamedType
mkUpdSetTy tn =
G.NamedType $ qualTableToName tn <> "_set_input"
G.NamedType $ qualObjectToName tn <> "_set_input"
{-
input table_set_input {
@ -655,7 +768,7 @@ mkUpdSetInp tn cols =
-- table_inc_input
mkUpdIncTy :: QualifiedTable -> G.NamedType
mkUpdIncTy tn =
G.NamedType $ qualTableToName tn <> "_inc_input"
G.NamedType $ qualObjectToName tn <> "_inc_input"
{-
input table_inc_input {
@ -681,7 +794,7 @@ mkUpdIncInp tn = maybe Nothing mkType
-- table_<json-op>_input
mkJSONOpTy :: QualifiedTable -> G.Name -> G.NamedType
mkJSONOpTy tn op =
G.NamedType $ qualTableToName tn <> op <> "_input"
G.NamedType $ qualObjectToName tn <> op <> "_input"
-- json ops are _concat, _delete_key, _delete_elem, _delete_at_path
{-
@ -847,7 +960,7 @@ mkUpdMutFld tn cols =
<> mkJSONOpInpVals tn cols
desc = G.Description $ "update data of the table: " <>> tn
fldName = "update_" <> qualTableToName tn
fldName = "update_" <> qualObjectToName tn
filterArgDesc = "filter the rows which have to be updated"
filterArg =
@ -876,7 +989,7 @@ mkDelMutFld tn =
where
desc = G.Description $ "delete data from the table: " <>> tn
fldName = "delete_" <> qualTableToName tn
fldName = "delete_" <> qualObjectToName tn
filterArgDesc = "filter the rows which have to be deleted"
filterArg =
@ -886,28 +999,28 @@ mkDelMutFld tn =
-- table_insert_input
mkInsInpTy :: QualifiedTable -> G.NamedType
mkInsInpTy tn =
G.NamedType $ qualTableToName tn <> "_insert_input"
G.NamedType $ qualObjectToName tn <> "_insert_input"
-- table_obj_rel_insert_input
mkObjInsInpTy :: QualifiedTable -> G.NamedType
mkObjInsInpTy tn =
G.NamedType $ qualTableToName tn <> "_obj_rel_insert_input"
G.NamedType $ qualObjectToName tn <> "_obj_rel_insert_input"
-- table_arr_rel_insert_input
mkArrInsInpTy :: QualifiedTable -> G.NamedType
mkArrInsInpTy tn =
G.NamedType $ qualTableToName tn <> "_arr_rel_insert_input"
G.NamedType $ qualObjectToName tn <> "_arr_rel_insert_input"
-- table_on_conflict
mkOnConflictInpTy :: QualifiedTable -> G.NamedType
mkOnConflictInpTy tn =
G.NamedType $ qualTableToName tn <> "_on_conflict"
G.NamedType $ qualObjectToName tn <> "_on_conflict"
-- table_constraint
mkConstraintInpTy :: QualifiedTable -> G.NamedType
mkConstraintInpTy tn =
G.NamedType $ qualTableToName tn <> "_constraint"
G.NamedType $ qualObjectToName tn <> "_constraint"
-- conflict_action
conflictActionTy :: G.NamedType
@ -916,12 +1029,12 @@ conflictActionTy = G.NamedType "conflict_action"
-- table_update_column
mkUpdColumnInpTy :: QualifiedTable -> G.NamedType
mkUpdColumnInpTy tn =
G.NamedType $ qualTableToName tn <> "_update_column"
G.NamedType $ qualObjectToName tn <> "_update_column"
--table_select_column
mkSelColumnInpTy :: QualifiedTable -> G.NamedType
mkSelColumnInpTy tn =
G.NamedType $ qualTableToName tn <> "_select_column"
G.NamedType $ qualObjectToName tn <> "_select_column"
{-
input table_obj_rel_insert_input {
@ -1039,7 +1152,7 @@ mkInsMutFld tn isUpsertable =
desc = G.Description $
"insert data into the table: " <>> tn
fldName = "insert_" <> qualTableToName tn
fldName = "insert_" <> qualObjectToName tn
objsArgDesc = "the rows to be inserted"
objectsArg =
@ -1126,7 +1239,7 @@ mkConflictActionTy updAllowed =
mkTabAggOpOrdByTy :: QualifiedTable -> G.Name -> G.NamedType
mkTabAggOpOrdByTy tn op =
G.NamedType $ qualTableToName tn <> "_" <> op <> "_order_by"
G.NamedType $ qualObjectToName tn <> "_" <> op <> "_order_by"
{-
input table_<op>_order_by {
@ -1154,7 +1267,7 @@ mkTabAggOpOrdByInpObjs tn numCols compCols =
mkTabAggOrdByTy :: QualifiedTable -> G.NamedType
mkTabAggOrdByTy tn =
G.NamedType $ qualTableToName tn <> "_aggregate_order_by"
G.NamedType $ qualObjectToName tn <> "_aggregate_order_by"
{-
input table_aggregate_order_by {
@ -1181,7 +1294,7 @@ mkTabAggOrdByInpObj tn numCols compCols =
mkOrdByTy :: QualifiedTable -> G.NamedType
mkOrdByTy tn =
G.NamedType $ qualTableToName tn <> "_order_by"
G.NamedType $ qualObjectToName tn <> "_order_by"
{-
input table_order_by {
@ -1280,9 +1393,11 @@ mkGCtxRole'
-- constraints
-> [ConstraintName]
-> Maybe ViewInfo
-- all functions
-> [FunctionInfo]
-> TyAgg
mkGCtxRole' tn insPermM selPermM updColsM delPermM pkeyCols constraints viM =
TyAgg (mkTyInfoMap allTypes) fieldMap ordByCtx
mkGCtxRole' tn insPermM selPermM updColsM delPermM pkeyCols constraints viM funcs =
TyAgg (mkTyInfoMap allTypes) fieldMap ordByCtx funcArgCtx
where
@ -1295,8 +1410,11 @@ mkGCtxRole' tn insPermM selPermM updColsM delPermM pkeyCols constraints viM =
relInsInpObjTys = maybe [] (map TIInpObj) $
mutHelper viIsInsertable relInsInpObjsM
funcInpArgTys = bool [] (map TIInpObj funcArgInpObjs) $ isJust selFldsM
allTypes = relInsInpObjTys <> onConflictTypes <> jsonOpTys
<> queryTypes <> aggQueryTypes <> mutationTypes
<> funcInpArgTys
queryTypes = catMaybes
[ TIInpObj <$> boolExpInpObjM
@ -1356,6 +1474,10 @@ mkGCtxRole' tn insPermM selPermM updColsM delPermM pkeyCols constraints viM =
then Just $ mkBoolExpInp tn []
else Nothing
-- funcargs input type
(funcArgInpObjs, funcArgCtxs) = unzip $ mapMaybe mkFuncArgsInp funcs
funcArgCtx = Map.unions funcArgCtxs
-- helper
mkFldMap ty = Map.fromList . concatMap (mkFld ty)
mkFld ty = \case
@ -1430,16 +1552,20 @@ getRootFldsRole'
-> [PGCol]
-> [ConstraintName]
-> FieldInfoMap
-> [FunctionInfo]
-> Maybe ([T.Text], Bool) -- insert perm
-> Maybe (AnnBoolExpSQL, Maybe Int, [T.Text], Bool) -- select filter
-> Maybe ([PGCol], AnnBoolExpSQL, [T.Text]) -- update filter
-> Maybe (AnnBoolExpSQL, [T.Text]) -- delete filter
-> Maybe ViewInfo
-> RootFlds
getRootFldsRole' tn primCols constraints fields insM selM updM delM viM =
getRootFldsRole' tn primCols constraints fields funcs insM selM updM delM viM =
RootFlds mFlds
where
mFlds = mapFromL (either _fiName _fiName . snd) $ catMaybes
mFlds = mapFromL (either _fiName _fiName . snd) $
funcQueries <>
funcAggQueries <>
catMaybes
[ mutHelper viIsInsertable getInsDet insM
, mutHelper viIsUpdatable getUpdDet updM
, mutHelper viIsDeletable getDelDet delM
@ -1447,6 +1573,9 @@ getRootFldsRole' tn primCols constraints fields insM selM updM delM viM =
, getPKeySelDet selM $ getColInfos primCols colInfos
]
funcQueries = maybe [] getFuncQueryFlds selM
funcAggQueries = maybe [] getFuncAggQueryFlds selM
mutHelper :: (ViewInfo -> Bool) -> (a -> b) -> Maybe a -> Maybe b
mutHelper f getDet mutM =
bool Nothing (getDet <$> mutM) $ isMutable f viM
@ -1476,6 +1605,16 @@ getRootFldsRole' tn primCols constraints fields insM selM updM delM viM =
getPKeySelDet (Just (selFltr, _, hdrs, _)) pCols = Just
(OCSelectPkey tn selFltr hdrs, Left $ mkSelFldPKey tn pCols)
getFuncQueryFlds (selFltr, pLimit, hdrs, _) =
flip map funcs $ \fi ->
(OCFuncQuery tn (fiName fi) selFltr pLimit hdrs, Left $ mkFuncQueryFld fi)
getFuncAggQueryFlds (selFltr, pLimit, hdrs, True) =
flip map funcs $ \fi ->
(OCFuncAggQuery tn (fiName fi) selFltr pLimit hdrs, Left $ mkFuncAggQueryFld fi)
getFuncAggQueryFlds _ = []
-- getRootFlds
-- :: TableCache
-- -> Map.HashMap RoleName RootFlds
@ -1572,19 +1711,20 @@ mkGCtxRole
-> FieldInfoMap
-> [PGCol]
-> [ConstraintName]
-> [FunctionInfo]
-> Maybe ViewInfo
-> RoleName
-> RolePermInfo
-> m (TyAgg, RootFlds, InsCtxMap)
mkGCtxRole tableCache tn fields pCols constraints viM role permInfo = do
mkGCtxRole tableCache tn fields pCols constraints funcs viM role permInfo = do
selPermM <- mapM (getSelPerm tableCache fields role) $ _permSel permInfo
tabInsCtxM <- forM (_permIns permInfo) $ \ipi -> do
tic <- mkInsCtx role tableCache fields ipi $ _permUpd permInfo
return (tic, isJust $ _permUpd permInfo)
let updColsM = filterColInfos . upiCols <$> _permUpd permInfo
tyAgg = mkGCtxRole' tn tabInsCtxM selPermM updColsM
(void $ _permDel permInfo) pColInfos constraints viM
rootFlds = getRootFldsRole tn pCols constraints fields viM permInfo
(void $ _permDel permInfo) pColInfos constraints viM funcs
rootFlds = getRootFldsRole tn pCols constraints fields funcs viM permInfo
insCtxMap = maybe Map.empty (Map.singleton tn) $ fmap fst tabInsCtxM
return (tyAgg, rootFlds, insCtxMap)
where
@ -1598,11 +1738,12 @@ getRootFldsRole
-> [PGCol]
-> [ConstraintName]
-> FieldInfoMap
-> [FunctionInfo]
-> Maybe ViewInfo
-> RolePermInfo
-> RootFlds
getRootFldsRole tn pCols constraints fields viM (RolePermInfo insM selM updM delM) =
getRootFldsRole' tn pCols constraints fields
getRootFldsRole tn pCols constraints fields funcs viM (RolePermInfo insM selM updM delM) =
getRootFldsRole' tn pCols constraints fields funcs
(mkIns <$> insM) (mkSel <$> selM)
(mkUpd <$> updM) (mkDel <$> delM)
viM
@ -1620,15 +1761,16 @@ getRootFldsRole tn pCols constraints fields viM (RolePermInfo insM selM updM del
mkGCtxMapTable
:: (MonadError QErr m)
=> TableCache
-> FunctionCache
-> TableInfo
-> m (Map.HashMap RoleName (TyAgg, RootFlds, InsCtxMap))
mkGCtxMapTable tableCache (TableInfo tn _ fields rolePerms constraints pkeyCols viewInfo _) = do
mkGCtxMapTable tableCache funcCache (TableInfo tn _ fields rolePerms constraints pkeyCols viewInfo _) = do
m <- Map.traverseWithKey
(mkGCtxRole tableCache tn fields pkeyCols validConstraints viewInfo) rolePerms
(mkGCtxRole tableCache tn fields pkeyCols validConstraints tabFuncs viewInfo) rolePerms
adminInsCtx <- mkAdminInsCtx tn tableCache fields
let adminCtx = mkGCtxRole' tn (Just (adminInsCtx, True))
(Just (True, selFlds)) (Just colInfos) (Just ())
pkeyColInfos validConstraints viewInfo
pkeyColInfos validConstraints viewInfo tabFuncs
adminInsCtxMap = Map.singleton tn adminInsCtx
return $ Map.insert adminRole (adminCtx, adminRootFlds, adminInsCtxMap) m
where
@ -1636,11 +1778,12 @@ mkGCtxMapTable tableCache (TableInfo tn _ fields rolePerms constraints pkeyCols
colInfos = getValidCols fields
allCols = map pgiName colInfos
pkeyColInfos = getColInfos pkeyCols colInfos
tabFuncs = getFuncsOfTable tn funcCache
selFlds = flip map (toValidFieldInfos fields) $ \case
FIColumn pgColInfo -> Left pgColInfo
FIRelationship relInfo -> Right (relInfo, True, noFilter, Nothing, isRelNullable fields relInfo)
adminRootFlds =
getRootFldsRole' tn pkeyCols validConstraints fields
getRootFldsRole' tn pkeyCols validConstraints fields tabFuncs
(Just ([], True)) (Just (noFilter, Nothing, [], True))
(Just (allCols, noFilter, [])) (Just (noFilter, []))
viewInfo
@ -1751,16 +1894,16 @@ checkConflictingNode gCtx node = do
mkGCtxMap
:: (MonadError QErr m)
=> TableCache -> m GCtxMap
mkGCtxMap tableCache = do
typesMapL <- mapM (mkGCtxMapTable tableCache) $
=> TableCache -> FunctionCache -> m GCtxMap
mkGCtxMap tableCache functionCache = do
typesMapL <- mapM (mkGCtxMapTable tableCache functionCache) $
filter tableFltr $ Map.elems tableCache
let typesMap = foldr (Map.unionWith mappend) Map.empty typesMapL
return $ flip Map.map typesMap $ \(ty, flds, insCtxMap) ->
mkGCtx ty flds insCtxMap
where
tableFltr ti = not (tiSystemDefined ti)
&& isValidTableName (tiName ti)
&& isValidObjectName (tiName ti)
-- mkGCtx :: TyAgg -> RootFlds -> InsCtxMap -> GCtx

View File

@ -9,15 +9,15 @@ module Hasura.RQL.DDL.Deps
import Hasura.Prelude
import qualified Data.HashSet as HS
import qualified Data.Text as T
import qualified Database.PG.Query as Q
import qualified Data.HashSet as HS
import qualified Data.Text as T
import qualified Database.PG.Query as Q
import Hasura.RQL.Types
import Hasura.SQL.Types
purgeRel :: QualifiedTable -> RelName -> Q.Tx ()
purgeRel (QualifiedTable sn tn) rn =
purgeRel (QualifiedObject sn tn) rn =
Q.unitQ [Q.sql|
DELETE FROM hdb_catalog.hdb_relationship
WHERE table_schema = $1
@ -45,8 +45,8 @@ parseDropNotice t = do
where
dottedTxtToQualTable dt =
case T.split (=='.') dt of
[tn] -> return $ QualifiedTable publicSchema $ TableName tn
[sn, tn] -> return $ QualifiedTable (SchemaName sn) $ TableName tn
[tn] -> return $ QualifiedObject publicSchema $ TableName tn
[sn, tn] -> return $ QualifiedObject (SchemaName sn) $ TableName tn
_ -> throw400 ParseFailed $ "parsing dotted table failed : " <> dt
getCascadeLines = do

View File

@ -1,3 +1,4 @@
{-# LANGUAGE QuasiQuotes #-}
module Hasura.RQL.DDL.Metadata
( TableMeta
@ -22,12 +23,12 @@ import Control.Lens
import Data.Aeson
import Data.Aeson.Casing
import Data.Aeson.TH
import Language.Haskell.TH.Syntax (Lift)
import Language.Haskell.TH.Syntax (Lift)
import qualified Data.HashMap.Strict as M
import qualified Data.HashSet as HS
import qualified Data.List as L
import qualified Data.Text as T
import qualified Data.HashMap.Strict as M
import qualified Data.HashSet as HS
import qualified Data.List as L
import qualified Data.Text as T
import Hasura.GraphQL.Utils
import Hasura.Prelude
@ -35,15 +36,16 @@ import Hasura.RQL.DDL.Utils
import Hasura.RQL.Types
import Hasura.SQL.Types
import qualified Database.PG.Query as Q
import qualified Hasura.RQL.DDL.Permission as DP
import qualified Hasura.RQL.DDL.QueryTemplate as DQ
import qualified Hasura.RQL.DDL.Relationship as DR
import qualified Hasura.RQL.DDL.RemoteSchema as DRS
import qualified Hasura.RQL.DDL.Schema.Table as DT
import qualified Hasura.RQL.DDL.Subscribe as DS
import qualified Hasura.RQL.Types.RemoteSchema as TRS
import qualified Hasura.RQL.Types.Subscribe as DTS
import qualified Database.PG.Query as Q
import qualified Hasura.RQL.DDL.Permission as DP
import qualified Hasura.RQL.DDL.QueryTemplate as DQ
import qualified Hasura.RQL.DDL.Relationship as DR
import qualified Hasura.RQL.DDL.RemoteSchema as DRS
import qualified Hasura.RQL.DDL.Schema.Function as DF
import qualified Hasura.RQL.DDL.Schema.Table as DT
import qualified Hasura.RQL.DDL.Subscribe as DS
import qualified Hasura.RQL.Types.RemoteSchema as TRS
import qualified Hasura.RQL.Types.Subscribe as DTS
data TableMeta
= TableMeta
@ -113,6 +115,7 @@ instance FromJSON ClearMetadata where
clearMetadata :: Q.TxE QErr ()
clearMetadata = Q.catchE defaultTxErrorHandler $ do
Q.unitQ "DELETE FROM hdb_catalog.hdb_query_template WHERE is_system_defined <> 'true'" () False
Q.unitQ "DELETE FROM hdb_catalog.hdb_function WHERE is_system_defined <> 'true'" () False
Q.unitQ "DELETE FROM hdb_catalog.hdb_permission WHERE is_system_defined <> 'true'" () False
Q.unitQ "DELETE FROM hdb_catalog.hdb_relationship WHERE is_system_defined <> 'true'" () False
Q.unitQ "DELETE FROM hdb_catalog.hdb_table WHERE is_system_defined <> 'true'" () False
@ -134,6 +137,7 @@ data ReplaceMetadata
= ReplaceMetadata
{ aqTables :: ![TableMeta]
, aqQueryTemplates :: ![DQ.CreateQueryTemplate]
, aqFunctions :: !(Maybe [QualifiedFunction])
, aqRemoteSchemas :: !(Maybe [TRS.AddRemoteSchemaQuery])
} deriving (Show, Eq, Lift)
@ -142,7 +146,7 @@ $(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''ReplaceMetadata)
applyQP1
:: (QErrM m, UserInfoM m)
=> ReplaceMetadata -> m ()
applyQP1 (ReplaceMetadata tables templates mSchemas) = do
applyQP1 (ReplaceMetadata tables templates mFunctions mSchemas) = do
adminOnly
@ -171,12 +175,16 @@ applyQP1 (ReplaceMetadata tables templates mSchemas) = do
withPathK "queryTemplates" $
checkMultipleDecls "query templates" $ map DQ.cqtName templates
withPathK "functions" $
checkMultipleDecls "functions" functions
onJust mSchemas $ \schemas ->
withPathK "remote_schemas" $
checkMultipleDecls "remote schemas" $ map TRS._arsqName schemas
where
withTableName qt = withPathK (qualTableToTxt qt)
withTableName qt = withPathK (qualObjectToText qt)
functions = fromMaybe [] mFunctions
checkMultipleDecls t l = do
let dups = getDups l
@ -196,7 +204,7 @@ applyQP2
)
=> ReplaceMetadata
-> m RespBody
applyQP2 (ReplaceMetadata tables templates mSchemas) = do
applyQP2 (ReplaceMetadata tables templates mFunctions mSchemas) = do
liftTx clearMetadata
DT.buildSchemaCache
@ -240,6 +248,10 @@ applyQP2 (ReplaceMetadata tables templates mSchemas) = do
qti <- DQ.createQueryTemplateP1 template
void $ DQ.createQueryTemplateP2 template qti
-- sql functions
withPathK "functions" $
indexedMapM_ (void . DF.trackFunctionP2) functions
-- remote schemas
onJust mSchemas $ \schemas ->
withPathK "remote_schemas" $
@ -249,6 +261,7 @@ applyQP2 (ReplaceMetadata tables templates mSchemas) = do
return successMsg
where
functions = fromMaybe [] mFunctions
processPerms tabInfo perms =
indexedForM_ perms $ \permDef -> do
permInfo <- DP.addPermP1 tabInfo permDef
@ -273,8 +286,7 @@ $(deriveToJSON defaultOptions ''ExportMetadata)
fetchMetadata :: Q.TxE QErr ReplaceMetadata
fetchMetadata = do
tables <- Q.catchE defaultTxErrorHandler fetchTables
let qts = map (uncurry QualifiedTable) tables
let qts = map (uncurry QualifiedObject) tables
tableMetaMap = M.fromList $ zip qts $ map mkTableMeta qts
-- Fetch all the relationships
@ -312,10 +324,15 @@ fetchMetadata = do
modMetaMap tmDeletePermissions delPermDefs
modMetaMap tmEventTriggers triggerMetaDefs
-- fetch all functions
functions <- map (uncurry QualifiedObject) <$>
Q.catchE defaultTxErrorHandler fetchFunctions
-- fetch all custom resolvers
schemas <- DRS.fetchRemoteSchemas
return $ ReplaceMetadata (M.elems postRelMap) qTmpltDefs (Just schemas)
return $ ReplaceMetadata (M.elems postRelMap) qTmpltDefs
(Just functions) (Just schemas)
where
@ -327,19 +344,19 @@ fetchMetadata = do
permRowToDef (sn, tn, rn, _, Q.AltJ pDef, mComment) = do
perm <- decodeValue pDef
return (QualifiedTable sn tn, DP.PermDef rn perm mComment)
return (QualifiedObject sn tn, DP.PermDef rn perm mComment)
mkRelDefs rt = mapM relRowToDef . filter (\rr -> rr ^. _4 == rt)
relRowToDef (sn, tn, rn, _, Q.AltJ rDef, mComment) = do
using <- decodeValue rDef
return (QualifiedTable sn tn, DR.RelDef rn using mComment)
return (QualifiedObject sn tn, DR.RelDef rn using mComment)
mkTriggerMetaDefs = mapM trigRowToDef
trigRowToDef (sn, tn, Q.AltJ configuration) = do
conf <- decodeValue configuration
return (QualifiedTable sn tn, conf::EventTriggerConf)
return (QualifiedObject sn tn, conf::EventTriggerConf)
fetchTables =
Q.listQ [Q.sql|
@ -372,6 +389,12 @@ fetchMetadata = do
SELECT e.schema_name, e.table_name, e.configuration::json
FROM hdb_catalog.event_triggers e
|] () False
fetchFunctions =
Q.listQ [Q.sql|
SELECT function_schema, function_name
FROM hdb_catalog.hdb_function
WHERE is_system_defined = 'false'
|] () False
runExportMetadata
:: (QErrM m, UserInfoM m, MonadTx m)

View File

@ -79,8 +79,8 @@ type InsPermDef = PermDef InsPerm
type CreateInsPerm = CreatePerm InsPerm
buildViewName :: QualifiedTable -> RoleName -> PermType -> QualifiedTable
buildViewName (QualifiedTable sn tn) (RoleName rTxt) pt =
QualifiedTable hdbViewsSchema $ TableName
buildViewName (QualifiedObject sn tn) (RoleName rTxt) pt =
QualifiedObject hdbViewsSchema $ TableName
(rTxt <> "__" <> T.pack (show pt) <> "__" <> snTxt <> "__" <> tnTxt)
where
hdbViewsSchema = SchemaName "hdb_views"
@ -400,7 +400,7 @@ runSetPermComment defn = do
setPermCommentTx
:: SetPermComment
-> Q.TxE QErr ()
setPermCommentTx (SetPermComment (QualifiedTable sn tn) rn pt comment) =
setPermCommentTx (SetPermComment (QualifiedObject sn tn) rn pt comment) =
Q.unitQE defaultTxErrorHandler [Q.sql|
UPDATE hdb_catalog.hdb_permission
SET comment = $1

View File

@ -104,7 +104,7 @@ savePermToCatalog
-> QualifiedTable
-> PermDef a
-> Q.TxE QErr ()
savePermToCatalog pt (QualifiedTable sn tn) (PermDef rn qdef mComment) =
savePermToCatalog pt (QualifiedObject sn tn) (PermDef rn qdef mComment) =
Q.unitQE defaultTxErrorHandler [Q.sql|
INSERT INTO
hdb_catalog.hdb_permission
@ -117,7 +117,7 @@ dropPermFromCatalog
-> RoleName
-> PermType
-> Q.TxE QErr ()
dropPermFromCatalog (QualifiedTable sn tn) rn pt =
dropPermFromCatalog (QualifiedObject sn tn) rn pt =
Q.unitQE defaultTxErrorHandler [Q.sql|
DELETE FROM
hdb_catalog.hdb_permission
@ -217,8 +217,8 @@ injectDefaults qv qt =
]
where
QualifiedTable (SchemaName vsn) (TableName vn) = qv
QualifiedTable (SchemaName tsn) (TableName tn) = qt
QualifiedObject (SchemaName vsn) (TableName vn) = qv
QualifiedObject (SchemaName tsn) (TableName tn) = qt
data DropPerm a
= DropPerm

View File

@ -5,15 +5,15 @@ import Hasura.RQL.Types
import Hasura.Server.Utils
import Hasura.SQL.Types
import qualified Database.PG.Query as Q
import qualified Hasura.SQL.DML as S
import qualified Database.PG.Query as Q
import qualified Hasura.SQL.DML as S
import qualified Data.Aeson as J
import qualified Data.FileEmbed as FE
import qualified Data.Text as T
import qualified Data.Aeson as J
import qualified Data.FileEmbed as FE
import qualified Data.Text as T
buildInsTrig :: QualifiedTable -> Q.Query
buildInsTrig qt@(QualifiedTable _ tn) =
buildInsTrig qt@(QualifiedObject _ tn) =
Q.fromBuilder $ mconcat
[ "CREATE TRIGGER " <> toSQL tn
, " INSTEAD OF INSERT ON " <> toSQL qt

View File

@ -112,7 +112,7 @@ persistRel :: QualifiedTable
-> Value
-> Maybe T.Text
-> Q.TxE QErr ()
persistRel (QualifiedTable sn tn) rn relType relDef comment =
persistRel (QualifiedObject sn tn) rn relType relDef comment =
Q.unitQE defaultTxErrorHandler [Q.sql|
INSERT INTO
hdb_catalog.hdb_relationship
@ -184,14 +184,14 @@ objRelP2Setup qt (RelDef rn ru _) = do
-- the constraint name will help.
, SchemaDependency (SOTable refqt) "remote_table"
]
refqt = QualifiedTable refsn reftn
refqt = QualifiedObject refsn reftn
void $ askTabInfo refqt
return (RelInfo rn ObjRel colMapping refqt False, deps)
_ -> throw400 ConstraintError
"more than one foreign key constraint exists on the given column"
addRelToCache rn relInfo deps qt
where
QualifiedTable sn tn = qt
QualifiedObject sn tn = qt
fetchFKeyDetail cn =
Q.listQ [Q.sql|
SELECT constraint_name, ref_table_table_schema, ref_table, column_mapping
@ -279,7 +279,7 @@ arrRelP2Setup qt (RelDef rn ru _) = do
<> map (\c -> SchemaDependency (SOTableObj refqt $ TOCol c) "rcol") rCols
return (RelInfo rn ArrRel (zip lCols rCols) refqt True, deps)
RUFKeyOn (ArrRelUsingFKeyOn refqt refCol) -> do
let QualifiedTable refSn refTn = refqt
let QualifiedObject refSn refTn = refqt
res <- liftTx $ Q.catchE defaultTxErrorHandler $
fetchFKeyDetail refSn refTn refCol
case mapMaybe processRes res of
@ -298,7 +298,7 @@ arrRelP2Setup qt (RelDef rn ru _) = do
"more than one foreign key constraint exists on the given column"
addRelToCache rn relInfo deps qt
where
QualifiedTable sn tn = qt
QualifiedObject sn tn = qt
fetchFKeyDetail refsn reftn refcn = Q.listQ [Q.sql|
SELECT constraint_name, column_mapping
FROM hdb_catalog.hdb_foreign_key_constraint
@ -381,7 +381,7 @@ delRelFromCatalog
:: QualifiedTable
-> RelName
-> Q.TxE QErr ()
delRelFromCatalog (QualifiedTable sn tn) rn =
delRelFromCatalog (QualifiedObject sn tn) rn =
Q.unitQE defaultTxErrorHandler [Q.sql|
DELETE FROM
hdb_catalog.hdb_relationship
@ -421,7 +421,7 @@ runSetRelComment defn = do
setRelComment :: SetRelComment
-> Q.TxE QErr ()
setRelComment (SetRelComment (QualifiedTable sn tn) rn comment) =
setRelComment (SetRelComment (QualifiedObject sn tn) rn comment) =
Q.unitQE defaultTxErrorHandler [Q.sql|
UPDATE hdb_catalog.hdb_relationship
SET comment = $1

View File

@ -45,7 +45,7 @@ addRemoteSchemaP2 q@(AddRemoteSchemaQuery name def _) = do
let defRemoteGCtx = scDefaultRemoteGCtx sc
remoteGCtx <- fetchRemoteSchema manager name rsi
newDefGCtx <- mergeGCtx defRemoteGCtx $ convRemoteGCtx remoteGCtx
newHsraGCtxMap <- GS.mkGCtxMap (scTables sc)
newHsraGCtxMap <- GS.mkGCtxMap (scTables sc) (scFunctions sc)
newGCtxMap <- mergeRemoteSchema newHsraGCtxMap newDefGCtx
liftTx $ addRemoteSchemaToCatalog q
addRemoteSchemaToCache newGCtxMap newDefGCtx name rsi
@ -80,7 +80,7 @@ refreshGCtxMapInSchema
=> m ()
refreshGCtxMapInSchema = do
sc <- askSchemaCache
gCtxMap <- GS.mkGCtxMap (scTables sc)
gCtxMap <- GS.mkGCtxMap (scTables sc) (scFunctions sc)
httpMgr <- askHttpManager
(mergedGCtxMap, defGCtx) <-
mergeSchemas (scRemoteResolvers sc) gCtxMap httpMgr
@ -117,7 +117,7 @@ removeRemoteSchemaP2 (RemoveRemoteSchemaQuery name) = do
let resolvers = scRemoteResolvers sc
newResolvers = Map.filterWithKey (\n _ -> n /= name) resolvers
newGCtxMap <- GS.mkGCtxMap (scTables sc)
newGCtxMap <- GS.mkGCtxMap (scTables sc) (scFunctions sc)
(mergedGCtxMap, defGCtx) <- mergeSchemas newResolvers newGCtxMap hMgr
removeRemoteSchemaFromCache newResolvers mergedGCtxMap defGCtx
liftTx $ removeRemoteSchemaFromCatalog name

View File

@ -11,6 +11,10 @@ module Hasura.RQL.DDL.Schema.Diff
, SchemaDiff(..)
, getSchemaDiff
, getSchemaChangeDeps
, FunctionMeta(..)
, fetchFunctionMeta
, getDroppedFuncs
) where
import Hasura.Prelude
@ -107,7 +111,7 @@ fetchTableMeta = do
AND t.table_schema <> 'hdb_catalog'
|] () False
forM res $ \(ts, tn, toid, cols, constrnts) ->
return $ TableMeta toid (QualifiedTable ts tn) (Q.getAltJ cols) (Q.getAltJ constrnts)
return $ TableMeta toid (QualifiedObject ts tn) (Q.getAltJ cols) (Q.getAltJ constrnts)
getOverlap :: (Eq k, Hashable k) => (v -> k) -> [v] -> [v] -> [(v, v)]
getOverlap getKey left right =
@ -219,3 +223,28 @@ getSchemaChangeDeps schemaDiff = do
isDirectDep (SOTableObj tn _) = tn `HS.member` (HS.fromList droppedTables)
isDirectDep _ = False
data FunctionMeta
= FunctionMeta
{ fmOid :: !Int
, fmFunction :: !QualifiedFunction
} deriving (Show, Eq)
fetchFunctionMeta :: Q.Tx [FunctionMeta]
fetchFunctionMeta = do
res <- Q.listQ [Q.sql|
SELECT
f.function_schema,
f.function_name,
p.oid
FROM hdb_catalog.hdb_function_agg f
JOIN pg_catalog.pg_proc p ON (p.proname = f.function_name)
WHERE
f.function_schema <> 'hdb_catalog'
|] () False
forM res $ \(sn, fn, foid) ->
return $ FunctionMeta foid $ QualifiedObject sn fn
getDroppedFuncs :: [FunctionMeta] -> [FunctionMeta] -> [QualifiedFunction]
getDroppedFuncs oldMeta newMeta =
map fmFunction $ getDifference fmOid oldMeta newMeta

View File

@ -0,0 +1,181 @@
module Hasura.RQL.DDL.Schema.Function where
import Hasura.GraphQL.Utils (isValidName, showNames)
import Hasura.Prelude
import Hasura.RQL.Types
import Hasura.SQL.Types
import Data.Aeson
import Data.Aeson.Casing
import Data.Aeson.TH
import Language.Haskell.TH.Syntax (Lift)
import qualified Hasura.GraphQL.Schema as GS
import qualified Language.GraphQL.Draft.Syntax as G
import qualified Data.HashMap.Strict as M
import qualified Data.Sequence as Seq
import qualified Data.Text as T
import qualified Database.PG.Query as Q
data PGTypType
= PTBASE
| PTCOMPOSITE
| PTDOMAIN
| PTENUM
| PTRANGE
| PTPSUEDO
deriving (Show, Eq)
$(deriveJSON defaultOptions{constructorTagModifier = drop 2} ''PGTypType)
data RawFuncInfo
= RawFuncInfo
{ rfiHasVariadic :: !Bool
, rfiFunctionType :: !FunctionType
, rfiReturnTypeSchema :: !SchemaName
, rfiReturnTypeName :: !T.Text
, rfiReturnTypeType :: !PGTypType
, rfiReturnsSet :: !Bool
, rfiInputArgTypes :: ![PGColType]
, rfiInputArgNames :: ![T.Text]
, rfiReturnsTable :: !Bool
} deriving (Show, Eq)
$(deriveFromJSON (aesonDrop 3 snakeCase) ''RawFuncInfo)
mkFunctionArgs :: [PGColType] -> [T.Text] -> [FunctionArg]
mkFunctionArgs tys argNames =
bool withNames withNoNames $ null argNames
where
withNoNames = flip map tys $ \ty -> FunctionArg Nothing ty
withNames = zipWith mkArg argNames tys
mkArg "" ty = FunctionArg Nothing ty
mkArg n ty = flip FunctionArg ty $ Just $ FunctionArgName n
validateFuncArgs :: MonadError QErr m => [FunctionArg] -> m ()
validateFuncArgs args =
unless (null invalidArgs) $ throw400 NotSupported $
"arguments: " <> showNames invalidArgs
<> " are not in compliance with GraphQL spec"
where
funcArgsText = mapMaybe (fmap getFuncArgNameTxt . faName) args
invalidArgs = filter (not . isValidName) $ map G.Name funcArgsText
mkFunctionInfo :: QualifiedFunction -> RawFuncInfo -> Q.TxE QErr FunctionInfo
mkFunctionInfo qf rawFuncInfo = do
-- throw error if function has variadic arguments
when hasVariadic $ throw400 NotSupported "function with \"VARIADIC\" parameters are not supported"
-- throw error if return type is not composite type
when (retTyTyp /= PTCOMPOSITE) $ throw400 NotSupported "function does not return a \"COMPOSITE\" type"
-- throw error if function do not returns SETOF
unless retSet $ throw400 NotSupported "function does not return a SETOF"
-- throw error if return type is not a valid table
unless returnsTab $ throw400 NotSupported "function does not return a SETOF table"
-- throw error if function type is VOLATILE
when (funTy == FTVOLATILE) $ throw400 NotSupported "function of type \"VOLATILE\" is not supported now"
let funcArgs = mkFunctionArgs inpArgTyps inpArgNames
validateFuncArgs funcArgs
let funcArgsSeq = Seq.fromList funcArgs
dep = SchemaDependency (SOTable retTable) "table"
retTable = QualifiedObject retSn (TableName retN)
return $ FunctionInfo qf False funTy funcArgsSeq retTable [dep]
where
RawFuncInfo hasVariadic funTy retSn retN retTyTyp
retSet inpArgTyps inpArgNames returnsTab
= rawFuncInfo
-- Build function info
getFunctionInfo :: QualifiedFunction -> Q.TxE QErr FunctionInfo
getFunctionInfo qf@(QualifiedObject sn fn) = do
-- fetch function details
funcData <- Q.catchE defaultTxErrorHandler $
Q.listQ $(Q.sqlFromFile "src-rsr/function_info.sql") (sn, fn) True
case funcData of
[] ->
throw400 NotExists $ "no such function exists in postgres : " <>> qf
[Identity (Q.AltJ rawFuncInfo)] -> mkFunctionInfo qf rawFuncInfo
_ ->
throw400 NotSupported $
"function " <> qf <<> " is overloaded. Overloaded functions are not supported"
saveFunctionToCatalog :: QualifiedFunction -> Bool -> Q.TxE QErr ()
saveFunctionToCatalog (QualifiedObject sn fn) isSystemDefined =
Q.unitQE defaultTxErrorHandler [Q.sql|
INSERT INTO "hdb_catalog"."hdb_function" VALUES ($1, $2, $3)
|] (sn, fn, isSystemDefined) False
delFunctionFromCatalog :: QualifiedFunction -> Q.TxE QErr ()
delFunctionFromCatalog (QualifiedObject sn fn) =
Q.unitQE defaultTxErrorHandler [Q.sql|
DELETE FROM hdb_catalog.hdb_function
WHERE function_schema = $1
AND function_name = $2
|] (sn, fn) False
newtype TrackFunction
= TrackFunction
{ tfName :: QualifiedFunction}
deriving (Show, Eq, FromJSON, ToJSON, Lift)
trackFunctionP1
:: (CacheRM m, UserInfoM m, QErrM m) => TrackFunction -> m ()
trackFunctionP1 (TrackFunction qf) = do
adminOnly
rawSchemaCache <- askSchemaCache
when (M.member qf $ scFunctions rawSchemaCache) $
throw400 AlreadyTracked $ "function already tracked : " <>> qf
trackFunctionP2Setup :: (QErrM m, CacheRWM m, MonadTx m)
=> QualifiedFunction -> m ()
trackFunctionP2Setup qf = do
fi <- withPathK "name" $ liftTx $ getFunctionInfo qf
let retTable = fiReturnType fi
err = err400 NotExists $ "table " <> retTable <<> " is not tracked"
sc <- askSchemaCache
void $ liftMaybe err $ M.lookup retTable $ scTables sc
addFunctionToCache fi
trackFunctionP2 :: (QErrM m, CacheRWM m, MonadTx m)
=> QualifiedFunction -> m RespBody
trackFunctionP2 qf = do
sc <- askSchemaCache
let defGCtx = scDefaultRemoteGCtx sc
funcNameGQL = GS.qualObjectToName qf
-- check function name is in compliance with GraphQL spec
unless (isValidName funcNameGQL) $ throw400 NotSupported $
"function name " <> qf <<> " is not in compliance with GraphQL spec"
-- check for conflicts in remote schema
GS.checkConflictingNode defGCtx funcNameGQL
trackFunctionP2Setup qf
liftTx $ saveFunctionToCatalog qf False
return successMsg
runTrackFunc
:: ( QErrM m, CacheRWM m, MonadTx m
, UserInfoM m
)
=> TrackFunction -> m RespBody
runTrackFunc q = do
trackFunctionP1 q
trackFunctionP2 $ tfName q
newtype UnTrackFunction
= UnTrackFunction
{ utfName :: QualifiedFunction }
deriving (Show, Eq, FromJSON, ToJSON, Lift)
runUntrackFunc
:: ( QErrM m, CacheRWM m, MonadTx m
, UserInfoM m
)
=> UnTrackFunction -> m RespBody
runUntrackFunc (UnTrackFunction qf) = do
adminOnly
void $ askFunctionInfo qf
liftTx $ delFunctionFromCatalog qf
delFunctionFromCache qf
return successMsg

View File

@ -9,6 +9,7 @@ import Hasura.RQL.DDL.QueryTemplate
import Hasura.RQL.DDL.Relationship
import Hasura.RQL.DDL.RemoteSchema
import Hasura.RQL.DDL.Schema.Diff
import Hasura.RQL.DDL.Schema.Function
import Hasura.RQL.DDL.Subscribe
import Hasura.RQL.DDL.Utils
import Hasura.RQL.Types
@ -32,21 +33,21 @@ import qualified Database.PostgreSQL.LibPQ as PQ
import qualified Language.GraphQL.Draft.Syntax as G
delTableFromCatalog :: QualifiedTable -> Q.Tx ()
delTableFromCatalog (QualifiedTable sn tn) =
delTableFromCatalog (QualifiedObject sn tn) =
Q.unitQ [Q.sql|
DELETE FROM "hdb_catalog"."hdb_table"
WHERE table_schema = $1 AND table_name = $2
|] (sn, tn) False
saveTableToCatalog :: QualifiedTable -> Q.Tx ()
saveTableToCatalog (QualifiedTable sn tn) =
saveTableToCatalog (QualifiedObject sn tn) =
Q.unitQ [Q.sql|
INSERT INTO "hdb_catalog"."hdb_table" VALUES ($1, $2)
|] (sn, tn) False
-- Build the TableInfo with all its columns
getTableInfo :: QualifiedTable -> Bool -> Q.TxE QErr TableInfo
getTableInfo qt@(QualifiedTable sn tn) isSystemDefined = do
getTableInfo qt@(QualifiedObject sn tn) isSystemDefined = do
tableData <- Q.catchE defaultTxErrorHandler $
Q.listQ $(Q.sqlFromFile "src-rsr/table_info.sql")(sn, tn) True
case tableData of
@ -92,8 +93,8 @@ trackExistingTableOrViewP2 vn isSystemDefined = do
return successMsg
where
getSchemaN = getSchemaTxt . qtSchema
getTableN = getTableTxt . qtTable
getSchemaN = getSchemaTxt . qSchema
getTableN = getTableTxt . qName
tn = case getSchemaN vn of
"public" -> getTableN vn
_ -> getSchemaN vn <> "_" <> getTableN vn
@ -122,6 +123,10 @@ purgeDep schemaObjId = case schemaObjId of
liftTx $ delQTemplateFromCatalog qtn
delQTemplateFromCache qtn
(SOFunction qf) -> do
liftTx $ delFunctionFromCatalog qf
delFunctionFromCache qf
(SOTableObj qt (TOTrigger trn)) -> do
liftTx $ delEventTriggerFromCatalog trn
delEventTriggerFromCache qt trn
@ -179,7 +184,7 @@ processTableChanges ti tableDiff = do
delTableAndDirectDeps
:: (QErrM m, CacheRWM m, MonadTx m) => QualifiedTable -> m ()
delTableAndDirectDeps qtn@(QualifiedTable sn tn) = do
delTableAndDirectDeps qtn@(QualifiedObject sn tn) = do
liftTx $ Q.catchE defaultTxErrorHandler $ do
Q.unitQ [Q.sql|
DELETE FROM "hdb_catalog"."hdb_relationship"
@ -237,7 +242,7 @@ unTrackExistingTableOrViewP2
unTrackExistingTableOrViewP2 (UntrackTable qtn cascade) = do
sc <- askSchemaCache
-- Get relational and query template dependants
-- Get relational, query template and function dependants
let allDeps = getDependentObjs sc (SOTable qtn)
indirectDeps = filter (not . isDirectDep) allDeps
@ -278,7 +283,7 @@ buildSchemaCache = do
tables <- liftTx $ Q.catchE defaultTxErrorHandler fetchTables
forM_ tables $ \(sn, tn, isSystemDefined) ->
modifyErr (\e -> "table " <> tn <<> "; " <> e) $
trackExistingTableOrViewP2Setup (QualifiedTable sn tn) isSystemDefined
trackExistingTableOrViewP2Setup (QualifiedObject sn tn) isSystemDefined
-- Fetch all the relationships
relationships <- liftTx $ Q.catchE defaultTxErrorHandler fetchRelationships
@ -287,10 +292,10 @@ buildSchemaCache = do
modifyErr (\e -> "table " <> tn <<> "; rel " <> rn <<> "; " <> e) $ case rt of
ObjRel -> do
using <- decodeValue rDef
objRelP2Setup (QualifiedTable sn tn) $ RelDef rn using Nothing
objRelP2Setup (QualifiedObject sn tn) $ RelDef rn using Nothing
ArrRel -> do
using <- decodeValue rDef
arrRelP2Setup (QualifiedTable sn tn) $ RelDef rn using Nothing
arrRelP2Setup (QualifiedObject sn tn) $ RelDef rn using Nothing
-- Fetch all the permissions
permissions <- liftTx $ Q.catchE defaultTxErrorHandler fetchPermissions
@ -315,15 +320,20 @@ buildSchemaCache = do
forM_ eventTriggers $ \(sn, tn, trid, trn, Q.AltJ configuration) -> do
etc <- decodeValue configuration
let qt = QualifiedTable sn tn
let qt = QualifiedObject sn tn
subTableP2Setup qt trid etc
allCols <- getCols . tiFieldInfoMap <$> askTabInfo qt
liftTx $ mkTriggerQ trid trn qt allCols (etcDefinition etc)
functions <- liftTx $ Q.catchE defaultTxErrorHandler fetchFunctions
forM_ functions $ \(sn, fn) ->
modifyErr (\e -> "function " <> fn <<> "; " <> e) $
trackFunctionP2Setup (QualifiedObject sn fn)
-- remote schemas
res <- liftTx fetchRemoteSchemas
sc <- askSchemaCache
gCtxMap <- GS.mkGCtxMap (scTables sc)
gCtxMap <- GS.mkGCtxMap (scTables sc) (scFunctions sc)
remoteScConf <- forM res $ \(AddRemoteSchemaQuery n def _) ->
(,) n <$> validateRemoteSchemaDef def
@ -337,7 +347,7 @@ buildSchemaCache = do
permHelper sn tn rn pDef pa = do
qCtx <- mkAdminQCtx <$> askSchemaCache
perm <- decodeValue pDef
let qt = QualifiedTable sn tn
let qt = QualifiedObject sn tn
permDef = PermDef rn perm Nothing
createPerm = WithTable qt permDef
(permInfo, deps) <- liftP1WithQCtx qCtx $ createPermP1 createPerm
@ -373,6 +383,11 @@ buildSchemaCache = do
SELECT e.schema_name, e.table_name, e.id, e.name, e.configuration::json
FROM hdb_catalog.event_triggers e
|] () False
fetchFunctions =
Q.listQ [Q.sql|
SELECT function_schema, function_name
FROM hdb_catalog.hdb_function
|] () False
data RunSQL
= RunSQL
@ -409,16 +424,22 @@ execWithMDCheck (RunSQL t cascade _) = do
-- Get the metadata before the sql query, everything, need to filter this
oldMetaU <- liftTx $ Q.catchE defaultTxErrorHandler fetchTableMeta
oldFuncMetaU <-
liftTx $ Q.catchE defaultTxErrorHandler fetchFunctionMeta
-- Run the SQL
res <- execRawSQL t
-- Get the metadata after the sql query
newMeta <- liftTx $ Q.catchE defaultTxErrorHandler fetchTableMeta
newFuncMeta <- liftTx $ Q.catchE defaultTxErrorHandler fetchFunctionMeta
sc <- askSchemaCache
let existingTables = M.keys $ scTables sc
oldMeta = flip filter oldMetaU $ \tm -> tmTable tm `elem` existingTables
schemaDiff = getSchemaDiff oldMeta newMeta
existingFuncs = M.keys $ scFunctions sc
oldFuncMeta = flip filter oldFuncMetaU $ \fm -> fmFunction fm `elem` existingFuncs
droppedFuncs = getDroppedFuncs oldFuncMeta newFuncMeta
indirectDeps <- getSchemaChangeDeps schemaDiff
@ -428,6 +449,16 @@ execWithMDCheck (RunSQL t cascade _) = do
-- Purge all the indirect dependents from state
mapM_ purgeDep indirectDeps
-- Purge all dropped functions
let purgedFuncs = flip mapMaybe indirectDeps $ \dep ->
case dep of
SOFunction qf -> Just qf
_ -> Nothing
forM_ (droppedFuncs \\ purgedFuncs) $ \qf -> do
liftTx $ delFunctionFromCatalog qf
delFunctionFromCache qf
-- update the schema cache with the changes
processSchemaChanges schemaDiff

View File

@ -154,7 +154,7 @@ addEventTriggerToCatalog qt allCols etc = do
mkTriggerQ trid name qt allCols opsdef
return trid
where
QualifiedTable sn tn = qt
QualifiedObject sn tn = qt
(EventTriggerConf name opsdef _ _ _ _) = etc
getTrid [] = throw500 "could not create event-trigger"
getTrid (x:_) = return x

View File

@ -41,9 +41,8 @@ mkDefaultMutFlds = \case
(fromPGCol $ pgiName pgColInfo, FCol pgColInfo)
qualTableToAliasIden :: QualifiedTable -> Iden
qualTableToAliasIden (QualifiedTable sn tn) =
Iden $ getSchemaTxt sn <> "_" <> getTableTxt tn
<> "__mutation_result_alias"
qualTableToAliasIden qt =
Iden $ snakeCaseTable qt <> "__mutation_result_alias"
mkMutFldExp :: QualifiedTable -> Bool -> MutFld -> S.SQLExp
mkMutFldExp qt singleObj = \case

View File

@ -1,6 +1,7 @@
module Hasura.RQL.DML.Select
( selectP2
, selectAggP2
, funcQueryTx
, convSelectQuery
, getSelectDeps
, module Hasura.RQL.DML.Select.Internal
@ -286,6 +287,18 @@ convSelectQuery prepArgBuilder (DMLQuery qt selQ) = do
validateHeaders $ spiRequiredHeaders selPermInfo
convSelectQ (tiFieldInfoMap tabInfo) selPermInfo extSelQ prepArgBuilder
funcQueryTx
:: S.FromItem -> QualifiedFunction -> QualifiedTable
-> TablePerm -> TableArgs
-> (Either TableAggFlds AnnFlds, DS.Seq Q.PrepArg)
-> Q.TxE QErr RespBody
funcQueryTx frmItem fn tn tabPerm tabArgs (eSelFlds, p) =
runIdentity . Q.getRow
<$> Q.rawQE dmlTxErrorHandler (Q.fromBuilder sqlBuilder) (toList p) True
where
sqlBuilder = toSQL $
mkFuncSelectWith fn tn tabPerm tabArgs eSelFlds frmItem
selectAggP2 :: (AnnAggSel, DS.Seq Q.PrepArg) -> Q.TxE QErr RespBody
selectAggP2 (sel, p) =
runIdentity . Q.getRow
@ -293,7 +306,6 @@ selectAggP2 (sel, p) =
where
selectSQL = toSQL $ mkAggSelect sel
-- selectP2 :: (QErrM m, CacheRWM m, MonadTx m, MonadIO m) => (SelectQueryP1, DS.Seq Q.PrepArg) -> m RespBody
selectP2 :: Bool -> (AnnSel, DS.Seq Q.PrepArg) -> Q.TxE QErr RespBody
selectP2 asSingleObject (sel, p) =

View File

@ -1,6 +1,7 @@
module Hasura.RQL.DML.Select.Internal
( mkSQLSelect
, mkAggSelect
, mkFuncSelectWith
, module Hasura.RQL.DML.Select.Types
)
where
@ -608,3 +609,28 @@ mkSQLSelect isSingleObject annSel =
baseNode = annSelToBaseNode (toIden rootFldName) rootFldName annSel
rootFldName = FieldName "root"
rootFldAls = S.Alias $ toIden rootFldName
mkFuncSelectWith
:: QualifiedFunction -> QualifiedTable
-> TablePerm -> TableArgs -> Either TableAggFlds AnnFlds
-> S.FromItem -> S.SelectWith
mkFuncSelectWith qf tn tabPerm tabArgs eSelFlds frmItem = selWith
where
-- SELECT * FROM function_name(args)
funcSel = S.mkSelect { S.selFrom = Just $ S.FromExp [frmItem]
, S.selExtr = [S.Extractor S.SEStar Nothing]
}
mainSel = case eSelFlds of
Left aggFlds -> mkAggSelect $
AnnSelG aggFlds tabFrom tabPerm tabArgs
Right annFlds -> mkSQLSelect False $
AnnSelG annFlds tabFrom tabPerm tabArgs
tabFrom = TableFrom tn $ Just $ toIden funcAls
QualifiedObject sn fn = qf
funcAls = S.Alias $ Iden $
getSchemaTxt sn <> "_" <> getFunctionTxt fn <> "__result"
selWith = S.SelectWith [(funcAls, S.CTESelect funcSel)] mainSel

View File

@ -68,11 +68,15 @@ type ObjSel = AnnRelG AnnSel
type ArrRel = AnnRelG AnnSel
type ArrRelAgg = AnnRelG AnnAggSel
type Fields a = [(FieldName, a)]
data ArrSel
= ASSimple !ArrRel
| ASAgg !ArrRelAgg
deriving (Show, Eq)
type ArrSelFlds = Fields ArrSel
data AnnFld
= FCol !PGColInfo
| FObj !ObjSel
@ -97,7 +101,7 @@ data PGColFld
| PCFExp !T.Text
deriving (Show, Eq)
type ColFlds = [(FieldName, PGColFld)]
type ColFlds = Fields PGColFld
data AggOp
= AggOp
@ -111,14 +115,17 @@ data AggFld
| AFExp !T.Text
deriving (Show, Eq)
type AggFlds = [(FieldName, AggFld)]
type AggFlds = Fields AggFld
type AnnFlds = Fields AnnFld
data TableAggFld
= TAFAgg !AggFlds
| TAFNodes ![(FieldName, AnnFld)]
| TAFNodes !AnnFlds
| TAFExp !T.Text
deriving (Show, Eq)
type TableAggFlds = Fields TableAggFld
data TableFrom
= TableFrom
{ _tfTable :: !QualifiedTable
@ -139,8 +146,8 @@ data AnnSelG a
, _asnArgs :: !TableArgs
} deriving (Show, Eq)
type AnnSel = AnnSelG [(FieldName, AnnFld)]
type AnnAggSel = AnnSelG [(FieldName, TableAggFld)]
type AnnSel = AnnSelG AnnFlds
type AnnAggSel = AnnSelG TableAggFlds
data BaseNode
= BaseNode
@ -177,7 +184,7 @@ data OrderByNode
data ArrRelCtx
= ArrRelCtx
{ aacFields :: ![(FieldName, ArrSel)]
{ aacFields :: !ArrSelFlds
, aacAggOrdBys :: ![RelName]
} deriving (Show, Eq)

View File

@ -33,6 +33,7 @@ module Hasura.RQL.Types.Error
, indexedForM
, indexedMapM
, indexedForM_
, indexedMapM_
) where
import Data.Aeson
@ -276,6 +277,10 @@ indexedForM_ l f =
forM_ (zip [0..] l) $ \(i, a) ->
withPathE (Index i) (f a)
indexedMapM_ :: (QErrM m)
=> (a -> m ()) -> [a] -> m ()
indexedMapM_ = flip indexedForM_
liftIResult :: (QErrM m) => IResult a -> m a
liftIResult (IError path msg) =
throwError $ QErr path N.status400 (T.pack $ formatMsg msg) ParseFailed Nothing

View File

@ -84,6 +84,17 @@ module Hasura.RQL.Types.SchemaCache
, mkColDep
, getDependentObjs
, getDependentObjsWith
, FunctionType(..)
, FunctionArg(..)
, FunctionArgName(..)
, FunctionName(..)
, FunctionInfo(..)
, FunctionCache
, getFuncsOfTable
, addFunctionToCache
, askFunctionInfo
, delFunctionFromCache
) where
import qualified Hasura.GraphQL.Context as GC
@ -105,6 +116,7 @@ import Data.Aeson.TH
import qualified Data.HashMap.Strict as M
import qualified Data.HashSet as HS
import qualified Data.Sequence as Seq
import qualified Data.Text as T
reportSchemaObjs :: [SchemaObjId] -> T.Text
@ -348,7 +360,48 @@ mkTableInfo tn isSystemDefined uniqCons cols pcols mVI =
colMap = M.fromList $ map f cols
f colInfo = (fromPGCol $ pgiName colInfo, FIColumn colInfo)
data FunctionType
= FTVOLATILE
| FTIMMUTABLE
| FTSTABLE
deriving (Eq)
$(deriveJSON defaultOptions{constructorTagModifier = drop 2} ''FunctionType)
funcTypToTxt :: FunctionType -> T.Text
funcTypToTxt FTVOLATILE = "VOLATILE"
funcTypToTxt FTIMMUTABLE = "IMMUTABLE"
funcTypToTxt FTSTABLE = "STABLE"
instance Show FunctionType where
show = T.unpack . funcTypToTxt
newtype FunctionArgName =
FunctionArgName { getFuncArgNameTxt :: T.Text}
deriving (Show, Eq, ToJSON)
data FunctionArg
= FunctionArg
{ faName :: !(Maybe FunctionArgName)
, faType :: !PGColType
} deriving(Show, Eq)
$(deriveToJSON (aesonDrop 2 snakeCase) ''FunctionArg)
data FunctionInfo
= FunctionInfo
{ fiName :: !QualifiedFunction
, fiSystemDefined :: !Bool
, fiType :: !FunctionType
, fiInputArgs :: !(Seq.Seq FunctionArg)
, fiReturnType :: !QualifiedTable
, fiDeps :: ![SchemaDependency]
} deriving (Show, Eq)
$(deriveToJSON (aesonDrop 2 snakeCase) ''FunctionInfo)
type TableCache = M.HashMap QualifiedTable TableInfo -- info of all tables
type FunctionCache = M.HashMap QualifiedFunction FunctionInfo -- info of all functions
type DepMap = M.HashMap SchemaObjId (HS.HashSet SchemaDependency)
@ -370,6 +423,7 @@ removeFromDepMap =
data SchemaCache
= SchemaCache
{ scTables :: !TableCache
, scFunctions :: !FunctionCache
, scQTemplates :: !QTemplateCache
, scRemoteResolvers :: !RemoteSchemaMap
, scGCtxMap :: !GC.GCtxMap
@ -379,6 +433,11 @@ data SchemaCache
$(deriveToJSON (aesonDrop 2 snakeCase) ''SchemaCache)
getFuncsOfTable :: QualifiedTable -> FunctionCache -> [FunctionInfo]
getFuncsOfTable qt fc = flip filter allFuncs $ \f -> qt == fiReturnType f
where
allFuncs = M.elems fc
modDepMapInCache :: (CacheRWM m) => (DepMap -> DepMap) -> m ()
modDepMapInCache f = do
sc <- askSchemaCache
@ -434,7 +493,7 @@ delQTemplateFromCache qtn = do
emptySchemaCache :: SchemaCache
emptySchemaCache =
SchemaCache (M.fromList []) (M.fromList []) M.empty M.empty GC.emptyGCtx mempty
SchemaCache (M.fromList []) M.empty (M.fromList []) M.empty M.empty GC.emptyGCtx mempty
modTableCache :: (CacheRWM m) => TableCache -> m ()
modTableCache tc = do
@ -599,6 +658,48 @@ delEventTriggerFromCache qt trn = do
return $ ti { tiEventTriggerInfoMap = M.delete trn etim }
schObjId = SOTableObj qt $ TOTrigger trn
addFunctionToCache
:: (QErrM m, CacheRWM m)
=> FunctionInfo -> m ()
addFunctionToCache fi = do
sc <- askSchemaCache
let functionCache = scFunctions sc
case M.lookup fn functionCache of
Just _ -> throw500 $ "function already exists in cache " <>> fn
Nothing -> do
let newFunctionCache = M.insert fn fi functionCache
writeSchemaCache $ sc {scFunctions = newFunctionCache}
modDepMapInCache (addToDepMap objId deps)
where
fn = fiName fi
objId = SOFunction $ fiName fi
deps = fiDeps fi
askFunctionInfo
:: (CacheRM m, QErrM m)
=> QualifiedFunction -> m FunctionInfo
askFunctionInfo qf = do
sc <- askSchemaCache
maybe throwNoFn return $ M.lookup qf $ scFunctions sc
where
throwNoFn = throw400 NotExists $
"function not found in cache " <>> qf
delFunctionFromCache
:: (QErrM m, CacheRWM m)
=> QualifiedFunction -> m ()
delFunctionFromCache qf = do
sc <- askSchemaCache
let functionCache = scFunctions sc
case M.lookup qf functionCache of
Nothing -> throw500 $ "function does not exist in cache " <>> qf
Just _ -> do
let newFunctionCache = M.delete qf functionCache
writeSchemaCache $ sc {scFunctions = newFunctionCache}
modDepMapInCache (removeFromDepMap objId)
where
objId = SOFunction qf
addPermToCache
:: (QErrM m, CacheRWM m)
=> QualifiedTable

View File

@ -30,25 +30,27 @@ data SchemaObjId
= SOTable !QualifiedTable
| SOQTemplate !TQueryName
| SOTableObj !QualifiedTable !TableObjId
| SOFunction !QualifiedFunction
deriving (Eq, Generic)
instance Hashable SchemaObjId
reportSchemaObj :: SchemaObjId -> T.Text
reportSchemaObj (SOTable tn) = "table " <> qualTableToTxt tn
reportSchemaObj (SOTable tn) = "table " <> qualObjectToText tn
reportSchemaObj (SOFunction fn) = "function " <> qualObjectToText fn
reportSchemaObj (SOQTemplate qtn) =
"query-template " <> getTQueryName qtn
reportSchemaObj (SOTableObj tn (TOCol cn)) =
"column " <> qualTableToTxt tn <> "." <> getPGColTxt cn
"column " <> qualObjectToText tn <> "." <> getPGColTxt cn
reportSchemaObj (SOTableObj tn (TORel cn)) =
"relationship " <> qualTableToTxt tn <> "." <> getRelTxt cn
"relationship " <> qualObjectToText tn <> "." <> getRelTxt cn
reportSchemaObj (SOTableObj tn (TOCons cn)) =
"constraint " <> qualTableToTxt tn <> "." <> getConstraintTxt cn
"constraint " <> qualObjectToText tn <> "." <> getConstraintTxt cn
reportSchemaObj (SOTableObj tn (TOPerm rn pt)) =
"permission " <> qualTableToTxt tn <> "." <> getRoleTxt rn
"permission " <> qualObjectToText tn <> "." <> getRoleTxt rn
<> "." <> permTypeToCode pt
reportSchemaObj (SOTableObj tn (TOTrigger trn )) =
"event-trigger " <> qualTableToTxt tn <> "." <> trn
"event-trigger " <> qualObjectToText tn <> "." <> trn
instance Show SchemaObjId where

View File

@ -120,6 +120,10 @@ mkSelFromExp isLateral sel tn =
where
alias = Alias $ toIden tn
mkFuncFromItem :: QualifiedFunction -> [SQLExp] -> FromItem
mkFuncFromItem qf args =
FIFunc qf args Nothing
mkRowExp :: [Extractor] -> SQLExp
mkRowExp extrs = let
innerSel = mkSelect { selExtr = extrs }
@ -385,6 +389,7 @@ instance ToSQL DistinctExpr where
data FromItem
= FISimple !QualifiedTable !(Maybe Alias)
| FIIden !Iden
| FIFunc !QualifiedFunction ![SQLExp] !(Maybe Alias)
| FISelect !Lateral !Select !Alias
| FIJoin !JoinExpr
deriving (Show, Eq)
@ -400,6 +405,8 @@ instance ToSQL FromItem where
toSQL qt <-> toSQL mal
toSQL (FIIden iden) =
toSQL iden
toSQL (FIFunc qf args mal) =
toSQL qf <> paren (", " <+> args) <-> toSQL mal
toSQL (FISelect mla sel al) =
toSQL mla <-> paren (toSQL sel) <-> toSQL al
toSQL (FIJoin je) =

View File

@ -87,6 +87,8 @@ uFromItem fromItem = case fromItem of
S.FISimple t <$> mapM addAlias alM
S.FIIden iden ->
S.FIIden <$> return iden
S.FIFunc f args alM ->
S.FIFunc f args <$> mapM addAlias alM
S.FISelect isLateral sel al -> do
-- we are kind of ignoring if we have to reset
-- idens to empty based on correlation

View File

@ -84,6 +84,9 @@ instance (ToSQL a) => ToSQL (Maybe a) where
toSQL (Just a) = toSQL a
toSQL Nothing = mempty
class ToTxt a where
toTxt :: a -> T.Text
newtype TableName
= TableName { getTableTxt :: T.Text }
deriving (Show, Eq, FromJSON, ToJSON, Hashable, Q.ToPrepArg, Q.FromCol, Lift)
@ -97,6 +100,9 @@ instance DQuote TableName where
instance ToSQL TableName where
toSQL = toSQL . toIden
instance ToTxt TableName where
toTxt = getTableTxt
data TableType
= TTBaseTable
| TTView
@ -135,6 +141,22 @@ instance IsIden ConstraintName where
instance ToSQL ConstraintName where
toSQL = toSQL . toIden
newtype FunctionName
= FunctionName { getFunctionTxt :: T.Text }
deriving (Show, Eq, FromJSON, ToJSON, Q.ToPrepArg, Q.FromCol, Hashable, Lift)
instance IsIden FunctionName where
toIden (FunctionName t) = Iden t
instance DQuote FunctionName where
dquoteTxt (FunctionName t) = t
instance ToSQL FunctionName where
toSQL = toSQL . toIden
instance ToTxt FunctionName where
toTxt = getFunctionTxt
newtype SchemaName
= SchemaName { getSchemaTxt :: T.Text }
deriving (Show, Eq, FromJSON, ToJSON, Hashable, Q.ToPrepArg, Q.FromCol, Lift)
@ -148,51 +170,59 @@ instance IsIden SchemaName where
instance ToSQL SchemaName where
toSQL = toSQL . toIden
data QualifiedTable
= QualifiedTable
{ qtSchema :: !SchemaName
, qtTable :: !TableName
data QualifiedObject a
= QualifiedObject
{ qSchema :: !SchemaName
, qName :: !a
} deriving (Show, Eq, Generic, Lift)
instance FromJSON QualifiedTable where
instance (FromJSON a) => FromJSON (QualifiedObject a) where
parseJSON v@(String _) =
QualifiedTable publicSchema <$> parseJSON v
QualifiedObject publicSchema <$> parseJSON v
parseJSON (Object o) =
QualifiedTable <$>
QualifiedObject <$>
o .:? "schema" .!= publicSchema <*>
o .: "name"
parseJSON _ =
fail "expecting a string/object for table"
fail "expecting a string/object for QualifiedObject"
instance ToJSON QualifiedTable where
toJSON (QualifiedTable (SchemaName "public") tn) = toJSON tn
toJSON (QualifiedTable sn tn) =
instance (ToJSON a) => ToJSON (QualifiedObject a) where
toJSON (QualifiedObject (SchemaName "public") o) = toJSON o
toJSON (QualifiedObject sn o) =
object [ "schema" .= sn
, "name" .= tn
, "name" .= o
]
instance ToJSONKey QualifiedTable where
toJSONKey = ToJSONKeyText qualTableToTxt (text . qualTableToTxt)
instance (ToJSON a, ToTxt a) => ToJSONKey (QualifiedObject a) where
toJSONKey = ToJSONKeyText qualObjectToText (text . qualObjectToText)
instance DQuote QualifiedTable where
dquoteTxt = qualTableToTxt
instance (ToTxt a) => DQuote (QualifiedObject a) where
dquoteTxt = qualObjectToText
instance Hashable QualifiedTable
instance (Hashable a) => Hashable (QualifiedObject a)
instance ToSQL QualifiedTable where
toSQL (QualifiedTable sn tn) =
toSQL sn <> "." <> toSQL tn
instance (ToSQL a) => ToSQL (QualifiedObject a) where
toSQL (QualifiedObject sn o) =
toSQL sn <> "." <> toSQL o
qualTableToTxt :: QualifiedTable -> T.Text
qualTableToTxt (QualifiedTable (SchemaName "public") tn) =
getTableTxt tn
qualTableToTxt (QualifiedTable sn tn) =
getSchemaTxt sn <> "." <> getTableTxt tn
qualObjectToText :: ToTxt a => QualifiedObject a -> T.Text
qualObjectToText (QualifiedObject sn o)
| sn == publicSchema = toTxt o
| otherwise = getSchemaTxt sn <> "." <> toTxt o
snakeCaseTable :: QualifiedTable -> T.Text
snakeCaseTable (QualifiedTable sn tn) =
snakeCaseQualObject :: ToTxt a => QualifiedObject a -> T.Text
snakeCaseQualObject (QualifiedObject sn o)
| sn == publicSchema = toTxt o
| otherwise = getSchemaTxt sn <> "_" <> toTxt o
type QualifiedTable = QualifiedObject TableName
snakeCaseTable :: QualifiedObject TableName -> T.Text
snakeCaseTable (QualifiedObject sn tn) =
getSchemaTxt sn <> "_" <> getTableTxt tn
type QualifiedFunction = QualifiedObject FunctionName
newtype PGCol
= PGCol { getPGColTxt :: T.Text }
deriving (Show, Eq, Ord, FromJSON, ToJSON, Hashable, Q.ToPrepArg, Q.FromCol, ToJSONKey, FromJSONKey, Lift)

View File

@ -218,7 +218,7 @@ v1QueryHandler query = do
scRef <- scCacheRef . hcServerCtx <$> ask
httpMgr <- scManager . hcServerCtx <$> ask
--FIXME: should we be fetching the remote schema again? if not how do we get the remote schema?
newGCtxMap <- GS.mkGCtxMap (scTables newSc)
newGCtxMap <- GS.mkGCtxMap (scTables newSc) (scFunctions newSc)
(mergedGCtxMap, defGCtx) <-
mergeSchemas (scRemoteResolvers newSc) newGCtxMap httpMgr
let newSc' =
@ -273,7 +273,7 @@ legacyQueryHandler tn queryType =
Just queryParser -> getQueryParser queryParser qt >>= v1QueryHandler
Nothing -> throw404 "No such resource exists"
where
qt = QualifiedTable publicSchema tn
qt = QualifiedObject publicSchema tn
mkWaiApp

View File

@ -3,12 +3,12 @@ module Hasura.Server.Query where
import Data.Aeson
import Data.Aeson.Casing
import Data.Aeson.TH
import Language.Haskell.TH.Syntax (Lift)
import Language.Haskell.TH.Syntax (Lift)
import qualified Data.ByteString.Builder as BB
import qualified Data.ByteString.Lazy as BL
import qualified Data.Vector as V
import qualified Network.HTTP.Client as HTTP
import qualified Data.ByteString.Builder as BB
import qualified Data.ByteString.Lazy as BL
import qualified Data.Vector as V
import qualified Network.HTTP.Client as HTTP
import Hasura.Prelude
import Hasura.RQL.DDL.Metadata
@ -16,24 +16,28 @@ import Hasura.RQL.DDL.Permission
import Hasura.RQL.DDL.QueryTemplate
import Hasura.RQL.DDL.Relationship
import Hasura.RQL.DDL.RemoteSchema
import Hasura.RQL.DDL.Schema.Function
import Hasura.RQL.DDL.Schema.Table
import Hasura.RQL.DDL.Subscribe
import Hasura.RQL.DML.Count
import Hasura.RQL.DML.Delete
import Hasura.RQL.DML.Insert
import Hasura.RQL.DML.QueryTemplate
import Hasura.RQL.DML.Returning (encodeJSONVector)
import Hasura.RQL.DML.Returning (encodeJSONVector)
import Hasura.RQL.DML.Select
import Hasura.RQL.DML.Update
import Hasura.RQL.Types
import qualified Database.PG.Query as Q
import qualified Database.PG.Query as Q
data RQLQuery
= RQAddExistingTableOrView !TrackTable
| RQTrackTable !TrackTable
| RQUntrackTable !UntrackTable
| RQTrackFunction !TrackFunction
| RQUntrackFunction !UnTrackFunction
| RQCreateObjectRelationship !CreateObjRel
| RQCreateArrayRelationship !CreateArrRel
| RQDropRelationship !DropRel
@ -131,6 +135,8 @@ queryNeedsReload qi = case qi of
RQAddExistingTableOrView _ -> True
RQTrackTable _ -> True
RQUntrackTable _ -> True
RQTrackFunction _ -> True
RQUntrackFunction _ -> True
RQCreateObjectRelationship _ -> True
RQCreateArrayRelationship _ -> True
@ -188,6 +194,9 @@ runQueryM rq = withPathK "args" $ case rq of
RQTrackTable q -> runTrackTableQ q
RQUntrackTable q -> runUntrackTableQ q
RQTrackFunction q -> runTrackFunc q
RQUntrackFunction q -> runUntrackFunc q
RQCreateObjectRelationship q -> runCreateObjRel q
RQCreateArrayRelationship q -> runCreateArrRel q
RQDropRelationship q -> runDropRel q

View File

@ -0,0 +1,33 @@
SELECT
row_to_json (
(
SELECT
e
FROM
(
SELECT
has_variadic,
function_type,
return_type_schema,
return_type_name,
return_type_type,
returns_set,
input_arg_types,
input_arg_names,
exists(
SELECT
1
FROM
information_schema.tables
WHERE
table_schema = return_type_schema
AND table_name = return_type_name
) AS returns_table
) AS e
)
) AS "raw_function_info"
FROM
hdb_catalog.hdb_function_agg
WHERE
function_schema = $1
AND function_name = $2

View File

@ -245,7 +245,32 @@ args:
name: event_invocation_logs
column: event_id
- type: track_table
args:
name: hdb_function_agg
schema: hdb_catalog
- type: track_table
args:
name: hdb_function
schema: hdb_catalog
- type: add_existing_table_or_view
args:
schema: hdb_catalog
name: remote_schemas
- type: create_object_relationship
args:
name: return_table_info
table:
schema: hdb_catalog
name: hdb_function_agg
using:
manual_configuration:
remote_table:
schema: hdb_catalog
name: hdb_table
column_mapping:
return_type_schema: table_schema
return_type_name: table_name

View File

@ -316,6 +316,83 @@ CREATE TABLE hdb_catalog.event_invocation_logs
CREATE INDEX ON hdb_catalog.event_invocation_logs (event_id);
CREATE TABLE hdb_catalog.hdb_function
(
function_schema TEXT,
function_name TEXT,
is_system_defined boolean default false,
PRIMARY KEY (function_schema, function_name)
);
CREATE VIEW hdb_catalog.hdb_function_agg AS
(
SELECT
p.proname::text AS function_name,
pn.nspname::text AS function_schema,
CASE
WHEN (p.provariadic = (0) :: oid) THEN false
ELSE true
END AS has_variadic,
CASE
WHEN (
(p.provolatile) :: text = ('i' :: character(1)) :: text
) THEN 'IMMUTABLE' :: text
WHEN (
(p.provolatile) :: text = ('s' :: character(1)) :: text
) THEN 'STABLE' :: text
WHEN (
(p.provolatile) :: text = ('v' :: character(1)) :: text
) THEN 'VOLATILE' :: text
ELSE NULL :: text
END AS function_type,
pg_get_functiondef(p.oid) AS function_definition,
rtn.nspname::text AS return_type_schema,
rt.typname::text AS return_type_name,
CASE
WHEN ((rt.typtype) :: text = ('b' :: character(1)) :: text) THEN 'BASE' :: text
WHEN ((rt.typtype) :: text = ('c' :: character(1)) :: text) THEN 'COMPOSITE' :: text
WHEN ((rt.typtype) :: text = ('d' :: character(1)) :: text) THEN 'DOMAIN' :: text
WHEN ((rt.typtype) :: text = ('e' :: character(1)) :: text) THEN 'ENUM' :: text
WHEN ((rt.typtype) :: text = ('r' :: character(1)) :: text) THEN 'RANGE' :: text
WHEN ((rt.typtype) :: text = ('p' :: character(1)) :: text) THEN 'PSUEDO' :: text
ELSE NULL :: text
END AS return_type_type,
p.proretset AS returns_set,
( SELECT
COALESCE(json_agg(pt.typname), '[]')
FROM
(
unnest(
COALESCE(p.proallargtypes, (p.proargtypes) :: oid [])
) WITH ORDINALITY pat(oid, ordinality)
LEFT JOIN pg_type pt ON ((pt.oid = pat.oid))
)
) AS input_arg_types,
to_json(COALESCE(p.proargnames, ARRAY [] :: text [])) AS input_arg_names
FROM
pg_proc p
JOIN pg_namespace pn ON (pn.oid = p.pronamespace)
JOIN pg_type rt ON (rt.oid = p.prorettype)
JOIN pg_namespace rtn ON (rtn.oid = rt.typnamespace)
WHERE
pn.nspname :: text NOT LIKE 'pg_%'
AND pn.nspname :: text NOT IN ('information_schema', 'hdb_catalog', 'hdb_views')
AND (NOT EXISTS (
SELECT
1
FROM
pg_aggregate
WHERE
((pg_aggregate.aggfnoid) :: oid = p.oid)
)
)
);
CREATE TABLE hdb_catalog.remote_schemas (
id BIGSERIAL PRIMARY KEY,

View File

@ -0,0 +1,77 @@
CREATE TABLE hdb_catalog.hdb_function
(
function_schema TEXT,
function_name TEXT,
is_system_defined boolean default false,
PRIMARY KEY (function_schema, function_name)
);
CREATE VIEW hdb_catalog.hdb_function_agg AS
(
SELECT
p.proname::text AS function_name,
pn.nspname::text AS function_schema,
CASE
WHEN (p.provariadic = (0) :: oid) THEN false
ELSE true
END AS has_variadic,
CASE
WHEN (
(p.provolatile) :: text = ('i' :: character(1)) :: text
) THEN 'IMMUTABLE' :: text
WHEN (
(p.provolatile) :: text = ('s' :: character(1)) :: text
) THEN 'STABLE' :: text
WHEN (
(p.provolatile) :: text = ('v' :: character(1)) :: text
) THEN 'VOLATILE' :: text
ELSE NULL :: text
END AS function_type,
pg_get_functiondef(p.oid) AS function_definition,
rtn.nspname::text AS return_type_schema,
rt.typname::text AS return_type_name,
CASE
WHEN ((rt.typtype) :: text = ('b' :: character(1)) :: text) THEN 'BASE' :: text
WHEN ((rt.typtype) :: text = ('c' :: character(1)) :: text) THEN 'COMPOSITE' :: text
WHEN ((rt.typtype) :: text = ('d' :: character(1)) :: text) THEN 'DOMAIN' :: text
WHEN ((rt.typtype) :: text = ('e' :: character(1)) :: text) THEN 'ENUM' :: text
WHEN ((rt.typtype) :: text = ('r' :: character(1)) :: text) THEN 'RANGE' :: text
WHEN ((rt.typtype) :: text = ('p' :: character(1)) :: text) THEN 'PSUEDO' :: text
ELSE NULL :: text
END AS return_type_type,
p.proretset AS returns_set,
( SELECT
COALESCE(json_agg(pt.typname), '[]')
FROM
(
unnest(
COALESCE(p.proallargtypes, (p.proargtypes) :: oid [])
) WITH ORDINALITY pat(oid, ordinality)
LEFT JOIN pg_type pt ON ((pt.oid = pat.oid))
)
) AS input_arg_types,
to_json(COALESCE(p.proargnames, ARRAY [] :: text [])) AS input_arg_names
FROM
pg_proc p
JOIN pg_namespace pn ON (pn.oid = p.pronamespace)
JOIN pg_type rt ON (rt.oid = p.prorettype)
JOIN pg_namespace rtn ON (rtn.oid = rt.typnamespace)
WHERE
pn.nspname :: text NOT LIKE 'pg_%'
AND pn.nspname :: text NOT IN ('information_schema', 'hdb_catalog', 'hdb_views')
AND (NOT EXISTS (
SELECT
1
FROM
pg_aggregate
WHERE
((pg_aggregate.aggfnoid) :: oid = p.oid)
)
)
);

View File

@ -0,0 +1,24 @@
type: bulk
args:
- type: track_table
args:
schema: hdb_catalog
name: hdb_function_agg
- type: track_table
args:
schema: hdb_catalog
name: hdb_function
- type: create_object_relationship
args:
name: return_table_info
table:
schema: hdb_catalog
name: hdb_function_agg
using:
manual_configuration:
remote_table:
schema: hdb_catalog
name: hdb_table
column_mapping:
return_type_schema: table_schema
return_type_name: table_name

View File

@ -0,0 +1,18 @@
description: Custom GraphQL query using search_posts function
url: /v1alpha1/graphql
status: 200
response:
data:
search_posts:
- title: post by hasura
content: content for post
query:
query: |
query {
search_posts(
args: {search: "hasura"}
) {
title
content
}
}

View File

@ -0,0 +1,19 @@
description: Custom GraphQL aggregate query using search_posts function
url: /v1alpha1/graphql
status: 200
response:
data:
search_posts_aggregate:
aggregate:
count: 2
query:
query: |
query {
search_posts_aggregate(
args: {search: "post"}
) {
aggregate{
count
}
}
}

View File

@ -0,0 +1,44 @@
type: bulk
args:
#Article table
- type: run_sql
args:
sql: |
create table post (
id serial PRIMARY KEY,
title TEXT,
content TEXT
)
- type: track_table
args:
schema: public
name: post
#Search post function
- type: run_sql
args:
sql: |
create function search_posts(search text)
returns setof post as $$
select *
from post
where
title ilike ('%' || search || '%') or
content ilike ('%' || search || '%')
$$ language sql stable;
- type: track_function
args:
name: search_posts
schema: public
#Insert values
- type: run_sql
args:
sql: |
insert into post (title, content)
values
('post by hasura', 'content for post'),
('post by another', 'content for another post')

View File

@ -0,0 +1,20 @@
type: bulk
args:
#Drop function first
- type: untrack_function
args:
name: search_posts
schema: public
- type: untrack_table
args:
table:
schema: public
name: post
#Drop table and function from postgres
- type: run_sql
args:
sql: |
drop table post cascade;
cascade: true

View File

@ -0,0 +1,19 @@
description: Search tracks of an artist
url: /v1alpha1/graphql
status: 200
headers:
X-Hasura-Role: Artist
X-Hasura-Artist-Id: '1'
response:
data:
search_tracks:
- id: 1
name: Keepup
query:
query: |
query {
search_tracks(args: {search: "up"}){
id
name
}
}

View File

@ -0,0 +1,32 @@
description: Search tracks of an artist (with Aggregate)
url: /v1alpha1/graphql
status: 200
headers:
X-Hasura-Role: Artist
X-Hasura-Artist-Id: '1'
response:
data:
search_tracks_aggregate:
aggregate:
count: 2
nodes:
- id: 1
name: Keepup
artist_id: 1
- id: 2
name: Keepdown
artist_id: 1
query:
query: |
query {
search_tracks_aggregate(args: {search: "keep"}){
aggregate{
count
}
nodes{
id
name
artist_id
}
}
}

View File

@ -241,5 +241,23 @@ args:
filter:
Artist:
id: X-Hasura-Artist-Id
allow_aggregations: true
# Create search_track function
- type: run_sql
args:
sql: |
create function search_tracks(search text)
returns setof "Track" as $$
select *
from "Track"
where
name ilike ('%' || search || '%')
$$ language sql stable;
- type: track_function
args:
name: search_tracks
schema: public

View File

@ -16,7 +16,7 @@ args:
- type: run_sql
args:
sql: |
drop table "Track"
drop table "Track" cascade
cascade: true
- type: run_sql

View File

@ -11,6 +11,7 @@
url: /v1/query
status: 200
response:
functions: []
query_templates: []
tables: []
remote_schemas: []

View File

@ -2,7 +2,6 @@ description: Export schema cache (metadata)
url: /v1/query
status: 200
response:
query_templates: []
remote_schemas: []
tables:
- table: author
@ -31,6 +30,9 @@ response:
update_permissions: []
delete_permissions: []
event_triggers: []
query_templates: []
functions:
- search_articles
query:
type: export_metadata

View File

@ -40,3 +40,5 @@ query:
update_permissions: []
delete_permissions: []
event_triggers: []
functions:
- search_articles

View File

@ -7,6 +7,9 @@ query:
type: replace_metadata
args:
query_templates: []
functions:
- schema: public
name: search_articles
tables:
- table: author
object_relationships: []

View File

@ -82,3 +82,20 @@ args:
2,
true
)
#Create search article function
- type: run_sql
args:
sql: |
create function search_articles(search text)
returns setof article as $$
select *
from article
where
title ilike ('%' || search || '%') or
content ilike ('%' || search || '%')
$$ language sql stable;
- type: track_function
args:
name: search_articles
schema: public

View File

@ -5,7 +5,7 @@ args:
args:
cascade: true
sql: |
drop table article
drop table article cascade
- type: run_sql
args:
cascade: true

View File

@ -206,6 +206,12 @@ class TestGraphqlQueryPermissions(DefaultTestSelectQueries):
def test_artist_select_query_Track(self, hge_ctx):
check_query_f(hge_ctx, self.dir() + '/artist_select_query_Track.yaml')
def test_artist_search_tracks(self, hge_ctx):
check_query_f(hge_ctx, self.dir() + '/artist_search_tracks.yaml')
def test_artist_search_tracks_aggregate(self, hge_ctx):
check_query_f(hge_ctx, self.dir() + '/artist_search_tracks_aggregate.yaml')
@classmethod
def dir(cls):
return 'queries/graphql_query/permissions'
@ -312,3 +318,15 @@ class TestGraphQLQueryOrderBy(DefaultTestSelectQueries):
@classmethod
def dir(cls):
return 'queries/graphql_query/order_by'
class TestGraphQLQueryFunctions(DefaultTestSelectQueries):
def test_search_posts(self, hge_ctx):
check_query_f(hge_ctx, self.dir() + "/query_search_posts.yaml")
def test_search_posts_aggregate(self, hge_ctx):
check_query_f(hge_ctx, self.dir() + "/query_search_posts_aggregate.yaml")
@classmethod
def dir(cls):
return 'queries/graphql_query/functions'

Some files were not shown because too many files have changed in this diff Show More