Merge oss/master onto mono/main

GitOrigin-RevId: 1c8c4d60e033c8a0bc8b2beed24c5bceb7d4bcc8
This commit is contained in:
Vishnu Bharathi P 2020-11-12 14:55:48 +05:30 committed by hasura-bot
parent 9faf5d90f7
commit 58c44f55dd
141 changed files with 3674 additions and 3074 deletions

View File

@ -1,6 +1,6 @@
FROM golang:1.13
FROM golang:1.14
ARG upx_version="3.94"
ARG upx_version="3.96"
# install go dependencies
RUN go get github.com/mitchellh/gox \

View File

@ -374,7 +374,7 @@ jobs:
# test and build cli
test_and_build_cli:
docker:
- image: hasura/graphql-engine-cli-builder:20191205
- image: hasura/graphql-engine-cli-builder:20201105
- image: circleci/postgres:10-alpine
environment:
POSTGRES_USER: gql_test
@ -669,4 +669,4 @@ workflows:
- build_image
- test_console
- test_and_build_cli_migrations
- all_server_tests_pass
- all_server_tests_pass

View File

@ -104,6 +104,7 @@ This release contains the [PDV refactor (#4111)](https://github.com/hasura/graph
### Bug fixes and improvements
(Add entries here in the order of: server, console, cli, docs, others)
- cli: fix cli-migrations-v2 image failing to run as a non root user (close #4651, close #5333)
- server: Fix fine-grained incremental cache invalidation (fix #3759)
@ -120,11 +121,14 @@ This release contains the [PDV refactor (#4111)](https://github.com/hasura/graph
- server: accept only non-negative integers for batch size and refetch interval (close #5653) (#5759)
- server: fix bug which arised when renaming a table which had a manual relationship defined (close #4158)
- server: limit the length of event trigger names (close #5786)
- server: Configurable websocket keep-alive interval. Add `--websocket-keepalive` command-line flag
and handle `HASURA_GRAPHQL_WEBSOCKET_KEEPALIVE` env variable (fix #3539)
**NOTE:** If you have event triggers with names greater than 42 chars, then you should update their names to avoid running into Postgres identifier limit bug (#5786)
- server: validate remote schema queries (fixes #4143)
- server: fix issue with tracking custom functions that return `SETOF` materialized view (close #5294) (#5945)
- server: introduce optional custom table name in table configuration to track the table according to the custom name. The `set_table_custom_fields` API has been deprecated, A new API `set_table_customization` has been added to set the configuration. (#3811)
- server: allow remote relationships with union, interface and enum type fields as well (fixes #5875) (#6080)
- server: fix event trigger cleanup on deletion via replace_metadata (fix #5461) (#6137)
- console: allow user to cascade Postgres dependencies when dropping Postgres objects (close #5109) (#5248)
- console: mark inconsistent remote schemas in the UI (close #5093) (#5181)
- console: remove ONLY as default for ALTER TABLE in column alter operations (close #5512) #5706
@ -133,6 +137,7 @@ This release contains the [PDV refactor (#4111)](https://github.com/hasura/graph
- console: down migrations improvements (close #3503, #4988) (#4790)
- cli: add missing global flags for seed command (#5565)
- cli: allow seeds as alias for seed command (#5693)
- cli: fix bug in metadata apply which made the server aquire some redundant and unnecessary locks (close #6115)
- docs: add docs page on networking with docker (close #4346) (#4811)
- docs: add tabs for console / cli / api workflows (close #3593) (#4948)
- docs: add postgres concepts page to docs (close #4440) (#4471)

View File

@ -133,7 +133,7 @@ func newScriptsUpdateConfigV2Cmd(ec *cli.ExecutionContext) *cobra.Command {
}
}
// check if up.sql file exists
if string(sqlUp.Bytes()) != "" {
if sqlUp.String() != "" {
upMigration, ok := fileCfg.Migrations.Migrations[version][source.Up]
if !ok {
// if up.sql doesn't exists, create a up.sql file and upMigration
@ -207,7 +207,7 @@ func newScriptsUpdateConfigV2Cmd(ec *cli.ExecutionContext) *cobra.Command {
}
}
// check if up.sql file exists
if string(sqlDown.Bytes()) != "" {
if sqlDown.String() != "" {
downMigration, ok := fileCfg.Migrations.Migrations[version][source.Down]
if !ok {
// if up.sql doesn't exists, create a up.sql file and upMigration

View File

@ -180,18 +180,9 @@ func (h *HasuraDB) ApplyMetadata() error {
if err != nil {
return err
}
query := HasuraInterfaceBulk{
Type: "bulk",
Args: []interface{}{
HasuraInterfaceQuery{
Type: "clear_metadata",
Args: HasuraArgs{},
},
HasuraInterfaceQuery{
Type: "replace_metadata",
Args: obj,
},
},
query := HasuraInterfaceQuery{
Type: "replace_metadata",
Args: obj,
}
resp, body, err := h.sendv1Query(query)
if err != nil {

View File

@ -19,16 +19,24 @@ import (
)
func (c *Config) findPluginManifestFiles(indexDir string) ([]string, error) {
c.Logger.Debugf("finding plugin manifest files in directory %v", indexDir)
var out []string
fs := afero.Afero{
Fs: afero.NewOsFs(),
}
fs.Walk(indexDir, func(path string, info os.FileInfo, err error) error {
if info == nil {
if err != nil {
return err
}
return nil
}
if info.Mode().IsRegular() && filepath.Ext(info.Name()) == paths.ManifestExtension {
out = append(out, path)
}
return nil
})
return out, nil
}
@ -50,6 +58,7 @@ func (c *Config) LoadPluginListFromFS(indexDir string) (Plugins, error) {
// LoadPluginByName loads a plugins index file by its name. When plugin
// file not found, it returns an error that can be checked with os.IsNotExist.
func (c *Config) LoadPluginByName(pluginName string) (*PluginVersions, error) {
c.Logger.Debugf("loading plugin %s", pluginName)
if !IsSafePluginName(pluginName) {
return nil, errors.Errorf("plugin name %q not allowed", pluginName)
}
@ -68,6 +77,7 @@ func (c *Config) LoadPluginByName(pluginName string) (*PluginVersions, error) {
}
func (c *Config) LoadPlugins(files []string, pluginName ...string) Plugins {
c.Logger.Debugf("loading plugins")
ps := Plugins{}
for _, file := range files {
p, err := c.ReadPluginFromFile(file)

View File

@ -99,7 +99,7 @@ type Plugin struct {
func (p *Plugin) ParseVersion() {
v, err := semver.NewVersion(p.Version)
if err != nil {
p.ParsedVersion = nil
p.ParsedVersion = semver.MustParse("0.0.0-dev")
return
}
p.ParsedVersion = v

View File

@ -20,8 +20,9 @@ const Endpoints = {
hasuractlMetadata: `${hasuractlUrl}/apis/metadata`,
hasuractlMigrateSettings: `${hasuractlUrl}/apis/migrate/settings`,
telemetryServer: 'wss://telemetry.hasura.io/v1/ws',
consoleNotificationsStg: 'https://data.hasura-stg.hasura-app.io/v1/query',
consoleNotificationsProd: 'https://data.hasura.io/v1/query',
consoleNotificationsStg:
'https://notifications.hasura-stg.hasura-app.io/v1/graphql',
consoleNotificationsProd: 'https://notifications.hasura.io/v1/graphql',
};
const globalCookiePolicy = 'same-origin';

View File

@ -795,47 +795,34 @@ export const getConsoleNotificationQuery = (
time: Date | string | number,
userType?: Nullable<ConsoleScope>
) => {
let consoleUserScope = {
$ilike: `%${userType}%`,
};
let consoleUserScopeVar = `%${userType}%`;
if (!userType) {
consoleUserScope = {
$ilike: '%OSS%',
};
consoleUserScopeVar = '%OSS%';
}
return {
args: {
table: 'console_notification',
columns: ['*'],
where: {
$or: [
{
expiry_date: {
$gte: time,
},
},
{
expiry_date: {
$eq: null,
},
},
],
scope: consoleUserScope,
start_date: { $lte: time },
},
order_by: [
{
type: 'asc',
nulls: 'last',
column: 'priority',
},
{
type: 'desc',
column: 'start_date',
},
],
},
type: 'select',
const query = `query fetchNotifications($currentTime: timestamptz, $userScope: String) {
console_notifications(
where: {start_date: {_lte: $currentTime}, scope: {_ilike: $userScope}, _or: [{expiry_date: {_gte: $currentTime}}, {expiry_date: {_eq: null}}]},
order_by: {priority: asc_nulls_last, start_date: desc}
) {
content
created_at
external_link
expiry_date
id
is_active
priority
scope
start_date
subject
type
}
}`;
const variables = {
userScope: consoleUserScopeVar,
currentTime: time,
};
return { query, variables };
};

View File

@ -79,8 +79,12 @@ const fetchConsoleNotifications = () => (dispatch, getState) => {
const consoleId = window.__env.consoleId;
const consoleScope = getConsoleScope(serverVersion, consoleId);
let userType = 'admin';
if (headers.hasOwnProperty(HASURA_COLLABORATOR_TOKEN)) {
const collabToken = headers[HASURA_COLLABORATOR_TOKEN];
const headerHasCollabToken = Object.keys(headers).find(
header => header.toLowerCase() === HASURA_COLLABORATOR_TOKEN
);
if (headerHasCollabToken) {
const collabToken = headers[headerHasCollabToken];
userType = getUserType(collabToken);
}
@ -94,12 +98,14 @@ const fetchConsoleNotifications = () => (dispatch, getState) => {
}
const now = new Date().toISOString();
const query = getConsoleNotificationQuery(now, consoleScope);
const payload = getConsoleNotificationQuery(now, consoleScope);
const options = {
body: JSON.stringify(query),
body: JSON.stringify(payload),
method: 'POST',
headers: {
'content-type': 'application/json',
// temp. change until Auth is added
'x-hasura-role': 'user',
},
};
@ -108,94 +114,131 @@ const fetchConsoleNotifications = () => (dispatch, getState) => {
const lastSeenNotifications = JSON.parse(
window.localStorage.getItem('notifications:lastSeen')
);
if (!data.length) {
dispatch({ type: FETCH_CONSOLE_NOTIFICATIONS_SET_DEFAULT });
dispatch(
updateConsoleNotificationsState({
read: 'default',
date: now,
showBadge: false,
})
);
if (!lastSeenNotifications) {
if (data.data.console_notifications) {
const fetchedData = data.data.console_notifications;
if (!fetchedData.length) {
dispatch({ type: FETCH_CONSOLE_NOTIFICATIONS_SET_DEFAULT });
dispatch(
updateConsoleNotificationsState({
read: 'default',
date: now,
showBadge: false,
})
);
if (!lastSeenNotifications) {
window.localStorage.setItem(
'notifications:lastSeen',
JSON.stringify(0)
);
}
return;
}
// NOTE: these 2 steps may not be required if the table in the DB
// enforces the usage of `enums` and we're sure that the notification scope
// is only from the allowed permutations of scope. We aren't doing that yet
// because within the GQL query, I can't be using the `_ilike` operator during
// filtering. Hence I'm keeping it here since this is a new feature and
// mistakes can happen while adding data into the DB.
// TODO: is to remove these once things are more streamlined
const uppercaseScopedData = makeUppercaseScopes(fetchedData);
let filteredData = filterScope(uppercaseScopedData, consoleScope);
if (
lastSeenNotifications &&
lastSeenNotifications > filteredData.length
) {
window.localStorage.setItem(
'notifications:lastSeen',
JSON.stringify(0)
JSON.stringify(filteredData.length)
);
}
if (previousRead) {
if (!consoleStateDB.console_notifications) {
dispatch(
updateConsoleNotificationsState({
read: [],
date: now,
showBadge: true,
})
);
} else {
let newReadValue;
if (previousRead === 'default' || previousRead === 'error') {
newReadValue = [];
toShowBadge = false;
} else if (previousRead === 'all') {
const previousList = JSON.parse(
localStorage.getItem('notifications:data')
);
if (!previousList) {
// we don't have a record of the IDs that were marked as read previously
newReadValue = [];
toShowBadge = true;
} else if (previousList.length) {
const readNotificationsDiff = filteredData.filter(
newNotif =>
!previousList.find(oldNotif => oldNotif.id === newNotif.id)
);
if (!readNotificationsDiff.length) {
// since the data hasn't changed since the last call
newReadValue = previousRead;
toShowBadge = false;
} else {
newReadValue = [...previousList.map(notif => `${notif.id}`)];
toShowBadge = true;
filteredData = [...readNotificationsDiff, ...previousList];
}
}
} else {
newReadValue = previousRead;
if (
previousRead.length &&
lastSeenNotifications >= filteredData.length
) {
toShowBadge = false;
} else if (lastSeenNotifications < filteredData.length) {
toShowBadge = true;
}
}
dispatch(
updateConsoleNotificationsState({
read: newReadValue,
date: consoleStateDB.console_notifications[userType].date,
showBadge: toShowBadge,
})
);
}
}
dispatch({
type: FETCH_CONSOLE_NOTIFICATIONS_SUCCESS,
data: filteredData,
});
// update/set the lastSeen value upon data is set
if (
!lastSeenNotifications ||
lastSeenNotifications !== filteredData.length
) {
window.localStorage.setItem(
'notifications:lastSeen',
JSON.stringify(filteredData.length)
);
}
return;
}
const uppercaseScopedData = makeUppercaseScopes(data);
let filteredData = filterScope(uppercaseScopedData, consoleScope);
if (
!lastSeenNotifications ||
lastSeenNotifications !== filteredData.length
) {
window.localStorage.setItem(
'notifications:lastSeen',
JSON.stringify(filteredData.length)
);
}
if (previousRead) {
if (!consoleStateDB.console_notifications) {
dispatch(
updateConsoleNotificationsState({
read: [],
date: now,
showBadge: true,
})
);
} else {
let newReadValue;
if (previousRead === 'default' || previousRead === 'error') {
newReadValue = [];
toShowBadge = false;
} else if (previousRead === 'all') {
const previousList = JSON.parse(
localStorage.getItem('notifications:data')
);
if (previousList.length) {
const resDiff = filteredData.filter(
newNotif =>
!previousList.find(oldNotif => oldNotif.id === newNotif.id)
);
if (!resDiff.length) {
// since the data hasn't changed since the last call
newReadValue = previousRead;
toShowBadge = false;
} else {
newReadValue = [...previousList.map(notif => `${notif.id}`)];
toShowBadge = true;
filteredData = [...resDiff, ...previousList];
}
}
} else {
newReadValue = previousRead;
if (
previousRead.length &&
lastSeenNotifications === filteredData.length
) {
toShowBadge = false;
}
}
dispatch(
updateConsoleNotificationsState({
read: newReadValue,
date: consoleStateDB.console_notifications[userType].date,
showBadge: toShowBadge,
})
);
}
}
dispatch({
type: FETCH_CONSOLE_NOTIFICATIONS_SUCCESS,
data: filteredData,
});
dispatch({ type: FETCH_CONSOLE_NOTIFICATIONS_ERROR });
dispatch(
updateConsoleNotificationsState({
read: 'error',
date: now,
showBadge: false,
})
);
})
.catch(err => {
console.error(err);

View File

@ -31,7 +31,6 @@ export type ConsoleNotification = {
scope?: NotificationScope;
};
// FIXME? : we may have to remove this
export const defaultNotification: ConsoleNotification = {
subject: 'No updates available at the moment',
created_at: Date.now(),

View File

@ -35,10 +35,40 @@ import {
setProClickState,
getLoveConsentState,
setLoveConsentState,
getUserType,
} from './utils';
import { getSchemaBaseRoute } from '../Common/utils/routesUtils';
import LoveSection from './LoveSection';
import { Help, ProPopup } from './components/';
import { HASURA_COLLABORATOR_TOKEN } from '../../constants';
import { UPDATE_CONSOLE_NOTIFICATIONS } from '../../telemetry/Actions';
const updateRequestHeaders = props => {
const { requestHeaders, dispatch } = props;
const collabTokenKey = Object.keys(requestHeaders).find(
hdr => hdr.toLowerCase() === HASURA_COLLABORATOR_TOKEN
);
if (collabTokenKey) {
const userID = getUserType(requestHeaders[collabTokenKey]);
if (props.console_opts && props.console_opts.console_notifications) {
if (!props.console_opts.console_notifications[userID]) {
dispatch({
type: UPDATE_CONSOLE_NOTIFICATIONS,
data: {
...props.console_opts.console_notifications,
[userID]: {
read: [],
date: null,
showBadge: true,
},
},
});
}
}
}
};
class Main extends React.Component {
constructor(props) {
@ -57,6 +87,7 @@ class Main extends React.Component {
componentDidMount() {
const { dispatch } = this.props;
updateRequestHeaders(this.props);
dispatch(loadServerVersion()).then(() => {
dispatch(featureCompatibilityInit());
@ -74,6 +105,18 @@ class Main extends React.Component {
dispatch(fetchServerConfig);
}
componentDidUpdate(prevProps) {
const prevHeaders = Object.keys(prevProps.requestHeaders);
const currHeaders = Object.keys(this.props.requestHeaders);
if (
prevHeaders.length !== currHeaders.length ||
prevHeaders.filter(hdr => !currHeaders.includes(hdr)).length
) {
updateRequestHeaders(this.props);
}
}
toggleProPopup = () => {
const { dispatch } = this.props;
dispatch(emitProClickedEvent({ open: !this.state.isPopUpOpen }));
@ -368,6 +411,7 @@ const mapStateToProps = (state, ownProps) => {
currentSchema: state.tables.currentSchema,
metadata: state.metadata,
console_opts: state.telemetry.console_opts,
requestHeaders: state.tables.dataHeaders,
};
};

View File

@ -1367,7 +1367,7 @@
position: absolute;
width: 17px;
top: 16px;
right: 8px;
right: 0.8rem;
border-radius: 50%;
user-select: none;
visibility: visible;
@ -1536,10 +1536,6 @@
.secureSectionText {
display: none;
}
.shareSection {
display: none;
}
}
@media (max-width: 1050px) {

View File

@ -511,15 +511,19 @@ const HasuraNotifications: React.FC<
let userType = 'admin';
if (dataHeaders?.[HASURA_COLLABORATOR_TOKEN]) {
const collabToken = dataHeaders[HASURA_COLLABORATOR_TOKEN];
const headerHasCollabToken = Object.keys(dataHeaders).find(
header => header.toLowerCase() === HASURA_COLLABORATOR_TOKEN
);
if (headerHasCollabToken) {
const collabToken = dataHeaders[headerHasCollabToken];
userType = getUserType(collabToken);
}
const previouslyReadState = React.useMemo(
() =>
console_opts?.console_notifications &&
console_opts?.console_notifications[userType].read,
console_opts?.console_notifications[userType]?.read,
[console_opts?.console_notifications, userType]
);
const showBadge = React.useMemo(
@ -639,7 +643,7 @@ const HasuraNotifications: React.FC<
useOnClickOutside([dropDownRef, wrapperRef], onClickOutside);
const onClickShareSection = () => {
const onClickNotificationButton = () => {
if (showBadge) {
if (console_opts?.console_notifications) {
let updatedState = {};
@ -718,11 +722,11 @@ const HasuraNotifications: React.FC<
return (
<>
<div
className={`${styles.shareSection} ${
className={`${styles.shareSection} ${styles.headerRightNavbarBtn} ${
isDropDownOpen ? styles.opened : ''
} dropdown-toggle`}
aria-expanded="false"
onClick={onClickShareSection}
onClick={onClickNotificationButton}
ref={wrapperRef}
>
<i className={`fa fa-bell ${styles.bellIcon}`} />
@ -750,7 +754,7 @@ const HasuraNotifications: React.FC<
<Button
title="Mark all as read"
onClick={onClickMarkAllAsRead}
disabled={!numberNotifications}
disabled={!numberNotifications || !consoleNotifications.length}
className={styles.markAllAsReadBtn}
>
mark all as read

View File

@ -149,81 +149,168 @@ const setPreReleaseNotificationOptOutInDB = () => (
return dispatch(setConsoleOptsInDB(options, successCb, errorCb));
};
// TODO: We could fetch the latest `read` state from the DB everytime we
// open the notifications dropdown. That way we can reach a more consistent behavior on notifications.
// OR another option would be to provide a refresh button so that users can use it to refresh state
const updateConsoleNotificationsState = (updatedState: NotificationsState) => {
return (
dispatch: ThunkDispatch<ReduxState, unknown, AnyAction>,
getState: GetReduxState
) => {
const url = Endpoints.schemaChange;
const currentNotifications = getState().main.consoleNotifications;
const restState = getState().telemetry.console_opts;
const headers = dataHeaders(getState);
let userType = 'admin';
if (headers?.[HASURA_COLLABORATOR_TOKEN]) {
const collabToken = headers[HASURA_COLLABORATOR_TOKEN];
userType = getUserType(collabToken);
}
let composedUpdatedState: ConsoleState['console_opts'] = {
...restState,
console_notifications: {
[userType]: updatedState,
},
};
if (userType !== 'admin') {
const currentState = restState?.console_notifications;
if (Object.keys(currentState ?? {}).length > 1) {
composedUpdatedState = {
...restState,
console_notifications: {
...currentState,
[userType]: updatedState,
},
};
}
}
if (currentNotifications && Array.isArray(currentNotifications)) {
if (isUpdateIDsEqual(currentNotifications, updatedState.read)) {
composedUpdatedState = {
...restState,
console_notifications: {
...restState?.console_notifications,
[userType]: {
read: 'all',
showBadge: false,
date: updatedState.date,
},
},
};
// update the localStorage var with all the notifications
// since all the notifications were clicked on read state
window.localStorage.setItem(
'notifications:data',
JSON.stringify(currentNotifications)
);
}
}
const updatedReadNotifications = getUpdateConsoleStateQuery(
composedUpdatedState
);
const options: RequestInit = {
credentials: globalCookiePolicy,
const getStateURL = Endpoints.query;
const getStateOptions: RequestInit = {
method: 'POST',
headers,
body: JSON.stringify(updatedReadNotifications),
body: JSON.stringify(getConsoleOptsQuery()),
headers: dataHeaders(getState),
credentials: globalCookiePolicy,
};
return dispatch(requestAction(url, options))
.then((data: any) => {
dispatch({
type: UPDATE_CONSOLE_NOTIFICATIONS,
data: data.returning[0].console_state.console_notifications,
});
// make a query to get the latest state from db prior to updating the read state for a user
return dispatch(requestAction(getStateURL, getStateOptions))
.then((data: Telemetry[]) => {
if (data?.length) {
const { console_state: current_console_state } = data[0];
let composedUpdatedState: ConsoleState['console_opts'] = {
...current_console_state,
console_notifications: {
...current_console_state?.console_notifications,
},
};
const url = Endpoints.query;
const currentNotifications = getState().main.consoleNotifications;
const headers = dataHeaders(getState);
let userType = 'admin';
const headerHasAdminToken = Object.keys(headers).find(
header => header.toLowerCase() === HASURA_COLLABORATOR_TOKEN
);
if (headerHasAdminToken) {
const collabToken = headers[headerHasAdminToken];
userType = getUserType(collabToken);
}
const dbReadState =
current_console_state?.console_notifications?.[userType]?.read;
let combinedReadState: NotificationsState['read'] = [];
if (
!dbReadState ||
dbReadState === 'default' ||
dbReadState === 'error'
) {
composedUpdatedState = {
...current_console_state,
console_notifications: {
...current_console_state?.console_notifications,
[userType]: updatedState,
},
};
} else if (dbReadState === 'all') {
if (updatedState.read === 'all') {
composedUpdatedState = {
...current_console_state,
console_notifications: {
...current_console_state?.console_notifications,
[userType]: {
read: 'all',
date: updatedState.date,
showBadge: false,
},
},
};
} else {
composedUpdatedState = {
...current_console_state,
console_notifications: {
...current_console_state?.console_notifications,
[userType]: updatedState,
},
};
}
} else {
if (typeof updatedState.read === 'string') {
combinedReadState = updatedState.read;
} else if (Array.isArray(updatedState.read)) {
// this is being done to ensure that there is a consistency between the read
// state of the users and the data present in the DB
combinedReadState = dbReadState
.concat(updatedState.read)
.reduce((acc: string[], val: string) => {
if (!acc.includes(val)) {
return [...acc, val];
}
return acc;
}, []);
}
composedUpdatedState = {
...current_console_state,
console_notifications: {
...current_console_state?.console_notifications,
[userType]: {
...updatedState,
read: combinedReadState,
},
},
};
}
if (
currentNotifications &&
Array.isArray(currentNotifications) &&
Array.isArray(combinedReadState)
) {
if (isUpdateIDsEqual(currentNotifications, combinedReadState)) {
composedUpdatedState = {
...current_console_state,
console_notifications: {
...current_console_state?.console_notifications,
[userType]: {
read: 'all',
showBadge: false,
date: updatedState.date,
},
},
};
// update the localStorage var with all the notifications
// since all the notifications were clicked on read state
window.localStorage.setItem(
'notifications:data',
JSON.stringify(currentNotifications)
);
}
}
const updatedReadNotifications = getUpdateConsoleStateQuery(
composedUpdatedState
);
const options: RequestInit = {
credentials: globalCookiePolicy,
method: 'POST',
headers,
body: JSON.stringify(updatedReadNotifications),
};
return dispatch(requestAction(url, options))
.then((retData: any) => {
dispatch({
type: UPDATE_CONSOLE_NOTIFICATIONS,
data: retData.returning[0].console_state.console_notifications,
});
})
.catch(error => {
console.error(
'There was an error in updating the read console notifications.',
error
);
return error;
});
}
})
.catch(error => {
.catch(err => {
console.error(
'There was an error in updating the console notifications.',
error
'There was an error in fetching the latest state from the DB.',
err
);
return error;
});
};
};
@ -242,12 +329,18 @@ const loadConsoleOpts = () => {
body: JSON.stringify(getConsoleOptsQuery()),
};
let userType = 'admin';
if (headers?.[HASURA_COLLABORATOR_TOKEN]) {
userType = headers[HASURA_COLLABORATOR_TOKEN];
const headerHasAdminToken = Object.keys(headers).find(
header => header.toLowerCase() === HASURA_COLLABORATOR_TOKEN
);
if (headerHasAdminToken) {
const collabToken = headers[headerHasAdminToken];
userType = getUserType(collabToken);
}
return dispatch(requestAction(url, options) as any).then(
return dispatch(requestAction(url, options)).then(
(data: Telemetry[]) => {
if (data.length) {
if (data?.length) {
const { hasura_uuid, console_state } = data[0];
dispatch({
@ -274,6 +367,20 @@ const loadConsoleOpts = () => {
},
},
});
} else if (
console_state.console_notifications &&
!console_state.console_notifications[userType]
) {
dispatch({
type: UPDATE_CONSOLE_NOTIFICATIONS,
data: {
[userType]: {
read: [],
date: null,
showBadge: true,
},
},
});
}
return Promise.resolve();
@ -353,7 +460,10 @@ const telemetryReducer = (
...state,
console_opts: {
...state.console_opts,
console_notifications: action.data,
console_notifications: {
...state.console_opts?.console_notifications,
...action.data,
},
},
};
default:
@ -369,4 +479,5 @@ export {
setPreReleaseNotificationOptOutInDB,
setTelemetryNotificationShownInDB,
updateConsoleNotificationsState,
UPDATE_CONSOLE_NOTIFICATIONS,
};

View File

@ -27,6 +27,40 @@ export type ConsoleState = {
hasura_uuid: string;
};
export type ApiExplorer = {
authApiExpanded: string;
currentTab: number;
headerFocus: boolean;
loading: boolean;
mode: string;
modalState: Record<string, string>;
explorerData: Record<string, string>;
displayedApi: DisplayedApiState;
};
export type DisplayedApiState = {
details: Record<string, string>;
id: string;
request: ApiExplorerRequest;
};
export type ApiExplorerRequest = {
bodyType: string;
headers: ApiExplorerHeader[];
headersInitialised: boolean;
method: string;
params: string;
url: string;
};
export type ApiExplorerHeader = {
key: string;
value: string;
isActive: boolean;
isNewHeader: boolean;
isDisabled: boolean;
};
// Redux Utils
export type ReduxState = {
tables: {
@ -43,6 +77,7 @@ export type ReduxState = {
consoleNotifications: ConsoleNotification[];
};
telemetry: ConsoleState;
apiexplorer: ApiExplorer;
};
export type ReduxAction = RAEvents | RouterAction;

View File

@ -1,28 +1,46 @@
# Table of Contents
# @hasura/metadata
`@hasura/metadata` contains TypeScript types for Hasura Metadata V2.
## Installation
```sh
yarn add @hasura/metadata # npm i @hasura/metadata
```
## Usage
```ts
import { HasuraMetadataV2, Action, ComputedField } from "@hasura/metadata-types"
```
# Metadata SDK
## Table of Contents
- [Table of Contents](#table-of-contents)
- [Introduction](#introduction)
- [How to use this (aka TL;DR)](#how-to-use-this-aka-tldr)
- [Demos](#demos)
- [TypeScript SDK](#typescript-sdk)
- [Type-Checking & Docs inside of Metadata YAML files](#type-checking--docs-inside-of-metadata-yaml-files)
- [TypeScript SDK](#typescript-sdk)
- [Type-Checking & Docs inside of Metadata YAML files](#type-checking--docs-inside-of-metadata-yaml-files)
- [SDK Usage Examples (TypeScript)](#sdk-usage-examples-typescript)
- [Extending the Generated Class Functionality](#extending-the-generated-class-functionality)
- [Programmatically Interacting with Metadata](#programmatically-interacting-with-metadata)
- [Extending the Generated Class Functionality](#extending-the-generated-class-functionality)
- [Programmatically Interacting with Metadata](#programmatically-interacting-with-metadata)
- [Generator Config File Options](#generator-config-file-options)
- [Test Config File Options](#test-config-file-options)
- [Programmatic Usage](#programmatic-usage)
- [Metadata IDE Type-Checking Integration](#metadata-ide-type-checking-integration)
- [VS Code](#vs-code)
- [Jetbrains](#jetbrains)
- [VS Code](#vs-code)
- [Jetbrains](#jetbrains)
# Introduction
## Introduction
This repo contains a script used to generate SDK's in various languages from either TypeScript or JSON Schema sources. The script is configurable and built to be consumed from something such as a Github Action or a git hook.
It is being used to generate SDK's for Hasura Metadata V2
# How to use this (aka TL;DR)
## How to use this (aka TL;DR)
_**"I want to..."**_
@ -41,17 +59,17 @@ _**"I want to..."**_
- `yarn install` or `npm install`
- `yarn generate-types` or `npm run generate-types`
# Demos
## Demos
### TypeScript SDK
![](TypeScript-typecheck-demo.gif)
![](typescript-typecheck-demo.gif)
### Type-Checking & Docs inside of Metadata YAML files
![](json-schema-typecheck-demo.gif)
# SDK Usage Examples (TypeScript)
## SDK Usage Examples (TypeScript)
### Extending the Generated Class Functionality
@ -71,10 +89,10 @@ This class can be extended from another file to add extra functionality. Here is
```ts
// customMetadataConverter.ts
import fs from 'fs'
import { load, dump } from 'js-yaml'
import { createPatch } from 'diff'
import { detailedDiff } from 'deep-object-diff'
import fs from "fs"
import { load, dump } from "js-yaml"
import { createPatch } from "diff"
import { detailedDiff } from "deep-object-diff"
import {
Convert as _Convert,
TableEntry,
@ -82,7 +100,7 @@ import {
CustomTypes,
CronTrigger,
HasuraMetadataV2,
} from '../generated/HasuraMetadataV2'
} from "../generated/HasuraMetadataV2"
interface DiffOutput {
structuralDiff: object
@ -102,7 +120,7 @@ export class Convert extends _Convert {
public static diffJson = detailedDiff
public static clone(obj: any) {
if (obj == null || typeof obj != 'object') return obj
if (obj == null || typeof obj != "object") return obj
let temp = new obj.constructor()
for (var key in obj) {
if (obj.hasOwnProperty(key)) {
@ -116,7 +134,7 @@ export class Convert extends _Convert {
const originalYaml = Convert.metadataToYaml(before)
const updatedYaml = Convert.metadataToYaml(after)
const structuralDiff = Convert.diffJson(before, after)
const textDiff = Convert.diffYaml('', originalYaml, updatedYaml)
const textDiff = Convert.diffYaml("", originalYaml, updatedYaml)
return { structuralDiff, textDiff }
}
@ -149,22 +167,22 @@ Below is an example to demonstrate the common usecases you may encounter when wa
- Repeating the above process for `metadata.json` (could be `metadata.yaml` as well)
```ts
import { Convert } from './customMetadataConverter'
import { Convert } from "./customMetadataConverter"
import {
TableEntry,
Action,
CustomTypes,
HasuraMetadataV2,
} from '../generated/HasuraMetadataV2'
} from "../generated/HasuraMetadataV2"
// Read "tables.yaml" file as text from filesystem
const tablesMetadataFile = fs.readFileSync('./metadata/tables.yaml', 'utf8')
const tablesMetadataFile = fs.readFileSync("./metadata/tables.yaml", "utf8")
// Convert it to JSON object with type annotation using loadYAML utility
const tablesMetadata: TableEntry[] = Convert.loadYAML(tablesMetadataFile)
tablesMetadata.forEach(console.log)
// Read "actions.yaml" file as text from filesystem
const actionMetadataFile = fs.readFileSync('./metadata/actions.yaml', 'utf8')
const actionMetadataFile = fs.readFileSync("./metadata/actions.yaml", "utf8")
// Convert it to JSON object with type annotation using loadYAML utility
const actionMetadata: {
actions: Action[]
@ -175,17 +193,17 @@ console.log(actionMetadata.custom_types)
// Make a new table object
const newTable: TableEntry = {
table: { schema: 'public', name: 'user' },
table: { schema: "public", name: "user" },
select_permissions: [
{
role: 'user',
role: "user",
permission: {
limit: 100,
allow_aggregations: false,
columns: ['id', 'name', 'etc'],
computed_fields: ['my_computed_field'],
columns: ["id", "name", "etc"],
computed_fields: ["my_computed_field"],
filter: {
id: { _eq: 'X-Hasura-User-ID' },
id: { _eq: "X-Hasura-User-ID" },
},
},
},
@ -200,15 +218,15 @@ tablesMetadata.push(newTable)
// Generate a structural and text diff from the changes between original and now
const tableDiff = Convert.diff(originalTablesMetadata, tablesMetadata)
// Write the diffs to /diffs folder, will output "tables.json" and "tables.diff"
Convert.writeDiff({ folder: 'diffs', file: 'tables', diffs: tableDiff })
Convert.writeDiff({ folder: "diffs", file: "tables", diffs: tableDiff })
// Ouput the updated "tables.yaml" to filesystem
fs.writeFileSync(
'./tables-updated.yaml',
"./tables-updated.yaml",
Convert.metadataToYAML(tablesMetadata)
)
// Read "metadata.json"
const metadataFile = fs.readFileSync('./metadata.json', 'utf-8')
const metadataFile = fs.readFileSync("./metadata.json", "utf-8")
// Convert.to<typeName> does runtime validation of the type
const allMetadata: HasuraMetadataV2 = Convert.toHasuraMetadataV2(metadataFile)
console.log(allMetadata)
@ -219,10 +237,10 @@ allMetadata.tables.push(newTable)
// Diff, write diff
const metadataDiff = Convert.diff(beforeMetadataChanges, allMetadata)
Convert.writeDiff({ folder: 'diffs', file: 'metadata', diffs: metadataDiff })
Convert.writeDiff({ folder: "diffs", file: "metadata", diffs: metadataDiff })
```
# Generator Config File Options
## Generator Config File Options
_Note: Run with `yarn generate-types`/`npm run generate-types`_
@ -242,11 +260,11 @@ selected_input_language: TypeScript
# Only the matching SELECTED INPUT LANGUAGE file expression will be used
input_files:
# Paths can be either a string, or an array of strings
JsonSchema: './src/types/**.schema.json'
TypeScript: ['./src/types/**.ts', './src/otherfolder/**.ts']
JsonSchema: "./src/types/**.schema.json"
TypeScript: ["./src/types/**.ts", "./src/otherfolder/**.ts"]
# Output file directory
output_directory: './generated'
output_directory: "./generated"
# Quicktype config per-language
# Config is an object of type "rendererOptions"
@ -269,7 +287,7 @@ quicktype_config:
# objective-c: ~
# pike: ~
python:
python-version: '3.7'
python-version: "3.7"
# ruby: ~
# rust: ~
schema: ~
@ -279,7 +297,7 @@ quicktype_config:
# just-types: true
```
# Test Config File Options
## Test Config File Options
_Note: Run with `yarn test`/`npm run test`_
@ -309,16 +327,16 @@ This is what the definition looks like:
```yaml
---
- typeDefinitionFile: './generated/HasuraMetadataV2.ts'
- typeDefinitionFile: "./generated/HasuraMetadataV2.ts"
jsonInputTests:
- files: './src/tests/**.json'
- files: "./src/tests/**.json"
# This gets called as "Convert.to(expectType)" -> e.g "Convert.toHasuraMetadataV2" in generated TS SDK
expectType: HasuraMetadataV2
```
![](test-output-sample.png)
# Programmatic Usage
## Programmatic Usage
The type generator can in theory run both as a CLI executable, and as a library.
This allows for customizing behavior, IE for CI/CD pipelines. Here is one example:
@ -326,26 +344,26 @@ This allows for customizing behavior, IE for CI/CD pipelines. Here is one exampl
```ts
generateTypes()
.then((outputs) => {
console.log('Finished generateTypes(), outputs are', outputs)
console.log("Finished generateTypes(), outputs are", outputs)
for (let output of outputs) {
// This is the input file path
console.log('File:', output.file)
console.log("File:", output.file)
// This contains the generated text
console.log('Results:', output.results)
console.log("Results:", output.results)
}
})
.catch((err) => {
console.log('Got error', err)
console.log("Got error", err)
})
.finally(async () => {
// Convert the generated JSON Schema to YAML, for example
const generatedFolder = path.join(pathFromRoot, 'generated', '/')
const jsonSchemas = await glob(generatedFolder + '**.json')
const generatedFolder = path.join(pathFromRoot, "generated", "/")
const jsonSchemas = await glob(generatedFolder + "**.json")
jsonSchemas.forEach(jsonSchemaToYAML)
})
```
# Metadata IDE Type-Checking Integration
## Metadata IDE Type-Checking Integration
Ever tried (or wanted) to write Hasura Metadata YAML definitions by hand, but found yourself frequently pulled back to documentation for definitions, or fighting YAML's whitespace sensitivity? Well, no more!

View File

@ -1,7 +1,8 @@
{
"name": "",
"version": "1.0.0",
"version": "1.0.2",
"main": "index.js",
"private": false,
"license": "MIT",
"source": "lib/index.ts",
"types": "v2/index.d.ts",
@ -37,4 +38,4 @@
"ts-node/register"
]
}
}
}

View File

@ -279,7 +279,7 @@ Now, we can create a role ``user`` and add an insert validation rule as follows:
:alt: validation using permission: title cannot be empty
.. tab:: CLI
You can add roles and permissions in the ``tables.yaml`` file inside the ``metadata`` directory:
.. code-block:: yaml
@ -345,7 +345,7 @@ If we try to insert an article with ``title = ""``, we will get a ``permission-e
{
"errors": [
{
"message": "Check constraint violation. insert check constraint failed",
"message": "check constraint of an insert/update permission has failed",
"extensions": {
"path": "$.selectionSet.insert_article.args.objects",
"code": "permission-error"
@ -449,7 +449,7 @@ will receive a ``permission-error`` :
{
"errors": [
{
"message": "Check constraint violation. insert check constraint failed",
"message": "check constraint of an insert/update permission has failed",
"extensions": {
"path": "$.selectionSet.insert_article.args.objects",
"code": "permission-error"
@ -545,7 +545,7 @@ we get the following error message:
InsertAuthor(author: { name: "Thanos" }) {
id
}
}
}
:response:
{
"errors": [

View File

@ -5,7 +5,6 @@ BUILD_DIR ?= /build
BINARY ?= $(BUILD_DIR)/_cli_output/binaries/cli-hasura-linux-amd64
IMAGE_TAG ?= cli-migrations
BUILD_OUTPUT ?= $(BUILD_DIR)/_cli_migrations_output
CLI_EXT_MANIFEST_FILE ?= $(BUILD_DIR)/_cli_ext_output/manifest-dev.yaml
SERVER_BUILD_OUTPUT := $(BUILD_DIR)/_server_output
.PHONY: load-server-image
@ -30,8 +29,7 @@ test-cli-migrations-v1:
.ONESHELL:
build-cli-migrations-v2:
cd v2
cp ${BINARY} .
cp ${CLI_EXT_MANIFEST_FILE} manifest.yaml
./prepare_docker_context.sh
docker build -t '${IMAGE_TAG}-v2' .
docker save -o '$(BUILD_OUTPUT)/v2.tar' '$(IMAGE_TAG)-v2'
@ -43,4 +41,4 @@ test-cli-migrations-v2:
./test-upgrade-from-latest-release.sh
.PHONY: all
all: load-server-image build-cli-migrations-v1 build-cli-migrations-v2 test-cli-migrations-v1 test-cli-migrations-v2
all: load-server-image build-cli-migrations-v1 build-cli-migrations-v2 test-cli-migrations-v1 test-cli-migrations-v2

View File

@ -15,10 +15,10 @@ ENV HASURA_GRAPHQL_SHOW_UPDATE_NOTIFICATION=false
COPY docker-entrypoint.sh /bin/
COPY cli-hasura-linux-amd64 /bin/hasura-cli
COPY manifest.yaml /tmp/manifest.yaml
RUN chmod +x /bin/hasura-cli \
&& hasura-cli plugins install cli-ext --manifest-file /tmp/manifest.yaml \
&& rm /tmp/manifest.yaml
COPY manifest.yaml /opt/manifest.yaml
COPY cli-ext-hasura-linux.tar.gz /opt/cli-ext/cli-ext-hasura-linux.tar.gz
COPY ./hasura-home-dir-tmpl /opt/hasura-home-directory
RUN chmod +x /bin/hasura-cli
# set an env var to let the cli know that
# it is running in server environment

View File

@ -13,6 +13,11 @@ DEFAULT_MIGRATIONS_DIR="/hasura-migrations"
DEFAULT_METADATA_DIR="/hasura-metadata"
TEMP_PROJECT_DIR="/tmp/hasura-project"
# install cli-ext plugin
log "installing cli-ext plugin"
hasura-cli plugins install cli-ext --manifest-file /opt/manifest.yaml
cp -r /opt/hasura-home-directory/plugins/index ~/.hasura/plugins/index
# configure the target database for migrations
if [ ${HASURA_GRAPHQL_MIGRATIONS_DATABASE_ENV_VAR} ]; then
log "migrations-startup" "database url for migrations is set by $HASURA_GRAPHQL_MIGRATIONS_DATABASE_ENV_VAR"
@ -95,7 +100,7 @@ if [ -d "$HASURA_GRAPHQL_METADATA_DIR" ]; then
echo "version: 2" > config.yaml
echo "endpoint: http://localhost:$HASURA_GRAPHQL_MIGRATIONS_SERVER_PORT" >> config.yaml
echo "metadata_directory: metadata" >> config.yaml
hasura-cli metadata apply
hasura-cli metadata apply
else
log "migrations-apply" "directory $HASURA_GRAPHQL_METADATA_DIR does not exist, skipping metadata"
fi

View File

@ -0,0 +1,29 @@
#!/usr/bin/env bash
set -evo pipefail
# check if yq is installed
if ! command -v yq &> /dev/null
then
curl -LO https://github.com/mikefarah/yq/releases/download/3.3.2/yq_linux_amd64 && chmod +x yq_linux_amd64 && mv yq_linux_amd64 /usr/local/bin/yq
fi
BUILD_DIR=/build
BINARY=${BUILD_DIR}/_cli_output/binaries/cli-hasura-linux-amd64
CLI_EXT_BINARY_NAME=cli-ext-hasura-linux.tar.gz
CLI_EXT_LINUX_BINARY_PATH=${BUILD_DIR}/_cli_ext_output/${CLI_EXT_BINARY_NAME}
CLI_EXT_MANIFEST_FILE=${BUILD_DIR}/_cli_ext_output/manifest.yaml
TEMPLATE_CLI_EXT_INDEX_DIR='hasura-home-dir-tmpl/plugins/index/plugins/cli-ext'
cp ${BINARY} .
# copy linux binary
cp ${CLI_EXT_LINUX_BINARY_PATH} .
# edit manifest file cli-ext linux uri to file:///opt/cli-ext-hasura-linux.tar.gz
yq write -i ${CLI_EXT_MANIFEST_FILE} "platforms[0].uri" "file:///opt/cli-ext/${CLI_EXT_BINARY_NAME}"
cp ${CLI_EXT_MANIFEST_FILE} manifest.yaml
# edit hasura home template directory
CLI_EXT_VERSION=$(yq read manifest.yaml version)
mkdir -p ${TEMPLATE_CLI_EXT_INDEX_DIR}/${CLI_EXT_VERSION}
cp manifest.yaml ${TEMPLATE_CLI_EXT_INDEX_DIR}/${CLI_EXT_VERSION}/manifest.yaml

View File

@ -46,7 +46,7 @@ done <<< $lines
# delete notify triggers
sed -i -E '/^CREATE TRIGGER "?notify_hasura_.+"? AFTER \w+ ON .+ FOR EACH ROW EXECUTE PROCEDURE "?hdb_views"?\."?notify_hasura_.+"?\(\);$/d' $filename
sed -i -E '/^CREATE TRIGGER "?notify_hasura_.+"? AFTER \w+ ON .+ FOR EACH ROW EXECUTE PROCEDURE "?hdb_catalog"?\."?notify_hasura_.+"?\(\);$/d' $filename
# delete empty lines

3
server/.gitignore vendored
View File

@ -36,3 +36,6 @@ random*.sql
# example related
sample/data
# This is ignored so that everyone can have their own hie options
hie.yaml

View File

@ -101,6 +101,14 @@
- warn: {lhs: "onNothing x (pure y)", rhs: "pure (fromMaybe y x)"}
- warn: {lhs: "onLeft x (return . f)", rhs: "return (either f id x)"}
- warn: {lhs: "onLeft x (pure . f)", rhs: "pure (either f id x)"}
- warn: {lhs: "case x of {Right a -> pure a; Left c -> d}", rhs: "onLeft x (\\ c -> d)"}
- warn: {lhs: "case x of {Left c -> d; Right a -> pure a}", rhs: "onLeft x (\\ c -> d)"}
- warn: {lhs: "case x of {Right a -> return a; Left c -> d}", rhs: "onLeft x (\\ c -> d)"}
- warn: {lhs: "case x of {Left c -> d; Right a -> return a}", rhs: "onLeft x (\\ c -> d)"}
- warn: {lhs: "case x of {Nothing -> a; Just b -> pure b}", rhs: "onNothing x a"}
- warn: {lhs: "case x of {Just b -> pure b; Nothing -> a}", rhs: "onNothing x a"}
- warn: {lhs: "case x of {Nothing -> a; Just b -> return b}", rhs: "onNothing x a"}
- warn: {lhs: "case x of {Just b -> return b; Nothing -> a}", rhs: "onNothing x a"}
- group:
name: data-text-extended

View File

@ -58,6 +58,20 @@ To set up the project configuration to coincide with the testing scripts below,
$ ln -s cabal.project.dev-sh.local cabal.project.local
### IDE Support
You may want to use [hls](https://github.com/haskell/haskell-language-server)/[ghcide](https://github.com/haskell/ghcide) if your editor has LSP support. A sample configuration has been provided which can be used as follows:
```
ln -s sample.hie.yaml hie.yaml
```
If you have to customise any of the options for ghcide/hls, you should instead copy the sample file and make necessary changes in `hie.yaml` file. Note that `hie.yaml` is gitignored so the changes will be specific to your machine.
```
cp sample.hie.yaml hie.yaml
```
### Run and test via `dev.sh`
The `dev.sh` script in the top-level `scripts/` directory is a turnkey solution to build, run, and

View File

@ -372,29 +372,29 @@ library
, Hasura.Server.Auth.JWT.Logging
, Hasura.RQL.Instances
, Hasura.RQL.Types
, Hasura.RQL.Types.SchemaCache
, Hasura.RQL.Types.Table
, Hasura.RQL.Types.SchemaCache.Build
, Hasura.RQL.Types.SchemaCacheTypes
, Hasura.RQL.Types.Function
, Hasura.RQL.Types.Action
, Hasura.RQL.Types.Catalog
, Hasura.RQL.Types.Column
, Hasura.RQL.Types.Common
, Hasura.RQL.Types.ComputedField
, Hasura.RQL.Types.DML
, Hasura.RQL.Types.CustomTypes
, Hasura.RQL.Types.Error
, Hasura.RQL.Types.EventTrigger
, Hasura.RQL.Types.Function
, Hasura.RQL.Types.Metadata
, Hasura.RQL.Types.Permission
, Hasura.RQL.Types.QueryCollection
, Hasura.RQL.Types.Action
, Hasura.RQL.Types.RemoteSchema
, Hasura.RQL.Types.Relationship
, Hasura.RQL.Types.RemoteRelationship
, Hasura.RQL.Types.RemoteSchema
, Hasura.RQL.Types.ScheduledTrigger
, Hasura.RQL.Types.SchemaCache
, Hasura.RQL.Types.SchemaCache.Build
, Hasura.RQL.Types.SchemaCacheTypes
, Hasura.RQL.Types.Table
, Hasura.RQL.DDL.Action
, Hasura.RQL.DDL.ComputedField
, Hasura.RQL.DDL.CustomTypes
, Hasura.RQL.Types.CustomTypes
, Hasura.RQL.DDL.Deps
, Hasura.RQL.DDL.Headers
, Hasura.RQL.DDL.Metadata
@ -405,7 +405,6 @@ library
, Hasura.RQL.DDL.QueryCollection
, Hasura.RQL.DDL.Relationship
, Hasura.RQL.DDL.Relationship.Rename
, Hasura.RQL.DDL.Relationship.Types
, Hasura.RQL.DDL.RemoteRelationship
, Hasura.RQL.DDL.RemoteRelationship.Validate
, Hasura.RQL.DDL.RemoteSchema
@ -421,7 +420,6 @@ library
, Hasura.RQL.DDL.Schema.Function
, Hasura.RQL.DDL.Schema.Rename
, Hasura.RQL.DDL.Schema.Table
, Hasura.RQL.DDL.Utils
, Hasura.RQL.DDL.EventTrigger
, Hasura.RQL.DDL.ScheduledTrigger
, Hasura.RQL.DML.Count
@ -429,9 +427,11 @@ library
, Hasura.RQL.DML.Insert
, Hasura.RQL.DML.Internal
, Hasura.RQL.DML.Update
, Hasura.RQL.DML.Types
, Hasura.RQL.IR.BoolExp
, Hasura.RQL.IR.Delete
, Hasura.RQL.IR.Insert
, Hasura.RQL.IR.OrderBy
, Hasura.RQL.IR.RemoteJoin
, Hasura.RQL.IR.Returning
, Hasura.RQL.IR.Select
@ -455,16 +455,17 @@ library
, Hasura.GraphQL.Context
, Hasura.GraphQL.Parser
, Hasura.GraphQL.Parser.Class
, Hasura.GraphQL.Parser.Class.Parse
, Hasura.GraphQL.Parser.Collect
, Hasura.GraphQL.Parser.Column
, Hasura.GraphQL.Parser.Internal.Parser
, Hasura.GraphQL.Parser.Internal.Types
, Hasura.GraphQL.Parser.Monad
, Hasura.GraphQL.Parser.Schema
, Hasura.GraphQL.Schema
, Hasura.GraphQL.Schema.Action
, Hasura.GraphQL.Schema.BoolExp
, Hasura.GraphQL.Schema.Common
, Hasura.GraphQL.Schema.Insert
, Hasura.GraphQL.Schema.Introspect
, Hasura.GraphQL.Schema.Mutation
, Hasura.GraphQL.Schema.OrderBy

10
server/sample.hie.yaml Normal file
View File

@ -0,0 +1,10 @@
cradle:
cabal:
- path: "./src-lib"
component: "lib:graphql-engine"
- path: "./src-exec"
component: "exe:graphql-engine"
- path: "./src-test"
component: "test:graphql-engine-tests"
- path: "./src-bench-cache"
component: "bench:cache"

View File

@ -10,7 +10,7 @@ import Data.Time.Clock.POSIX (getPOSIXTime)
import Hasura.App
import Hasura.Logging (Hasura)
import Hasura.Prelude
import Hasura.RQL.DDL.Metadata (fetchMetadata)
import Hasura.RQL.DDL.Metadata (fetchMetadataFromHdbTables)
import Hasura.RQL.DDL.Schema
import Hasura.RQL.Types
import Hasura.Server.Init
@ -24,8 +24,8 @@ import qualified Data.Environment as Env
import qualified Database.PG.Query as Q
import qualified Hasura.Tracing as Tracing
import qualified System.Exit as Sys
import qualified System.Posix.Signals as Signals
import qualified System.Metrics as EKG
import qualified System.Posix.Signals as Signals
main :: IO ()
@ -44,17 +44,17 @@ runApp env (HGEOptionsG rci hgeCmd) =
withVersion $$(getVersionFromEnvironment) $ case hgeCmd of
HCServe serveOptions -> do
(initCtx, initTime) <- initialiseCtx env hgeCmd rci
ekgStore <- liftIO do
s <- EKG.newStore
EKG.registerGcMetrics s
let getTimeMs :: IO Int64
getTimeMs = (round . (* 1000)) `fmap` getPOSIXTime
EKG.registerCounter "ekg.server_timestamp_ms" getTimeMs s
pure s
let shutdownApp = return ()
-- Catches the SIGTERM signal and initiates a graceful shutdown.
-- Graceful shutdown for regular HTTP requests is already implemented in
@ -69,7 +69,7 @@ runApp env (HGEOptionsG rci hgeCmd) =
HCExport -> do
(initCtx, _) <- initialiseCtx env hgeCmd rci
res <- runTx' initCtx fetchMetadata Q.ReadCommitted
res <- runTx' initCtx fetchMetadataFromHdbTables Q.ReadCommitted
either (printErrJExit MetadataExportError) printJSON res
HCClean -> do

View File

@ -339,7 +339,6 @@ runHGEServer env ServeOptions{..} InitCtx{..} pgExecCtx initTime shutdownApp pos
_idleGCThread <- C.forkImmortal "ourIdleGC" logger $ liftIO $
ourIdleGC logger (seconds 0.3) (seconds 10) (seconds 60)
HasuraApp app cacheRef cacheInitTime stopWsServer <- flip onException (flushLogger loggerCtx) $
mkWaiApp env
soTxIso
@ -364,6 +363,7 @@ runHGEServer env ServeOptions{..} InitCtx{..} pgExecCtx initTime shutdownApp pos
_icSchemaCache
ekgStore
soConnectionOptions
soWebsocketKeepAlive
-- log inconsistent schema objects
inconsObjs <- scInconsistentObjs <$> liftIO (getSCFromRef cacheRef)
@ -429,7 +429,7 @@ runHGEServer env ServeOptions{..} InitCtx{..} pgExecCtx initTime shutdownApp pos
, eventQueueThread
, scheduledEventsThread
, cronEventsThread
] <> maybe [] pure telemetryThread
] <> onNothing telemetryThread []
finishTime <- liftIO Clock.getCurrentTime
let apiInitTime = realToFrac $ Clock.diffUTCTime finishTime initTime

View File

@ -49,10 +49,10 @@ import Hasura.Session
type MutationRemoteJoinCtx = (HTTP.Manager, [N.Header], UserInfo)
data Mutation (b :: Backend)
data Mutation (b :: BackendType)
= Mutation
{ _mTable :: !QualifiedTable
, _mQuery :: !(S.CTE, DS.Seq Q.PrepArg)
, _mQuery :: !(MutationCTE, DS.Seq Q.PrepArg)
, _mOutput :: !(MutationOutput b)
, _mCols :: ![ColumnInfo b]
, _mRemoteJoins :: !(Maybe (RemoteJoins b, MutationRemoteJoinCtx))
@ -62,7 +62,7 @@ data Mutation (b :: Backend)
mkMutation
:: Maybe MutationRemoteJoinCtx
-> QualifiedTable
-> (S.CTE, DS.Seq Q.PrepArg)
-> (MutationCTE, DS.Seq Q.PrepArg)
-> MutationOutput 'Postgres
-> [ColumnInfo 'Postgres]
-> Bool
@ -97,10 +97,7 @@ mutateAndReturn
-> Mutation 'Postgres
-> m EncJSON
mutateAndReturn env (Mutation qt (cte, p) mutationOutput allCols remoteJoins strfyNum) =
executeMutationOutputQuery env sqlQuery (toList p) remoteJoins
where
sqlQuery = Q.fromBuilder $ toSQL $
mkMutationOutputExp qt allCols Nothing cte mutationOutput strfyNum
executeMutationOutputQuery env qt allCols Nothing cte mutationOutput strfyNum (toList p) remoteJoins
execUpdateQuery
@ -116,7 +113,7 @@ execUpdateQuery
-> (AnnUpd 'Postgres, DS.Seq Q.PrepArg)
-> m EncJSON
execUpdateQuery env strfyNum remoteJoinCtx (u, p) =
runMutation env $ mkMutation remoteJoinCtx (uqp1Table u) (updateCTE, p)
runMutation env $ mkMutation remoteJoinCtx (uqp1Table u) (MCCheckConstraint updateCTE, p)
(uqp1Output u) (uqp1AllCols u) strfyNum
where
updateCTE = mkUpdateCTE u
@ -134,10 +131,10 @@ execDeleteQuery
-> (AnnDel 'Postgres, DS.Seq Q.PrepArg)
-> m EncJSON
execDeleteQuery env strfyNum remoteJoinCtx (u, p) =
runMutation env $ mkMutation remoteJoinCtx (dqp1Table u) (deleteCTE, p)
runMutation env $ mkMutation remoteJoinCtx (dqp1Table u) (MCDelete delete, p)
(dqp1Output u) (dqp1AllCols u) strfyNum
where
deleteCTE = mkDeleteCTE u
delete = mkDelete u
execInsertQuery
:: ( HasVersion
@ -152,7 +149,7 @@ execInsertQuery
-> m EncJSON
execInsertQuery env strfyNum remoteJoinCtx (u, p) =
runMutation env
$ mkMutation remoteJoinCtx (iqp1Table u) (insertCTE, p)
$ mkMutation remoteJoinCtx (iqp1Table u) (MCCheckConstraint insertCTE, p)
(iqp1Output u) (iqp1AllCols u) strfyNum
where
insertCTE = mkInsertCTE u
@ -186,41 +183,67 @@ mutateAndSel
mutateAndSel env (Mutation qt q mutationOutput allCols remoteJoins strfyNum) = do
-- Perform mutation and fetch unique columns
MutateResp _ columnVals <- liftTx $ mutateAndFetchCols qt allCols q strfyNum
selCTE <- mkSelCTEFromColVals qt allCols columnVals
let selWith = mkMutationOutputExp qt allCols Nothing selCTE mutationOutput strfyNum
select <- mkSelectExpFromColumnValues qt allCols columnVals
-- Perform select query and fetch returning fields
executeMutationOutputQuery env (Q.fromBuilder $ toSQL selWith) [] remoteJoins
executeMutationOutputQuery env qt allCols Nothing
(MCSelectValues select) mutationOutput strfyNum [] remoteJoins
withCheckPermission :: (MonadError QErr m) => m (a, Bool) -> m a
withCheckPermission sqlTx = do
(rawResponse, checkConstraint) <- sqlTx
unless checkConstraint $ throw400 PermissionError $
"check constraint of an insert/update permission has failed"
pure rawResponse
executeMutationOutputQuery
::
:: forall m.
( HasVersion
, MonadTx m
, MonadIO m
, Tracing.MonadTrace m
)
=> Env.Environment
-> Q.Query -- ^ SQL query
-> QualifiedTable
-> [ColumnInfo 'Postgres]
-> Maybe Int
-> MutationCTE
-> MutationOutput 'Postgres
-> Bool
-> [Q.PrepArg] -- ^ Prepared params
-> Maybe (RemoteJoins 'Postgres, MutationRemoteJoinCtx) -- ^ Remote joins context
-> m EncJSON
executeMutationOutputQuery env query prepArgs = \case
Nothing ->
runIdentity . Q.getRow
-- See Note [Prepared statements in Mutations]
<$> liftTx (Q.rawQE dmlTxErrorHandler query prepArgs False)
Just (remoteJoins, (httpManager, reqHeaders, userInfo)) ->
executeQueryWithRemoteJoins env httpManager reqHeaders userInfo query prepArgs remoteJoins
executeMutationOutputQuery env qt allCols preCalAffRows cte mutOutput strfyNum prepArgs maybeRJ = do
let queryTx :: Q.FromRes a => m a
queryTx = do
let selectWith = mkMutationOutputExp qt allCols preCalAffRows cte mutOutput strfyNum
query = Q.fromBuilder $ toSQL selectWith
-- See Note [Prepared statements in Mutations]
liftTx (Q.rawQE dmlTxErrorHandler query prepArgs False)
rawResponse <-
if checkPermissionRequired cte
then withCheckPermission $ Q.getRow <$> queryTx
else (runIdentity . Q.getRow) <$> queryTx
case maybeRJ of
Nothing -> pure $ encJFromLBS rawResponse
Just (remoteJoins, (httpManager, reqHeaders, userInfo)) ->
processRemoteJoins env httpManager reqHeaders userInfo rawResponse remoteJoins
mutateAndFetchCols
:: QualifiedTable
-> [ColumnInfo 'Postgres]
-> (S.CTE, DS.Seq Q.PrepArg)
-> (MutationCTE, DS.Seq Q.PrepArg)
-> Bool
-> Q.TxE QErr (MutateResp TxtEncodedPGVal)
mutateAndFetchCols qt cols (cte, p) strfyNum =
Q.getAltJ . runIdentity . Q.getRow
-- See Note [Prepared statements in Mutations]
<$> Q.rawQE dmlTxErrorHandler (Q.fromBuilder sql) (toList p) False
mutateAndFetchCols qt cols (cte, p) strfyNum = do
let mutationTx :: Q.FromRes a => Q.TxE QErr a
mutationTx =
-- See Note [Prepared statements in Mutations]
Q.rawQE dmlTxErrorHandler sqlText (toList p) False
if checkPermissionRequired cte
then withCheckPermission $ (first Q.getAltJ . Q.getRow) <$> mutationTx
else (Q.getAltJ . runIdentity . Q.getRow) <$> mutationTx
where
aliasIdentifier = Identifier $ qualifiedObjectToText qt <> "__mutation_result"
tabFrom = FromIdentifier aliasIdentifier
@ -228,9 +251,12 @@ mutateAndFetchCols qt cols (cte, p) strfyNum =
selFlds = flip map cols $
\ci -> (fromPGCol $ pgiColumn ci, mkAnnColumnFieldAsText ci)
sql = toSQL selectWith
selectWith = S.SelectWith [(S.Alias aliasIdentifier, cte)] select
select = S.mkSelect {S.selExtr = [S.Extractor extrExp Nothing]}
sqlText = Q.fromBuilder $ toSQL selectWith
selectWith = S.SelectWith [(S.Alias aliasIdentifier, getMutationCTE cte)] select
select = S.mkSelect { S.selExtr = S.Extractor extrExp Nothing
: bool [] [S.Extractor checkErrExp Nothing] (checkPermissionRequired cte)
}
checkErrExp = mkCheckErrorExp aliasIdentifier
extrExp = S.applyJsonBuildObj
[ S.SELit "affected_rows", affRowsSel
, S.SELit "returning_columns", colSel

View File

@ -8,12 +8,14 @@ module Hasura.Backends.Postgres.Execute.RemoteJoin
, FieldPath(..)
, RemoteJoin(..)
, executeQueryWithRemoteJoins
, processRemoteJoins
) where
import Hasura.Prelude
import qualified Data.Aeson as A
import qualified Data.Aeson.Ordered as AO
import qualified Data.ByteString.Lazy as BL
import qualified Data.Environment as Env
import qualified Data.HashMap.Strict as Map
import qualified Data.HashMap.Strict.Extended as Map
@ -64,9 +66,27 @@ executeQueryWithRemoteJoins
-> RemoteJoins 'Postgres
-> m EncJSON
executeQueryWithRemoteJoins env manager reqHdrs userInfo q prepArgs rjs = do
-- Step 1: Perform the query on database and fetch the response
-- Perform the query on database and fetch the response
pgRes <- runIdentity . Q.getRow <$> Tracing.trace "Postgres" (liftTx (Q.rawQE dmlTxErrorHandler q prepArgs True))
jsonRes <- either (throw500 . T.pack) pure $ AO.eitherDecode pgRes
-- Process remote joins in the response
processRemoteJoins env manager reqHdrs userInfo pgRes rjs
processRemoteJoins
:: ( HasVersion
, MonadTx m
, MonadIO m
, Tracing.MonadTrace m
)
=> Env.Environment
-> HTTP.Manager
-> [N.Header]
-> UserInfo
-> BL.ByteString
-> RemoteJoins 'Postgres
-> m EncJSON
processRemoteJoins env manager reqHdrs userInfo pgRes rjs = do
-- Step 1: Decode the given bytestring as a JSON value
jsonRes <- onLeft (AO.eitherDecode pgRes) (throw500 . T.pack)
-- Step 2: Traverse through the JSON obtained in above step and generate composite JSON value with remote joins
compositeJson <- traverseQueryResponseJSON rjMap jsonRes
let remoteJoins = collectRemoteFields compositeJson
@ -101,7 +121,7 @@ getCounter = do
pure c
parseGraphQLName :: (MonadError QErr m) => Text -> m G.Name
parseGraphQLName txt = maybe (throw400 RemoteSchemaError $ errMsg) pure $ G.mkName txt
parseGraphQLName txt = onNothing (G.mkName txt) (throw400 RemoteSchemaError $ errMsg)
where
errMsg = txt <> " is not a valid GraphQL name"
@ -405,69 +425,45 @@ fetchRemoteJoinFields
-> m AO.Object
fetchRemoteJoinFields env manager reqHdrs userInfo remoteJoins = do
results <- forM (Map.toList remoteSchemaBatch) $ \(rsi, batch) -> do
let batchList = toList batch
gqlReq = fieldsToRequest G.OperationTypeQuery
(map _rjfField batchList)
let gqlReq = fieldsToRequest G.OperationTypeQuery $ _rjfField <$> batch
gqlReqUnparsed = GQLQueryText . G.renderExecutableDoc . G.ExecutableDocument . unGQLExecDoc <$> gqlReq
-- NOTE: discard remote headers (for now):
(_, _, respBody) <- execRemoteGQ' env manager userInfo reqHdrs gqlReqUnparsed rsi G.OperationTypeQuery
case AO.eitherDecode respBody of
Left e -> throw500 $ "Remote server response is not valid JSON: " <> T.pack e
Right r -> do
respObj <- either throw500 pure $ AO.asObject r
respObj <- onLeft (AO.asObject r) throw500
let errors = AO.lookup "errors" respObj
if | isNothing errors || errors == Just AO.Null ->
case AO.lookup "data" respObj of
Nothing -> throw400 Unexpected "\"data\" field not found in remote response"
Just v -> either throw500 pure $ AO.asObject v
Just v -> onLeft (AO.asObject v) throw500
| otherwise ->
throwError (err400 Unexpected "Errors from remote server")
{qeInternal = Just $ A.object ["errors" A..= (AO.fromOrdered <$> errors)]}
either (throw500 . T.pack) pure $ foldM AO.safeUnion AO.empty results
onLeft (foldM AO.safeUnion AO.empty results) (throw500 . T.pack)
where
remoteSchemaBatch = Map.groupOnNE _rjfRemoteSchema remoteJoins
fieldsToRequest :: G.OperationType -> [G.Field G.NoFragments Variable] -> GQLReqParsed
fieldsToRequest opType gFields =
let variableInfos = Just <$> foldl Map.union mempty $ Map.elems $ fmap collectVariables $ G._fArguments $ head gFields
gFields' = map (G.fmapFieldFragment G.inline . convertFieldWithVariablesToName) gFields
in
case variableInfos of
Nothing ->
GQLReq
{ _grOperationName = Nothing
, _grQuery =
GQLExecDoc
[ G.ExecutableDefinitionOperation
(G.OperationDefinitionTyped
( emptyOperationDefinition
{ G._todSelectionSet = map G.SelectionField gFields'
}
)
)
]
, _grVariables = Nothing
}
Just vars' ->
GQLReq
{ _grOperationName = Nothing
, _grQuery =
GQLExecDoc
[ G.ExecutableDefinitionOperation
(G.OperationDefinitionTyped
( emptyOperationDefinition
{ G._todSelectionSet = map G.SelectionField gFields'
, G._todVariableDefinitions = map fst $ Map.toList vars'
}
)
)
]
, _grVariables = Just $ Map.fromList
(map (\(varDef, val) -> (G._vdName varDef, val)) $ Map.toList vars')
}
fieldsToRequest :: G.OperationType -> NonEmpty (G.Field G.NoFragments Variable) -> GQLReqParsed
fieldsToRequest opType gFields@(headField :| _) =
let variableInfos =
-- only the `headField` is used for collecting the variables here because
-- the variable information of all the fields will be the same.
-- For example:
-- {
-- author {
-- name
-- remote_relationship (extra_arg: $extra_arg)
-- }
-- }
--
-- If there are 10 authors, then there are 10 fields that will be requested
-- each containing exactly the same variable info.
foldl Map.union mempty $ Map.elems $ fmap collectVariables $ G._fArguments $ headField
gFields' = NE.toList $ NE.map (G.fmapFieldFragment G.inline . convertFieldWithVariablesToName) gFields
in mkGQLRequest gFields' variableInfos
where
emptyOperationDefinition =
G.TypedOperationDefinition {
@ -475,7 +471,30 @@ fetchRemoteJoinFields env manager reqHdrs userInfo remoteJoins = do
, G._todName = Nothing
, G._todVariableDefinitions = []
, G._todDirectives = []
, G._todSelectionSet = [] }
, G._todSelectionSet = []
}
mkGQLRequest fields variableInfos =
let variableValues =
if (Map.null variableInfos)
then Nothing
else Just $ Map.fromList (map (\(varDef, val) -> (G._vdName varDef, val)) $ Map.toList variableInfos)
in
GQLReq
{ _grOperationName = Nothing
, _grQuery =
GQLExecDoc
[ G.ExecutableDefinitionOperation
(G.OperationDefinitionTyped
( emptyOperationDefinition
{ G._todSelectionSet = map G.SelectionField fields
, G._todVariableDefinitions = map fst $ Map.toList variableInfos
}
)
)
]
, _grVariables = variableValues
}
-- | Replace 'RemoteJoinField' in composite JSON with it's json value from remote server response.
replaceRemoteFields
@ -574,10 +593,9 @@ createArguments
-> RemoteArguments
-> m (HashMap G.Name (G.Value Void))
createArguments variables (RemoteArguments arguments) =
either
(throw400 Unexpected . \errors -> "Found errors: " <> commaSeparated errors)
pure
onLeft
(toEither (substituteVariables variables arguments))
(throw400 Unexpected . \errors -> "Found errors: " <> commaSeparated errors)
-- | Substitute values in the argument list.
substituteVariables

View File

@ -2,17 +2,18 @@ module Hasura.Backends.Postgres.SQL.DML where
import Hasura.Prelude
import qualified Data.Aeson as J
import qualified Data.HashMap.Strict as HM
import qualified Data.Text as T
import qualified Text.Builder as TB
import qualified Data.Aeson as J
import qualified Data.Aeson.Casing as J
import qualified Data.HashMap.Strict as HM
import qualified Data.Text as T
import qualified Text.Builder as TB
import Data.String (fromString)
import Data.String (fromString)
import Data.Text.Extended
import Language.Haskell.TH.Syntax (Lift)
import Language.Haskell.TH.Syntax (Lift)
import Hasura.Backends.Postgres.SQL.Types
import Hasura.Incremental (Cacheable)
import Hasura.Incremental (Cacheable)
import Hasura.SQL.Types
@ -84,6 +85,12 @@ instance ToSQL OrderType where
toSQL OTAsc = "ASC"
toSQL OTDesc = "DESC"
instance J.FromJSON OrderType where
parseJSON = J.genericParseJSON $ J.defaultOptions{J.constructorTagModifier = J.snakeCase . drop 2}
instance J.ToJSON OrderType where
toJSON = J.genericToJSON $ J.defaultOptions{J.constructorTagModifier = J.snakeCase . drop 2}
data NullsOrder
= NFirst
| NLast
@ -96,6 +103,12 @@ instance ToSQL NullsOrder where
toSQL NFirst = "NULLS FIRST"
toSQL NLast = "NULLS LAST"
instance J.FromJSON NullsOrder where
parseJSON = J.genericParseJSON $ J.defaultOptions{J.constructorTagModifier = J.snakeCase . drop 1}
instance J.ToJSON NullsOrder where
toJSON = J.genericToJSON $ J.defaultOptions{J.constructorTagModifier = J.snakeCase . drop 1}
instance ToSQL OrderByExp where
toSQL (OrderByExp l) =
"ORDER BY" <~> (", " <+> toList l)

View File

@ -24,7 +24,7 @@ type OpRhsParser m v =
-- | Represents a reference to a Postgres column, possibly casted an arbitrary
-- number of times. Used within 'parseOperationsExpression' for bookkeeping.
data ColumnReference (b :: Backend)
data ColumnReference (b :: BackendType)
= ColumnReferenceColumn !(ColumnInfo b)
| ColumnReferenceCast !(ColumnReference b) !(ColumnType b)
@ -258,14 +258,14 @@ parseOperationsExpression rhsParser fim columnInfo =
-- This convoluted expression instead of col = val
-- to handle the case of col : null
equalsBoolExpBuilder :: S.SQLExp -> S.SQLExp -> S.BoolExp
equalsBoolExpBuilder :: SQLExp 'Postgres -> SQLExp 'Postgres -> S.BoolExp
equalsBoolExpBuilder qualColExp rhsExp =
S.BEBin S.OrOp (S.BECompare S.SEQ qualColExp rhsExp)
(S.BEBin S.AndOp
(S.BENull qualColExp)
(S.BENull rhsExp))
notEqualsBoolExpBuilder :: S.SQLExp -> S.SQLExp -> S.BoolExp
notEqualsBoolExpBuilder :: SQLExp 'Postgres -> SQLExp 'Postgres -> S.BoolExp
notEqualsBoolExpBuilder qualColExp rhsExp =
S.BEBin S.OrOp (S.BECompare S.SNE qualColExp rhsExp)
(S.BEBin S.AndOp
@ -377,13 +377,13 @@ foldBoolExp f = \case
BoolFld ce -> f ce
mkFieldCompExp
:: S.Qual -> FieldName -> OpExpG 'Postgres S.SQLExp -> S.BoolExp
:: S.Qual -> FieldName -> OpExpG 'Postgres (SQLExp 'Postgres) -> S.BoolExp
mkFieldCompExp qual lhsField = mkCompExp (mkQField lhsField)
where
mkQCol = S.SEQIdentifier . S.QIdentifier qual . toIdentifier
mkQField = S.SEQIdentifier . S.QIdentifier qual . Identifier . getFieldNameTxt
mkCompExp :: S.SQLExp -> OpExpG 'Postgres S.SQLExp -> S.BoolExp
mkCompExp :: SQLExp 'Postgres -> OpExpG 'Postgres (SQLExp 'Postgres) -> S.BoolExp
mkCompExp lhs = \case
ACast casts -> mkCastsExp casts
AEQ False val -> equalsBoolExpBuilder lhs val

View File

@ -1,5 +1,5 @@
module Hasura.Backends.Postgres.Translate.Delete
( mkDeleteCTE
( mkDelete
) where
import Hasura.Prelude
@ -12,12 +12,9 @@ import Hasura.Backends.Postgres.Translate.BoolExp
import Hasura.RQL.IR.Delete
import Hasura.RQL.Types
mkDeleteCTE
:: AnnDel 'Postgres -> S.CTE
mkDeleteCTE (AnnDel tn (fltr, wc) _ _) =
S.CTEDelete delete
mkDelete :: AnnDel 'Postgres -> S.SQLDelete
mkDelete (AnnDel tn (fltr, wc) _ _) =
S.SQLDelete tn Nothing tableFltr $ Just S.returningStar
where
delete = S.SQLDelete tn Nothing tableFltr $ Just S.returningStar
tableFltr = Just $ S.WhereFrag $
toSQLBoolExp (S.QualTable tn) $ andAnnBoolExps fltr wc

View File

@ -1,23 +1,19 @@
module Hasura.Backends.Postgres.Translate.Insert
( mkInsertCTE
, insertCheckExpr
, buildConflictClause
, toSQLConflict
, insertCheckConstraint
, insertOrUpdateCheckExpr
) where
import Hasura.Prelude
import qualified Data.HashSet as HS
import Instances.TH.Lift ()
import Data.Text.Extended
import Instances.TH.Lift ()
import qualified Hasura.Backends.Postgres.SQL.DML as S
import qualified Hasura.Backends.Postgres.SQL.DML as S
import Hasura.Backends.Postgres.SQL.Types
import Hasura.Backends.Postgres.Translate.BoolExp
import Hasura.RQL.DML.Internal
import Hasura.Backends.Postgres.Translate.Returning
import Hasura.RQL.IR.Insert
import Hasura.RQL.Types
@ -32,11 +28,9 @@ mkInsertCTE (InsertQueryP1 tn cols vals conflict (insCheck, updCheck) _ _) =
. Just
. S.RetExp
$ [ S.selectStar
, S.Extractor
(insertOrUpdateCheckExpr tn conflict
(toSQLBool insCheck)
(fmap toSQLBool updCheck))
Nothing
, insertOrUpdateCheckExpr tn conflict
(toSQLBool insCheck)
(fmap toSQLBool updCheck)
]
toSQLBool = toSQLBoolExp $ S.QualTable tn
@ -53,92 +47,11 @@ toSQLConflict tableName = \case
CTConstraint cn -> S.SQLConstraint cn
validateInpCols :: (MonadError QErr m) => [PGCol] -> [PGCol] -> m ()
validateInpCols inpCols updColsPerm = forM_ inpCols $ \inpCol ->
unless (inpCol `elem` updColsPerm) $ throw400 ValidationFailed $
"column " <> inpCol <<> " is not updatable"
buildConflictClause
:: (UserInfoM m, QErrM m)
=> SessVarBldr 'Postgres m
-> TableInfo 'Postgres
-> [PGCol]
-> OnConflict
-> m (ConflictClauseP1 'Postgres S.SQLExp)
buildConflictClause sessVarBldr tableInfo inpCols (OnConflict mTCol mTCons act) =
case (mTCol, mTCons, act) of
(Nothing, Nothing, CAIgnore) -> return $ CP1DoNothing Nothing
(Just col, Nothing, CAIgnore) -> do
validateCols col
return $ CP1DoNothing $ Just $ CTColumn $ getPGCols col
(Nothing, Just cons, CAIgnore) -> do
validateConstraint cons
return $ CP1DoNothing $ Just $ CTConstraint cons
(Nothing, Nothing, CAUpdate) -> throw400 UnexpectedPayload
"Expecting 'constraint' or 'constraint_on' when the 'action' is 'update'"
(Just col, Nothing, CAUpdate) -> do
validateCols col
(updFltr, preSet) <- getUpdPerm
resolvedUpdFltr <- convAnnBoolExpPartialSQL sessVarBldr updFltr
resolvedPreSet <- mapM (convPartialSQLExp sessVarBldr) preSet
return $ CP1Update (CTColumn $ getPGCols col) inpCols resolvedPreSet resolvedUpdFltr
(Nothing, Just cons, CAUpdate) -> do
validateConstraint cons
(updFltr, preSet) <- getUpdPerm
resolvedUpdFltr <- convAnnBoolExpPartialSQL sessVarBldr updFltr
resolvedPreSet <- mapM (convPartialSQLExp sessVarBldr) preSet
return $ CP1Update (CTConstraint cons) inpCols resolvedPreSet resolvedUpdFltr
(Just _, Just _, _) -> throw400 UnexpectedPayload
"'constraint' and 'constraint_on' cannot be set at a time"
where
coreInfo = _tiCoreInfo tableInfo
fieldInfoMap = _tciFieldInfoMap coreInfo
-- toSQLBool = toSQLBoolExp (S.mkQual $ _tciName coreInfo)
validateCols c = do
let targetcols = getPGCols c
void $ withPathK "constraint_on" $ indexedForM targetcols $
\pgCol -> askPGType fieldInfoMap pgCol ""
validateConstraint c = do
let tableConsNames = maybe [] toList $
fmap _cName <$> tciUniqueOrPrimaryKeyConstraints coreInfo
withPathK "constraint" $
unless (c `elem` tableConsNames) $
throw400 Unexpected $ "constraint " <> getConstraintTxt c
<<> " for table " <> _tciName coreInfo
<<> " does not exist"
getUpdPerm = do
upi <- askUpdPermInfo tableInfo
let updFiltr = upiFilter upi
preSet = upiSet upi
updCols = HS.toList $ upiCols upi
validateInpCols inpCols updCols
return (updFiltr, preSet)
-- | Create an expression which will fail with a check constraint violation error
-- if the condition is not met on any of the inserted rows.
--
-- The resulting SQL will look something like this:
--
-- > INSERT INTO
-- > ...
-- > RETURNING
-- > *,
-- > CASE WHEN {cond}
-- > THEN NULL
-- > ELSE hdb_catalog.check_violation('insert check constraint failed')
-- > END
insertCheckExpr :: Text -> S.BoolExp -> S.SQLExp
insertCheckExpr errorMessage condExpr =
S.SECond condExpr S.SENull
(S.SEFunction
(S.FunctionExp
(QualifiedObject (SchemaName "hdb_catalog") (FunctionName "check_violation"))
(S.FunctionArgs [S.SELit errorMessage] mempty)
Nothing)
)
-- | Annotates the check constraint expression with @boolean@
-- (<check-condition>)::boolean
insertCheckConstraint :: S.BoolExp -> S.SQLExp
insertCheckConstraint boolExp =
S.SETyAnn (S.SEBool boolExp) S.boolTypeAnn
-- | When inserting data, we might need to also enforce the update
-- check condition, because we might fall back to an update via an
@ -153,15 +66,10 @@ insertCheckExpr errorMessage condExpr =
-- > RETURNING
-- > *,
-- > CASE WHEN xmax = 0
-- > THEN CASE WHEN {insert_cond}
-- > THEN NULL
-- > ELSE hdb_catalog.check_violation('insert check constraint failed')
-- > END
-- > ELSE CASE WHEN {update_cond}
-- > THEN NULL
-- > ELSE hdb_catalog.check_violation('update check constraint failed')
-- > END
-- > THEN {insert_cond}
-- > ELSE {update_cond}
-- > END
-- > AS "check__constraint"
--
-- See @https://stackoverflow.com/q/34762732@ for more information on the use of
-- the @xmax@ system column.
@ -170,15 +78,16 @@ insertOrUpdateCheckExpr
-> Maybe (ConflictClauseP1 'Postgres S.SQLExp)
-> S.BoolExp
-> Maybe S.BoolExp
-> S.SQLExp
-> S.Extractor
insertOrUpdateCheckExpr qt (Just _conflict) insCheck (Just updCheck) =
asCheckErrorExtractor $
S.SECond
(S.BECompare
S.SEQ
(S.SEQIdentifier (S.QIdentifier (S.mkQual qt) (Identifier "xmax")))
(S.SEUnsafe "0"))
(insertCheckExpr "insert check constraint failed" insCheck)
(insertCheckExpr "update check constraint failed" updCheck)
(insertCheckConstraint insCheck)
(insertCheckConstraint updCheck)
insertOrUpdateCheckExpr _ _ insCheck _ =
-- If we won't generate an ON CONFLICT clause, there is no point
-- in testing xmax. In particular, views don't provide the xmax
@ -188,4 +97,4 @@ insertOrUpdateCheckExpr _ _ insCheck _ =
--
-- Alternatively, if there is no update check constraint, we should
-- use the insert check constraint, for backwards compatibility.
insertCheckExpr "insert check constraint failed" insCheck
asCheckErrorExtractor $ insertCheckConstraint insCheck

View File

@ -1,5 +1,5 @@
module Hasura.Backends.Postgres.Translate.Mutation
( mkSelCTEFromColVals
( mkSelectExpFromColumnValues
)
where
@ -22,19 +22,18 @@ import Hasura.SQL.Types
-- For example, let's consider the table, `CREATE TABLE test (id serial primary key, name text not null, age int)`.
-- The generated values expression should be in order of columns;
-- `SELECT ("row"::table).* VALUES (1, 'Robert', 23) AS "row"`.
mkSelCTEFromColVals
mkSelectExpFromColumnValues
:: (MonadError QErr m)
=> QualifiedTable -> [ColumnInfo 'Postgres] -> [ColumnValues TxtEncodedPGVal] -> m S.CTE
mkSelCTEFromColVals qt allCols colVals =
S.CTESelect <$> case colVals of
[] -> return selNoRows
_ -> do
tuples <- mapM mkTupsFromColVal colVals
let fromItem = S.FIValues (S.ValuesExp tuples) (S.Alias rowAlias) Nothing
return S.mkSelect
{ S.selExtr = [extractor]
, S.selFrom = Just $ S.FromExp [fromItem]
}
=> QualifiedTable -> [ColumnInfo 'Postgres] -> [ColumnValues TxtEncodedPGVal] -> m S.Select
mkSelectExpFromColumnValues qt allCols = \case
[] -> return selNoRows
colVals -> do
tuples <- mapM mkTupsFromColVal colVals
let fromItem = S.FIValues (S.ValuesExp tuples) (S.Alias rowAlias) Nothing
return S.mkSelect
{ S.selExtr = [extractor]
, S.selFrom = Just $ S.FromExp [fromItem]
}
where
rowAlias = Identifier "row"
extractor = S.selectStar' $ S.QualifiedIdentifier rowAlias $ Just $ S.TypeAnn $ toSQLTxt qt

View File

@ -1,7 +1,13 @@
module Hasura.Backends.Postgres.Translate.Returning
( mkMutFldExp
( MutationCTE(..)
, getMutationCTE
, checkPermissionRequired
, mkMutFldExp
, mkDefaultMutFlds
, mkCheckErrorExp
, mkMutationOutputExp
, checkConstraintIdentifier
, asCheckErrorExtractor
, checkRetCols
) where
@ -19,6 +25,28 @@ import Hasura.RQL.IR.Select
import Hasura.RQL.Types
-- | The postgres common table expression (CTE) for mutation queries.
-- This CTE expression is used to generate mutation field output expression,
-- see Note [Mutation output expression].
data MutationCTE
= MCCheckConstraint !S.CTE -- ^ A Mutation with check constraint validation (Insert or Update)
| MCSelectValues !S.Select -- ^ A Select statement which emits mutated table rows
| MCDelete !S.SQLDelete -- ^ A Delete statement
deriving (Show, Eq)
getMutationCTE :: MutationCTE -> S.CTE
getMutationCTE = \case
MCCheckConstraint cte -> cte
MCSelectValues select -> S.CTESelect select
MCDelete delete -> S.CTEDelete delete
checkPermissionRequired :: MutationCTE -> Bool
checkPermissionRequired = \case
MCCheckConstraint _ -> True
MCSelectValues _ -> False
MCDelete _ -> False
pgColsToSelFlds :: [ColumnInfo 'Postgres] -> [(FieldName, AnnField 'Postgres)]
pgColsToSelFlds cols =
flip map cols $
@ -58,10 +86,7 @@ WITH "<table-name>__mutation_result_alias" AS (
(<insert-value-row>[..])
ON CONFLICT ON CONSTRAINT "<table-constraint-name>" DO NOTHING RETURNING *,
-- An extra column expression which performs the 'CHECK' validation
CASE
WHEN (<CHECK Condition>) THEN NULL
ELSE "hdb_catalog"."check_violation"('insert check constraint failed')
END
(<CHECK Condition>) AS "check__constraint"
),
"<table-name>__all_columns_alias" AS (
-- Only extract columns from mutated rows. Columns sorted by ordinal position so that
@ -70,7 +95,8 @@ WITH "<table-name>__mutation_result_alias" AS (
FROM
"<table-name>__mutation_result_alias"
)
<SELECT statement to generate mutation response using '<table-name>__all_columns_alias' as FROM>
<SELECT statement to generate mutation response using '<table-name>__all_columns_alias' as FROM
and bool_and("check__constraint") from "<table-name>__mutation_result_alias">
-}
-- | Generate mutation output expression with given mutation CTE statement.
@ -79,24 +105,27 @@ mkMutationOutputExp
:: QualifiedTable
-> [ColumnInfo 'Postgres]
-> Maybe Int
-> S.CTE
-> MutationCTE
-> MutationOutput 'Postgres
-> Bool
-> S.SelectWith
mkMutationOutputExp qt allCols preCalAffRows cte mutOutput strfyNum =
S.SelectWith [ (S.Alias mutationResultAlias, cte)
S.SelectWith [ (S.Alias mutationResultAlias, getMutationCTE cte)
, (S.Alias allColumnsAlias, allColumnsSelect)
] sel
where
mutationResultAlias = Identifier $ snakeCaseQualifiedObject qt <> "__mutation_result_alias"
allColumnsAlias = Identifier $ snakeCaseQualifiedObject qt <> "__all_columns_alias"
allColumnsSelect = S.CTESelect $ S.mkSelect
{ S.selExtr = map (S.mkExtr . pgiColumn) $ sortCols allCols
{ S.selExtr = map (S.mkExtr . pgiColumn) (sortCols allCols)
, S.selFrom = Just $ S.mkIdenFromExp mutationResultAlias
}
sel = S.mkSelect { S.selExtr = [S.Extractor extrExp Nothing] }
sel = S.mkSelect { S.selExtr = S.Extractor extrExp Nothing
: bool [] [S.Extractor checkErrorExp Nothing] (checkPermissionRequired cte)
}
where
checkErrorExp = mkCheckErrorExp mutationResultAlias
extrExp = case mutOutput of
MOutMultirowFields mutFlds ->
let jsonBuildObjArgs = flip concatMap mutFlds $
@ -111,6 +140,22 @@ mkMutationOutputExp qt allCols preCalAffRows cte mutOutput strfyNum =
in S.SESelect $ mkSQLSelect JASSingleObject $
AnnSelectG annFlds tabFrom tabPerm noSelectArgs strfyNum
mkCheckErrorExp :: IsIdentifier a => a -> S.SQLExp
mkCheckErrorExp alias =
let boolAndCheckConstraint =
S.handleIfNull (S.SEBool $ S.BELit True) $
S.SEFnApp "bool_and" [S.SEIdentifier checkConstraintIdentifier] Nothing
in S.SESelect $
S.mkSelect { S.selExtr = [S.Extractor boolAndCheckConstraint Nothing]
, S.selFrom = Just $ S.mkIdenFromExp alias
}
checkConstraintIdentifier :: Identifier
checkConstraintIdentifier = Identifier "check__constraint"
asCheckErrorExtractor :: S.SQLExp -> S.Extractor
asCheckErrorExtractor s =
S.Extractor s $ Just $ S.Alias checkConstraintIdentifier
checkRetCols
:: (UserInfoM m, QErrM m)

View File

@ -28,6 +28,7 @@ import Hasura.Backends.Postgres.Translate.BoolExp
import Hasura.EncJSON
import Hasura.GraphQL.Schema.Common
import Hasura.RQL.DML.Internal
import Hasura.RQL.IR.OrderBy
import Hasura.RQL.IR.Select
import Hasura.RQL.Types
import Hasura.SQL.Types
@ -71,7 +72,7 @@ selectFromToFromItem pfx = \case
-- from the FromItem generated with selectFromToFromItem
-- however given from S.FromItem is modelled, it is not
-- possible currently
selectFromToQual :: SelectFrom backend -> S.Qual
selectFromToQual :: SelectFrom 'Postgres -> S.Qual
selectFromToQual = \case
FromTable tn -> S.QualTable tn
FromIdentifier i -> S.QualifiedIdentifier i Nothing
@ -340,13 +341,13 @@ mkSimilarArrayFields annFields maybeOrderBys =
Just (riName ri, mkOrderByFieldName $ riName ri)
fetchAggOrderByRels _ = Nothing
getArrayRelNameAndSelectArgs :: ArraySelectG backend v -> (RelName, SelectArgsG backend v)
getArrayRelNameAndSelectArgs :: ArraySelectG 'Postgres v -> (RelName, SelectArgsG 'Postgres v)
getArrayRelNameAndSelectArgs = \case
ASSimple r -> (aarRelationshipName r, _asnArgs $ aarAnnSelect r)
ASAggregate r -> (aarRelationshipName r, _asnArgs $ aarAnnSelect r)
ASConnection r -> (aarRelationshipName r, _asnArgs $ _csSelect $ aarAnnSelect r)
getAnnArr :: (a, AnnFieldG backend v) -> Maybe (a, ArraySelectG backend v)
getAnnArr :: (a, AnnFieldG 'Postgres v) -> Maybe (a, ArraySelectG 'Postgres v)
getAnnArr (f, annFld) = case annFld of
AFArrayRelation (ASConnection _) -> Nothing
AFArrayRelation ar -> Just (f, ar)
@ -354,8 +355,8 @@ getAnnArr (f, annFld) = case annFld of
withWriteJoinTree
:: (MonadWriter (JoinTree backend) m)
=> (JoinTree backend -> b -> JoinTree backend)
:: (MonadWriter (JoinTree 'Postgres) m)
=> (JoinTree 'Postgres -> b -> JoinTree 'Postgres)
-> m (a, b)
-> m a
withWriteJoinTree joinTreeUpdater action =
@ -366,8 +367,8 @@ withWriteJoinTree joinTreeUpdater action =
pure (out, fromJoinTree)
withWriteObjectRelation
:: (MonadWriter (JoinTree backend) m, Hashable (ObjectRelationSource backend))
=> m ( ObjectRelationSource backend
:: (MonadWriter (JoinTree 'Postgres) m)
=> m ( ObjectRelationSource 'Postgres
, HM.HashMap S.Alias S.SQLExp
, a
)
@ -418,8 +419,8 @@ withWriteArrayConnection action =
in mempty{_jtArrayConnections = HM.singleton source arraySelectNode}
withWriteComputedFieldTableSet
:: (MonadWriter (JoinTree backend) m)
=> m ( ComputedFieldTableSetSource
:: (MonadWriter (JoinTree 'Postgres) m)
=> m ( ComputedFieldTableSetSource 'Postgres
, HM.HashMap S.Alias S.SQLExp
, a
)
@ -442,7 +443,7 @@ processAnnSimpleSelect
-> FieldName
-> PermissionLimitSubQuery
-> AnnSimpleSel 'Postgres
-> m ( SelectSource
-> m ( SelectSource 'Postgres
, HM.HashMap S.Alias S.SQLExp
)
processAnnSimpleSelect sourcePrefixes fieldAlias permLimitSubQuery annSimpleSel = do
@ -464,7 +465,7 @@ processAnnAggregateSelect
=> SourcePrefixes
-> FieldName
-> AnnAggregateSelect 'Postgres
-> m ( SelectSource
-> m ( SelectSource 'Postgres
, HM.HashMap S.Alias S.SQLExp
, S.Extractor
)
@ -513,8 +514,8 @@ processAnnAggregateSelect sourcePrefixes fieldAlias annAggSel = do
mkPermissionLimitSubQuery
:: Maybe Int
-> TableAggregateFields backend
-> Maybe (NE.NonEmpty (AnnOrderByItem backend))
-> TableAggregateFields 'Postgres
-> Maybe (NE.NonEmpty (AnnOrderByItem 'Postgres))
-> PermissionLimitSubQuery
mkPermissionLimitSubQuery permLimit aggFields orderBys =
case permLimit of
@ -589,7 +590,7 @@ processSelectParams
-> PermissionLimitSubQuery
-> TablePerm 'Postgres
-> SelectArgs 'Postgres
-> m ( SelectSource
-> m ( SelectSource 'Postgres
, [(S.Alias, S.SQLExp)]
, Maybe S.SQLExp -- Order by cursor
)
@ -696,8 +697,7 @@ processOrderByItems sourcePrefix' fieldAlias' similarArrayFields orderByItems =
toOrderByExp :: OrderByItemExp 'Postgres -> S.OrderByItem
toOrderByExp orderByItemExp =
let OrderByItemG obTyM expAlias obNullsM = fst . snd <$> orderByItemExp
in S.OrderByItem (S.SEIdentifier $ toIdentifier expAlias)
(unOrderType <$> obTyM) (unNullsOrder <$> obNullsM)
in S.OrderByItem (S.SEIdentifier $ toIdentifier expAlias) obTyM obNullsM
mkCursorExp :: [OrderByItemExp 'Postgres] -> S.SQLExp
mkCursorExp orderByItemExps =
@ -823,7 +823,7 @@ processAnnFields sourcePrefix fieldAlias similarArrFields annFields = do
pure $ toJSONableExp strfyNum (pgiType col) asText $ withColumnOp colOpM $
S.mkQIdenExp (mkBaseTableAlias sourcePrefix) $ pgiColumn col
fromScalarComputedField :: ComputedFieldScalarSelect S.SQLExp -> m S.SQLExp
fromScalarComputedField :: ComputedFieldScalarSelect 'Postgres S.SQLExp -> m S.SQLExp
fromScalarComputedField computedFieldScalar = do
strfyNum <- ask
pure $ toJSONableExp strfyNum (PGColumnScalar ty) False $ withColumnOp colOpM $
@ -831,7 +831,7 @@ processAnnFields sourcePrefix fieldAlias similarArrFields annFields = do
where
ComputedFieldScalarSelect fn args ty colOpM = computedFieldScalar
withColumnOp :: Maybe ColumnOp -> S.SQLExp -> S.SQLExp
withColumnOp :: Maybe (ColumnOp 'Postgres) -> S.SQLExp -> S.SQLExp
withColumnOp colOpM sqlExp = case colOpM of
Nothing -> sqlExp
Just (ColumnOp opText cExp) -> S.mkSQLOpExp opText sqlExp cExp
@ -862,7 +862,7 @@ mkJoinCond baseTablepfx colMapn =
generateSQLSelect
:: S.BoolExp -- ^ Pre join condition
-> SelectSource
-> SelectSource 'Postgres
-> SelectNode 'Postgres
-> S.Select
generateSQLSelect joinCondition selectSource selectNode =
@ -928,7 +928,7 @@ generateSQLSelect joinCondition selectSource selectNode =
in S.FISelectWith (S.Lateral True) selectWith alias
computedFieldToFromItem
:: (ComputedFieldTableSetSource, SelectNode 'Postgres) -> S.FromItem
:: (ComputedFieldTableSetSource 'Postgres, SelectNode 'Postgres) -> S.FromItem
computedFieldToFromItem (computedFieldTableSource, node) =
let ComputedFieldTableSetSource fieldName selectTy source = computedFieldTableSource
internalSelect = generateSQLSelect (S.BELit True) source node
@ -942,7 +942,7 @@ generateSQLSelect joinCondition selectSource selectNode =
in S.mkLateralFromItem select alias
generateSQLSelectFromArrayNode
:: SelectSource
:: SelectSource 'Postgres
-> ArraySelectNode 'Postgres
-> S.BoolExp
-> S.Select
@ -1125,7 +1125,7 @@ processConnectionSelect sourcePrefixes fieldAlias relAlias colMapping connection
mkSplitCompareExp (ConnectionSplit kind v (OrderByItemG obTyM obCol _)) =
let obAlias = mkAnnOrderByAlias thisPrefix fieldAlias similarArrayFields obCol
obTy = maybe S.OTAsc unOrderType obTyM
obTy = fromMaybe S.OTAsc obTyM
compareOp = case (kind, obTy) of
(CSKAfter, S.OTAsc) -> S.SGT
(CSKAfter, S.OTDesc) -> S.SLT

View File

@ -4,15 +4,16 @@ module Hasura.Backends.Postgres.Translate.Update
import Hasura.Prelude
import Instances.TH.Lift ()
import Instances.TH.Lift ()
import qualified Hasura.Backends.Postgres.SQL.DML as S
import qualified Hasura.Backends.Postgres.SQL.DML as S
import Hasura.Backends.Postgres.SQL.Types
import Hasura.Backends.Postgres.Translate.BoolExp
import Hasura.Backends.Postgres.Translate.Insert
import Hasura.Backends.Postgres.Translate.Returning
import Hasura.RQL.Instances ()
import Hasura.RQL.IR.Update
import Hasura.RQL.Instances ()
import Hasura.RQL.Types
@ -26,7 +27,7 @@ mkUpdateCTE (AnnUpd tn opExps (permFltr, wc) chk _ columnsInfo) =
. Just
. S.RetExp
$ [ S.selectStar
, S.Extractor (insertCheckExpr "update check constraint failed" checkExpr) Nothing
, asCheckErrorExtractor $ insertCheckConstraint checkExpr
]
setExp = S.SetExp $ map (expandOperator columnsInfo) opExps
tableFltr = Just $ S.WhereFrag tableFltrExpr

View File

@ -187,7 +187,17 @@ processEventQueue logger logenv httpMgr pool getSchemaCache eeCtx@EventEngineCtx
where
fetchBatchSize = 100
popEventsBatch = do
let run = liftIO . runExceptT . Q.runTx pool (Q.RepeatableRead, Just Q.ReadWrite)
{-
SELECT FOR UPDATE .. SKIP LOCKED can throw serialization errors in RepeatableRead: https://stackoverflow.com/a/53289263/1911889
We can avoid this safely by running it in ReadCommitted as Postgres will recheck the
predicate condition if a row is updated concurrently: https://www.postgresql.org/docs/9.5/transaction-iso.html#XACT-READ-COMMITTED
Every other action on an event_log row (like post-processing, archival, etc) are single writes (no R-W or W-R)
so it is safe to perform them in ReadCommitted as well (the writes will then acquire some serial order).
Any serial order of updates to a row will lead to an eventually consistent state as the row will have
(delivered=t or error=t or archived=t) after a fixed number of tries (assuming it begins with locked='f').
-}
let run = liftIO . runExceptT . Q.runTx' pool
run (fetchEvents fetchBatchSize) >>= \case
Left err -> do
liftIO $ L.unLogger logger $ EventInternalErr err

View File

@ -26,15 +26,15 @@ import qualified Language.GraphQL.Draft.Syntax as G
import Data.Aeson.Casing
import Data.Aeson.TH
import qualified Hasura.Backends.Postgres.SQL.DML as S
import qualified Hasura.RQL.IR.Delete as RQL
import qualified Hasura.RQL.IR.Select as RQL
import qualified Hasura.RQL.IR.Update as RQL
import qualified Hasura.Backends.Postgres.SQL.DML as PG
import qualified Hasura.RQL.IR.Delete as IR
import qualified Hasura.RQL.IR.Insert as IR
import qualified Hasura.RQL.IR.Select as IR
import qualified Hasura.RQL.IR.Update as IR
import qualified Hasura.RQL.Types.Action as RQL
import qualified Hasura.RQL.Types.RemoteSchema as RQL
import Hasura.GraphQL.Parser
import Hasura.GraphQL.Schema.Insert (AnnInsert)
import Hasura.SQL.Backend
-- | For storing both a normal GQLContext and one for the backend variant.
@ -89,12 +89,12 @@ traverseAction f = \case
RFRaw x -> pure $ RFRaw x
data QueryDB b v
= QDBSimple (RQL.AnnSimpleSelG b v)
| QDBPrimaryKey (RQL.AnnSimpleSelG b v)
| QDBAggregation (RQL.AnnAggregateSelectG b v)
| QDBConnection (RQL.ConnectionSelect b v)
= QDBSimple (IR.AnnSimpleSelG b v)
| QDBPrimaryKey (IR.AnnSimpleSelG b v)
| QDBAggregation (IR.AnnAggregateSelectG b v)
| QDBConnection (IR.ConnectionSelect b v)
data ActionQuery (b :: Backend) v
data ActionQuery (b :: BackendType) v
= AQQuery !(RQL.AnnActionExecution b v)
| AQAsync !(RQL.AnnActionAsyncQuery b v)
@ -102,12 +102,12 @@ type RemoteField = (RQL.RemoteSchemaInfo, G.Field G.NoFragments G.Name)
type QueryRootField v = RootField (QueryDB 'Postgres v) RemoteField (ActionQuery 'Postgres v) J.Value
data MutationDB (b :: Backend) v
= MDBInsert (AnnInsert b v)
| MDBUpdate (RQL.AnnUpdG b v)
| MDBDelete (RQL.AnnDelG b v)
data MutationDB (b :: BackendType) v
= MDBInsert (IR.AnnInsert b v)
| MDBUpdate (IR.AnnUpdG b v)
| MDBDelete (IR.AnnDelG b v)
data ActionMutation (b :: Backend) v
data ActionMutation (b :: BackendType) v
= AMSync !(RQL.AnnActionExecution b v)
| AMAsync !RQL.AnnActionMutationAsync
@ -115,4 +115,4 @@ type MutationRootField v =
RootField (MutationDB 'Postgres v) RemoteField (ActionMutation 'Postgres v) J.Value
type SubscriptionRootField v = RootField (QueryDB 'Postgres v) Void (RQL.AnnActionAsyncQuery 'Postgres v) Void
type SubscriptionRootFieldResolved = RootField (QueryDB 'Postgres S.SQLExp) Void (RQL.AnnSimpleSel 'Postgres) Void
type SubscriptionRootFieldResolved = RootField (QueryDB 'Postgres PG.SQLExp) Void (IR.AnnSimpleSel 'Postgres) Void

View File

@ -14,40 +14,38 @@ import qualified Database.PG.Query as Q
import Data.Text.Extended
import qualified Hasura.Backends.Postgres.Execute.Mutation as RQL
import qualified Hasura.Backends.Postgres.Execute.RemoteJoin as RQL
import qualified Hasura.Backends.Postgres.SQL.DML as S
import qualified Hasura.Backends.Postgres.Translate.BoolExp as RQL
import qualified Hasura.Backends.Postgres.Translate.Insert as RQL
import qualified Hasura.Backends.Postgres.Translate.Mutation as RQL
import qualified Hasura.Backends.Postgres.Translate.Returning as RQL
import qualified Hasura.RQL.IR.Insert as RQL
import qualified Hasura.RQL.IR.Returning as RQL
import qualified Hasura.Backends.Postgres.Execute.Mutation as PGE
import qualified Hasura.Backends.Postgres.Execute.RemoteJoin as PGE
import qualified Hasura.Backends.Postgres.SQL.DML as PG
import qualified Hasura.Backends.Postgres.Translate.BoolExp as PGT
import qualified Hasura.Backends.Postgres.Translate.Insert as PGT
import qualified Hasura.Backends.Postgres.Translate.Mutation as PGT
import qualified Hasura.Backends.Postgres.Translate.Returning as PGT
import qualified Hasura.RQL.IR.Insert as IR
import qualified Hasura.RQL.IR.Returning as IR
import qualified Hasura.Tracing as Tracing
import Hasura.Backends.Postgres.Connection
import Hasura.Backends.Postgres.SQL.Types
import Hasura.Backends.Postgres.SQL.Value
import Hasura.EncJSON
import Hasura.GraphQL.Schema.Insert
import Hasura.RQL.Types
import Hasura.SQL.Types
import Hasura.Server.Version (HasVersion)
traverseAnnInsert
:: (Applicative f)
=> (a -> f b)
-> AnnInsert backend a
-> f (AnnInsert backend b)
traverseAnnInsert f (AnnInsert fieldName isSingle (annIns, mutationOutput)) =
AnnInsert fieldName isSingle
-> IR.AnnInsert backend a
-> f (IR.AnnInsert backend b)
traverseAnnInsert f (IR.AnnInsert fieldName isSingle (annIns, mutationOutput)) =
IR.AnnInsert fieldName isSingle
<$> ( (,)
<$> traverseMulti annIns
<*> RQL.traverseMutationOutput f mutationOutput
<*> IR.traverseMutationOutput f mutationOutput
)
where
traverseMulti (AnnIns objs tableName conflictClause checkCond columns defaultValues) = AnnIns
traverseMulti (IR.AnnIns objs tableName conflictClause checkCond columns defaultValues) = IR.AnnIns
<$> traverse traverseObject objs
<*> pure tableName
<*> traverse (traverse f) conflictClause
@ -57,7 +55,7 @@ traverseAnnInsert f (AnnInsert fieldName isSingle (annIns, mutationOutput)) =
)
<*> pure columns
<*> traverse f defaultValues
traverseSingle (AnnIns obj tableName conflictClause checkCond columns defaultValues) = AnnIns
traverseSingle (IR.AnnIns obj tableName conflictClause checkCond columns defaultValues) = IR.AnnIns
<$> traverseObject obj
<*> pure tableName
<*> traverse (traverse f) conflictClause
@ -67,24 +65,24 @@ traverseAnnInsert f (AnnInsert fieldName isSingle (annIns, mutationOutput)) =
)
<*> pure columns
<*> traverse f defaultValues
traverseObject (AnnInsObj columns objRels arrRels) = AnnInsObj
traverseObject (IR.AnnInsObj columns objRels arrRels) = IR.AnnInsObj
<$> traverse (traverse f) columns
<*> traverse (traverseRel traverseSingle) objRels
<*> traverse (traverseRel traverseMulti) arrRels
traverseRel z (RelIns object relInfo) = RelIns <$> z object <*> pure relInfo
traverseRel z (IR.RelIns object relInfo) = IR.RelIns <$> z object <*> pure relInfo
convertToSQLTransaction
:: (HasVersion, MonadTx m, MonadIO m, Tracing.MonadTrace m)
=> Env.Environment
-> AnnInsert 'Postgres S.SQLExp
-> RQL.MutationRemoteJoinCtx
-> IR.AnnInsert 'Postgres PG.SQLExp
-> PGE.MutationRemoteJoinCtx
-> Seq.Seq Q.PrepArg
-> Bool
-> m EncJSON
convertToSQLTransaction env (AnnInsert fieldName isSingle (annIns, mutationOutput)) remoteJoinCtx planVars stringifyNum =
if null $ _aiInsObj annIns
then pure $ RQL.buildEmptyMutResp mutationOutput
convertToSQLTransaction env (IR.AnnInsert fieldName isSingle (annIns, mutationOutput)) remoteJoinCtx planVars stringifyNum =
if null $ IR._aiInsObj annIns
then pure $ IR.buildEmptyMutResp mutationOutput
else withPaths ["selectionSet", fieldName, "args", suffix] $
insertMultipleObjects env annIns [] remoteJoinCtx mutationOutput planVars stringifyNum
where
@ -94,27 +92,27 @@ convertToSQLTransaction env (AnnInsert fieldName isSingle (annIns, mutationOutpu
insertMultipleObjects
:: (HasVersion, MonadTx m, MonadIO m, Tracing.MonadTrace m)
=> Env.Environment
-> MultiObjIns 'Postgres S.SQLExp
-> [(PGCol, S.SQLExp)]
-> RQL.MutationRemoteJoinCtx
-> RQL.MutationOutput 'Postgres
-> IR.MultiObjIns 'Postgres PG.SQLExp
-> [(PGCol, PG.SQLExp)]
-> PGE.MutationRemoteJoinCtx
-> IR.MutationOutput 'Postgres
-> Seq.Seq Q.PrepArg
-> Bool
-> m EncJSON
insertMultipleObjects env multiObjIns additionalColumns remoteJoinCtx mutationOutput planVars stringifyNum =
bool withoutRelsInsert withRelsInsert anyRelsToInsert
where
AnnIns insObjs table conflictClause checkCondition columnInfos defVals = multiObjIns
allInsObjRels = concatMap _aioObjRels insObjs
allInsArrRels = concatMap _aioArrRels insObjs
IR.AnnIns insObjs table conflictClause checkCondition columnInfos defVals = multiObjIns
allInsObjRels = concatMap IR._aioObjRels insObjs
allInsArrRels = concatMap IR._aioArrRels insObjs
anyRelsToInsert = not $ null allInsArrRels && null allInsObjRels
withoutRelsInsert = do
indexedForM_ (_aioColumns <$> insObjs) \column ->
indexedForM_ (IR._aioColumns <$> insObjs) \column ->
validateInsert (map fst column) [] (map fst additionalColumns)
let columnValues = map (mkSQLRow defVals) $ union additionalColumns . _aioColumns <$> insObjs
let columnValues = map (mkSQLRow defVals) $ union additionalColumns . IR._aioColumns <$> insObjs
columnNames = Map.keys defVals
insertQuery = RQL.InsertQueryP1
insertQuery = IR.InsertQueryP1
table
columnNames
columnValues
@ -122,34 +120,33 @@ insertMultipleObjects env multiObjIns additionalColumns remoteJoinCtx mutationOu
checkCondition
mutationOutput
columnInfos
rowCount = T.pack . show . length $ _aiInsObj multiObjIns
rowCount = T.pack . show . length $ IR._aiInsObj multiObjIns
Tracing.trace ("Insert (" <> rowCount <> ") " <> qualifiedObjectToText table) do
Tracing.attachMetadata [("count", rowCount)]
RQL.execInsertQuery env stringifyNum (Just remoteJoinCtx) (insertQuery, planVars)
PGE.execInsertQuery env stringifyNum (Just remoteJoinCtx) (insertQuery, planVars)
withRelsInsert = do
insertRequests <- indexedForM insObjs \obj -> do
let singleObj = AnnIns obj table conflictClause checkCondition columnInfos defVals
let singleObj = IR.AnnIns obj table conflictClause checkCondition columnInfos defVals
insertObject env singleObj additionalColumns remoteJoinCtx planVars stringifyNum
let affectedRows = sum $ map fst insertRequests
columnValues = mapMaybe snd insertRequests
selectExpr <- RQL.mkSelCTEFromColVals table columnInfos columnValues
let (mutOutputRJ, remoteJoins) = RQL.getRemoteJoinsMutationOutput mutationOutput
sqlQuery = Q.fromBuilder $ toSQL $
RQL.mkMutationOutputExp table columnInfos (Just affectedRows) selectExpr mutOutputRJ stringifyNum
RQL.executeMutationOutputQuery env sqlQuery [] $ (,remoteJoinCtx) <$> remoteJoins
selectExpr <- PGT.mkSelectExpFromColumnValues table columnInfos columnValues
let (mutOutputRJ, remoteJoins) = PGE.getRemoteJoinsMutationOutput mutationOutput
PGE.executeMutationOutputQuery env table columnInfos (Just affectedRows) (PGT.MCSelectValues selectExpr)
mutOutputRJ stringifyNum [] $ (, remoteJoinCtx) <$> remoteJoins
insertObject
:: (HasVersion, MonadTx m, MonadIO m, Tracing.MonadTrace m)
=> Env.Environment
-> SingleObjIns 'Postgres S.SQLExp
-> [(PGCol, S.SQLExp)]
-> RQL.MutationRemoteJoinCtx
-> IR.SingleObjIns 'Postgres PG.SQLExp
-> [(PGCol, PG.SQLExp)]
-> PGE.MutationRemoteJoinCtx
-> Seq.Seq Q.PrepArg
-> Bool
-> m (Int, Maybe (ColumnValues TxtEncodedPGVal))
insertObject env singleObjIns additionalColumns remoteJoinCtx planVars stringifyNum = Tracing.trace ("Insert " <> qualifiedObjectToText table) do
validateInsert (map fst columns) (map _riRelInfo objectRels) (map fst additionalColumns)
validateInsert (map fst columns) (map IR._riRelInfo objectRels) (map fst additionalColumns)
-- insert all object relations and fetch this insert dependent column values
objInsRes <- forM objectRels $ insertObjRel env planVars remoteJoinCtx stringifyNum
@ -161,7 +158,8 @@ insertObject env singleObjIns additionalColumns remoteJoinCtx planVars stringify
cte <- mkInsertQ table onConflict finalInsCols defaultValues checkCond
MutateResp affRows colVals <- liftTx $ RQL.mutateAndFetchCols table allColumns (cte, planVars) stringifyNum
MutateResp affRows colVals <- liftTx $
PGE.mutateAndFetchCols table allColumns (PGT.MCCheckConstraint cte, planVars) stringifyNum
colValM <- asSingleObject colVals
arrRelAffRows <- bool (withArrRels colValM) (return 0) $ null arrayRels
@ -169,11 +167,11 @@ insertObject env singleObjIns additionalColumns remoteJoinCtx planVars stringify
return (totAffRows, colValM)
where
AnnIns annObj table onConflict checkCond allColumns defaultValues = singleObjIns
AnnInsObj columns objectRels arrayRels = annObj
IR.AnnIns annObj table onConflict checkCond allColumns defaultValues = singleObjIns
IR.AnnInsObj columns objectRels arrayRels = annObj
arrRelDepCols = flip getColInfos allColumns $
concatMap (Map.keys . riMapping . _riRelInfo) arrayRels
concatMap (Map.keys . riMapping . IR._riRelInfo) arrayRels
withArrRels colValM = do
colVal <- onNothing colValM $ throw400 NotSupported cannotInsArrRelErr
@ -194,10 +192,10 @@ insertObjRel
:: (HasVersion, MonadTx m, MonadIO m, Tracing.MonadTrace m)
=> Env.Environment
-> Seq.Seq Q.PrepArg
-> RQL.MutationRemoteJoinCtx
-> PGE.MutationRemoteJoinCtx
-> Bool
-> ObjRelIns 'Postgres S.SQLExp
-> m (Int, [(PGCol, S.SQLExp)])
-> IR.ObjRelIns 'Postgres PG.SQLExp
-> m (Int, [(PGCol, PG.SQLExp)])
insertObjRel env planVars remoteJoinCtx stringifyNum objRelIns =
withPathK (relNameToTxt relName) $ do
(affRows, colValM) <- withPathK "data" $ insertObject env singleObjIns [] remoteJoinCtx planVars stringifyNum
@ -208,11 +206,11 @@ insertObjRel env planVars remoteJoinCtx stringifyNum objRelIns =
Just (column, value)
pure (affRows, columns)
where
RelIns singleObjIns relInfo = objRelIns
IR.RelIns singleObjIns relInfo = objRelIns
relName = riName relInfo
table = riRTable relInfo
mapCols = riMapping relInfo
allCols = _aiTableCols singleObjIns
allCols = IR._aiTableCols singleObjIns
rCols = Map.elems mapCols
rColInfos = getColInfos rCols allCols
errMsg = "cannot proceed to insert object relation "
@ -222,11 +220,11 @@ insertObjRel env planVars remoteJoinCtx stringifyNum objRelIns =
insertArrRel
:: (HasVersion, MonadTx m, MonadIO m, Tracing.MonadTrace m)
=> Env.Environment
-> [(PGCol, S.SQLExp)]
-> RQL.MutationRemoteJoinCtx
-> [(PGCol, PG.SQLExp)]
-> PGE.MutationRemoteJoinCtx
-> Seq.Seq Q.PrepArg
-> Bool
-> ArrRelIns 'Postgres S.SQLExp
-> IR.ArrRelIns 'Postgres PG.SQLExp
-> m Int
insertArrRel env resCols remoteJoinCtx planVars stringifyNum arrRelIns =
withPathK (relNameToTxt $ riName relInfo) $ do
@ -239,9 +237,9 @@ insertArrRel env resCols remoteJoinCtx planVars stringifyNum arrRelIns =
onNothing (Map.lookup ("affected_rows" :: Text) resObj) $
throw500 "affected_rows not returned in array rel insert"
where
RelIns multiObjIns relInfo = arrRelIns
IR.RelIns multiObjIns relInfo = arrRelIns
mapping = riMapping relInfo
mutOutput = RQL.MOutMultirowFields [("affected_rows", RQL.MCount)]
mutOutput = IR.MOutMultirowFields [("affected_rows", IR.MCount)]
-- | validate an insert object based on insert columns,
-- | insert object relations and additional columns from parent
@ -273,45 +271,43 @@ validateInsert insCols objRels addCols = do
mkInsertQ
:: MonadError QErr m
=> QualifiedTable
-> Maybe (RQL.ConflictClauseP1 'Postgres S.SQLExp)
-> [(PGCol, S.SQLExp)]
-> Map.HashMap PGCol S.SQLExp
-> Maybe (IR.ConflictClauseP1 'Postgres PG.SQLExp)
-> [(PGCol, PG.SQLExp)]
-> Map.HashMap PGCol PG.SQLExp
-> (AnnBoolExpSQL 'Postgres, Maybe (AnnBoolExpSQL 'Postgres))
-> m S.CTE
-> m PG.CTE
mkInsertQ table onConflictM insCols defVals (insCheck, updCheck) = do
let sqlConflict = RQL.toSQLConflict table <$> onConflictM
let sqlConflict = PGT.toSQLConflict table <$> onConflictM
sqlExps = mkSQLRow defVals insCols
valueExp = S.ValuesExp [S.TupleExp sqlExps]
valueExp = PG.ValuesExp [PG.TupleExp sqlExps]
tableCols = Map.keys defVals
sqlInsert =
S.SQLInsert table tableCols valueExp sqlConflict
PG.SQLInsert table tableCols valueExp sqlConflict
. Just
$ S.RetExp
[ S.selectStar
, S.Extractor
(RQL.insertOrUpdateCheckExpr table onConflictM
(RQL.toSQLBoolExp (S.QualTable table) insCheck)
(fmap (RQL.toSQLBoolExp (S.QualTable table)) updCheck))
Nothing
$ PG.RetExp
[ PG.selectStar
, PGT.insertOrUpdateCheckExpr table onConflictM
(PGT.toSQLBoolExp (PG.QualTable table) insCheck)
(fmap (PGT.toSQLBoolExp (PG.QualTable table)) updCheck)
]
pure $ S.CTEInsert sqlInsert
pure $ PG.CTEInsert sqlInsert
fetchFromColVals
:: MonadError QErr m
=> ColumnValues TxtEncodedPGVal
-> [ColumnInfo 'Postgres]
-> m [(PGCol, S.SQLExp)]
-> m [(PGCol, PG.SQLExp)]
fetchFromColVals colVal reqCols =
forM reqCols $ \ci -> do
let valM = Map.lookup (pgiColumn ci) colVal
val <- onNothing valM $ throw500 $ "column "
<> pgiColumn ci <<> " not found in given colVal"
let pgColVal = case val of
TENull -> S.SENull
TELit t -> S.SELit t
TENull -> PG.SENull
TELit t -> PG.SELit t
return (pgiColumn ci, pgColVal)
mkSQLRow :: Map.HashMap PGCol S.SQLExp -> [(PGCol, S.SQLExp)] -> [S.SQLExp]
mkSQLRow :: Map.HashMap PGCol PG.SQLExp -> [(PGCol, PG.SQLExp)] -> [PG.SQLExp]
mkSQLRow defVals withPGCol = map snd $
flip map (Map.toList defVals) $
\(col, defVal) -> (col,) $ fromMaybe defVal $ Map.lookup col withPGColMap

View File

@ -14,12 +14,13 @@ import qualified Language.GraphQL.Draft.Syntax as G
import qualified Network.HTTP.Client as HTTP
import qualified Network.HTTP.Types as HTTP
import qualified Hasura.Backends.Postgres.Execute.Mutation as RQL
import qualified Hasura.Backends.Postgres.Execute.Mutation as PGE
import qualified Hasura.GraphQL.Transport.HTTP.Protocol as GH
import qualified Hasura.Logging as L
import qualified Hasura.RQL.IR.Delete as RQL
import qualified Hasura.RQL.IR.Returning as RQL
import qualified Hasura.RQL.IR.Update as RQL
import qualified Hasura.RQL.IR.Delete as IR
import qualified Hasura.RQL.IR.Insert as IR
import qualified Hasura.RQL.IR.Returning as IR
import qualified Hasura.RQL.IR.Update as IR
import qualified Hasura.Tracing as Tracing
import Hasura.Backends.Postgres.Connection
@ -31,11 +32,11 @@ import Hasura.GraphQL.Execute.Prepare
import Hasura.GraphQL.Execute.Remote
import Hasura.GraphQL.Execute.Resolve
import Hasura.GraphQL.Parser
import Hasura.GraphQL.Schema.Insert
import Hasura.RQL.Types
import Hasura.Server.Version (HasVersion)
import Hasura.Session
convertDelete
:: ( HasVersion
, MonadError QErr m
@ -44,14 +45,14 @@ convertDelete
, MonadIO tx)
=> Env.Environment
-> SessionVariables
-> RQL.MutationRemoteJoinCtx
-> RQL.AnnDelG 'Postgres UnpreparedValue
-> PGE.MutationRemoteJoinCtx
-> IR.AnnDelG 'Postgres UnpreparedValue
-> Bool
-> m (tx EncJSON)
convertDelete env usrVars remoteJoinCtx deleteOperation stringifyNum = do
let (preparedDelete, expectedVariables) = flip runState Set.empty $ RQL.traverseAnnDel prepareWithoutPlan deleteOperation
let (preparedDelete, expectedVariables) = flip runState Set.empty $ IR.traverseAnnDel prepareWithoutPlan deleteOperation
validateSessionVariables expectedVariables usrVars
pure $ RQL.execDeleteQuery env stringifyNum (Just remoteJoinCtx) (preparedDelete, Seq.empty)
pure $ PGE.execDeleteQuery env stringifyNum (Just remoteJoinCtx) (preparedDelete, Seq.empty)
convertUpdate
:: ( HasVersion
@ -62,17 +63,17 @@ convertUpdate
)
=> Env.Environment
-> SessionVariables
-> RQL.MutationRemoteJoinCtx
-> RQL.AnnUpdG 'Postgres UnpreparedValue
-> PGE.MutationRemoteJoinCtx
-> IR.AnnUpdG 'Postgres UnpreparedValue
-> Bool
-> m (tx EncJSON)
convertUpdate env usrVars remoteJoinCtx updateOperation stringifyNum = do
let (preparedUpdate, expectedVariables) = flip runState Set.empty $ RQL.traverseAnnUpd prepareWithoutPlan updateOperation
if null $ RQL.uqp1OpExps updateOperation
then pure $ pure $ RQL.buildEmptyMutResp $ RQL.uqp1Output preparedUpdate
let (preparedUpdate, expectedVariables) = flip runState Set.empty $ IR.traverseAnnUpd prepareWithoutPlan updateOperation
if null $ IR.uqp1OpExps updateOperation
then pure $ pure $ IR.buildEmptyMutResp $ IR.uqp1Output preparedUpdate
else do
validateSessionVariables expectedVariables usrVars
pure $ RQL.execUpdateQuery env stringifyNum (Just remoteJoinCtx) (preparedUpdate, Seq.empty)
pure $ PGE.execUpdateQuery env stringifyNum (Just remoteJoinCtx) (preparedUpdate, Seq.empty)
convertInsert
:: ( HasVersion
@ -82,8 +83,8 @@ convertInsert
, MonadIO tx)
=> Env.Environment
-> SessionVariables
-> RQL.MutationRemoteJoinCtx
-> AnnInsert 'Postgres UnpreparedValue
-> PGE.MutationRemoteJoinCtx
-> IR.AnnInsert 'Postgres UnpreparedValue
-> Bool
-> m (tx EncJSON)
convertInsert env usrVars remoteJoinCtx insertOperation stringifyNum = do
@ -101,7 +102,7 @@ convertMutationDB
)
=> Env.Environment
-> SessionVariables
-> RQL.MutationRemoteJoinCtx
-> PGE.MutationRemoteJoinCtx
-> Bool
-> MutationDB 'Postgres UnpreparedValue
-> m (tx EncJSON, HTTP.ResponseHeaders)

View File

@ -71,7 +71,7 @@ instance J.ToJSON RootFieldPlan where
RFPPostgres pgPlan -> J.toJSON pgPlan
RFPActionQuery _ -> J.String "Action Execution Tx"
data ActionQueryPlan (b :: Backend)
data ActionQueryPlan (b :: BackendType)
= AQPAsyncQuery !(DS.AnnSimpleSel b) -- ^ Cacheable plan
| AQPQuery !ActionExecuteTx -- ^ Non cacheable transaction

View File

@ -1,25 +1,30 @@
-- | Classes for monads used during schema construction and query parsing.
module Hasura.GraphQL.Parser.Class where
module Hasura.GraphQL.Parser.Class
( MonadParse(..)
, parseError
, QueryReusability(..)
, module Hasura.GraphQL.Parser.Class
) where
import Hasura.Prelude
import Hasura.Prelude
import qualified Data.HashMap.Strict as Map
import qualified Language.Haskell.TH as TH
import qualified Language.GraphQL.Draft.Syntax as G
import qualified Data.HashMap.Strict as Map
import qualified Language.GraphQL.Draft.Syntax as G
import qualified Language.Haskell.TH as TH
import Data.Has
import Data.Parser.JSONPath
import Data.Text.Extended
import Data.Tuple.Extended
import GHC.Stack (HasCallStack)
import Type.Reflection (Typeable)
import Data.Has
import Data.Text.Extended
import Data.Tuple.Extended
import GHC.Stack (HasCallStack)
import Type.Reflection (Typeable)
import Hasura.Backends.Postgres.SQL.Types
import {-# SOURCE #-} Hasura.GraphQL.Parser.Internal.Parser
import Hasura.RQL.Types.Error
import Hasura.RQL.Types.Table
import Hasura.SQL.Backend
import Hasura.Session (RoleName)
import Hasura.Backends.Postgres.SQL.Types
import Hasura.GraphQL.Parser.Class.Parse
import Hasura.GraphQL.Parser.Internal.Types
import Hasura.RQL.Types.Error
import Hasura.RQL.Types.Table
import Hasura.SQL.Backend
import Hasura.Session (RoleName)
{- Note [Tying the knot]
~~~~~~~~~~~~~~~~~~~~~~~~
@ -140,7 +145,7 @@ getTableGQLName
getTableGQLName table = do
tableInfo <- askTableInfo table
let tableCustomName = _tcCustomName . _tciCustomConfig . _tiCoreInfo $ tableInfo
maybe (qualifiedObjectToName table) pure tableCustomName
tableCustomName `onNothing` qualifiedObjectToName table
-- | A wrapper around 'memoizeOn' that memoizes a function by using its argument
-- as the key.
@ -173,41 +178,3 @@ memoize4
-> (a -> b -> c -> d -> m (Parser k n e))
-> (a -> b -> c -> d -> m (Parser k n e))
memoize4 name = curry4 . memoize name . uncurry4
-- | A class that provides functionality for parsing GraphQL queries, i.e.
-- running a fully-constructed 'Parser'.
class Monad m => MonadParse m where
withPath :: (JSONPath -> JSONPath) -> m a -> m a
-- | Not the full power of 'MonadError' because parse errors cannot be
-- caught.
parseErrorWith :: Code -> Text -> m a
-- | See 'QueryReusability'.
markNotReusable :: m ()
parseError :: MonadParse m => Text -> m a
parseError = parseErrorWith ValidationFailed
-- | Tracks whether or not a query is /reusable/. Reusable queries are nice,
-- since we can cache their resolved ASTs and avoid re-resolving them if we
-- receive an identical query. However, we cant always safely reuse queries if
-- they have variables, since some variable values can affect the generated SQL.
-- For example, consider the following query:
--
-- > query users_where($condition: users_bool_exp!) {
-- > users(where: $condition) {
-- > id
-- > }
-- > }
--
-- Different values for @$condition@ will produce completely different queries,
-- so we cant reuse its plan (unless the variable values were also all
-- identical, of course, but we dont bother caching those).
data QueryReusability = Reusable | NotReusable
instance Semigroup QueryReusability where
NotReusable <> _ = NotReusable
_ <> NotReusable = NotReusable
Reusable <> Reusable = Reusable
instance Monoid QueryReusability where
mempty = Reusable

View File

@ -1,5 +0,0 @@
module Hasura.GraphQL.Parser.Class where
import Data.Kind (Type)
class MonadParse (m :: Type -> Type)

View File

@ -0,0 +1,46 @@
-- | Classes for monads used during schema construction and query parsing.
module Hasura.GraphQL.Parser.Class.Parse where
import Hasura.Prelude
import Data.Parser.JSONPath
import Hasura.RQL.Types.Error
-- | A class that provides functionality for parsing GraphQL queries, i.e.
-- running a fully-constructed 'Parser'.
class Monad m => MonadParse m where
withPath :: (JSONPath -> JSONPath) -> m a -> m a
-- | Not the full power of 'MonadError' because parse errors cannot be
-- caught.
parseErrorWith :: Code -> Text -> m a
-- | See 'QueryReusability'.
markNotReusable :: m ()
parseError :: MonadParse m => Text -> m a
parseError = parseErrorWith ValidationFailed
-- | Tracks whether or not a query is /reusable/. Reusable queries are nice,
-- since we can cache their resolved ASTs and avoid re-resolving them if we
-- receive an identical query. However, we cant always safely reuse queries if
-- they have variables, since some variable values can affect the generated SQL.
-- For example, consider the following query:
--
-- > query users_where($condition: users_bool_exp!) {
-- > users(where: $condition) {
-- > id
-- > }
-- > }
--
-- Different values for @$condition@ will produce completely different queries,
-- so we cant reuse its plan (unless the variable values were also all
-- identical, of course, but we dont bother caching those).
data QueryReusability = Reusable | NotReusable
instance Semigroup QueryReusability where
NotReusable <> _ = NotReusable
_ <> NotReusable = NotReusable
Reusable <> Reusable = Reusable
instance Monoid QueryReusability where
mempty = Reusable

View File

@ -12,19 +12,18 @@ module Hasura.GraphQL.Parser.Collect
( collectFields
) where
import Hasura.Prelude
import Hasura.Prelude
import qualified Data.HashMap.Strict.Extended as Map
import qualified Data.HashMap.Strict.InsOrd as OMap
import qualified Data.HashMap.Strict.Extended as Map
import qualified Data.HashMap.Strict.InsOrd as OMap
import Data.List.Extended (duplicates)
import Language.GraphQL.Draft.Syntax
import Data.List.Extended (duplicates)
import Language.GraphQL.Draft.Syntax
import Data.Text.Extended
import Hasura.GraphQL.Parser.Class
import {-# SOURCE #-} Hasura.GraphQL.Parser.Internal.Parser (boolean, runParser)
import Hasura.GraphQL.Parser.Schema
import Hasura.GraphQL.Utils (showNames)
import Data.Text.Extended
import Hasura.GraphQL.Parser.Class
import Hasura.GraphQL.Parser.Schema
import Hasura.GraphQL.Utils (showNames)
-- | Collects the effective set of fields queried by a selection set by
-- flattening fragments and merging duplicate fields.
@ -33,10 +32,12 @@ collectFields
=> t Name
-- ^ The names of the object types and interface types the 'SelectionSet' is
-- selecting against.
-> (InputValue Variable -> m Bool)
-- ^ Please pass 'runParser boolean' here (passed explicitly to avoid cyclic imports)
-> SelectionSet NoFragments Variable
-> m (InsOrdHashMap Name (Field NoFragments Variable))
collectFields objectTypeNames selectionSet =
mergeFields =<< flattenSelectionSet objectTypeNames selectionSet
collectFields objectTypeNames boolParser selectionSet =
mergeFields =<< flattenSelectionSet objectTypeNames boolParser selectionSet
-- | Flattens inline fragments in a selection set. For example,
--
@ -92,9 +93,11 @@ flattenSelectionSet
:: (MonadParse m, Foldable t)
=> t Name
-- ^ The name of the object type the 'SelectionSet' is selecting against.
-> (InputValue Variable -> m Bool)
-- ^ Please pass 'runParser boolean' here (passed explicitly to avoid cyclic imports)
-> SelectionSet NoFragments Variable
-> m [Field NoFragments Variable]
flattenSelectionSet objectTypeNames = fmap concat . traverse flattenSelection
flattenSelectionSet objectTypeNames boolParser = fmap concat . traverse flattenSelection
where
-- The easy case: just a single field.
flattenSelection (SelectionField field) = do
@ -130,7 +133,7 @@ flattenSelectionSet objectTypeNames = fmap concat . traverse flattenSelection
flattenInlineFragment InlineFragment{ _ifDirectives, _ifSelectionSet } = do
validateDirectives _ifDirectives
flattenSelectionSet objectTypeNames _ifSelectionSet
flattenSelectionSet objectTypeNames boolParser _ifSelectionSet
applyInclusionDirectives directives continue
| Just directive <- find ((== $$(litName "include")) . _dName) directives
@ -142,7 +145,7 @@ flattenSelectionSet objectTypeNames = fmap concat . traverse flattenSelection
applyInclusionDirective adjust Directive{ _dName, _dArguments } continue = do
ifArgument <- Map.lookup $$(litName "if") _dArguments `onNothing`
parseError ("missing \"if\" argument for " <> _dName <<> " directive")
value <- runParser boolean $ GraphQLValue ifArgument
value <- boolParser $ GraphQLValue ifArgument
if adjust value then continue else pure []
validateDirectives directives =

View File

@ -3,165 +3,41 @@
{-# LANGUAGE StrictData #-}
-- | Defines the 'Parser' type and its primitive combinators.
module Hasura.GraphQL.Parser.Internal.Parser where
module Hasura.GraphQL.Parser.Internal.Parser
( module Hasura.GraphQL.Parser.Internal.Parser
, Parser(..)
, parserType
, runParser
, ParserInput
) where
import Hasura.Prelude
import qualified Data.Aeson as A
import qualified Data.HashMap.Strict.Extended as M
import qualified Data.HashMap.Strict.InsOrd as OMap
import qualified Data.HashSet as S
import qualified Data.List.Extended as LE
import qualified Data.Text as T
import qualified Data.Aeson as A
import qualified Data.HashMap.Strict.Extended as M
import qualified Data.HashMap.Strict.InsOrd as OMap
import qualified Data.HashSet as S
import qualified Data.List.Extended as LE
import qualified Data.Text as T
import Control.Lens.Extended hiding (enum, index)
import Data.Int (Int32, Int64)
import Control.Lens.Extended hiding (enum, index)
import Data.Int (Int32, Int64)
import Data.Parser.JSONPath
import Data.Scientific (toBoundedInteger)
import Data.Scientific (toBoundedInteger)
import Data.Text.Extended
import Data.Type.Equality
import Language.GraphQL.Draft.Syntax hiding (Definition)
import Language.GraphQL.Draft.Syntax hiding (Definition)
import Hasura.Backends.Postgres.SQL.Value
import Hasura.GraphQL.Parser.Class
import Hasura.GraphQL.Parser.Class.Parse
import Hasura.GraphQL.Parser.Collect
import Hasura.GraphQL.Parser.Internal.Types
import Hasura.GraphQL.Parser.Schema
import Hasura.RQL.Types.CustomTypes
import Hasura.RQL.Types.Error
import Hasura.Server.Utils (englishList)
import Hasura.Server.Utils (englishList)
-- -----------------------------------------------------------------------------
-- type definitions
-- | A 'Parser' that corresponds to a type in the GraphQL schema. A 'Parser' is
-- really two things at once:
--
-- 1. As its name implies, a 'Parser' can be used to parse GraphQL queries
-- (via 'runParser').
--
-- 2. Less obviously, a 'Parser' represents a slice of the GraphQL schema,
-- since every 'Parser' corresponds to a particular GraphQL type, and
-- information about that type can be recovered (via 'parserType').
--
-- A natural way to view this is that 'Parser's support a sort of dynamic
-- reflection: in addition to running a 'Parser' on an input query, you can ask
-- it to tell you about what type of input it expects. Importantly, you can do
-- this even if you dont have a query to parse; this is necessary to implement
-- GraphQL introspection, which provides precisely this sort of reflection on
-- types.
--
-- Another way of viewing a 'Parser' is a little more quantum: just as light
-- “sometimes behaves like a particle and sometimes behaves like a wave,” a
-- 'Parser' “sometimes behaves like a query parser and sometimes behaves like a
-- type.” In this way, you can think of a function that produces a 'Parser' as
-- simultaneously both a function that constructs a GraphQL schema and a
-- function that parses a GraphQL query. 'Parser' constructors therefore
-- interleave two concerns: information about a type definition (like the types
-- name and description) and information about how to parse a query on that type.
--
-- Notably, these two concerns happen at totally different phases in the
-- program: GraphQL schema construction happens when @graphql-engine@ first
-- starts up, before it receives any GraphQL queries at all. But query parsing
-- obviously cant happen until there is actually a query to parse. For that
-- reason, its useful to take care to distinguish which effects are happening
-- at which phase during 'Parser' construction, since otherwise you may get
-- mixed up!
--
-- For some more information about how to interpret the meaning of a 'Parser',
-- see Note [The meaning of Parser 'Output].
data Parser k m a = Parser
{ pType :: ~(Type k)
-- ^ Lazy for knot-tying reasons; see Note [Tying the knot] in
-- Hasura.GraphQL.Parser.Class.
, pParser :: ParserInput k -> m a
} deriving (Functor)
parserType :: Parser k m a -> Type k
parserType = pType
runParser :: Parser k m a -> ParserInput k -> m a
runParser = pParser
instance HasName (Parser k m a) where
getName = getName . pType
instance HasDefinition (Parser k m a) (TypeInfo k) where
definitionLens f parser = definitionLens f (pType parser) <&> \pType -> parser { pType }
type family ParserInput k where
-- see Note [The 'Both kind] in Hasura.GraphQL.Parser.Schema
ParserInput 'Both = InputValue Variable
ParserInput 'Input = InputValue Variable
-- see Note [The meaning of Parser 'Output]
ParserInput 'Output = SelectionSet NoFragments Variable
{- Note [The meaning of Parser 'Output]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The ParserInput type family determines what a Parser accepts as input during
query parsing, which varies based on its Kind. A `Parser 'Input`,
unsurprisingly, parses GraphQL input values, much in the same way aeson
`Parser`s parse JSON values.
Therefore, one might naturally conclude that `Parser 'Output` ought to parse
GraphQL output values. But it doesnt---a Parser is used to parse GraphQL
*queries*, and output values dont show up in queries anywhere! Rather, the
output values are the results of executing the query, not something the user
sends us, so we dont have to parse those at all.
What output types really correspond to in GraphQL queries is selection sets. For
example, if we have the GraphQL types
type User {
posts(filters: PostFilters): [Post]
}
input PostFilters {
newer_than: Date
}
type Post {
id: Int
title: String
body: String
}
then we might receive a query that looks like this:
query list_user_posts($user_id: Int, $date: Date) {
user_by_id(id: $user_id) {
posts(filters: {newer_than: $date}) {
id
title
}
}
}
We have Parsers to represent each of these types: a `Parser 'Input` for
PostFilters, and two `Parser 'Output`s for User and Post. When we parse the
query, we pass the `{newer_than: $date}` input value to the PostFilters parser,
as expected. But what do we pass to the User parser? The answer is this
selection set:
{
posts(filters: {newer_than: $date}) {
id
title
}
}
Likewise, the Post parser eventually receives the inner selection set:
{
id
title
}
These Parsers handle interpreting the fields of the selection sets. This is why
`ParserInput 'Output` is SelectionSet---the GraphQL *type* associated with the
Parser is an output type, but the part of the *query* that corresponds to that
output type isnt an output value but a selection set. -}
-- | The constraint @(''Input' '<:' k)@ entails @('ParserInput' k ~ 'Value')@,
-- but GHC cant figure that out on its own, so we have to be explicit to give
-- it a little help.
@ -726,7 +602,7 @@ selectionSetObject name description parsers implementsInterfaces = Parser
-- TODO(PDV) This probably accepts invalid queries, namely queries that use
-- type names that do not exist.
fields <- collectFields (name:parsedInterfaceNames) input
fields <- collectFields (name:parsedInterfaceNames) (runParser boolean) input
for fields \selectionField@Field{ _fName, _fAlias } -> if
| _fName == $$(litName "__typename") ->
pure $ SelectTypename name

View File

@ -1,23 +0,0 @@
module Hasura.GraphQL.Parser.Internal.Parser where
import Hasura.Prelude
import qualified Data.Kind as K
import Language.GraphQL.Draft.Syntax
import {-# SOURCE #-} Hasura.GraphQL.Parser.Class
import Hasura.GraphQL.Parser.Schema
type role Parser nominal representational nominal
data Parser (k :: Kind) (m :: K.Type -> K.Type) (a :: K.Type)
runParser :: Parser k m a -> ParserInput k -> m a
type family ParserInput k where
ParserInput 'Both = InputValue Variable
ParserInput 'Input = InputValue Variable
ParserInput 'Output = SelectionSet NoFragments Variable
boolean :: MonadParse m => Parser 'Both m Bool

View File

@ -0,0 +1,142 @@
{-# LANGUAGE StrictData #-}
-- | Defines the 'Parser' type and its primitive combinators.
module Hasura.GraphQL.Parser.Internal.Types where
import Hasura.Prelude
import Language.GraphQL.Draft.Syntax hiding (Definition)
import Hasura.GraphQL.Parser.Schema
-- -----------------------------------------------------------------------------
-- type definitions
-- | A 'Parser' that corresponds to a type in the GraphQL schema. A 'Parser' is
-- really two things at once:
--
-- 1. As its name implies, a 'Parser' can be used to parse GraphQL queries
-- (via 'runParser').
--
-- 2. Less obviously, a 'Parser' represents a slice of the GraphQL schema,
-- since every 'Parser' corresponds to a particular GraphQL type, and
-- information about that type can be recovered (via 'parserType').
--
-- A natural way to view this is that 'Parser's support a sort of dynamic
-- reflection: in addition to running a 'Parser' on an input query, you can ask
-- it to tell you about what type of input it expects. Importantly, you can do
-- this even if you dont have a query to parse; this is necessary to implement
-- GraphQL introspection, which provides precisely this sort of reflection on
-- types.
--
-- Another way of viewing a 'Parser' is a little more quantum: just as light
-- “sometimes behaves like a particle and sometimes behaves like a wave,” a
-- 'Parser' “sometimes behaves like a query parser and sometimes behaves like a
-- type.” In this way, you can think of a function that produces a 'Parser' as
-- simultaneously both a function that constructs a GraphQL schema and a
-- function that parses a GraphQL query. 'Parser' constructors therefore
-- interleave two concerns: information about a type definition (like the types
-- name and description) and information about how to parse a query on that type.
--
-- Notably, these two concerns happen at totally different phases in the
-- program: GraphQL schema construction happens when @graphql-engine@ first
-- starts up, before it receives any GraphQL queries at all. But query parsing
-- obviously cant happen until there is actually a query to parse. For that
-- reason, its useful to take care to distinguish which effects are happening
-- at which phase during 'Parser' construction, since otherwise you may get
-- mixed up!
--
-- For some more information about how to interpret the meaning of a 'Parser',
-- see Note [The meaning of Parser 'Output].
data Parser k m a = Parser
{ pType :: ~(Type k)
-- ^ Lazy for knot-tying reasons; see Note [Tying the knot] in
-- Hasura.GraphQL.Parser.Class.
, pParser :: ParserInput k -> m a
} deriving (Functor)
instance HasName (Parser k m a) where
getName = getName . pType
instance HasDefinition (Parser k m a) (TypeInfo k) where
definitionLens f parser = definitionLens f (pType parser) <&> \pType -> parser { pType }
type family ParserInput k where
-- see Note [The 'Both kind] in Hasura.GraphQL.Parser.Schema
ParserInput 'Both = InputValue Variable
ParserInput 'Input = InputValue Variable
-- see Note [The meaning of Parser 'Output]
ParserInput 'Output = SelectionSet NoFragments Variable
parserType :: Parser k m a -> Type k
parserType = pType
runParser :: Parser k m a -> ParserInput k -> m a
runParser = pParser
{- Note [The meaning of Parser 'Output]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The ParserInput type family determines what a Parser accepts as input during
query parsing, which varies based on its Kind. A `Parser 'Input`,
unsurprisingly, parses GraphQL input values, much in the same way aeson
`Parser`s parse JSON values.
Therefore, one might naturally conclude that `Parser 'Output` ought to parse
GraphQL output values. But it doesnt---a Parser is used to parse GraphQL
*queries*, and output values dont show up in queries anywhere! Rather, the
output values are the results of executing the query, not something the user
sends us, so we dont have to parse those at all.
What output types really correspond to in GraphQL queries is selection sets. For
example, if we have the GraphQL types
type User {
posts(filters: PostFilters): [Post]
}
input PostFilters {
newer_than: Date
}
type Post {
id: Int
title: String
body: String
}
then we might receive a query that looks like this:
query list_user_posts($user_id: Int, $date: Date) {
user_by_id(id: $user_id) {
posts(filters: {newer_than: $date}) {
id
title
}
}
}
We have Parsers to represent each of these types: a `Parser 'Input` for
PostFilters, and two `Parser 'Output`s for User and Post. When we parse the
query, we pass the `{newer_than: $date}` input value to the PostFilters parser,
as expected. But what do we pass to the User parser? The answer is this
selection set:
{
posts(filters: {newer_than: $date}) {
id
title
}
}
Likewise, the Post parser eventually receives the inner selection set:
{
id
title
}
These Parsers handle interpreting the fields of the selection sets. This is why
`ParserInput 'Output` is SelectionSet---the GraphQL *type* associated with the
Parser is an output type, but the part of the *query* that corresponds to that
output type isnt an output value but a selection set. -}

View File

@ -1,3 +1,4 @@
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE Arrows #-}
module Hasura.GraphQL.Schema
( buildGQLContext
@ -131,19 +132,19 @@ buildRoleContext
-> [P.FieldParser (P.ParseT Identity) RemoteField]
-> RoleName
-> m (RoleContext GQLContext)
buildRoleContext queryContext allTables allFunctions allActionInfos
nonObjectCustomTypes queryRemotes mutationRemotes roleName =
buildRoleContext queryContext (takeValidTables -> allTables) (takeValidFunctions -> allFunctions)
allActionInfos nonObjectCustomTypes queryRemotes mutationRemotes roleName =
runMonadSchema roleName queryContext validTables $ do
runMonadSchema roleName queryContext allTables $ do
mutationParserFrontend <-
buildPGMutationFields Frontend validTableNames >>=
buildPGMutationFields Frontend tableNames >>=
buildMutationParser mutationRemotes allActionInfos nonObjectCustomTypes
mutationParserBackend <-
buildPGMutationFields Backend validTableNames >>=
buildPGMutationFields Backend tableNames >>=
buildMutationParser mutationRemotes allActionInfos nonObjectCustomTypes
queryPGFields <- buildPostgresQueryFields validTableNames validFunctions
queryPGFields <- buildPostgresQueryFields tableNames allFunctions
subscriptionParser <- buildSubscriptionParser queryPGFields allActionInfos
queryParserFrontend <- buildQueryParser queryPGFields queryRemotes
@ -158,13 +159,22 @@ buildRoleContext queryContext allTables allFunctions allActionInfos
pure $ RoleContext frontendContext $ Just backendContext
where
tableNames = Map.keysSet allTables
tableFilter = not . isSystemDefined . _tciSystemDefined
functionFilter = not . isSystemDefined . fiSystemDefined
takeValidTables :: TableCache -> TableCache
takeValidTables = Map.filterWithKey graphQLTableFilter . Map.filter tableFilter
where
tableFilter = not . isSystemDefined . _tciSystemDefined . _tiCoreInfo
graphQLTableFilter tableName tableInfo =
-- either the table name should be GraphQL compliant
-- or it should have a GraphQL custom name set with it
isGraphQLCompliantTableName tableName
|| (isJust . _tcCustomName . _tciCustomConfig . _tiCoreInfo $ tableInfo)
validTables = Map.filter (tableFilter . _tiCoreInfo) allTables
validTableNames = Map.keysSet validTables
validFunctions = Map.elems $ Map.filter functionFilter allFunctions
takeValidFunctions :: FunctionCache -> [FunctionInfo]
takeValidFunctions = Map.elems . Map.filter functionFilter
where
functionFilter = not . isSystemDefined . fiSystemDefined
buildFullestDBSchema
:: (MonadError QErr m, MonadIO m, MonadUnique m)
@ -172,14 +182,14 @@ buildFullestDBSchema
-> m ( Parser 'Output (P.ParseT Identity) (OMap.InsOrdHashMap G.Name (QueryRootField UnpreparedValue))
, Maybe (Parser 'Output (P.ParseT Identity) (OMap.InsOrdHashMap G.Name (MutationRootField UnpreparedValue)))
)
buildFullestDBSchema queryContext allTables allFunctions allActionInfos
nonObjectCustomTypes = do
runMonadSchema adminRoleName queryContext validTables $ do
buildFullestDBSchema queryContext (takeValidTables -> allTables) (takeValidFunctions -> allFunctions)
allActionInfos nonObjectCustomTypes = do
runMonadSchema adminRoleName queryContext allTables $ do
mutationParserFrontend <-
buildPGMutationFields Frontend validTableNames >>=
buildPGMutationFields Frontend tableNames >>=
buildMutationParser mempty allActionInfos nonObjectCustomTypes
queryPGFields <- buildPostgresQueryFields validTableNames validFunctions
queryPGFields <- buildPostgresQueryFields tableNames allFunctions
subscriptionParser <- buildSubscriptionParser queryPGFields allActionInfos
queryParserFrontend <- buildQueryParser queryPGFields mempty
@ -188,13 +198,7 @@ buildFullestDBSchema queryContext allTables allFunctions allActionInfos
pure (queryParserFrontend, mutationParserFrontend)
where
tableFilter = not . isSystemDefined . _tciSystemDefined
functionFilter = not . isSystemDefined . fiSystemDefined
validTables = Map.filter (tableFilter . _tiCoreInfo) allTables
validTableNames = Map.keysSet validTables
validFunctions = Map.elems $ Map.filter functionFilter allFunctions
tableNames = Map.keysSet allTables
buildRelayRoleContext
:: (MonadError QErr m, MonadIO m, MonadUnique m)
@ -202,19 +206,19 @@ buildRelayRoleContext
-> [P.FieldParser (P.ParseT Identity) RemoteField]
-> RoleName
-> m (RoleContext GQLContext)
buildRelayRoleContext queryContext allTables allFunctions allActionInfos
nonObjectCustomTypes mutationRemotes roleName =
buildRelayRoleContext queryContext (takeValidTables -> allTables) (takeValidFunctions -> allFunctions)
allActionInfos nonObjectCustomTypes mutationRemotes roleName =
runMonadSchema roleName queryContext validTables $ do
runMonadSchema roleName queryContext allTables $ do
mutationParserFrontend <-
buildPGMutationFields Frontend validTableNames >>=
buildPGMutationFields Frontend tableNames >>=
buildMutationParser mutationRemotes allActionInfos nonObjectCustomTypes
mutationParserBackend <-
buildPGMutationFields Backend validTableNames >>=
buildPGMutationFields Backend tableNames >>=
buildMutationParser mutationRemotes allActionInfos nonObjectCustomTypes
queryPGFields <- buildRelayPostgresQueryFields validTableNames validFunctions
queryPGFields <- buildRelayPostgresQueryFields tableNames allFunctions
subscriptionParser <- P.safeSelectionSet subscriptionRoot Nothing queryPGFields
<&> fmap (fmap (P.handleTypename (RFRaw . J.String. G.unName)))
queryParserFrontend <- queryWithIntrospectionHelper queryPGFields
@ -229,13 +233,7 @@ buildRelayRoleContext queryContext allTables allFunctions allActionInfos
pure $ RoleContext frontendContext $ Just backendContext
where
tableFilter = not . isSystemDefined . _tciSystemDefined
functionFilter = not . isSystemDefined . fiSystemDefined
validTables = Map.filter (tableFilter . _tiCoreInfo) allTables
validTableNames = Map.keysSet validTables
validFunctions = Map.elems $ Map.filter functionFilter allFunctions
tableNames = Map.keysSet allTables
unauthenticatedContext
:: forall m

View File

@ -1,77 +0,0 @@
module Hasura.GraphQL.Schema.Insert where
import Hasura.Prelude
import qualified Hasura.RQL.IR.Insert as RQL
import qualified Hasura.RQL.IR.Returning as RQL
import Hasura.Backends.Postgres.SQL.Types
import Hasura.RQL.IR.BoolExp
import Hasura.RQL.Types.Column
import Hasura.RQL.Types.Common
import Hasura.SQL.Backend
-- At time of writing (August 2020), GraphQL queries and mutations get
-- translated into corresponding RQL queries: RQL is used as the internal
-- intermediary representation, before a query gets translated into
-- SQL. However, RQL inserts represenation does not support nested insertions,
-- which means that GraphQL inserts need a separate representation, found here.
-- FIXME: this code doesn't belong in this folder: arguably, since this is an
-- internal representation of a mutation, it should belong alongside RQL rather
-- than alongside the schema code, especially if we transition RQL to only be an
-- intermediary representation library rather than an actual API (see [1] for
-- more information).
-- [1] https://gist.github.com/abooij/07165b5ac36097178a334bc03805c33b
-- FIXME: this representation was lifted almost verbatim from pre-PDV code, and
-- hasn't been adapted to reflect the changes that PDV brought. It is therefore
-- quite likely that some of the information stored in those structures is
-- redundant, and that they can be simplified.
data AnnInsert (b :: Backend) v
= AnnInsert
{ _aiFieldName :: !Text
, _aiIsSingle :: Bool
, _aiData :: AnnMultiInsert b v
}
data AnnIns (b :: Backend) a v
= AnnIns
{ _aiInsObj :: !a
, _aiTableName :: !QualifiedTable
, _aiConflictClause :: !(Maybe (RQL.ConflictClauseP1 b v))
, _aiCheckCond :: !(AnnBoolExp b v, Maybe (AnnBoolExp b v))
, _aiTableCols :: ![ColumnInfo b]
, _aiDefVals :: !(PreSetColsG b v)
}
type SingleObjIns b v = AnnIns b (AnnInsObj b v) v
type MultiObjIns b v = AnnIns b [AnnInsObj b v] v
data RelIns a
= RelIns
{ _riAnnIns :: !a
, _riRelInfo :: !RelInfo
} deriving (Show, Eq)
type ObjRelIns b v = RelIns (SingleObjIns b v)
type ArrRelIns b v = RelIns (MultiObjIns b v)
data AnnInsObj (b :: Backend) v
= AnnInsObj
{ _aioColumns :: ![(Column b, v)]
, _aioObjRels :: ![ObjRelIns b v]
, _aioArrRels :: ![ArrRelIns b v]
}
type AnnSingleInsert b v = (SingleObjIns b v, RQL.MutationOutputG b v)
type AnnMultiInsert b v = (MultiObjIns b v, RQL.MutationOutputG b v)
instance Semigroup (AnnInsObj backend v) where
(AnnInsObj col1 obj1 rel1) <> (AnnInsObj col2 obj2 rel2) =
AnnInsObj (col1 <> col2) (obj1 <> obj2) (rel1 <> rel2)
instance Monoid (AnnInsObj backend v) where
mempty = AnnInsObj [] [] []

View File

@ -10,7 +10,6 @@ module Hasura.GraphQL.Schema.Mutation
) where
import Data.Has
import Hasura.Prelude
import qualified Data.HashMap.Strict as Map
@ -19,24 +18,25 @@ import qualified Data.HashSet as Set
import qualified Data.Text as T
import qualified Language.GraphQL.Draft.Syntax as G
import qualified Hasura.Backends.Postgres.SQL.DML as S
import qualified Hasura.GraphQL.Parser as P
import qualified Hasura.RQL.IR.Delete as RQL
import qualified Hasura.RQL.IR.Insert as RQL
import qualified Hasura.RQL.IR.Returning as RQL
import qualified Hasura.RQL.IR.Update as RQL
import Data.Has
import Data.Text.Extended
import qualified Hasura.Backends.Postgres.SQL.DML as PG
import qualified Hasura.GraphQL.Parser as P
import qualified Hasura.RQL.IR.Delete as IR
import qualified Hasura.RQL.IR.Insert as IR
import qualified Hasura.RQL.IR.Returning as IR
import qualified Hasura.RQL.IR.Update as IR
import Hasura.Backends.Postgres.SQL.Types
import Hasura.GraphQL.Parser (FieldParser, InputFieldsParser, Kind (..),
Parser, UnpreparedValue (..), mkParameter)
import Hasura.GraphQL.Parser.Class
import Hasura.GraphQL.Schema.BoolExp
import Hasura.GraphQL.Schema.Common
import Hasura.GraphQL.Schema.Insert
import Hasura.GraphQL.Schema.Select
import Hasura.GraphQL.Schema.Table
import Hasura.RQL.Types
import Hasura.RQL.Types hiding (ConstraintName)
@ -51,7 +51,7 @@ insertIntoTable
-> InsPermInfo 'Postgres -- ^ insert permissions of the table
-> Maybe (SelPermInfo 'Postgres) -- ^ select permissions of the table (if any)
-> Maybe (UpdPermInfo 'Postgres) -- ^ update permissions of the table (if any)
-> m (FieldParser n (AnnInsert 'Postgres UnpreparedValue))
-> m (FieldParser n (IR.AnnInsert 'Postgres UnpreparedValue))
insertIntoTable table fieldName description insertPerms selectPerms updatePerms = do
columns <- tableColumns table
selectionParser <- mutationSelectionSet table selectPerms
@ -64,10 +64,10 @@ insertIntoTable table fieldName description insertPerms selectPerms updatePerms
objects <- P.field objectsName (Just objectsDesc) objectsParser
pure (conflictClause, objects)
pure $ P.subselection fieldName description argsParser selectionParser
<&> \((conflictClause, objects), output) -> AnnInsert (G.unName fieldName) False
( mkInsertObject objects table columns conflictClause insertPerms updatePerms
, RQL.MOutMultirowFields output
)
<&> \((conflictClause, objects), output) -> IR.AnnInsert (G.unName fieldName) False
( mkInsertObject objects table columns conflictClause insertPerms updatePerms
, IR.MOutMultirowFields output
)
mkConflictClause :: MonadParse n => Maybe (Parser 'Input n a) -> InputFieldsParser n (Maybe a)
mkConflictClause conflictParser
@ -88,7 +88,7 @@ insertOneIntoTable
-> InsPermInfo 'Postgres -- ^ insert permissions of the table
-> SelPermInfo 'Postgres -- ^ select permissions of the table
-> Maybe (UpdPermInfo 'Postgres) -- ^ update permissions of the table (if any)
-> m (FieldParser n (AnnInsert 'Postgres UnpreparedValue))
-> m (FieldParser n (IR.AnnInsert 'Postgres UnpreparedValue))
insertOneIntoTable table fieldName description insertPerms selectPerms updatePerms = do
columns <- tableColumns table
selectionParser <- tableSelectionSet table selectPerms
@ -101,9 +101,9 @@ insertOneIntoTable table fieldName description insertPerms selectPerms updatePer
object <- P.field objectName (Just objectDesc) objectParser
pure (conflictClause, object)
pure $ P.subselection fieldName description argsParser selectionParser
<&> \((conflictClause, object), output) -> AnnInsert (G.unName fieldName) True
<&> \((conflictClause, object), output) -> IR.AnnInsert (G.unName fieldName) True
( mkInsertObject [object] table columns conflictClause insertPerms updatePerms
, RQL.MOutSinglerowObject output
, IR.MOutSinglerowObject output
)
-- | We specify the data of an individual row to insert through this input parser.
@ -111,7 +111,7 @@ tableFieldsInput
:: forall m n r. (MonadSchema n m, MonadTableInfo r m, MonadRole r m)
=> QualifiedTable -- ^ qualified name of the table
-> InsPermInfo 'Postgres -- ^ insert permissions of the table
-> m (Parser 'Input n (AnnInsObj 'Postgres UnpreparedValue))
-> m (Parser 'Input n (IR.AnnInsObj 'Postgres UnpreparedValue))
tableFieldsInput table insertPerms = memoizeOn 'tableFieldsInput table do
tableGQLName <- getTableGQLName table
allFields <- _tciFieldInfoMap . _tiCoreInfo <$> askTableInfo table
@ -124,7 +124,7 @@ tableFieldsInput table insertPerms = memoizeOn 'tableFieldsInput table do
columnDesc = pgiDescription columnInfo
fieldParser <- P.column (pgiType columnInfo) (G.Nullability $ pgiIsNullable columnInfo)
pure $ P.fieldOptional columnName columnDesc fieldParser `mapField`
\(mkParameter -> value) -> AnnInsObj [(pgiColumn columnInfo, value)] [] []
\(mkParameter -> value) -> IR.AnnInsObj [(pgiColumn columnInfo, value)] [] []
FIRelationship relationshipInfo -> runMaybeT $ do
let otherTable = riRTable relationshipInfo
relName = riName relationshipInfo
@ -137,12 +137,12 @@ tableFieldsInput table insertPerms = memoizeOn 'tableFieldsInput table do
ObjRel -> do
parser <- objectRelationshipInput otherTable insPerms selPerms updPerms
pure $ P.fieldOptional relFieldName Nothing parser `mapField`
\objRelIns -> AnnInsObj [] [RelIns objRelIns relationshipInfo] []
\objRelIns -> IR.AnnInsObj [] [IR.RelIns objRelIns relationshipInfo] []
ArrRel -> do
parser <- P.nullable <$> arrayRelationshipInput otherTable insPerms selPerms updPerms
pure $ P.fieldOptional relFieldName Nothing parser <&> \arrRelIns -> do
rel <- join arrRelIns
Just $ AnnInsObj [] [] [RelIns rel relationshipInfo | not $ null $ _aiInsObj rel]
Just $ IR.AnnInsObj [] [] [IR.RelIns rel relationshipInfo | not $ null $ IR._aiInsObj rel]
let objectName = tableGQLName <> $$(G.litName "_insert_input")
objectDesc = G.Description $ "input type for inserting data into table " <>> table
pure $ P.object objectName (Just objectDesc) $ catMaybes <$> sequenceA objectFields
@ -155,7 +155,7 @@ objectRelationshipInput
-> InsPermInfo 'Postgres
-> Maybe (SelPermInfo 'Postgres)
-> Maybe (UpdPermInfo 'Postgres)
-> m (Parser 'Input n (SingleObjIns 'Postgres UnpreparedValue))
-> m (Parser 'Input n (IR.SingleObjIns 'Postgres UnpreparedValue))
objectRelationshipInput table insertPerms selectPerms updatePerms =
memoizeOn 'objectRelationshipInput table do
tableGQLName <- getTableGQLName table
@ -178,7 +178,7 @@ arrayRelationshipInput
-> InsPermInfo 'Postgres
-> Maybe (SelPermInfo 'Postgres)
-> Maybe (UpdPermInfo 'Postgres)
-> m (Parser 'Input n (MultiObjIns 'Postgres UnpreparedValue))
-> m (Parser 'Input n (IR.MultiObjIns 'Postgres UnpreparedValue))
arrayRelationshipInput table insertPerms selectPerms updatePerms =
memoizeOn 'arrayRelationshipInput table do
tableGQLName <- getTableGQLName table
@ -198,22 +198,22 @@ mkInsertObject
:: a
-> QualifiedTable
-> [ColumnInfo 'Postgres]
-> Maybe (RQL.ConflictClauseP1 'Postgres UnpreparedValue)
-> Maybe (IR.ConflictClauseP1 'Postgres UnpreparedValue)
-> InsPermInfo 'Postgres
-> Maybe (UpdPermInfo 'Postgres)
-> AnnIns 'Postgres a UnpreparedValue
-> IR.AnnIns 'Postgres a UnpreparedValue
mkInsertObject objects table columns conflictClause insertPerms updatePerms =
AnnIns { _aiInsObj = objects
, _aiTableName = table
, _aiConflictClause = conflictClause
, _aiCheckCond = (insertCheck, updateCheck)
, _aiTableCols = columns
, _aiDefVals = defaultValues
}
IR.AnnIns { _aiInsObj = objects
, _aiTableName = table
, _aiConflictClause = conflictClause
, _aiCheckCond = (insertCheck, updateCheck)
, _aiTableCols = columns
, _aiDefVals = defaultValues
}
where insertCheck = fmapAnnBoolExp partialSQLExpToUnpreparedValue $ ipiCheck insertPerms
updateCheck = fmapAnnBoolExp partialSQLExpToUnpreparedValue <$> (upiCheck =<< updatePerms)
defaultValues = Map.union (partialSQLExpToUnpreparedValue <$> ipiSet insertPerms)
$ fmap UVLiteral $ S.mkColDefValMap $ map pgiColumn columns
$ fmap UVLiteral $ PG.mkColDefValMap $ map pgiColumn columns
-- | Specifies the "ON CONFLICT" SQL clause
conflictObject
@ -221,7 +221,7 @@ conflictObject
=> QualifiedTable
-> Maybe (SelPermInfo 'Postgres)
-> UpdPermInfo 'Postgres
-> m (Maybe (Parser 'Input n (RQL.ConflictClauseP1 'Postgres UnpreparedValue)))
-> m (Maybe (Parser 'Input n (IR.ConflictClauseP1 'Postgres UnpreparedValue)))
conflictObject table selectPerms updatePerms = runMaybeT $ do
tableGQLName <- getTableGQLName table
columnsEnum <- MaybeT $ tableUpdateColumnsEnum table updatePerms
@ -234,12 +234,12 @@ conflictObject table selectPerms updatePerms = runMaybeT $ do
columnsName = $$(G.litName "update_columns")
whereExpName = $$(G.litName "where")
fieldsParser = do
constraint <- RQL.CTConstraint <$> P.field constraintName Nothing constraintParser
constraint <- IR.CTConstraint <$> P.field constraintName Nothing constraintParser
columns <- P.field columnsName Nothing $ P.list columnsEnum
whereExp <- P.fieldOptional whereExpName Nothing whereExpParser
pure $ case columns of
[] -> RQL.CP1DoNothing $ Just constraint
_ -> RQL.CP1Update constraint columns preSetColumns $
[] -> IR.CP1DoNothing $ Just constraint
_ -> IR.CP1Update constraint columns preSetColumns $
BoolAnd $ catMaybes [whereExp, Just $ fmapAnnBoolExp partialSQLExpToUnpreparedValue $ upiFilter updatePerms]
pure $ P.object objectName (Just objectDesc) fieldsParser
where preSetColumns = partialSQLExpToUnpreparedValue <$> upiSet updatePerms
@ -273,7 +273,7 @@ updateTable
-> Maybe G.Description -- ^ field description, if any
-> UpdPermInfo 'Postgres -- ^ update permissions of the table
-> Maybe (SelPermInfo 'Postgres) -- ^ select permissions of the table (if any)
-> m (Maybe (FieldParser n (RQL.AnnUpdG 'Postgres UnpreparedValue)))
-> m (Maybe (FieldParser n (IR.AnnUpdG 'Postgres UnpreparedValue)))
updateTable table fieldName description updatePerms selectPerms = runMaybeT $ do
let whereName = $$(G.litName "where")
whereDesc = "filter the rows which have to be updated"
@ -283,7 +283,7 @@ updateTable table fieldName description updatePerms selectPerms = runMaybeT $ do
selection <- lift $ mutationSelectionSet table selectPerms
let argsParser = liftA2 (,) opArgs whereArg
pure $ P.subselection fieldName description argsParser selection
<&> mkUpdateObject table columns updatePerms . fmap RQL.MOutMultirowFields
<&> mkUpdateObject table columns updatePerms . fmap IR.MOutMultirowFields
-- | Construct a root field, normally called update_tablename, that can be used
-- to update a single in a DB table, specified by primary key. Only returns a
@ -296,7 +296,7 @@ updateTableByPk
-> Maybe G.Description -- ^ field description, if any
-> UpdPermInfo 'Postgres -- ^ update permissions of the table
-> SelPermInfo 'Postgres -- ^ select permissions of the table
-> m (Maybe (FieldParser n (RQL.AnnUpdG 'Postgres UnpreparedValue)))
-> m (Maybe (FieldParser n (IR.AnnUpdG 'Postgres UnpreparedValue)))
updateTableByPk table fieldName description updatePerms selectPerms = runMaybeT $ do
tableGQLName <- getTableGQLName table
columns <- lift $ tableSelectColumns table selectPerms
@ -311,26 +311,26 @@ updateTableByPk table fieldName description updatePerms selectPerms = runMaybeT
primaryKeys <- P.field pkFieldName Nothing $ P.object pkObjectName (Just pkObjectDesc) pkArgs
pure (operators, primaryKeys)
pure $ P.subselection fieldName description argsParser selection
<&> mkUpdateObject table columns updatePerms . fmap RQL.MOutSinglerowObject
<&> mkUpdateObject table columns updatePerms . fmap IR.MOutSinglerowObject
mkUpdateObject
:: QualifiedTable
-> [ColumnInfo 'Postgres]
-> UpdPermInfo 'Postgres
-> ( ( [(PGCol, RQL.UpdOpExpG UnpreparedValue)]
-> ( ( [(PGCol, IR.UpdOpExpG UnpreparedValue)]
, AnnBoolExp 'Postgres UnpreparedValue
)
, RQL.MutationOutputG 'Postgres UnpreparedValue
, IR.MutationOutputG 'Postgres UnpreparedValue
)
-> RQL.AnnUpdG 'Postgres UnpreparedValue
-> IR.AnnUpdG 'Postgres UnpreparedValue
mkUpdateObject table columns updatePerms ((opExps, whereExp), mutationOutput) =
RQL.AnnUpd { RQL.uqp1Table = table
, RQL.uqp1OpExps = opExps
, RQL.uqp1Where = (permissionFilter, whereExp)
, RQL.uqp1Check = checkExp
, RQL.uqp1Output = mutationOutput
, RQL.uqp1AllCols = columns
}
IR.AnnUpd { IR.uqp1Table = table
, IR.uqp1OpExps = opExps
, IR.uqp1Where = (permissionFilter, whereExp)
, IR.uqp1Check = checkExp
, IR.uqp1Output = mutationOutput
, IR.uqp1AllCols = columns
}
where
permissionFilter = fmapAnnBoolExp partialSQLExpToUnpreparedValue $ upiFilter updatePerms
checkExp = maybe annBoolExpTrue (fmapAnnBoolExp partialSQLExpToUnpreparedValue) $ upiCheck updatePerms
@ -340,7 +340,7 @@ updateOperators
:: forall m n r. (MonadSchema n m, MonadTableInfo r m)
=> QualifiedTable -- ^ qualified name of the table
-> UpdPermInfo 'Postgres -- ^ update permissions of the table
-> m (Maybe (InputFieldsParser n [(PGCol, RQL.UpdOpExpG UnpreparedValue)]))
-> m (Maybe (InputFieldsParser n [(PGCol, IR.UpdOpExpG UnpreparedValue)]))
updateOperators table updatePermissions = do
tableGQLName <- getTableGQLName table
columns <- tableUpdateColumns table updatePermissions
@ -348,42 +348,42 @@ updateOperators table updatePermissions = do
jsonCols = onlyJSONBCols columns
parsers <- catMaybes <$> sequenceA
[ updateOperator tableGQLName $$(G.litName "_set")
columnParser RQL.UpdSet columns
columnParser IR.UpdSet columns
"sets the columns of the filtered rows to the given values"
(G.Description $ "input type for updating data in table " <>> table)
, updateOperator tableGQLName $$(G.litName "_inc")
columnParser RQL.UpdInc numericCols
columnParser IR.UpdInc numericCols
"increments the numeric columns with given value of the filtered values"
(G.Description $"input type for incrementing numeric columns in table " <>> table)
, let desc = "prepend existing jsonb value of filtered columns with new jsonb value"
in updateOperator tableGQLName $$(G.litName "_prepend")
columnParser RQL.UpdPrepend jsonCols desc desc
columnParser IR.UpdPrepend jsonCols desc desc
, let desc = "append existing jsonb value of filtered columns with new jsonb value"
in updateOperator tableGQLName $$(G.litName "_append")
columnParser RQL.UpdAppend jsonCols desc desc
columnParser IR.UpdAppend jsonCols desc desc
, let desc = "delete key/value pair or string element. key/value pairs are matched based on their key value"
in updateOperator tableGQLName $$(G.litName "_delete_key")
nullableTextParser RQL.UpdDeleteKey jsonCols desc desc
nullableTextParser IR.UpdDeleteKey jsonCols desc desc
, let desc = "delete the array element with specified index (negative integers count from the end). "
<> "throws an error if top level container is not an array"
in updateOperator tableGQLName $$(G.litName "_delete_elem")
nonNullableIntParser RQL.UpdDeleteElem jsonCols desc desc
nonNullableIntParser IR.UpdDeleteElem jsonCols desc desc
, let desc = "delete the field or element with specified path (for JSON arrays, negative integers count from the end)"
in updateOperator tableGQLName $$(G.litName "_delete_at_path")
(fmap P.list . nonNullableTextParser) RQL.UpdDeleteAtPath jsonCols desc desc
(fmap P.list . nonNullableTextParser) IR.UpdDeleteAtPath jsonCols desc desc
]
whenMaybe (not $ null parsers) do
let allowedOperators = fst <$> parsers
pure $ fmap catMaybes (sequenceA $ snd <$> parsers)
`P.bindFields` \opExps -> do
-- there needs to be at least one operator in the update, even if it is empty
let presetColumns = Map.toList $ RQL.UpdSet . partialSQLExpToUnpreparedValue <$> upiSet updatePermissions
let presetColumns = Map.toList $ IR.UpdSet . partialSQLExpToUnpreparedValue <$> upiSet updatePermissions
when (null opExps && null presetColumns) $ parseError $
"at least any one of " <> commaSeparated allowedOperators <> " is expected"
@ -392,7 +392,7 @@ updateOperators table updatePermissions = do
erroneousExps = OMap.filter ((>1) . length) $ OMap.groupTuples flattenedExps
unless (OMap.null erroneousExps) $ parseError $
"column found in multiple operators; " <>
T.intercalate ". " [ dquote column <> " in " <> commaSeparated (RQL.updateOperatorText <$> ops)
T.intercalate ". " [ dquote column <> " in " <> commaSeparated (IR.updateOperatorText <$> ops)
| (column, ops) <- OMap.toList erroneousExps
]
@ -407,11 +407,11 @@ updateOperators table updatePermissions = do
:: G.Name
-> G.Name
-> (ColumnInfo 'Postgres -> m (Parser 'Both n a))
-> (a -> RQL.UpdOpExpG UnpreparedValue)
-> (a -> IR.UpdOpExpG UnpreparedValue)
-> [ColumnInfo 'Postgres]
-> G.Description
-> G.Description
-> m (Maybe (Text, InputFieldsParser n (Maybe [(PGCol, RQL.UpdOpExpG UnpreparedValue)])))
-> m (Maybe (Text, InputFieldsParser n (Maybe [(PGCol, IR.UpdOpExpG UnpreparedValue)])))
updateOperator tableGQLName opName mkParser updOpExp columns opDesc objDesc =
whenMaybe (not $ null columns) do
fields <- for columns \columnInfo -> do
@ -439,7 +439,7 @@ deleteFromTable
-> Maybe G.Description -- ^ field description, if any
-> DelPermInfo 'Postgres -- ^ delete permissions of the table
-> Maybe (SelPermInfo 'Postgres) -- ^ select permissions of the table (if any)
-> m (FieldParser n (RQL.AnnDelG 'Postgres UnpreparedValue))
-> m (FieldParser n (IR.AnnDelG 'Postgres UnpreparedValue))
deleteFromTable table fieldName description deletePerms selectPerms = do
let whereName = $$(G.litName "where")
whereDesc = "filter the rows which have to be deleted"
@ -447,7 +447,7 @@ deleteFromTable table fieldName description deletePerms selectPerms = do
selection <- mutationSelectionSet table selectPerms
columns <- tableColumns table
pure $ P.subselection fieldName description whereArg selection
<&> mkDeleteObject table columns deletePerms . fmap RQL.MOutMultirowFields
<&> mkDeleteObject table columns deletePerms . fmap IR.MOutMultirowFields
-- | Construct a root field, normally called delete_tablename, that can be used
-- to delete an individual rows from a DB table, specified by primary key
@ -458,26 +458,26 @@ deleteFromTableByPk
-> Maybe G.Description -- ^ field description, if any
-> DelPermInfo 'Postgres -- ^ delete permissions of the table
-> SelPermInfo 'Postgres -- ^ select permissions of the table
-> m (Maybe (FieldParser n (RQL.AnnDelG 'Postgres UnpreparedValue)))
-> m (Maybe (FieldParser n (IR.AnnDelG 'Postgres UnpreparedValue)))
deleteFromTableByPk table fieldName description deletePerms selectPerms = runMaybeT $ do
columns <- lift $ tableSelectColumns table selectPerms
pkArgs <- MaybeT $ primaryKeysArguments table selectPerms
selection <- lift $ tableSelectionSet table selectPerms
pure $ P.subselection fieldName description pkArgs selection
<&> mkDeleteObject table columns deletePerms . fmap RQL.MOutSinglerowObject
<&> mkDeleteObject table columns deletePerms . fmap IR.MOutSinglerowObject
mkDeleteObject
:: QualifiedTable
-> [ColumnInfo 'Postgres]
-> DelPermInfo 'Postgres
-> (AnnBoolExp 'Postgres UnpreparedValue, RQL.MutationOutputG 'Postgres UnpreparedValue)
-> RQL.AnnDelG 'Postgres UnpreparedValue
-> (AnnBoolExp 'Postgres UnpreparedValue, IR.MutationOutputG 'Postgres UnpreparedValue)
-> IR.AnnDelG 'Postgres UnpreparedValue
mkDeleteObject table columns deletePerms (whereExp, mutationOutput) =
RQL.AnnDel { RQL.dqp1Table = table
, RQL.dqp1Where = (permissionFilter, whereExp)
, RQL.dqp1Output = mutationOutput
, RQL.dqp1AllCols = columns
}
IR.AnnDel { IR.dqp1Table = table
, IR.dqp1Where = (permissionFilter, whereExp)
, IR.dqp1Output = mutationOutput
, IR.dqp1AllCols = columns
}
where
permissionFilter = fmapAnnBoolExp partialSQLExpToUnpreparedValue $ dpiFilter deletePerms
@ -491,7 +491,7 @@ mutationSelectionSet
:: forall m n r. (MonadSchema n m, MonadTableInfo r m, MonadRole r m, Has QueryContext r)
=> QualifiedTable
-> Maybe (SelPermInfo 'Postgres)
-> m (Parser 'Output n (RQL.MutFldsG 'Postgres UnpreparedValue))
-> m (Parser 'Output n (IR.MutFldsG 'Postgres UnpreparedValue))
mutationSelectionSet table selectPerms =
memoizeOn 'mutationSelectionSet table do
tableGQLName <- getTableGQLName table
@ -500,19 +500,19 @@ mutationSelectionSet table selectPerms =
tableSet <- lift $ tableSelectionList table permissions
let returningName = $$(G.litName "returning")
returningDesc = "data from the rows affected by the mutation"
pure $ RQL.MRet <$> P.subselection_ returningName (Just returningDesc) tableSet
pure $ IR.MRet <$> P.subselection_ returningName (Just returningDesc) tableSet
let affectedRowsName = $$(G.litName "affected_rows")
affectedRowsDesc = "number of rows affected by the mutation"
selectionName = tableGQLName <> $$(G.litName "_mutation_response")
selectionDesc = G.Description $ "response of any mutation on the table " <>> table
selectionFields = catMaybes
[ Just $ RQL.MCount <$
[ Just $ IR.MCount <$
P.selection_ affectedRowsName (Just affectedRowsDesc) P.int
, returning
]
pure $ P.selectionSet selectionName (Just selectionDesc) selectionFields
<&> parsedSelectionsToFields RQL.MExp
<&> parsedSelectionsToFields IR.MExp
-- | How to specify a database row by primary key.
primaryKeysArguments

View File

@ -7,18 +7,20 @@ import Hasura.Prelude
import qualified Data.List.NonEmpty as NE
import qualified Language.GraphQL.Draft.Syntax as G
import Hasura.Backends.Postgres.SQL.DML as SQL
import qualified Hasura.GraphQL.Parser as P
import qualified Hasura.RQL.IR.Select as RQL
import Hasura.RQL.Types as RQL
import Data.Text.Extended
import qualified Hasura.Backends.Postgres.SQL.DML as PG
import qualified Hasura.GraphQL.Parser as P
import qualified Hasura.RQL.IR.OrderBy as IR
import qualified Hasura.RQL.IR.Select as IR
import Hasura.Backends.Postgres.SQL.Types
import Hasura.GraphQL.Parser (InputFieldsParser, Kind (..), Parser,
UnpreparedValue)
import Hasura.GraphQL.Parser.Class
import Hasura.GraphQL.Schema.Common
import Hasura.GraphQL.Schema.Table
import Hasura.RQL.Types
-- | Corresponds to an object type for an order by.
@ -35,7 +37,7 @@ orderByExp
:: forall m n r. (MonadSchema n m, MonadTableInfo r m, MonadRole r m)
=> QualifiedTable
-> SelPermInfo 'Postgres
-> m (Parser 'Input n [RQL.AnnOrderByItemG 'Postgres UnpreparedValue])
-> m (Parser 'Input n [IR.AnnOrderByItemG 'Postgres UnpreparedValue])
orderByExp table selectPermissions = memoizeOn 'orderByExp table $ do
tableGQLName <- getTableGQLName table
let name = tableGQLName <> $$(G.litName "_order_by")
@ -47,13 +49,13 @@ orderByExp table selectPermissions = memoizeOn 'orderByExp table $ do
where
mkField
:: FieldInfo 'Postgres
-> m (Maybe (InputFieldsParser n (Maybe [RQL.AnnOrderByItemG 'Postgres UnpreparedValue])))
-> m (Maybe (InputFieldsParser n (Maybe [IR.AnnOrderByItemG 'Postgres UnpreparedValue])))
mkField fieldInfo = runMaybeT $
case fieldInfo of
FIColumn columnInfo -> do
let fieldName = pgiName columnInfo
pure $ P.fieldOptional fieldName Nothing orderByOperator
<&> fmap (pure . mkOrderByItemG (RQL.AOCColumn columnInfo)) . join
<&> fmap (pure . mkOrderByItemG (IR.AOCColumn columnInfo)) . join
FIRelationship relationshipInfo -> do
let remoteTable = riRTable relationshipInfo
fieldName <- MaybeT $ pure $ G.mkName $ relNameToTxt $ riName relationshipInfo
@ -64,13 +66,13 @@ orderByExp table selectPermissions = memoizeOn 'orderByExp table $ do
otherTableParser <- lift $ orderByExp remoteTable perms
pure $ do
otherTableOrderBy <- join <$> P.fieldOptional fieldName Nothing (P.nullable otherTableParser)
pure $ fmap (map $ fmap $ RQL.AOCObjectRelation relationshipInfo newPerms) otherTableOrderBy
pure $ fmap (map $ fmap $ IR.AOCObjectRelation relationshipInfo newPerms) otherTableOrderBy
ArrRel -> do
let aggregateFieldName = fieldName <> $$(G.litName "_aggregate")
aggregationParser <- lift $ orderByAggregation remoteTable perms
pure $ do
aggregationOrderBy <- join <$> P.fieldOptional aggregateFieldName Nothing (P.nullable aggregationParser)
pure $ fmap (map $ fmap $ RQL.AOCArrayAggregation relationshipInfo newPerms) aggregationOrderBy
pure $ fmap (map $ fmap $ IR.AOCArrayAggregation relationshipInfo newPerms) aggregationOrderBy
FIComputedField _ -> empty
FIRemoteRelationship _ -> empty
@ -78,14 +80,18 @@ orderByExp table selectPermissions = memoizeOn 'orderByExp table $ do
-- local definitions
type OrderInfo = (SQL.OrderType, SQL.NullsOrder)
type OrderInfo = (PG.OrderType, PG.NullsOrder)
-- FIXME!
-- those parsers are directly using Postgres' SQL representation of
-- order, rather than using a general intermediary representation
orderByAggregation
:: forall m n r. (MonadSchema n m, MonadTableInfo r m, MonadRole r m)
=> QualifiedTable
-> SelPermInfo 'Postgres
-> m (Parser 'Input n [OrderByItemG (RQL.AnnAggregateOrderBy 'Postgres)])
-> m (Parser 'Input n [IR.OrderByItemG 'Postgres (IR.AnnAggregateOrderBy 'Postgres)])
orderByAggregation table selectPermissions = do
-- WIP NOTE
-- there is heavy duplication between this and Select.tableAggregationFields
@ -100,7 +106,7 @@ orderByAggregation table selectPermissions = do
aggFields = fmap (concat . catMaybes . concat) $ sequenceA $ catMaybes
[ -- count
Just $ P.fieldOptional $$(G.litName "count") Nothing orderByOperator
<&> pure . fmap (pure . mkOrderByItemG RQL.AAOCount) . join
<&> pure . fmap (pure . mkOrderByItemG IR.AAOCount) . join
, -- operators on numeric columns
if null numColumns then Nothing else Just $
for numericAggOperators \operator ->
@ -123,34 +129,34 @@ orderByAggregation table selectPermissions = do
:: G.Name
-> G.Name
-> InputFieldsParser n [(ColumnInfo 'Postgres, OrderInfo)]
-> InputFieldsParser n (Maybe [OrderByItemG (RQL.AnnAggregateOrderBy 'Postgres)])
-> InputFieldsParser n (Maybe [IR.OrderByItemG 'Postgres (IR.AnnAggregateOrderBy 'Postgres)])
parseOperator operator tableGQLName columns =
let opText = G.unName operator
objectName = tableGQLName <> $$(G.litName "_") <> operator <> $$(G.litName "_order_by")
objectDesc = Just $ G.Description $ "order by " <> opText <> "() on columns of table " <>> table
in P.fieldOptional operator Nothing (P.object objectName objectDesc columns)
`mapField` map (\(col, info) -> mkOrderByItemG (RQL.AAOOp opText col) info)
`mapField` map (\(col, info) -> mkOrderByItemG (IR.AAOOp opText col) info)
orderByOperator :: MonadParse m => Parser 'Both m (Maybe OrderInfo)
orderByOperator =
P.nullable $ P.enum $$(G.litName "order_by") (Just "column ordering options") $ NE.fromList
[ ( define $$(G.litName "asc") "in ascending order, nulls last"
, (SQL.OTAsc, SQL.NLast)
, (PG.OTAsc, PG.NLast)
)
, ( define $$(G.litName "asc_nulls_first") "in ascending order, nulls first"
, (SQL.OTAsc, SQL.NFirst)
, (PG.OTAsc, PG.NFirst)
)
, ( define $$(G.litName "asc_nulls_last") "in ascending order, nulls last"
, (SQL.OTAsc, SQL.NLast)
, (PG.OTAsc, PG.NLast)
)
, ( define $$(G.litName "desc") "in descending order, nulls first"
, (SQL.OTDesc, SQL.NFirst)
, (PG.OTDesc, PG.NFirst)
)
, ( define $$(G.litName "desc_nulls_first") "in descending order, nulls first"
, (SQL.OTDesc, SQL.NFirst)
, (PG.OTDesc, PG.NFirst)
)
, ( define $$(G.litName "desc_nulls_last") "in descending order, nulls last"
, (SQL.OTDesc, SQL.NLast)
, (PG.OTDesc, PG.NLast)
)
]
where
@ -160,12 +166,12 @@ orderByOperator =
-- local helpers
mkOrderByItemG :: a -> OrderInfo -> OrderByItemG a
mkOrderByItemG :: a -> OrderInfo -> IR.OrderByItemG 'Postgres a
mkOrderByItemG column (orderType, nullsOrder) =
OrderByItemG { obiType = Just $ RQL.OrderType orderType
, obiColumn = column
, obiNulls = Just $ RQL.NullsOrder nullsOrder
}
IR.OrderByItemG { obiType = Just orderType
, obiColumn = column
, obiNulls = Just nullsOrder
}
aliasToName :: G.Name -> FieldName
aliasToName = FieldName . G.unName

View File

@ -19,12 +19,6 @@ module Hasura.GraphQL.Schema.Select
import Hasura.Prelude
import Control.Lens hiding (index)
import Data.Has
import Data.Int (Int32)
import Data.Parser.JSONPath
import Data.Traversable (mapAccumL)
import qualified Data.Aeson as J
import qualified Data.Aeson.Extended as J
import qualified Data.Aeson.Internal as J
@ -37,14 +31,21 @@ import qualified Data.Sequence.NonEmpty as NESeq
import qualified Data.Text as T
import qualified Language.GraphQL.Draft.Syntax as G
import qualified Hasura.Backends.Postgres.SQL.DML as SQL
import Control.Lens hiding (index)
import Data.Has
import Data.Int (Int32)
import Data.Parser.JSONPath
import Data.Text.Extended
import Data.Traversable (mapAccumL)
import qualified Hasura.Backends.Postgres.SQL.DML as PG
import qualified Hasura.GraphQL.Execute.Types as ET
import qualified Hasura.GraphQL.Parser as P
import qualified Hasura.GraphQL.Parser.Internal.Parser as P
import qualified Hasura.RQL.IR.BoolExp as RQL
import qualified Hasura.RQL.IR.Select as RQL
import qualified Hasura.RQL.IR.BoolExp as IR
import qualified Hasura.RQL.IR.OrderBy as IR
import qualified Hasura.RQL.IR.Select as IR
import Data.Text.Extended
import Hasura.Backends.Postgres.SQL.Types
import Hasura.Backends.Postgres.SQL.Value
import Hasura.GraphQL.Parser (FieldParser, InputFieldsParser, Kind (..),
@ -59,13 +60,14 @@ import Hasura.GraphQL.Schema.Table
import Hasura.RQL.Types
import Hasura.Server.Utils (executeJSONPath)
type SelectExp b = RQL.AnnSimpleSelG b UnpreparedValue
type AggSelectExp b = RQL.AnnAggregateSelectG b UnpreparedValue
type ConnectionSelectExp b = RQL.ConnectionSelect b UnpreparedValue
type SelectArgs b = RQL.SelectArgsG b UnpreparedValue
type TablePerms b = RQL.TablePermG b UnpreparedValue
type AnnotatedFields b = RQL.AnnFieldsG b UnpreparedValue
type AnnotatedField b = RQL.AnnFieldG b UnpreparedValue
type SelectExp b = IR.AnnSimpleSelG b UnpreparedValue
type AggSelectExp b = IR.AnnAggregateSelectG b UnpreparedValue
type ConnectionSelectExp b = IR.ConnectionSelect b UnpreparedValue
type SelectArgs b = IR.SelectArgsG b UnpreparedValue
type TablePerms b = IR.TablePermG b UnpreparedValue
type AnnotatedFields b = IR.AnnFieldsG b UnpreparedValue
type AnnotatedField b = IR.AnnFieldG b UnpreparedValue
@ -93,12 +95,12 @@ selectTable table fieldName description selectPermissions = do
tableArgsParser <- tableArgs table selectPermissions
selectionSetParser <- tableSelectionList table selectPermissions
pure $ P.subselection fieldName description tableArgsParser selectionSetParser
<&> \(args, fields) -> RQL.AnnSelectG
{ RQL._asnFields = fields
, RQL._asnFrom = RQL.FromTable table
, RQL._asnPerm = tablePermissionsInfo selectPermissions
, RQL._asnArgs = args
, RQL._asnStrfyNum = stringifyNum
<&> \(args, fields) -> IR.AnnSelectG
{ IR._asnFields = fields
, IR._asnFrom = IR.FromTable table
, IR._asnPerm = tablePermissionsInfo selectPermissions
, IR._asnArgs = args
, IR._asnStrfyNum = stringifyNum
}
-- | Simple table connection selection.
@ -133,16 +135,16 @@ selectTableConnection table fieldName description pkeyColumns selectPermissions
selectArgsParser <- tableConnectionArgs pkeyColumns table selectPermissions
selectionSetParser <- P.nonNullableParser <$> tableConnectionSelectionSet table selectPermissions
pure $ P.subselection fieldName description selectArgsParser selectionSetParser
<&> \((args, split, slice), fields) -> RQL.ConnectionSelect
{ RQL._csPrimaryKeyColumns = pkeyColumns
, RQL._csSplit = split
, RQL._csSlice = slice
, RQL._csSelect = RQL.AnnSelectG
{ RQL._asnFields = fields
, RQL._asnFrom = RQL.FromTable table
, RQL._asnPerm = tablePermissionsInfo selectPermissions
, RQL._asnArgs = args
, RQL._asnStrfyNum = stringifyNum
<&> \((args, split, slice), fields) -> IR.ConnectionSelect
{ IR._csPrimaryKeyColumns = pkeyColumns
, IR._csSplit = split
, IR._csSlice = slice
, IR._csSelect = IR.AnnSelectG
{ IR._asnFields = fields
, IR._asnFrom = IR.FromTable table
, IR._asnPerm = tablePermissionsInfo selectPermissions
, IR._asnArgs = args
, IR._asnStrfyNum = stringifyNum
}
}
@ -177,14 +179,14 @@ selectTableByPk table fieldName description selectPermissions = runMaybeT do
<&> \(boolExpr, fields) ->
let defaultPerms = tablePermissionsInfo selectPermissions
-- Do not account permission limit since the result is just a nullable object
permissions = defaultPerms { RQL._tpLimit = Nothing }
permissions = defaultPerms { IR._tpLimit = Nothing }
whereExpr = Just $ BoolAnd $ toList boolExpr
in RQL.AnnSelectG
{ RQL._asnFields = fields
, RQL._asnFrom = RQL.FromTable table
, RQL._asnPerm = permissions
, RQL._asnArgs = RQL.noSelectArgs { RQL._saWhere = whereExpr }
, RQL._asnStrfyNum = stringifyNum
in IR.AnnSelectG
{ IR._asnFields = fields
, IR._asnFrom = IR.FromTable table
, IR._asnPerm = permissions
, IR._asnArgs = IR.noSelectArgs { IR._saWhere = whereExpr }
, IR._asnStrfyNum = stringifyNum
}
-- | Table aggregation selection
@ -213,18 +215,18 @@ selectTableAggregate table fieldName description selectPermissions = runMaybeT d
nodesParser <- lift $ tableSelectionList table selectPermissions
let selectionName = tableGQLName <> $$(G.litName "_aggregate")
aggregationParser = P.nonNullableParser $
parsedSelectionsToFields RQL.TAFExp <$>
parsedSelectionsToFields IR.TAFExp <$>
P.selectionSet selectionName (Just $ G.Description $ "aggregated selection of " <>> table)
[ RQL.TAFNodes <$> P.subselection_ $$(G.litName "nodes") Nothing nodesParser
, RQL.TAFAgg <$> P.subselection_ $$(G.litName "aggregate") Nothing aggregateParser
[ IR.TAFNodes <$> P.subselection_ $$(G.litName "nodes") Nothing nodesParser
, IR.TAFAgg <$> P.subselection_ $$(G.litName "aggregate") Nothing aggregateParser
]
pure $ P.subselection fieldName description tableArgsParser aggregationParser
<&> \(args, fields) -> RQL.AnnSelectG
{ RQL._asnFields = fields
, RQL._asnFrom = RQL.FromTable table
, RQL._asnPerm = tablePermissionsInfo selectPermissions
, RQL._asnArgs = args
, RQL._asnStrfyNum = stringifyNum
<&> \(args, fields) -> IR.AnnSelectG
{ IR._asnFields = fields
, IR._asnFrom = IR.FromTable table
, IR._asnPerm = tablePermissionsInfo selectPermissions
, IR._asnArgs = args
, IR._asnStrfyNum = stringifyNum
}
{- Note [Selectability of tables]
@ -321,14 +323,14 @@ tableSelectionSet table selectPermissions = memoizeOn 'tableSelectionSet table d
-- A relay table
(ET.QueryRelay, Just pkeyColumns) -> do
let nodeIdFieldParser =
P.selection_ $$(G.litName "id") Nothing P.identifier $> RQL.AFNodeId table pkeyColumns
P.selection_ $$(G.litName "id") Nothing P.identifier $> IR.AFNodeId table pkeyColumns
allFieldParsers = fieldParsers <> [nodeIdFieldParser]
nodeInterface <- node
pure $ P.selectionSetObject tableGQLName description allFieldParsers [nodeInterface]
<&> parsedSelectionsToFields RQL.AFExpression
<&> parsedSelectionsToFields IR.AFExpression
_ ->
pure $ P.selectionSetObject tableGQLName description fieldParsers []
<&> parsedSelectionsToFields RQL.AFExpression
<&> parsedSelectionsToFields IR.AFExpression
-- | List of table fields object.
-- Just a @'nonNullableObjectList' wrapper over @'tableSelectionSet'.
@ -376,50 +378,50 @@ tableConnectionSelectionSet
)
=> QualifiedTable
-> SelPermInfo 'Postgres
-> m (Parser 'Output n (RQL.ConnectionFields 'Postgres UnpreparedValue))
-> m (Parser 'Output n (IR.ConnectionFields 'Postgres UnpreparedValue))
tableConnectionSelectionSet table selectPermissions = do
edgesParser <- tableEdgesSelectionSet
tableGQLName <- getTableGQLName table
let connectionTypeName = tableGQLName <> $$(G.litName "Connection")
pageInfo = P.subselection_ $$(G.litName "pageInfo") Nothing
pageInfoSelectionSet <&> RQL.ConnectionPageInfo
pageInfoSelectionSet <&> IR.ConnectionPageInfo
edges = P.subselection_ $$(G.litName "edges") Nothing
edgesParser <&> RQL.ConnectionEdges
edgesParser <&> IR.ConnectionEdges
connectionDescription = G.Description $ "A Relay connection object on " <>> table
pure $ P.nonNullableParser $
P.selectionSet connectionTypeName (Just connectionDescription) [pageInfo, edges]
<&> parsedSelectionsToFields RQL.ConnectionTypename
<&> parsedSelectionsToFields IR.ConnectionTypename
where
pageInfoSelectionSet :: Parser 'Output n RQL.PageInfoFields
pageInfoSelectionSet :: Parser 'Output n IR.PageInfoFields
pageInfoSelectionSet =
let startCursorField = P.selection_ $$(G.litName "startCursor") Nothing
P.string $> RQL.PageInfoStartCursor
P.string $> IR.PageInfoStartCursor
endCursorField = P.selection_ $$(G.litName "endCursor") Nothing
P.string $> RQL.PageInfoEndCursor
P.string $> IR.PageInfoEndCursor
hasNextPageField = P.selection_ $$(G.litName "hasNextPage") Nothing
P.boolean $> RQL.PageInfoHasNextPage
P.boolean $> IR.PageInfoHasNextPage
hasPreviousPageField = P.selection_ $$(G.litName "hasPreviousPage") Nothing
P.boolean $> RQL.PageInfoHasPreviousPage
P.boolean $> IR.PageInfoHasPreviousPage
allFields =
[ startCursorField, endCursorField
, hasNextPageField, hasPreviousPageField
]
in P.nonNullableParser $ P.selectionSet $$(G.litName "PageInfo") Nothing allFields
<&> parsedSelectionsToFields RQL.PageInfoTypename
<&> parsedSelectionsToFields IR.PageInfoTypename
tableEdgesSelectionSet
:: m (Parser 'Output n (RQL.EdgeFields 'Postgres UnpreparedValue))
:: m (Parser 'Output n (IR.EdgeFields 'Postgres UnpreparedValue))
tableEdgesSelectionSet = do
tableGQLName <- getTableGQLName table
edgeNodeParser <- P.nonNullableParser <$> tableSelectionSet table selectPermissions
let edgesType = tableGQLName <> $$(G.litName "Edge")
cursor = P.selection_ $$(G.litName "cursor") Nothing
P.string $> RQL.EdgeCursor
P.string $> IR.EdgeCursor
edgeNode = P.subselection_ $$(G.litName "node") Nothing
edgeNodeParser <&> RQL.EdgeNode
edgeNodeParser <&> IR.EdgeNode
pure $ nonNullableObjectList $ P.selectionSet edgesType Nothing [cursor, edgeNode]
<&> parsedSelectionsToFields RQL.EdgeTypename
<&> parsedSelectionsToFields IR.EdgeTypename
-- | User-defined function (AKA custom function)
selectFunction
@ -437,12 +439,12 @@ selectFunction function fieldName description selectPermissions = do
selectionSetParser <- tableSelectionList table selectPermissions
let argsParser = liftA2 (,) functionArgsParser tableArgsParser
pure $ P.subselection fieldName description argsParser selectionSetParser
<&> \((funcArgs, tableArgs'), fields) -> RQL.AnnSelectG
{ RQL._asnFields = fields
, RQL._asnFrom = RQL.FromFunction (fiName function) funcArgs Nothing
, RQL._asnPerm = tablePermissionsInfo selectPermissions
, RQL._asnArgs = tableArgs'
, RQL._asnStrfyNum = stringifyNum
<&> \((funcArgs, tableArgs'), fields) -> IR.AnnSelectG
{ IR._asnFields = fields
, IR._asnFrom = IR.FromFunction (fiName function) funcArgs Nothing
, IR._asnPerm = tablePermissionsInfo selectPermissions
, IR._asnArgs = tableArgs'
, IR._asnStrfyNum = stringifyNum
}
selectFunctionAggregate
@ -463,19 +465,19 @@ selectFunctionAggregate function fieldName description selectPermissions = runMa
selectionName <- lift $ pure tableGQLName <&> (<> $$(G.litName "_aggregate"))
nodesParser <- lift $ tableSelectionList table selectPermissions
let argsParser = liftA2 (,) functionArgsParser tableArgsParser
aggregationParser = fmap (parsedSelectionsToFields RQL.TAFExp) $
aggregationParser = fmap (parsedSelectionsToFields IR.TAFExp) $
P.nonNullableParser $
P.selectionSet selectionName Nothing
[ RQL.TAFNodes <$> P.subselection_ $$(G.litName "nodes") Nothing nodesParser
, RQL.TAFAgg <$> P.subselection_ $$(G.litName "aggregate") Nothing aggregateParser
[ IR.TAFNodes <$> P.subselection_ $$(G.litName "nodes") Nothing nodesParser
, IR.TAFAgg <$> P.subselection_ $$(G.litName "aggregate") Nothing aggregateParser
]
pure $ P.subselection fieldName description argsParser aggregationParser
<&> \((funcArgs, tableArgs'), fields) -> RQL.AnnSelectG
{ RQL._asnFields = fields
, RQL._asnFrom = RQL.FromFunction (fiName function) funcArgs Nothing
, RQL._asnPerm = tablePermissionsInfo selectPermissions
, RQL._asnArgs = tableArgs'
, RQL._asnStrfyNum = stringifyNum
<&> \((funcArgs, tableArgs'), fields) -> IR.AnnSelectG
{ IR._asnFields = fields
, IR._asnFrom = IR.FromFunction (fiName function) funcArgs Nothing
, IR._asnPerm = tablePermissionsInfo selectPermissions
, IR._asnArgs = tableArgs'
, IR._asnStrfyNum = stringifyNum
}
selectFunctionConnection
@ -494,16 +496,16 @@ selectFunctionConnection function fieldName description pkeyColumns selectPermis
selectionSetParser <- tableConnectionSelectionSet table selectPermissions
let argsParser = liftA2 (,) functionArgsParser tableConnectionArgsParser
pure $ P.subselection fieldName description argsParser selectionSetParser
<&> \((funcArgs, (args, split, slice)), fields) -> RQL.ConnectionSelect
{ RQL._csPrimaryKeyColumns = pkeyColumns
, RQL._csSplit = split
, RQL._csSlice = slice
, RQL._csSelect = RQL.AnnSelectG
{ RQL._asnFields = fields
, RQL._asnFrom = RQL.FromFunction (fiName function) funcArgs Nothing
, RQL._asnPerm = tablePermissionsInfo selectPermissions
, RQL._asnArgs = args
, RQL._asnStrfyNum = stringifyNum
<&> \((funcArgs, (args, split, slice)), fields) -> IR.ConnectionSelect
{ IR._csPrimaryKeyColumns = pkeyColumns
, IR._csSplit = split
, IR._csSlice = slice
, IR._csSelect = IR.AnnSelectG
{ IR._asnFields = fields
, IR._asnFrom = IR.FromFunction (fiName function) funcArgs Nothing
, IR._asnPerm = tablePermissionsInfo selectPermissions
, IR._asnArgs = args
, IR._asnStrfyNum = stringifyNum
}
}
@ -518,7 +520,7 @@ tableWhere
:: forall m n r. (MonadSchema n m, MonadTableInfo r m, MonadRole r m)
=> QualifiedTable
-> SelPermInfo 'Postgres
-> m (InputFieldsParser n (Maybe (RQL.AnnBoolExp 'Postgres UnpreparedValue)))
-> m (InputFieldsParser n (Maybe (IR.AnnBoolExp 'Postgres UnpreparedValue)))
tableWhere table selectPermissions = do
boolExpParser <- boolExp table (Just selectPermissions)
pure $ fmap join $
@ -533,7 +535,7 @@ tableOrderBy
:: forall m n r. (MonadSchema n m, MonadTableInfo r m, MonadRole r m)
=> QualifiedTable
-> SelPermInfo 'Postgres
-> m (InputFieldsParser n (Maybe (NonEmpty (RQL.AnnOrderByItemG 'Postgres UnpreparedValue))))
-> m (InputFieldsParser n (Maybe (NonEmpty (IR.AnnOrderByItemG 'Postgres UnpreparedValue))))
tableOrderBy table selectPermissions = do
orderByParser <- orderByExp table selectPermissions
pure $ do
@ -583,16 +585,16 @@ tableArgs table selectPermissions = do
limit <- fmap join $ P.fieldOptional limitName limitDesc $ P.nullable positiveInt
offset <- fmap join $ P.fieldOptional offsetName offsetDesc $ P.nullable fakeBigInt
distinct <- distinctParser
pure $ RQL.SelectArgs
{ RQL._saWhere = whereF
, RQL._saOrderBy = orderBy
, RQL._saLimit = fromIntegral <$> limit
, RQL._saOffset = txtEncoder <$> offset
, RQL._saDistinct = distinct
pure $ IR.SelectArgs
{ IR._saWhere = whereF
, IR._saOrderBy = orderBy
, IR._saLimit = fromIntegral <$> limit
, IR._saOffset = txtEncoder <$> offset
, IR._saDistinct = distinct
}
pure $ selectArgs `P.bindFields`
\args -> do
traverse_ (validateDistinctOn $ RQL._saOrderBy args) $ RQL._saDistinct args
traverse_ (validateDistinctOn $ IR._saOrderBy args) $ IR._saDistinct args
pure args
where
-- TODO: THIS IS A TEMPORARY FIX
@ -629,9 +631,9 @@ tableArgs table selectPermissions = do
let colsLen = length distinctOnCols
initOrderBys = take colsLen $ NE.toList orderByCols
initOrdByCols = flip mapMaybe initOrderBys $ \ob ->
case obiColumn ob of
RQL.AOCColumn pgCol -> Just $ pgiColumn pgCol
_ -> Nothing
case IR.obiColumn ob of
IR.AOCColumn pgCol -> Just $ pgiColumn pgCol
_ -> Nothing
isValid = (colsLen == length initOrdByCols)
&& all (`elem` initOrdByCols) (toList distinctOnCols)
unless isValid $ parseError
@ -661,8 +663,8 @@ tableConnectionArgs
-> SelPermInfo 'Postgres
-> m ( InputFieldsParser n
( SelectArgs 'Postgres
, Maybe (NonEmpty (RQL.ConnectionSplit 'Postgres UnpreparedValue))
, Maybe RQL.ConnectionSlice
, Maybe (NonEmpty (IR.ConnectionSplit 'Postgres UnpreparedValue))
, Maybe IR.ConnectionSlice
)
)
tableConnectionArgs pkeyColumns table selectPermissions = do
@ -688,85 +690,84 @@ tableConnectionArgs pkeyColumns table selectPermissions = do
rawSplit <- case (after, before) of
(Nothing, Nothing) -> pure Nothing
(Just _, Just _) -> parseError "\"after\" and \"before\" are not allowed at once"
(Just v, Nothing) -> pure $ Just (RQL.CSKAfter, v)
(Nothing, Just v) -> pure $ Just (RQL.CSKBefore, v)
(Just v, Nothing) -> pure $ Just (IR.CSKAfter, v)
(Nothing, Just v) -> pure $ Just (IR.CSKBefore, v)
for rawSplit (uncurry (parseConnectionSplit orderBy'))
slice <- firstAndLast `P.bindFields` \case
(Nothing, Nothing) -> pure Nothing
(Just _, Just _) -> parseError "\"first\" and \"last\" are not allowed at once"
(Just v, Nothing) -> pure $ Just $ RQL.SliceFirst $ fromIntegral v
(Nothing, Just v) -> pure $ Just $ RQL.SliceLast $ fromIntegral v
(Just v, Nothing) -> pure $ Just $ IR.SliceFirst $ fromIntegral v
(Nothing, Just v) -> pure $ Just $ IR.SliceLast $ fromIntegral v
pure ( RQL.SelectArgs whereF orderBy Nothing Nothing distinct
pure ( IR.SelectArgs whereF orderBy Nothing Nothing distinct
, split
, slice
)
where
base64Text = base64Decode <$> P.string
appendPrimaryKeyOrderBy :: NonEmpty (RQL.AnnOrderByItemG 'Postgres v) -> NonEmpty (RQL.AnnOrderByItemG 'Postgres v)
appendPrimaryKeyOrderBy :: NonEmpty (IR.AnnOrderByItemG 'Postgres v) -> NonEmpty (IR.AnnOrderByItemG 'Postgres v)
appendPrimaryKeyOrderBy orderBys@(h NE.:| t) =
let orderByColumnNames =
orderBys ^.. traverse . to obiColumn . RQL._AOCColumn . to pgiColumn
orderBys ^.. traverse . to IR.obiColumn . IR._AOCColumn . to pgiColumn
pkeyOrderBys = flip mapMaybe (toList pkeyColumns) $ \pgColumnInfo ->
if pgiColumn pgColumnInfo `elem` orderByColumnNames then Nothing
else Just $ OrderByItemG Nothing (RQL.AOCColumn pgColumnInfo) Nothing
else Just $ IR.OrderByItemG Nothing (IR.AOCColumn pgColumnInfo) Nothing
in h NE.:| (t <> pkeyOrderBys)
parseConnectionSplit
:: Maybe (NonEmpty (RQL.AnnOrderByItemG 'Postgres UnpreparedValue))
-> RQL.ConnectionSplitKind
:: Maybe (NonEmpty (IR.AnnOrderByItemG 'Postgres UnpreparedValue))
-> IR.ConnectionSplitKind
-> BL.ByteString
-> n (NonEmpty (RQL.ConnectionSplit 'Postgres UnpreparedValue))
-> n (NonEmpty (IR.ConnectionSplit 'Postgres UnpreparedValue))
parseConnectionSplit maybeOrderBys splitKind cursorSplit = do
cursorValue <- either (const throwInvalidCursor) pure $
J.eitherDecode cursorSplit
cursorValue <- J.eitherDecode cursorSplit `onLeft` const throwInvalidCursor
case maybeOrderBys of
Nothing -> forM (NESeq.toNonEmpty pkeyColumns) $
\pgColumnInfo -> do
let columnJsonPath = [J.Key $ getPGColTxt $ pgiColumn pgColumnInfo]
columnType = pgiType pgColumnInfo
pgColumnValue <- maybe throwInvalidCursor pure $ iResultToMaybe $
executeJSONPath columnJsonPath cursorValue
pgColumnValue <- iResultToMaybe (executeJSONPath columnJsonPath cursorValue)
`onNothing` throwInvalidCursor
pgValue <- liftQErr $ parsePGScalarValue columnType pgColumnValue
let unresolvedValue = flip UVParameter Nothing $ P.PGColumnValue columnType pgValue
pure $ RQL.ConnectionSplit splitKind unresolvedValue $
OrderByItemG Nothing (RQL.AOCColumn pgColumnInfo) Nothing
pure $ IR.ConnectionSplit splitKind unresolvedValue $
IR.OrderByItemG Nothing (IR.AOCColumn pgColumnInfo) Nothing
Just orderBys ->
forM orderBys $ \orderBy -> do
let OrderByItemG orderType annObCol nullsOrder = orderBy
let IR.OrderByItemG orderType annObCol nullsOrder = orderBy
columnType = getOrderByColumnType annObCol
orderByItemValue <- maybe throwInvalidCursor pure $ iResultToMaybe $
executeJSONPath (getPathFromOrderBy annObCol) cursorValue
orderByItemValue <- iResultToMaybe (executeJSONPath (getPathFromOrderBy annObCol) cursorValue)
`onNothing` throwInvalidCursor
pgValue <- liftQErr $ parsePGScalarValue columnType orderByItemValue
let unresolvedValue = flip UVParameter Nothing $ P.PGColumnValue columnType pgValue
pure $ RQL.ConnectionSplit splitKind unresolvedValue $
OrderByItemG orderType (() <$ annObCol) nullsOrder
pure $ IR.ConnectionSplit splitKind unresolvedValue $
IR.OrderByItemG orderType (() <$ annObCol) nullsOrder
where
throwInvalidCursor = parseError "the \"after\" or \"before\" cursor is invalid"
liftQErr = either (parseError . qeError) pure . runExcept
getPathFromOrderBy = \case
RQL.AOCColumn pgColInfo ->
IR.AOCColumn pgColInfo ->
let pathElement = J.Key $ getPGColTxt $ pgiColumn pgColInfo
in [pathElement]
RQL.AOCObjectRelation relInfo _ obCol ->
IR.AOCObjectRelation relInfo _ obCol ->
let pathElement = J.Key $ relNameToTxt $ riName relInfo
in pathElement : getPathFromOrderBy obCol
RQL.AOCArrayAggregation relInfo _ aggOb ->
IR.AOCArrayAggregation relInfo _ aggOb ->
let fieldName = J.Key $ relNameToTxt (riName relInfo) <> "_aggregate"
in fieldName : case aggOb of
RQL.AAOCount -> [J.Key "count"]
RQL.AAOOp t col -> [J.Key t, J.Key $ getPGColTxt $ pgiColumn col]
IR.AAOCount -> [J.Key "count"]
IR.AAOOp t col -> [J.Key t, J.Key $ getPGColTxt $ pgiColumn col]
getOrderByColumnType = \case
RQL.AOCColumn pgColInfo -> pgiType pgColInfo
RQL.AOCObjectRelation _ _ obCol -> getOrderByColumnType obCol
RQL.AOCArrayAggregation _ _ aggOb ->
IR.AOCColumn pgColInfo -> pgiType pgColInfo
IR.AOCObjectRelation _ _ obCol -> getOrderByColumnType obCol
IR.AOCArrayAggregation _ _ aggOb ->
case aggOb of
RQL.AAOCount -> PGColumnScalar PGInteger
RQL.AAOOp _ colInfo -> pgiType colInfo
IR.AAOCount -> PGColumnScalar PGInteger
IR.AAOOp _ colInfo -> pgiType colInfo
-- | Aggregation fields
--
@ -785,7 +786,7 @@ tableAggregationFields
:: forall m n r. (MonadSchema n m, MonadTableInfo r m, MonadRole r m)
=> QualifiedTable
-> SelPermInfo 'Postgres
-> m (Parser 'Output n (RQL.AggregateFields 'Postgres))
-> m (Parser 'Output n (IR.AggregateFields 'Postgres))
tableAggregationFields table selectPermissions = do
tableGQLName <- getTableGQLName table
allColumns <- tableSelectColumns table selectPermissions
@ -808,22 +809,22 @@ tableAggregationFields table selectPermissions = do
]
let aggregateFields = count : numericAndComparable
pure $ P.selectionSet selectName (Just description) aggregateFields
<&> parsedSelectionsToFields RQL.AFExp
<&> parsedSelectionsToFields IR.AFExp
where
mkNumericAggFields :: G.Name -> [ColumnInfo 'Postgres] -> m [FieldParser n (RQL.ColFld 'Postgres)]
mkNumericAggFields :: G.Name -> [ColumnInfo 'Postgres] -> m [FieldParser n (IR.ColFld 'Postgres)]
mkNumericAggFields name
| name == $$(G.litName "sum") = traverse mkColumnAggField
| otherwise = traverse \columnInfo ->
pure $ P.selection_ (pgiName columnInfo) (pgiDescription columnInfo)
(P.nullable P.float) $> RQL.CFCol (pgiColumn columnInfo)
(P.nullable P.float) $> IR.CFCol (pgiColumn columnInfo)
mkColumnAggField :: ColumnInfo 'Postgres -> m (FieldParser n (RQL.ColFld 'Postgres))
mkColumnAggField :: ColumnInfo 'Postgres -> m (FieldParser n (IR.ColFld 'Postgres))
mkColumnAggField columnInfo = do
field <- P.column (pgiType columnInfo) (G.Nullability True)
pure $ P.selection_ (pgiName columnInfo) (pgiDescription columnInfo) field
$> RQL.CFCol (pgiColumn columnInfo)
$> IR.CFCol (pgiColumn columnInfo)
countField :: m (FieldParser n (RQL.AggregateField 'Postgres))
countField :: m (FieldParser n (IR.AggregateField 'Postgres))
countField = do
columnsEnum <- tableSelectColumnsEnum table selectPermissions
let columnsName = $$(G.litName "columns")
@ -832,25 +833,25 @@ tableAggregationFields table selectPermissions = do
distinct <- P.fieldOptional distinctName Nothing P.boolean
columns <- maybe (pure Nothing) (P.fieldOptional columnsName Nothing . P.list) columnsEnum
pure $ case columns of
Nothing -> SQL.CTStar
Nothing -> PG.CTStar
Just cols -> if Just True == distinct
then SQL.CTDistinct cols
else SQL.CTSimple cols
pure $ RQL.AFCount <$> P.selection $$(G.litName "count") Nothing args P.int
then PG.CTDistinct cols
else PG.CTSimple cols
pure $ IR.AFCount <$> P.selection $$(G.litName "count") Nothing args P.int
parseAggOperator
:: G.Name
-> G.Name
-> [FieldParser n (RQL.ColFld 'Postgres)]
-> FieldParser n (RQL.AggregateField 'Postgres)
-> [FieldParser n (IR.ColFld 'Postgres)]
-> FieldParser n (IR.AggregateField 'Postgres)
parseAggOperator operator tableGQLName columns =
let opText = G.unName operator
setName = tableGQLName <> $$(G.litName "_") <> operator <> $$(G.litName "_fields")
setDesc = Just $ G.Description $ "aggregate " <> opText <> " on columns"
subselectionParser = P.selectionSet setName setDesc columns
<&> parsedSelectionsToFields RQL.CFExp
<&> parsedSelectionsToFields IR.CFExp
in P.subselection_ operator Nothing subselectionParser
<&> (RQL.AFOp . RQL.AggregateOp opText)
<&> (IR.AFOp . IR.AggregateOp opText)
lookupRemoteField'
:: (MonadSchema n m, MonadTableInfo r m)
@ -896,13 +897,13 @@ fieldSelection table maybePkeyColumns fieldInfo selectPermissions =
if | fieldName == $$(G.litName "id") && queryType == ET.QueryRelay -> do
pkeyColumns <- MaybeT $ pure maybePkeyColumns
pure $ P.selection_ fieldName Nothing P.identifier
$> RQL.AFNodeId table pkeyColumns
$> IR.AFNodeId table pkeyColumns
| otherwise -> do
guard $ Set.member columnName (spiCols selectPermissions)
let pathArg = jsonPathArg $ pgiType columnInfo
field <- lift $ P.column (pgiType columnInfo) (G.Nullability $ pgiIsNullable columnInfo)
pure $ P.selection fieldName (pgiDescription columnInfo) pathArg field
<&> RQL.mkAnnColumnField columnInfo
<&> IR.mkAnnColumnField columnInfo
FIRelationship relationshipInfo ->
concat . maybeToList <$> relationshipField relationshipInfo
@ -930,14 +931,14 @@ relationshipField relationshipInfo = runMaybeT do
selectionSetParser <- lift $ tableSelectionSet otherTable remotePerms
pure $ pure $ (if nullable then id else P.nonNullableField) $
P.subselection_ relFieldName desc selectionSetParser
<&> \fields -> RQL.AFObjectRelation $ RQL.AnnRelationSelectG relName colMapping $
RQL.AnnObjectSelectG fields otherTable $
RQL._tpFilter $ tablePermissionsInfo remotePerms
<&> \fields -> IR.AFObjectRelation $ IR.AnnRelationSelectG relName colMapping $
IR.AnnObjectSelectG fields otherTable $
IR._tpFilter $ tablePermissionsInfo remotePerms
ArrRel -> do
let arrayRelDesc = Just $ G.Description "An array relationship"
otherTableParser <- lift $ selectTable otherTable relFieldName arrayRelDesc remotePerms
let arrayRelField = otherTableParser <&> \selectExp -> RQL.AFArrayRelation $
RQL.ASSimple $ RQL.AnnRelationSelectG relName colMapping selectExp
let arrayRelField = otherTableParser <&> \selectExp -> IR.AFArrayRelation $
IR.ASSimple $ IR.AnnRelationSelectG relName colMapping selectExp
relAggFieldName = relFieldName <> $$(G.litName "_aggregate")
relAggDesc = Just $ G.Description "An aggregate relationship"
remoteAggField <- lift $ selectTableAggregate otherTable relAggFieldName relAggDesc remotePerms
@ -952,8 +953,8 @@ relationshipField relationshipInfo = runMaybeT do
lift $ lift $ selectTableConnection otherTable relConnectionName
relConnectionDesc pkeyColumns remotePerms
pure $ catMaybes [ Just arrayRelField
, fmap (RQL.AFArrayRelation . RQL.ASAggregate . RQL.AnnRelationSelectG relName colMapping) <$> remoteAggField
, fmap (RQL.AFArrayRelation . RQL.ASConnection . RQL.AnnRelationSelectG relName colMapping) <$> remoteConnectionField
, fmap (IR.AFArrayRelation . IR.ASAggregate . IR.AnnRelationSelectG relName colMapping) <$> remoteAggField
, fmap (IR.AFArrayRelation . IR.ASConnection . IR.AnnRelationSelectG relName colMapping) <$> remoteConnectionField
]
-- | Computed field parser
@ -973,11 +974,11 @@ computedField ComputedFieldInfo{..} selectPermissions = runMaybeT do
let fieldArgsParser = do
args <- functionArgsParser
colOp <- jsonPathArg $ PGColumnScalar scalarReturnType
pure $ RQL.AFComputedField $ RQL.CFSScalar $ RQL.ComputedFieldScalarSelect
{ RQL._cfssFunction = _cffName _cfiFunction
, RQL._cfssType = scalarReturnType
, RQL._cfssColumnOp = colOp
, RQL._cfssArguments = args
pure $ IR.AFComputedField $ IR.CFSScalar $ IR.ComputedFieldScalarSelect
{ IR._cfssFunction = _cffName _cfiFunction
, IR._cfssType = scalarReturnType
, IR._cfssColumnOp = colOp
, IR._cfssArguments = args
}
dummyParser <- lift $ P.column (PGColumnScalar scalarReturnType) (G.Nullability True)
pure $ P.selection fieldName (Just fieldDescription) fieldArgsParser dummyParser
@ -988,12 +989,12 @@ computedField ComputedFieldInfo{..} selectPermissions = runMaybeT do
let fieldArgsParser = liftA2 (,) functionArgsParser selectArgsParser
pure $ P.subselection fieldName (Just fieldDescription) fieldArgsParser selectionSetParser <&>
\((functionArgs', args), fields) ->
RQL.AFComputedField $ RQL.CFSTable RQL.JASMultipleRows $ RQL.AnnSelectG
{ RQL._asnFields = fields
, RQL._asnFrom = RQL.FromFunction (_cffName _cfiFunction) functionArgs' Nothing
, RQL._asnPerm = tablePermissionsInfo remotePerms
, RQL._asnArgs = args
, RQL._asnStrfyNum = stringifyNum
IR.AFComputedField $ IR.CFSTable IR.JASMultipleRows $ IR.AnnSelectG
{ IR._asnFields = fields
, IR._asnFrom = IR.FromFunction (_cffName _cfiFunction) functionArgs' Nothing
, IR._asnPerm = tablePermissionsInfo remotePerms
, IR._asnArgs = args
, IR._asnStrfyNum = stringifyNum
}
where
fieldDescription =
@ -1001,22 +1002,22 @@ computedField ComputedFieldInfo{..} selectPermissions = runMaybeT do
in mkDescriptionWith (_cffDescription _cfiFunction) defaultDescription
computedFieldFunctionArgs
:: ComputedFieldFunction -> m (InputFieldsParser n (RQL.FunctionArgsExpTableRow UnpreparedValue))
:: ComputedFieldFunction -> m (InputFieldsParser n (IR.FunctionArgsExpTableRow UnpreparedValue))
computedFieldFunctionArgs ComputedFieldFunction{..} =
functionArgs _cffName (IAUserProvided <$> _cffInputArgs) <&> fmap addTableAndSessionArgument
where
tableRowArgument = RQL.AETableRow Nothing
tableRowArgument = IR.AETableRow Nothing
addTableAndSessionArgument args@(RQL.FunctionArgsExp positional named) =
addTableAndSessionArgument args@(IR.FunctionArgsExp positional named) =
let withTable = case _cffTableArgument of
FTAFirst -> RQL.FunctionArgsExp (tableRowArgument : positional) named
FTANamed argName index -> RQL.insertFunctionArg argName index tableRowArgument args
sessionArgVal = RQL.AESession UVSession
FTAFirst -> IR.FunctionArgsExp (tableRowArgument : positional) named
FTANamed argName index -> IR.insertFunctionArg argName index tableRowArgument args
sessionArgVal = IR.AESession UVSession
in
case _cffSessionArgument of
Nothing -> withTable
Just (FunctionSessionArgument argName index) ->
RQL.insertFunctionArg argName index sessionArgVal withTable
IR.insertFunctionArg argName index sessionArgVal withTable
-- | Remote relationship field parsers
remoteRelationshipField
@ -1041,7 +1042,7 @@ remoteRelationshipField remoteFieldInfo = runMaybeT do
remoteFieldsArgumentsParser <-
sequenceA <$> for (Map.toList $ _rfiParamMap remoteFieldInfo) \(name, inpValDefn) -> do
parser <- lift $ inputValueDefinitionParser (_rfiSchemaIntrospect remoteFieldInfo) inpValDefn
pure $ parser `mapField` RQL.RemoteFieldArgument name
pure $ parser `mapField` IR.RemoteFieldArgument name
-- This selection set parser, should be of the remote node's selection set parser, which comes
-- from the fieldCall
@ -1055,7 +1056,7 @@ remoteRelationshipField remoteFieldInfo = runMaybeT do
pure $ pure $ P.unsafeRawField (P.mkDefinition fieldName Nothing fieldInfo')
`P.bindField` \G.Field{ G._fArguments = args, G._fSelectionSet = selSet } -> do
remoteArgs <- P.ifParser remoteFieldsArgumentsParser' $ P.GraphQLValue <$> args
pure $ RQL.AFRemote $ RQL.RemoteSelect
pure $ IR.AFRemote $ IR.RemoteSelect
{ _rselArgs = remoteArgs
, _rselSelection = selSet
, _rselHasuraColumns = _rfiHasuraFields remoteFieldInfo
@ -1068,7 +1069,7 @@ remoteRelationshipField remoteFieldInfo = runMaybeT do
customSQLFunctionArgs
:: (MonadSchema n m, MonadTableInfo r m)
=> FunctionInfo
-> m (InputFieldsParser n (RQL.FunctionArgsExpTableRow UnpreparedValue))
-> m (InputFieldsParser n (IR.FunctionArgsExpTableRow UnpreparedValue))
customSQLFunctionArgs FunctionInfo{..} = functionArgs fiName fiInputArgs
-- | Parses the arguments to the underlying sql function of a computed field or
@ -1086,7 +1087,7 @@ functionArgs
:: forall m n r. (MonadSchema n m, MonadTableInfo r m)
=> QualifiedFunction
-> Seq.Seq FunctionInputArgument
-> m (InputFieldsParser n (RQL.FunctionArgsExpTableRow UnpreparedValue))
-> m (InputFieldsParser n (IR.FunctionArgsExpTableRow UnpreparedValue))
functionArgs functionName (toList -> inputArgs) = do
-- First, we iterate through the original sql arguments in order, to find the
-- corresponding graphql names. At the same time, we create the input field
@ -1094,7 +1095,7 @@ functionArgs functionName (toList -> inputArgs) = do
-- mandatory arguments. Optional arguments have a default value, mandatory
-- arguments don't.
let (names, session, optional, mandatory) = mconcat $ snd $ mapAccumL splitArguments 1 inputArgs
defaultArguments = RQL.FunctionArgsExp (snd <$> session) Map.empty
defaultArguments = IR.FunctionArgsExp (snd <$> session) Map.empty
if | length session > 1 ->
-- We somehow found more than one session argument; this should never
@ -1131,21 +1132,21 @@ functionArgs functionName (toList -> inputArgs) = do
-- We also fail if we find a mandatory argument that was not
-- provided by the user.
named <- Map.fromList . catMaybes <$> traverse (namedArgument foundArguments) left
pure $ RQL.FunctionArgsExp positional named
pure $ IR.FunctionArgsExp positional named
pure $ P.field fieldName (Just fieldDesc) objectParser
where
sessionPlaceholder :: RQL.ArgumentExp UnpreparedValue
sessionPlaceholder = RQL.AEInput P.UVSession
sessionPlaceholder :: IR.ArgumentExp UnpreparedValue
sessionPlaceholder = IR.AEInput P.UVSession
splitArguments
:: Int
-> FunctionInputArgument
-> (Int, ( [Text] -- graphql names, in order
, [(Text, RQL.ArgumentExp UnpreparedValue)] -- session argument
, [m (InputFieldsParser n (Maybe (Text, RQL.ArgumentExp UnpreparedValue)))] -- optional argument
, [m (InputFieldsParser n (Maybe (Text, RQL.ArgumentExp UnpreparedValue)))] -- mandatory argument
, [(Text, IR.ArgumentExp UnpreparedValue)] -- session argument
, [m (InputFieldsParser n (Maybe (Text, IR.ArgumentExp UnpreparedValue)))] -- optional argument
, [m (InputFieldsParser n (Maybe (Text, IR.ArgumentExp UnpreparedValue)))] -- mandatory argument
)
)
splitArguments positionalIndex (IASessionVariables name) =
@ -1159,7 +1160,7 @@ functionArgs functionName (toList -> inputArgs) = do
then (newIndex, ([argName], [], [parseArgument arg argName], []))
else (newIndex, ([argName], [], [], [parseArgument arg argName]))
parseArgument :: FunctionArg -> Text -> m (InputFieldsParser n (Maybe (Text, RQL.ArgumentExp UnpreparedValue)))
parseArgument :: FunctionArg -> Text -> m (InputFieldsParser n (Maybe (Text, IR.ArgumentExp UnpreparedValue)))
parseArgument arg name = do
columnParser <- P.column (PGColumnScalar $ _qptName $ faType arg) (G.Nullability True)
fieldName <- textToName name
@ -1175,12 +1176,12 @@ functionArgs functionName (toList -> inputArgs) = do
-- surprises, we prefer to reject the query if a mandatory argument is
-- missing rather than filling the blanks for the user.
let argParser = P.fieldOptional fieldName Nothing columnParser
pure $ argParser `mapField` ((name,) . RQL.AEInput . mkParameter)
pure $ argParser `mapField` ((name,) . IR.AEInput . mkParameter)
namedArgument
:: HashMap Text (RQL.ArgumentExp UnpreparedValue)
:: HashMap Text (IR.ArgumentExp UnpreparedValue)
-> (Text, InputArgument FunctionArg)
-> n (Maybe (Text, RQL.ArgumentExp UnpreparedValue))
-> n (Maybe (Text, IR.ArgumentExp UnpreparedValue))
namedArgument dictionary (name, inputArgument) = case inputArgument of
IASessionVariables _ -> pure $ Just (name, sessionPlaceholder)
IAUserProvided arg -> case Map.lookup name dictionary of
@ -1192,7 +1193,7 @@ functionArgs functionName (toList -> inputArgs) = do
-- | The "path" argument for json column fields
jsonPathArg :: MonadParse n => PGColumnType -> InputFieldsParser n (Maybe RQL.ColumnOp)
jsonPathArg :: MonadParse n => PGColumnType -> InputFieldsParser n (Maybe (IR.ColumnOp 'Postgres))
jsonPathArg columnType
| isScalarColumnWhere isJSONType columnType =
P.fieldOptional fieldName description P.string `P.bindFields` fmap join . traverse toColExp
@ -1203,14 +1204,14 @@ jsonPathArg columnType
toColExp textValue = case parseJSONPath textValue of
Left err -> parseError $ T.pack $ "parse json path error: " ++ err
Right [] -> pure Nothing
Right jPaths -> pure $ Just $ RQL.ColumnOp SQL.jsonbPathOp $ SQL.SEArray $ map elToColExp jPaths
elToColExp (Key k) = SQL.SELit k
elToColExp (Index i) = SQL.SELit $ T.pack (show i)
Right jPaths -> pure $ Just $ IR.ColumnOp PG.jsonbPathOp $ PG.SEArray $ map elToColExp jPaths
elToColExp (Key k) = PG.SELit k
elToColExp (Index i) = PG.SELit $ T.pack (show i)
tablePermissionsInfo :: SelPermInfo 'Postgres -> TablePerms 'Postgres
tablePermissionsInfo selectPermissions = RQL.TablePerm
{ RQL._tpFilter = fmapAnnBoolExp partialSQLExpToUnpreparedValue $ spiFilter selectPermissions
, RQL._tpLimit = spiLimit selectPermissions
tablePermissionsInfo selectPermissions = IR.TablePerm
{ IR._tpFilter = fmapAnnBoolExp partialSQLExpToUnpreparedValue $ spiFilter selectPermissions
, IR._tpLimit = spiLimit selectPermissions
}
------------------------ Node interface from Relay ---------------------------
@ -1313,18 +1314,18 @@ nodeField = do
onNothing (Map.lookup table parseds) $
withArgsPath $ throwInvalidNodeId $ "the table " <>> ident
whereExp <- buildNodeIdBoolExp columnValues pkeyColumns
return $ RQL.AnnSelectG
{ RQL._asnFields = fields
, RQL._asnFrom = RQL.FromTable table
, RQL._asnPerm = tablePermissionsInfo perms
, RQL._asnArgs = RQL.SelectArgs
{ RQL._saWhere = Just whereExp
, RQL._saOrderBy = Nothing
, RQL._saLimit = Nothing
, RQL._saOffset = Nothing
, RQL._saDistinct = Nothing
return $ IR.AnnSelectG
{ IR._asnFields = fields
, IR._asnFrom = IR.FromTable table
, IR._asnPerm = tablePermissionsInfo perms
, IR._asnArgs = IR.SelectArgs
{ IR._saWhere = Just whereExp
, IR._saOrderBy = Nothing
, IR._saLimit = Nothing
, IR._saOffset = Nothing
, IR._saDistinct = Nothing
}
, RQL._asnStrfyNum = stringifyNum
, IR._asnStrfyNum = stringifyNum
}
where
parseNodeId :: Text -> n NodeId
@ -1335,7 +1336,7 @@ nodeField = do
buildNodeIdBoolExp
:: NESeq.NESeq J.Value
-> NESeq.NESeq (ColumnInfo 'Postgres)
-> n (RQL.AnnBoolExp 'Postgres UnpreparedValue)
-> n (IR.AnnBoolExp 'Postgres UnpreparedValue)
buildNodeIdBoolExp columnValues pkeyColumns = do
let firstPkColumn NESeq.:<|| remainingPkColumns = pkeyColumns
firstColumnValue NESeq.:<|| remainingColumns = columnValues
@ -1350,11 +1351,11 @@ nodeField = do
let allTuples = (firstPkColumn, firstColumnValue):alignedTuples
either (parseErrorWith ParseFailed . qeError) pure $ runExcept $
fmap RQL.BoolAnd $ for allTuples $ \(columnInfo, columnValue) -> do
flip onLeft (parseErrorWith ParseFailed . qeError) $ runExcept $
fmap IR.BoolAnd $ for allTuples $ \(columnInfo, columnValue) -> do
let modifyErrFn t = "value of column " <> pgiColumn columnInfo
<<> " in node id: " <> t
pgColumnType = pgiType columnInfo
pgValue <- modifyErr modifyErrFn $ parsePGScalarValue pgColumnType columnValue
let unpreparedValue = flip UVParameter Nothing $ P.PGColumnValue pgColumnType pgValue
pure $ RQL.BoolFld $ RQL.AVCol columnInfo [RQL.AEQ True unpreparedValue]
pure $ IR.BoolFld $ IR.AVCol columnInfo [IR.AEQ True unpreparedValue]

View File

@ -73,6 +73,7 @@ import qualified Hasura.GraphQL.Transport.WebSocket.Server as WS
import qualified Hasura.Logging as L
import qualified Hasura.Server.Telemetry.Counters as Telem
import qualified Hasura.Tracing as Tracing
import Hasura.Server.Init.Config (KeepAliveDelay (..))
-- | 'LQ.LiveQueryId' comes from 'Hasura.GraphQL.Execute.LiveQuery.State.addLiveQuery'. We use
-- this to track a connection's operations so we can remove them from 'LiveQueryState', and
@ -228,11 +229,12 @@ data WSServerEnv
-- , _wseQueryCache :: !E.PlanCache -- See Note [Temporarily disabling query plan caching]
, _wseServer :: !WSServer
, _wseEnableAllowlist :: !Bool
, _wseKeepAliveDelay :: !KeepAliveDelay
}
onConn :: (MonadIO m)
=> L.Logger L.Hasura -> CorsPolicy -> WS.OnConnH m WSConnData
onConn (L.Logger logger) corsPolicy wsId requestHead ipAddress = do
onConn :: (MonadIO m, MonadReader WSServerEnv m)
=> WS.OnConnH m WSConnData
onConn wsId requestHead ipAddress = do
res <- runExceptT $ do
(errType, queryType) <- checkPath
let reqHdrs = WS.requestHeaders requestHead
@ -241,9 +243,10 @@ onConn (L.Logger logger) corsPolicy wsId requestHead ipAddress = do
either reject accept res
where
keepAliveAction wsConn = liftIO $ forever $ do
sendMsg wsConn SMConnKeepAlive
sleep $ seconds 5
keepAliveAction keepAliveDelay wsConn = do
liftIO $ forever $ do
sendMsg wsConn SMConnKeepAlive
sleep $ seconds (unKeepAliveDelay keepAliveDelay)
tokenExpiryHandler wsConn = do
expTime <- liftIO $ STM.atomically $ do
@ -256,6 +259,8 @@ onConn (L.Logger logger) corsPolicy wsId requestHead ipAddress = do
sleep $ convertDuration $ TC.diffUTCTime expTime currTime
accept (hdrs, errType, queryType) = do
(L.Logger logger) <- asks _wseLogger
keepAliveDelay <- asks _wseKeepAliveDelay
logger $ mkWsInfoLog Nothing (WsConnInfo wsId Nothing Nothing) EAccepted
connData <- liftIO $ WSConnData
<$> STM.newTVarIO (CSNotInitialised hdrs ipAddress)
@ -264,9 +269,9 @@ onConn (L.Logger logger) corsPolicy wsId requestHead ipAddress = do
<*> pure queryType
let acceptRequest = WS.defaultAcceptRequest
{ WS.acceptSubprotocol = Just "graphql-ws"}
return $ Right $ WS.AcceptWith connData acceptRequest keepAliveAction tokenExpiryHandler
return $ Right $ WS.AcceptWith connData acceptRequest (keepAliveAction keepAliveDelay) tokenExpiryHandler
reject qErr = do
(L.Logger logger) <- asks _wseLogger
logger $ mkWsErrorLog Nothing (WsConnInfo wsId Nothing Nothing) (ERejected qErr)
return $ Left $ WS.RejectRequest
(H.statusCode $ qeStatus qErr)
@ -283,21 +288,24 @@ onConn (L.Logger logger) corsPolicy wsId requestHead ipAddress = do
getOrigin =
find ((==) "Origin" . fst) (WS.requestHeaders requestHead)
enforceCors origin reqHdrs = case cpConfig corsPolicy of
CCAllowAll -> return reqHdrs
CCDisabled readCookie ->
if readCookie
then return reqHdrs
else do
lift $ logger $ mkWsInfoLog Nothing (WsConnInfo wsId Nothing (Just corsNote)) EAccepted
return $ filter (\h -> fst h /= "Cookie") reqHdrs
CCAllowedOrigins ds
-- if the origin is in our cors domains, no error
| bsToTxt origin `elem` dmFqdns ds -> return reqHdrs
-- if current origin is part of wildcard domain list, no error
| inWildcardList ds (bsToTxt origin) -> return reqHdrs
-- otherwise error
| otherwise -> corsErr
enforceCors origin reqHdrs = do
(L.Logger logger) <- asks _wseLogger
corsPolicy <- asks _wseCorsPolicy
case cpConfig corsPolicy of
CCAllowAll -> return reqHdrs
CCDisabled readCookie ->
if readCookie
then return reqHdrs
else do
lift $ logger $ mkWsInfoLog Nothing (WsConnInfo wsId Nothing (Just corsNote)) EAccepted
return $ filter (\h -> fst h /= "Cookie") reqHdrs
CCAllowedOrigins ds
-- if the origin is in our cors domains, no error
| bsToTxt origin `elem` dmFqdns ds -> return reqHdrs
-- if current origin is part of wildcard domain list, no error
| inWildcardList ds (bsToTxt origin) -> return reqHdrs
-- otherwise error
| otherwise -> corsErr
filterWsHeaders hdrs = flip filter hdrs $ \(n, _) ->
n `notElem` [ "sec-websocket-key"
@ -444,7 +452,7 @@ onStart env serverEnv wsConn (StartMsg opId q) = catchAndIgnore $ do
return $ ResultsFragment telemTimeIO_DT Telem.Remote (JO.toEncJSON value) []
WSServerEnv logger pgExecCtx lqMap getSchemaCache httpMgr _ sqlGenCtx {- planCache -}
_ enableAL = serverEnv
_ enableAL _keepAliveDelay = serverEnv
WSConnData userInfoR opMap errRespTy queryType = WS.getData wsConn
@ -690,14 +698,15 @@ createWSServerEnv
-> CorsPolicy
-> SQLGenCtx
-> Bool
-> KeepAliveDelay
-- -> E.PlanCache
-> m WSServerEnv
createWSServerEnv logger isPgCtx lqState getSchemaCache httpManager
corsPolicy sqlGenCtx enableAL {- planCache -} = do
corsPolicy sqlGenCtx enableAL keepAliveDelay {- planCache -} = do
wsServer <- liftIO $ STM.atomically $ WS.createWSServer logger
return $
WSServerEnv logger isPgCtx lqState getSchemaCache httpManager corsPolicy
sqlGenCtx {- planCache -} wsServer enableAL
sqlGenCtx {- planCache -} wsServer enableAL keepAliveDelay
createWSServerApp
:: ( HasVersion
@ -723,7 +732,7 @@ createWSServerApp env authMode serverEnv = \ !ipAddress !pendingConn ->
handlers =
WS.WSHandlers
-- Mask async exceptions during event processing to help maintain integrity of mutable vars:
(\rid rh ip -> mask_ $ onConn (_wseLogger serverEnv) (_wseCorsPolicy serverEnv) rid rh ip)
(\rid rh ip -> mask_ $ flip runReaderT serverEnv $ onConn rid rh ip)
(\conn bs -> mask_ $ onMessage env authMode serverEnv conn bs)
(mask_ . onClose (_wseLogger serverEnv) (_wseLiveQMap serverEnv))

View File

@ -7,6 +7,8 @@ module Hasura.Incremental.Internal.Dependency where
import Hasura.Prelude
import qualified Data.Dependent.Map as DM
import qualified Data.HashMap.Strict as Map
import qualified Data.HashMap.Strict.InsOrd as OMap
import qualified Data.URL.Template as UT
import qualified Language.GraphQL.Draft.Syntax as G
import qualified Network.URI.Extended as N
@ -21,8 +23,8 @@ import Data.Set (Set)
import Data.Text.NonEmpty
import Data.Time.Clock
import Data.Vector (Vector)
import GHC.Generics (Generic (..), K1 (..), M1 (..), U1 (..), V1,
(:*:) (..), (:+:) (..))
import GHC.Generics ((:*:) (..), (:+:) (..), Generic (..), K1 (..),
M1 (..), U1 (..), V1)
import System.Cron.Types
import Hasura.Incremental.Select
@ -196,6 +198,10 @@ instance (Cacheable a) => Cacheable (CI a) where
unchanged _ = (==)
instance (Cacheable a) => Cacheable (Set a) where
unchanged = liftEq . unchanged
instance (Hashable k, Cacheable k, Cacheable v) => Cacheable (InsOrdHashMap k v) where
unchanged accesses l r = unchanged accesses (toHashMap l) (toHashMap r)
where
toHashMap = Map.fromList . OMap.toList
instance Cacheable ()
instance (Cacheable a, Cacheable b) => Cacheable (a, b)

View File

@ -17,9 +17,9 @@ module Hasura.Prelude
, liftEitherM
-- * Efficient coercions
, coerce
, coerceSet
, findWithIndex
, mapFromL
, oMapFromL
-- * Measuring and working with moments and durations
, withElapsedTime
, startTimer
@ -46,10 +46,10 @@ import Data.Foldable as M (asum, fold, foldrM, for
traverse_)
import Data.Function as M (on, (&))
import Data.Functor as M (($>), (<&>))
import Data.Hashable as M (Hashable)
import Data.HashMap.Strict as M (HashMap)
import Data.HashMap.Strict.InsOrd as M (InsOrdHashMap)
import Data.HashSet as M (HashSet)
import Data.Hashable as M (Hashable)
import Data.List as M (find, findIndex, foldl', group,
intercalate, intersect, lookup, sort,
sortBy, sortOn, union, unionBy, (\\))
@ -79,13 +79,12 @@ import qualified Data.ByteString.Base64.Lazy as Base64
import qualified Data.ByteString.Lazy as BL
import Data.Coerce
import qualified Data.HashMap.Strict as Map
import qualified Data.Set as Set
import qualified Data.HashMap.Strict.InsOrd as OMap
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import qualified Data.Text.Encoding.Error as TE
import qualified GHC.Clock as Clock
import qualified Test.QuickCheck as QC
import Unsafe.Coerce
alphabet :: String
alphabet = ['a'..'z'] ++ ['A'..'Z']
@ -140,16 +139,6 @@ spanMaybeM f = go . toList
Just y -> first (y:) <$> go xs
Nothing -> pure ([], l)
-- | Efficiently coerce a set from one type to another.
--
-- This has the same safety properties as 'Set.mapMonotonic', and is equivalent
-- to @Set.mapMonotonic coerce@ but is more efficient. This is safe to use when
-- both @a@ and @b@ have automatically derived @Ord@ instances.
--
-- https://stackoverflow.com/q/57963881/176841
coerceSet :: Coercible a b=> Set.Set a -> Set.Set b
coerceSet = unsafeCoerce
findWithIndex :: (a -> Bool) -> [a] -> Maybe (a, Int)
findWithIndex p l = do
v <- find p l
@ -160,6 +149,9 @@ findWithIndex p l = do
mapFromL :: (Eq k, Hashable k) => (a -> k) -> [a] -> Map.HashMap k a
mapFromL f = Map.fromList . map (\v -> (f v, v))
oMapFromL :: (Eq k, Hashable k) => (a -> k) -> [a] -> InsOrdHashMap k a
oMapFromL f = OMap.fromList . map (\v -> (f v, v))
-- | Time an IO action, returning the time with microsecond precision. The
-- result of the input action will be evaluated to WHNF.
--

View File

@ -31,22 +31,12 @@ import Hasura.EncJSON
import Hasura.Incremental (Cacheable)
import Hasura.RQL.DDL.Deps
import Hasura.RQL.DDL.Permission.Internal
import Hasura.RQL.DDL.Schema.Function (RawFunctionInfo (..), mkFunctionArgs)
import Hasura.RQL.DDL.Schema.Function (mkFunctionArgs)
import Hasura.RQL.Types
import Hasura.SQL.Types
import Hasura.Server.Utils (makeReasonMessage)
data ComputedFieldDefinition
= ComputedFieldDefinition
{ _cfdFunction :: !QualifiedFunction
, _cfdTableArgument :: !(Maybe FunctionArgName)
, _cfdSessionArgument :: !(Maybe FunctionArgName)
} deriving (Show, Eq, Lift, Generic)
instance NFData ComputedFieldDefinition
instance Cacheable ComputedFieldDefinition
$(deriveJSON (aesonDrop 4 snakeCase){omitNothingFields = True} ''ComputedFieldDefinition)
data AddComputedField
= AddComputedField
{ _afcTable :: !QualifiedTable

View File

@ -9,19 +9,20 @@ module Hasura.RQL.DDL.EventTrigger
-- TODO(from master): review
, delEventTriggerFromCatalog
, subTableP2
, subTableP2Setup
, mkEventTriggerInfo
, mkAllTriggersQ
, delTriggerQ
, getEventTriggerDef
, getWebhookInfoFromConf
, getHeaderInfosFromConf
, updateEventTriggerInCatalog
, replaceEventTriggersInCatalog
) where
import Hasura.Prelude
import qualified Data.Environment as Env
import qualified Data.HashMap.Strict.Extended as Map
import qualified Data.Text as T
import qualified Data.Text.Lazy as TL
import qualified Database.PG.Query as Q
@ -42,17 +43,12 @@ import Hasura.SQL.Types
data OpVar = OLD | NEW deriving (Show)
-- pgIdenTrigger is a method used to construct the name of the pg function
-- used for event triggers which are present in the hdb_views schema.
-- used for event triggers which are present in the hdb_catalog schema.
pgIdenTrigger:: Ops -> TriggerName -> Text
pgIdenTrigger op trn = pgFmtIdentifier . qualifyTriggerName op $ triggerNameToTxt trn
where
qualifyTriggerName op' trn' = "notify_hasura_" <> trn' <> "_" <> T.pack (show op')
getDropFuncSql :: Ops -> TriggerName -> Text
getDropFuncSql op trn = "DROP FUNCTION IF EXISTS"
<> " hdb_views." <> pgIdenTrigger op trn <> "()"
<> " CASCADE"
mkAllTriggersQ
:: (MonadTx m, HasSQLGenCtx m)
=> TriggerName
@ -121,9 +117,16 @@ mkTriggerQ trn qt allCols op (SubscribeOpSpec columns payload) = do
opToTxt = T.pack . show
delTriggerQ :: TriggerName -> Q.TxE QErr ()
delTriggerQ trn = mapM_ (\op -> Q.unitQE
defaultTxErrorHandler
(Q.fromText $ getDropFuncSql op trn) () False) [INSERT, UPDATE, DELETE]
delTriggerQ trn =
mapM_ (\op -> Q.unitQE
defaultTxErrorHandler
(Q.fromText $ getDropFuncSql op) () False) [INSERT, UPDATE, DELETE]
where
getDropFuncSql :: Ops -> T.Text
getDropFuncSql op =
"DROP FUNCTION IF EXISTS"
<> " hdb_catalog." <> pgIdenTrigger op trn <> "()"
<> " CASCADE"
addEventTriggerToCatalog
:: QualifiedTable
@ -211,13 +214,13 @@ subTableP1 (CreateEventTriggerQuery name qt insert update delete enableManual re
SubCStar -> return ()
SubCArray pgcols -> forM_ pgcols (assertPGCol (_tciFieldInfoMap ti) "")
subTableP2Setup
mkEventTriggerInfo
:: QErrM m
=> Env.Environment
-> QualifiedTable
-> EventTriggerConf
-> m (EventTriggerInfo, [SchemaDependency])
subTableP2Setup env qt (EventTriggerConf name def webhook webhookFromEnv rconf mheaders) = do
mkEventTriggerInfo env qt (EventTriggerConf name def webhook webhookFromEnv rconf mheaders) = do
webhookConf <- case (webhook, webhookFromEnv) of
(Just w, Nothing) -> return $ WCValue w
(Nothing, Just wEnv) -> return $ WCEnv wEnv
@ -249,19 +252,14 @@ getTrigDefDeps qt (TriggerOpsDef mIns mUpd mDel _) =
SubCStar -> []
SubCArray pgcols -> pgcols
subTableP2
:: (MonadTx m)
=> QualifiedTable -> Bool -> EventTriggerConf -> m ()
subTableP2 qt replace etc = liftTx if replace
then updateEventTriggerInCatalog etc
else addEventTriggerToCatalog qt etc
runCreateEventTriggerQuery
:: (QErrM m, UserInfoM m, CacheRWM m, MonadTx m)
=> CreateEventTriggerQuery -> m EncJSON
runCreateEventTriggerQuery q = do
(qt, replace, etc) <- subTableP1 q
subTableP2 qt replace etc
liftTx if replace
then updateEventTriggerInCatalog etc
else addEventTriggerToCatalog qt etc
buildSchemaCacheFor $ MOTableObj qt (MTOTrigger $ etcName etc)
return successMsg
@ -343,13 +341,6 @@ getWebhookInfoFromConf env wc = case wc of
envVal <- getEnv env we
return $ WebhookConfInfo wc envVal
getEnv :: QErrM m => Env.Environment -> Text -> m Text
getEnv env k = do
let mEnv = Env.lookupEnv env (T.unpack k)
case mEnv of
Nothing -> throw400 NotFound $ "environment variable '" <> k <> "' not set"
Just envVal -> return (T.pack envVal)
getEventTriggerDef
:: TriggerName
-> Q.TxE QErr (QualifiedTable, EventTriggerConf)
@ -370,3 +361,35 @@ updateEventTriggerInCatalog trigConf =
configuration = $1
WHERE name = $2
|] (Q.AltJ $ toJSON trigConf, etcName trigConf) True
-- | Replaces /all/ event triggers in the catalog with new ones, taking care to
-- drop SQL trigger functions and archive events for any deleted event triggers.
--
-- See Note [Diff-and-patch event triggers on replace] for more details.
replaceEventTriggersInCatalog
:: MonadTx m
=> HashMap TriggerName (QualifiedTable, EventTriggerConf)
-> m ()
replaceEventTriggersInCatalog triggerConfs = do
existingTriggers <- Map.fromListOn id <$> fetchExistingTriggers
liftTx $ for_ (align existingTriggers triggerConfs) \case
This triggerName -> delEventTriggerFromCatalog triggerName
That (tableName, triggerConf) -> addEventTriggerToCatalog tableName triggerConf
These _ (_, triggerConf) -> updateEventTriggerInCatalog triggerConf
where
fetchExistingTriggers = liftTx $ map runIdentity <$>
Q.listQE defaultTxErrorHandler
[Q.sql|SELECT name FROM hdb_catalog.event_triggers|] () True
{- Note [Diff-and-patch event triggers on replace]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When executing a replace_metadata API call, we usually just drop everything in
the catalog and recreate it from scratch, then rebuild the schema cache. This
works fine for most things, but its a bad idea for event triggers, because
delEventTriggerFromCatalog does extra work: it deletes the SQL trigger functions
and archives all associated events.
Therefore, we have to be more careful about which event triggers we drop. We
diff the new metadata against the old metadata, and we only drop triggers that
are actually absent in the new metadata. The replaceEventTriggersInCatalog
function implements this diff-and-patch operation. -}

View File

@ -2,7 +2,7 @@
module Hasura.RQL.DDL.Metadata
( runReplaceMetadata
, runExportMetadata
, fetchMetadata
, fetchMetadataFromHdbTables
, runClearMetadata
, runReloadMetadata
, runDumpInternalState
@ -15,10 +15,11 @@ module Hasura.RQL.DDL.Metadata
import Hasura.Prelude
import qualified Data.Aeson.Ordered as AO
import qualified Data.HashMap.Strict as HM
import qualified Data.HashMap.Strict.InsOrd as HMIns
import qualified Data.HashSet as HS
import qualified Data.HashSet.InsOrd as HSIns
import qualified Data.List as L
import qualified Data.Text as T
import qualified Database.PG.Query as Q
import Control.Lens hiding ((.=))
@ -36,10 +37,11 @@ import qualified Hasura.RQL.DDL.Schema as Schema
import Hasura.Backends.Postgres.SQL.Types
import Hasura.EncJSON
import Hasura.RQL.DDL.ComputedField (dropComputedFieldFromCatalog)
import Hasura.RQL.DDL.EventTrigger (delEventTriggerFromCatalog, subTableP2)
import Hasura.RQL.DDL.EventTrigger (delEventTriggerFromCatalog,
replaceEventTriggersInCatalog)
import Hasura.RQL.DDL.Metadata.Types
import Hasura.RQL.DDL.Permission.Internal (dropPermFromCatalog)
import Hasura.RQL.DDL.RemoteSchema (addRemoteSchemaToCatalog, fetchRemoteSchemas,
import Hasura.RQL.DDL.RemoteSchema (addRemoteSchemaToCatalog,
removeRemoteSchemaFromCatalog)
import Hasura.RQL.DDL.ScheduledTrigger (addCronTriggerToCatalog,
deleteCronTriggerFromCatalog)
@ -49,10 +51,11 @@ import Hasura.RQL.Types
-- | Purge all user-defined metadata; metadata with is_system_defined = false
clearUserMetadata :: MonadTx m => m ()
clearUserMetadata = liftTx $ Q.catchE defaultTxErrorHandler $ do
-- Note: we dont drop event triggers here because we update them a different
-- way; see Note [Diff-and-patch event triggers on replace] in Hasura.RQL.DDL.EventTrigger.
Q.unitQ "DELETE FROM hdb_catalog.hdb_function WHERE is_system_defined <> 'true'" () False
Q.unitQ "DELETE FROM hdb_catalog.hdb_permission WHERE is_system_defined <> 'true'" () False
Q.unitQ "DELETE FROM hdb_catalog.hdb_relationship WHERE is_system_defined <> 'true'" () False
Q.unitQ "DELETE FROM hdb_catalog.event_triggers" () False
Q.unitQ "DELETE FROM hdb_catalog.hdb_computed_field" () False
Q.unitQ "DELETE FROM hdb_catalog.hdb_remote_relationship" () False
Q.unitQ "DELETE FROM hdb_catalog.hdb_table WHERE is_system_defined <> 'true'" () False
@ -69,89 +72,16 @@ runClearMetadata
=> ClearMetadata -> m EncJSON
runClearMetadata _ = do
clearUserMetadata
replaceEventTriggersInCatalog mempty
buildSchemaCacheStrict
return successMsg
applyQP1
:: (QErrM m)
=> ReplaceMetadata -> m ()
applyQP1 (ReplaceMetadata _ tables functionsMeta schemas
collections
allowlist _ actions
cronTriggers) = do
withPathK "tables" $ do
checkMultipleDecls "tables" $ map _tmTable tables
-- process each table
void $ indexedForM tables $ \table -> withTableName (table ^. tmTable) $ do
let allRels = map Relationship.rdName (table ^. tmObjectRelationships) <>
map Relationship.rdName (table ^. tmArrayRelationships)
insPerms = map Permission.pdRole $ table ^. tmInsertPermissions
selPerms = map Permission.pdRole $ table ^. tmSelectPermissions
updPerms = map Permission.pdRole $ table ^. tmUpdatePermissions
delPerms = map Permission.pdRole $ table ^. tmDeletePermissions
eventTriggers = map etcName $ table ^. tmEventTriggers
computedFields = map _cfmName $ table ^. tmComputedFields
remoteRelationships = map _rrmName $ table ^. tmRemoteRelationships
checkMultipleDecls "relationships" allRels
checkMultipleDecls "insert permissions" insPerms
checkMultipleDecls "select permissions" selPerms
checkMultipleDecls "update permissions" updPerms
checkMultipleDecls "delete permissions" delPerms
checkMultipleDecls "event triggers" eventTriggers
checkMultipleDecls "computed fields" computedFields
checkMultipleDecls "remote relationships" remoteRelationships
withPathK "functions" $
case functionsMeta of
FMVersion1 qualifiedFunctions ->
checkMultipleDecls "functions" qualifiedFunctions
FMVersion2 functionsV2 ->
checkMultipleDecls "functions" $ map Schema._tfv2Function functionsV2
withPathK "remote_schemas" $
checkMultipleDecls "remote schemas" $ map _arsqName schemas
withPathK "query_collections" $
checkMultipleDecls "query collections" $ map Collection._ccName collections
withPathK "allowlist" $
checkMultipleDecls "allow list" $ map Collection._crCollection allowlist
withPathK "actions" $
checkMultipleDecls "actions" $ map _amName actions
withPathK "cron_triggers" $
checkMultipleDecls "cron triggers" $ map ctName cronTriggers
where
withTableName qt = withPathK (qualifiedObjectToText qt)
checkMultipleDecls t l = do
let dups = getDups l
unless (null dups) $
throw400 AlreadyExists $ "multiple declarations exist for the following " <> t <> " : "
<> T.pack (show dups)
getDups l =
l L.\\ HS.toList (HS.fromList l)
applyQP2 :: (CacheRWM m, MonadTx m, HasSystemDefined m) => ReplaceMetadata -> m EncJSON
applyQP2 replaceMetadata = do
clearUserMetadata
saveMetadata replaceMetadata
buildSchemaCacheStrict
pure successMsg
saveMetadata :: (MonadTx m, HasSystemDefined m) => ReplaceMetadata -> m ()
saveMetadata (ReplaceMetadata _ tables functionsMeta
saveMetadata :: (MonadTx m, HasSystemDefined m) => Metadata -> m ()
saveMetadata (Metadata tables functions
schemas collections allowlist customTypes actions cronTriggers) = do
withPathK "tables" $ do
indexedForM_ tables $ \TableMeta{..} -> do
indexedForM_ tables $ \TableMetadata{..} -> do
-- Save table
saveTableToCatalog _tmTable _tmIsEnum _tmConfiguration
@ -166,14 +96,14 @@ saveMetadata (ReplaceMetadata _ tables functionsMeta
-- Computed Fields
withPathK "computed_fields" $
indexedForM_ _tmComputedFields $
\(ComputedFieldMeta name definition comment) ->
\(ComputedFieldMetadata name definition comment) ->
ComputedField.addComputedFieldToCatalog $
ComputedField.AddComputedField _tmTable name definition comment
-- Remote Relationships
withPathK "remote_relationships" $
indexedForM_ _tmRemoteRelationships $
\(RemoteRelationshipMeta name def) -> do
\(RemoteRelationshipMetadata name def) -> do
let RemoteRelationshipDef rs hf rf = def
liftTx $ RemoteRelationship.persistRemoteRelationship $
RemoteRelationship name _tmTable hf rs rf
@ -184,16 +114,14 @@ saveMetadata (ReplaceMetadata _ tables functionsMeta
withPathK "update_permissions" $ processPerms _tmTable _tmUpdatePermissions
withPathK "delete_permissions" $ processPerms _tmTable _tmDeletePermissions
-- Event triggers
withPathK "event_triggers" $
indexedForM_ _tmEventTriggers $ \etc -> subTableP2 _tmTable False etc
-- Event triggers
let allEventTriggers = HMIns.elems tables & map \table ->
(_tmTable table,) <$> HMIns.toHashMap (_tmEventTriggers table)
replaceEventTriggersInCatalog $ HM.unions allEventTriggers
-- sql functions
withPathK "functions" $ case functionsMeta of
FMVersion1 qualifiedFunctions -> indexedForM_ qualifiedFunctions $
\qf -> Schema.saveFunctionToCatalog qf Schema.emptyFunctionConfig
FMVersion2 functionsV2 -> indexedForM_ functionsV2 $
\(Schema.TrackFunctionV2 function config) -> Schema.saveFunctionToCatalog function config
withPathK "functions" $ indexedForM_ functions $
\(FunctionMetadata function config) -> Schema.saveFunctionToCatalog function config
-- query collections
systemDefined <- askSystemDefined
@ -238,13 +166,15 @@ runReplaceMetadata
, CacheRWM m
, HasSystemDefined m
)
=> ReplaceMetadata -> m EncJSON
runReplaceMetadata q = do
applyQP1 q
applyQP2 q
=> Metadata -> m EncJSON
runReplaceMetadata metadata = do
clearUserMetadata
saveMetadata metadata
buildSchemaCacheStrict
pure successMsg
fetchMetadata :: Q.TxE QErr ReplaceMetadata
fetchMetadata = do
fetchMetadataFromHdbTables :: MonadTx m => m Metadata
fetchMetadataFromHdbTables = liftTx do
tables <- Q.catchE defaultTxErrorHandler fetchTables
let tableMetaMap = HMIns.fromList . flip map tables $
\(schema, name, isEnum, maybeConfig) ->
@ -277,63 +207,55 @@ fetchMetadata = do
-- Fetch all remote relationships
remoteRelationships <- Q.catchE defaultTxErrorHandler fetchRemoteRelationships
let (_, postRelMap) = flip runState tableMetaMap $ do
modMetaMap tmObjectRelationships objRelDefs
modMetaMap tmArrayRelationships arrRelDefs
modMetaMap tmInsertPermissions insPermDefs
modMetaMap tmSelectPermissions selPermDefs
modMetaMap tmUpdatePermissions updPermDefs
modMetaMap tmDeletePermissions delPermDefs
modMetaMap tmEventTriggers triggerMetaDefs
modMetaMap tmComputedFields computedFields
modMetaMap tmRemoteRelationships remoteRelationships
let (_, fullTableMetaMap) = flip runState tableMetaMap $ do
modMetaMap tmObjectRelationships _rdName objRelDefs
modMetaMap tmArrayRelationships _rdName arrRelDefs
modMetaMap tmInsertPermissions _pdRole insPermDefs
modMetaMap tmSelectPermissions _pdRole selPermDefs
modMetaMap tmUpdatePermissions _pdRole updPermDefs
modMetaMap tmDeletePermissions _pdRole delPermDefs
modMetaMap tmEventTriggers etcName triggerMetaDefs
modMetaMap tmComputedFields _cfmName computedFields
modMetaMap tmRemoteRelationships _rrmName remoteRelationships
-- fetch all functions
functions <- FMVersion2 <$> Q.catchE defaultTxErrorHandler fetchFunctions
functions <- Q.catchE defaultTxErrorHandler fetchFunctions
-- fetch all remote schemas
remoteSchemas <- fetchRemoteSchemas
remoteSchemas <- oMapFromL _arsqName <$> fetchRemoteSchemas
-- fetch all collections
collections <- fetchCollections
collections <- oMapFromL _ccName <$> fetchCollections
-- fetch allow list
allowlist <- map Collection.CollectionReq <$> fetchAllowlists
allowlist <- HSIns.fromList . map CollectionReq <$> fetchAllowlists
customTypes <- fetchCustomTypes
-- -- fetch actions
actions <- fetchActions
-- fetch actions
actions <- oMapFromL _amName <$> fetchActions
cronTriggers <- fetchCronTriggers
return $ ReplaceMetadata currentMetadataVersion
(HMIns.elems postRelMap)
functions
remoteSchemas
collections
allowlist
customTypes
actions
cronTriggers
pure $ Metadata fullTableMetaMap functions remoteSchemas collections
allowlist customTypes actions cronTriggers
where
modMetaMap l xs = do
modMetaMap l f xs = do
st <- get
put $ foldr (\(qt, dfn) b -> b & at qt._Just.l %~ (:) dfn) st xs
put $ foldl' (\b (qt, dfn) -> b & at qt._Just.l %~ HMIns.insert (f dfn) dfn) st xs
mkPermDefs pt = mapM permRowToDef . filter (\pr -> pr ^. _4 == pt)
permRowToDef (sn, tn, rn, _, Q.AltJ pDef, mComment) = do
perm <- decodeValue pDef
return (QualifiedObject sn tn, Permission.PermDef rn perm mComment)
return (QualifiedObject sn tn, PermDef rn perm mComment)
mkRelDefs rt = mapM relRowToDef . filter (\rr -> rr ^. _4 == rt)
relRowToDef (sn, tn, rn, _, Q.AltJ rDef, mComment) = do
using <- decodeValue rDef
return (QualifiedObject sn tn, Relationship.RelDef rn using mComment)
return (QualifiedObject sn tn, RelDef rn using mComment)
mkTriggerMetaDefs = mapM trigRowToDef
@ -379,8 +301,21 @@ fetchMetadata = do
WHERE is_system_defined = 'false'
ORDER BY function_schema ASC, function_name ASC
|] () False
pure $ flip map l $ \(sn, fn, Q.AltJ config) ->
Schema.TrackFunctionV2 (QualifiedObject sn fn) config
pure $ oMapFromL _fmFunction $
flip map l $ \(sn, fn, Q.AltJ config) ->
FunctionMetadata (QualifiedObject sn fn) config
fetchRemoteSchemas =
map fromRow <$> Q.listQE defaultTxErrorHandler
[Q.sql|
SELECT name, definition, comment
FROM hdb_catalog.remote_schemas
ORDER BY name ASC
|] () True
where
fromRow (name, Q.AltJ def, comment) =
AddRemoteSchemaQuery name def comment
fetchCollections =
map fromRow <$> Q.listQE defaultTxErrorHandler [Q.sql|
@ -391,7 +326,7 @@ fetchMetadata = do
|] () False
where
fromRow (name, Q.AltJ defn, mComment) =
Collection.CreateCollection name defn mComment
CreateCollection name defn mComment
fetchAllowlists = map runIdentity <$>
Q.listQE defaultTxErrorHandler [Q.sql|
@ -408,11 +343,11 @@ fetchMetadata = do
|] () False
pure $ flip map r $ \(schema, table, name, Q.AltJ definition, comment) ->
( QualifiedObject schema table
, ComputedFieldMeta name definition comment
, ComputedFieldMetadata name definition comment
)
fetchCronTriggers =
map uncurryCronTrigger
(oMapFromL ctName . map uncurryCronTrigger)
<$> Q.listQE defaultTxErrorHandler
[Q.sql|
SELECT ct.name, ct.webhook_conf, ct.cron_schedule, ct.payload,
@ -440,6 +375,7 @@ fetchMetadata = do
Q.rawQE defaultTxErrorHandler [Q.sql|
select coalesce((select custom_types::json from hdb_catalog.hdb_custom_types), '{}'::json)
|] [] False
fetchActions =
Q.getAltJ . runIdentity . Q.getRow <$> Q.rawQE defaultTxErrorHandler [Q.sql|
select
@ -482,14 +418,14 @@ fetchMetadata = do
|] () False
pure $ flip map r $ \(schema, table, name, Q.AltJ definition) ->
( QualifiedObject schema table
, RemoteRelationshipMeta name definition
, RemoteRelationshipMetadata name definition
)
runExportMetadata
:: (QErrM m, MonadTx m)
=> ExportMetadata -> m EncJSON
runExportMetadata _ =
AO.toEncJSON . replaceMetadataToOrdJSON <$> liftTx fetchMetadata
AO.toEncJSON . metadataToOrdJSON <$> fetchMetadataFromHdbTables
runReloadMetadata :: (QErrM m, CacheRWM m) => ReloadMetadata -> m EncJSON
runReloadMetadata (ReloadMetadata reloadRemoteSchemas) = do

View File

@ -1,11 +1,19 @@
{-# OPTIONS_GHC -fno-warn-orphans #-}
-- | This module generates a random 'Metadata' object, using a number of
-- 'Arbitrary' instances. This is used by the QuickCheck-based testing suite.
-- This module is not used by the graphql-engine library itself, and we may wish
-- to relocate it, for instance to Hasura.Generator.
module Hasura.RQL.DDL.Metadata.Generator
(genReplaceMetadata)
(genMetadata)
where
import Hasura.Prelude
import qualified Data.Aeson as J
import qualified Data.HashMap.Strict.InsOrd as OM
import qualified Data.HashSet.InsOrd as SetIns
import qualified Data.Text as T
import qualified Data.Vector as V
import qualified Language.GraphQL.Draft.Parser as G
@ -22,23 +30,16 @@ import Test.QuickCheck.Instances.Semigroup ()
import Test.QuickCheck.Instances.Time ()
import Test.QuickCheck.Instances.UnorderedContainers ()
import qualified Hasura.RQL.DDL.ComputedField as ComputedField
import qualified Hasura.RQL.DDL.Permission as Permission
import qualified Hasura.RQL.DDL.Permission.Internal as Permission
import qualified Hasura.RQL.DDL.QueryCollection as Collection
import qualified Hasura.RQL.DDL.Relationship as Relationship
import qualified Hasura.RQL.DDL.Schema as Schema
import Hasura.Backends.Postgres.SQL.Types
import Hasura.GraphQL.Utils (simpleGraphQLQuery)
import Hasura.RQL.DDL.Headers
import Hasura.RQL.DDL.Metadata.Types
import Hasura.RQL.Types
genReplaceMetadata :: Gen ReplaceMetadata
genReplaceMetadata = do
genMetadata :: Gen Metadata
genMetadata = do
version <- arbitrary
ReplaceMetadata version
Metadata
<$> arbitrary
<*> genFunctionsMetadata version
<*> arbitrary
@ -48,10 +49,16 @@ genReplaceMetadata = do
<*> arbitrary
<*> arbitrary
where
genFunctionsMetadata :: MetadataVersion -> Gen FunctionsMetadata
genFunctionsMetadata :: MetadataVersion -> Gen Functions
genFunctionsMetadata = \case
MVVersion1 -> FMVersion1 <$> arbitrary
MVVersion2 -> FMVersion2 <$> arbitrary
MVVersion1 -> OM.fromList . map (\qf -> (qf, FunctionMetadata qf emptyFunctionConfig)) <$> arbitrary
MVVersion2 -> arbitrary
instance (Arbitrary k, Eq k, Hashable k, Arbitrary v) => Arbitrary (InsOrdHashMap k v) where
arbitrary = OM.fromList <$> arbitrary
instance (Arbitrary a, Eq a, Hashable a) => Arbitrary (SetIns.InsOrdHashSet a) where
arbitrary = SetIns.fromList <$> arbitrary
instance Arbitrary G.Name where
arbitrary = G.unsafeMkName . T.pack <$> listOf1 (elements ['a'..'z'])
@ -59,6 +66,9 @@ instance Arbitrary G.Name where
instance Arbitrary MetadataVersion where
arbitrary = genericArbitrary
instance Arbitrary FunctionMetadata where
arbitrary = genericArbitrary
instance Arbitrary TableCustomRootFields where
arbitrary = uniqueRootFields
where
@ -71,25 +81,25 @@ instance Arbitrary TableCustomRootFields where
instance Arbitrary TableConfig where
arbitrary = genericArbitrary
instance (Arbitrary a) => Arbitrary (Relationship.RelUsing a) where
instance (Arbitrary a) => Arbitrary (RelUsing a) where
arbitrary = genericArbitrary
instance (Arbitrary a) => Arbitrary (Relationship.RelDef a) where
instance (Arbitrary a) => Arbitrary (RelDef a) where
arbitrary = genericArbitrary
instance Arbitrary Relationship.RelManualConfig where
instance Arbitrary RelManualConfig where
arbitrary = genericArbitrary
instance Arbitrary Relationship.ArrRelUsingFKeyOn where
instance Arbitrary ArrRelUsingFKeyOn where
arbitrary = genericArbitrary
instance (Arbitrary a) => Arbitrary (Permission.PermDef a) where
instance (Arbitrary a) => Arbitrary (PermDef a) where
arbitrary = genericArbitrary
instance Arbitrary ComputedField.ComputedFieldDefinition where
instance Arbitrary ComputedFieldDefinition where
arbitrary = genericArbitrary
instance Arbitrary ComputedFieldMeta where
instance Arbitrary ComputedFieldMetadata where
arbitrary = genericArbitrary
instance Arbitrary Scientific where
@ -112,28 +122,28 @@ instance Arbitrary J.Value where
instance Arbitrary ColExp where
arbitrary = genericArbitrary
instance Arbitrary (GExists b ColExp) where
instance Arbitrary (GExists 'Postgres ColExp) where
arbitrary = genericArbitrary
instance Arbitrary (GBoolExp b ColExp) where
instance Arbitrary (GBoolExp 'Postgres ColExp) where
arbitrary = genericArbitrary
instance Arbitrary (BoolExp b) where
instance Arbitrary (BoolExp 'Postgres) where
arbitrary = genericArbitrary
instance Arbitrary Permission.PermColSpec where
instance Arbitrary PermColSpec where
arbitrary = genericArbitrary
instance Arbitrary (Permission.InsPerm b) where
instance Arbitrary (InsPerm 'Postgres) where
arbitrary = genericArbitrary
instance Arbitrary (Permission.SelPerm b) where
instance Arbitrary (SelPerm 'Postgres) where
arbitrary = genericArbitrary
instance Arbitrary (Permission.UpdPerm b) where
instance Arbitrary (UpdPerm 'Postgres) where
arbitrary = genericArbitrary
instance Arbitrary (Permission.DelPerm b) where
instance Arbitrary (DelPerm 'Postgres) where
arbitrary = genericArbitrary
instance Arbitrary SubscribeColumns where
@ -157,13 +167,13 @@ instance Arbitrary HeaderConf where
instance Arbitrary EventTriggerConf where
arbitrary = genericArbitrary
instance Arbitrary TableMeta where
instance Arbitrary TableMetadata where
arbitrary = genericArbitrary
instance Arbitrary Schema.FunctionConfig where
instance Arbitrary FunctionConfig where
arbitrary = genericArbitrary
instance Arbitrary Schema.TrackFunctionV2 where
instance Arbitrary TrackFunctionV2 where
arbitrary = genericArbitrary
instance Arbitrary QualifiedTable where
@ -185,23 +195,23 @@ instance Arbitrary AddRemoteSchemaQuery where
-- FIXME:- The GraphQL AST has 'Gen' by Hedgehog testing package which lacks the
-- 'Arbitrary' class implementation. For time being, a single query is generated every time.
instance Arbitrary Collection.GQLQueryWithText where
arbitrary = pure $ Collection.GQLQueryWithText ( simpleGraphQLQuery
, Collection.GQLQuery simpleQuery
)
instance Arbitrary GQLQueryWithText where
arbitrary = pure $ GQLQueryWithText ( simpleGraphQLQuery
, GQLQuery simpleQuery
)
where
simpleQuery = $(either (fail . T.unpack) TH.lift $ G.parseExecutableDoc simpleGraphQLQuery)
instance Arbitrary Collection.ListedQuery where
instance Arbitrary ListedQuery where
arbitrary = genericArbitrary
instance Arbitrary Collection.CollectionDef where
instance Arbitrary CollectionDef where
arbitrary = genericArbitrary
instance Arbitrary Collection.CreateCollection where
instance Arbitrary CreateCollection where
arbitrary = genericArbitrary
instance Arbitrary Collection.CollectionReq where
instance Arbitrary CollectionReq where
arbitrary = genericArbitrary
instance Arbitrary G.Description where
@ -307,13 +317,13 @@ deriving instance Arbitrary RemoteFields
instance Arbitrary RemoteRelationshipDef where
arbitrary = genericArbitrary
instance Arbitrary RemoteRelationshipMeta where
instance Arbitrary RemoteRelationshipMetadata where
arbitrary = genericArbitrary
instance Arbitrary CronTriggerMetadata where
arbitrary = genericArbitrary
instance Arbitrary WebhookConf where
instance Arbitrary UrlConf where
arbitrary = genericArbitrary
instance Arbitrary STRetryConf where
@ -326,7 +336,7 @@ instance Arbitrary CronSchedule where
arbitrary = elements sampleCronSchedules
sampleCronSchedules :: [CronSchedule]
sampleCronSchedules = rights $ map Cr.parseCronSchedule $
sampleCronSchedules = rights $ map Cr.parseCronSchedule
[ "* * * * *"
-- every minute
, "5 * * * *"

View File

@ -4,27 +4,6 @@
module Hasura.RQL.DDL.Metadata.Types
( currentMetadataVersion
, MetadataVersion(..)
, TableMeta(..)
, tmTable
, tmIsEnum
, tmConfiguration
, tmObjectRelationships
, tmArrayRelationships
, tmComputedFields
, tmRemoteRelationships
, tmInsertPermissions
, tmSelectPermissions
, tmUpdatePermissions
, tmDeletePermissions
, tmEventTriggers
, mkTableMeta
, ReplaceMetadata(..)
, replaceMetadataToOrdJSON
, ActionMetadata(..)
, ActionPermissionMetadata(..)
, ComputedFieldMeta(..)
, RemoteRelationshipMeta(..)
, FunctionsMetadata(..)
, ExportMetadata(..)
, ClearMetadata(..)
, ReloadMetadata(..)
@ -35,137 +14,13 @@ module Hasura.RQL.DDL.Metadata.Types
import Hasura.Prelude
import qualified Data.Aeson.Ordered as AO
import qualified Data.HashMap.Strict as HM
import qualified Data.HashSet as HS
import qualified Language.GraphQL.Draft.Syntax as G
import Control.Lens hiding (set, (.=))
import Data.Aeson
import Data.Aeson.Casing
import Data.Aeson.TH
import Language.Haskell.TH.Syntax (Lift)
import Language.Haskell.TH.Syntax (Lift)
import qualified Hasura.RQL.DDL.ComputedField as ComputedField
import qualified Hasura.RQL.DDL.Permission as Permission
import qualified Hasura.RQL.DDL.QueryCollection as Collection
import qualified Hasura.RQL.DDL.Relationship as Relationship
import qualified Hasura.RQL.DDL.Schema as Schema
import qualified Hasura.RQL.Types.RemoteRelationship as RemoteRelationship
import Hasura.Backends.Postgres.SQL.Types
import Hasura.RQL.Types
data MetadataVersion
= MVVersion1
| MVVersion2
deriving (Show, Eq, Lift, Generic)
instance ToJSON MetadataVersion where
toJSON MVVersion1 = toJSON @Int 1
toJSON MVVersion2 = toJSON @Int 2
instance FromJSON MetadataVersion where
parseJSON v = do
version :: Int <- parseJSON v
case version of
1 -> pure MVVersion1
2 -> pure MVVersion2
i -> fail $ "expected 1 or 2, encountered " ++ show i
currentMetadataVersion :: MetadataVersion
currentMetadataVersion = MVVersion2
data ComputedFieldMeta
= ComputedFieldMeta
{ _cfmName :: !ComputedFieldName
, _cfmDefinition :: !ComputedField.ComputedFieldDefinition
, _cfmComment :: !(Maybe Text)
} deriving (Show, Eq, Lift, Generic)
$(deriveJSON (aesonDrop 4 snakeCase) ''ComputedFieldMeta)
data RemoteRelationshipMeta
= RemoteRelationshipMeta
{ _rrmName :: !RemoteRelationshipName
, _rrmDefinition :: !RemoteRelationship.RemoteRelationshipDef
} deriving (Show, Eq, Lift, Generic)
$(deriveJSON (aesonDrop 4 snakeCase) ''RemoteRelationshipMeta)
data TableMeta
= TableMeta
{ _tmTable :: !QualifiedTable
, _tmIsEnum :: !Bool
, _tmConfiguration :: !TableConfig
, _tmObjectRelationships :: ![Relationship.ObjRelDef]
, _tmArrayRelationships :: ![Relationship.ArrRelDef]
, _tmComputedFields :: ![ComputedFieldMeta]
, _tmRemoteRelationships :: ![RemoteRelationshipMeta]
, _tmInsertPermissions :: ![Permission.InsPermDef 'Postgres]
, _tmSelectPermissions :: ![Permission.SelPermDef 'Postgres]
, _tmUpdatePermissions :: ![Permission.UpdPermDef 'Postgres]
, _tmDeletePermissions :: ![Permission.DelPermDef 'Postgres]
, _tmEventTriggers :: ![EventTriggerConf]
} deriving (Show, Eq, Lift, Generic)
$(makeLenses ''TableMeta)
mkTableMeta :: QualifiedTable -> Bool -> TableConfig -> TableMeta
mkTableMeta qt isEnum config =
TableMeta qt isEnum config [] [] [] [] [] [] [] [] []
instance FromJSON TableMeta where
parseJSON (Object o) = do
unless (null unexpectedKeys) $
fail $ "unexpected keys when parsing TableMetadata : "
<> show (HS.toList unexpectedKeys)
TableMeta
<$> o .: tableKey
<*> o .:? isEnumKey .!= False
<*> o .:? configKey .!= emptyTableConfig
<*> o .:? orKey .!= []
<*> o .:? arKey .!= []
<*> o .:? cfKey .!= []
<*> o .:? rrKey .!= []
<*> o .:? ipKey .!= []
<*> o .:? spKey .!= []
<*> o .:? upKey .!= []
<*> o .:? dpKey .!= []
<*> o .:? etKey .!= []
where
tableKey = "table"
isEnumKey = "is_enum"
configKey = "configuration"
orKey = "object_relationships"
arKey = "array_relationships"
ipKey = "insert_permissions"
spKey = "select_permissions"
upKey = "update_permissions"
dpKey = "delete_permissions"
etKey = "event_triggers"
cfKey = "computed_fields"
rrKey = "remote_relationships"
unexpectedKeys =
HS.fromList (HM.keys o) `HS.difference` expectedKeySet
expectedKeySet =
HS.fromList [ tableKey, isEnumKey, configKey, orKey
, arKey , ipKey, spKey, upKey, dpKey, etKey
, cfKey, rrKey
]
parseJSON _ =
fail "expecting an Object for TableMetadata"
data FunctionsMetadata
= FMVersion1 ![QualifiedFunction]
| FMVersion2 ![Schema.TrackFunctionV2]
deriving (Show, Eq, Lift, Generic)
instance ToJSON FunctionsMetadata where
toJSON (FMVersion1 qualifiedFunctions) = toJSON qualifiedFunctions
toJSON (FMVersion2 functionsV2) = toJSON functionsV2
data ClearMetadata
= ClearMetadata
@ -175,37 +30,6 @@ $(deriveToJSON defaultOptions ''ClearMetadata)
instance FromJSON ClearMetadata where
parseJSON _ = return ClearMetadata
data ReplaceMetadata
= ReplaceMetadata
{ aqVersion :: !MetadataVersion
, aqTables :: ![TableMeta]
, aqFunctions :: !FunctionsMetadata
, aqRemoteSchemas :: ![AddRemoteSchemaQuery]
, aqQueryCollections :: ![Collection.CreateCollection]
, aqAllowlist :: ![Collection.CollectionReq]
, aqCustomTypes :: !CustomTypes
, aqActions :: ![ActionMetadata]
, aqCronTriggers :: ![CronTriggerMetadata]
} deriving (Show, Eq)
instance FromJSON ReplaceMetadata where
parseJSON = withObject "Object" $ \o -> do
version <- o .:? "version" .!= MVVersion1
ReplaceMetadata version
<$> o .: "tables"
<*> (o .:? "functions" >>= parseFunctions version)
<*> o .:? "remote_schemas" .!= []
<*> o .:? "query_collections" .!= []
<*> o .:? "allowlist" .!= []
<*> o .:? "custom_types" .!= emptyCustomTypes
<*> o .:? "actions" .!= []
<*> o .:? "cron_triggers" .!= []
where
parseFunctions version maybeValue =
case version of
MVVersion1 -> FMVersion1 <$> maybe (pure []) parseJSON maybeValue
MVVersion2 -> FMVersion2 <$> maybe (pure []) parseJSON maybeValue
data ExportMetadata
= ExportMetadata
deriving (Show, Eq, Lift)
@ -214,15 +38,16 @@ $(deriveToJSON defaultOptions ''ExportMetadata)
instance FromJSON ExportMetadata where
parseJSON _ = return ExportMetadata
newtype ReloadMetadata
data ReloadMetadata
= ReloadMetadata
{ _rmReloadRemoteSchemas :: Bool}
deriving (Show, Eq, Lift)
{ _rmReloadRemoteSchemas :: !Bool
} deriving (Show, Eq, Lift)
$(deriveToJSON (aesonDrop 3 snakeCase) ''ReloadMetadata)
instance FromJSON ReloadMetadata where
parseJSON = \case
Object o -> ReloadMetadata <$> o .:? "reload_remote_schemas" .!= False
Object o -> ReloadMetadata
<$> o .:? "reload_remote_schemas" .!= False
_ -> pure $ ReloadMetadata False
data DumpInternalState
@ -248,332 +73,3 @@ $(deriveToJSON defaultOptions ''DropInconsistentMetadata)
instance FromJSON DropInconsistentMetadata where
parseJSON _ = return DropInconsistentMetadata
instance ToJSON ReplaceMetadata where
toJSON = AO.fromOrdered . replaceMetadataToOrdJSON
-- | Encode 'ReplaceMetadata' to JSON with deterministic ordering. Ordering of object keys and array
-- elements should remain consistent across versions of graphql-engine if possible!
--
-- Note: While modifying any part of the code below, make sure the encoded JSON of each type is
-- parsable via its 'FromJSON' instance.
replaceMetadataToOrdJSON :: ReplaceMetadata -> AO.Value
replaceMetadataToOrdJSON ( ReplaceMetadata
version
tables
functions
remoteSchemas
queryCollections
allowlist
customTypes
actions
cronTriggers
) = AO.object $ [versionPair, tablesPair] <>
catMaybes [ functionsPair
, remoteSchemasPair
, queryCollectionsPair
, allowlistPair
, actionsPair
, customTypesPair
, cronTriggersPair
]
where
versionPair = ("version", AO.toOrdered version)
tablesPair = ("tables", AO.array $ map tableMetaToOrdJSON tables)
functionsPair = ("functions",) <$> functionsMetadataToOrdJSON functions
remoteSchemasPair = listToMaybeOrdPair "remote_schemas" remoteSchemaQToOrdJSON remoteSchemas
queryCollectionsPair = listToMaybeOrdPair "query_collections" createCollectionToOrdJSON queryCollections
allowlistPair = listToMaybeOrdPair "allowlist" AO.toOrdered allowlist
customTypesPair = if customTypes == emptyCustomTypes then Nothing
else Just ("custom_types", customTypesToOrdJSON customTypes)
actionsPair = listToMaybeOrdPair "actions" actionMetadataToOrdJSON actions
cronTriggersPair = listToMaybeOrdPair "cron_triggers" crontriggerQToOrdJSON cronTriggers
tableMetaToOrdJSON :: TableMeta -> AO.Value
tableMetaToOrdJSON ( TableMeta
table
isEnum
config
objectRelationships
arrayRelationships
computedFields
remoteRelationships
insertPermissions
selectPermissions
updatePermissions
deletePermissions
eventTriggers
) = AO.object $ [("table", AO.toOrdered table)]
<> catMaybes [ isEnumPair
, configPair
, objectRelationshipsPair
, arrayRelationshipsPair
, computedFieldsPair
, remoteRelationshipsPair
, insertPermissionsPair
, selectPermissionsPair
, updatePermissionsPair
, deletePermissionsPair
, eventTriggersPair
]
where
isEnumPair = if isEnum then Just ("is_enum", AO.toOrdered isEnum) else Nothing
configPair = if config == emptyTableConfig then Nothing
else Just ("configuration" , AO.toOrdered config)
objectRelationshipsPair = listToMaybeOrdPair "object_relationships"
relDefToOrdJSON objectRelationships
arrayRelationshipsPair = listToMaybeOrdPair "array_relationships"
relDefToOrdJSON arrayRelationships
computedFieldsPair = listToMaybeOrdPair "computed_fields"
computedFieldMetaToOrdJSON computedFields
remoteRelationshipsPair = listToMaybeOrdPair "remote_relationships"
AO.toOrdered remoteRelationships
insertPermissionsPair = listToMaybeOrdPair "insert_permissions"
insPermDefToOrdJSON insertPermissions
selectPermissionsPair = listToMaybeOrdPair "select_permissions"
selPermDefToOrdJSON selectPermissions
updatePermissionsPair = listToMaybeOrdPair "update_permissions"
updPermDefToOrdJSON updatePermissions
deletePermissionsPair = listToMaybeOrdPair "delete_permissions"
delPermDefToOrdJSON deletePermissions
eventTriggersPair = listToMaybeOrdPair "event_triggers"
eventTriggerConfToOrdJSON eventTriggers
relDefToOrdJSON :: (ToJSON a) => Relationship.RelDef a -> AO.Value
relDefToOrdJSON (Relationship.RelDef name using comment) =
AO.object $ [ ("name", AO.toOrdered name)
, ("using", AO.toOrdered using)
] <> catMaybes [maybeCommentToMaybeOrdPair comment]
computedFieldMetaToOrdJSON :: ComputedFieldMeta -> AO.Value
computedFieldMetaToOrdJSON (ComputedFieldMeta name definition comment) =
AO.object $ [ ("name", AO.toOrdered name)
, ("definition", AO.toOrdered definition)
] <> catMaybes [maybeCommentToMaybeOrdPair comment]
insPermDefToOrdJSON :: Permission.InsPermDef 'Postgres -> AO.Value
insPermDefToOrdJSON = permDefToOrdJSON insPermToOrdJSON
where
insPermToOrdJSON (Permission.InsPerm check set columns mBackendOnly) =
let columnsPair = ("columns",) . AO.toOrdered <$> columns
backendOnlyPair = ("backend_only",) . AO.toOrdered <$> mBackendOnly
in AO.object $ [("check", AO.toOrdered check)]
<> catMaybes [maybeSetToMaybeOrdPair set, columnsPair, backendOnlyPair]
selPermDefToOrdJSON :: Permission.SelPermDef 'Postgres -> AO.Value
selPermDefToOrdJSON = permDefToOrdJSON selPermToOrdJSON
where
selPermToOrdJSON (Permission.SelPerm columns fltr limit allowAgg computedFieldsPerm) =
AO.object $ catMaybes [ columnsPair
, computedFieldsPermPair
, filterPair
, limitPair
, allowAggPair
]
where
columnsPair = Just ("columns", AO.toOrdered columns)
computedFieldsPermPair = listToMaybeOrdPair "computed_fields" AO.toOrdered computedFieldsPerm
filterPair = Just ("filter", AO.toOrdered fltr)
limitPair = maybeAnyToMaybeOrdPair "limit" AO.toOrdered limit
allowAggPair = if allowAgg
then Just ("allow_aggregations", AO.toOrdered allowAgg)
else Nothing
updPermDefToOrdJSON :: Permission.UpdPermDef 'Postgres -> AO.Value
updPermDefToOrdJSON = permDefToOrdJSON updPermToOrdJSON
where
updPermToOrdJSON (Permission.UpdPerm columns set fltr check) =
AO.object $ [ ("columns", AO.toOrdered columns)
, ("filter", AO.toOrdered fltr)
, ("check", AO.toOrdered check)
] <> catMaybes [maybeSetToMaybeOrdPair set]
delPermDefToOrdJSON :: Permission.DelPermDef 'Postgres -> AO.Value
delPermDefToOrdJSON = permDefToOrdJSON AO.toOrdered
permDefToOrdJSON :: (a -> AO.Value) -> Permission.PermDef a -> AO.Value
permDefToOrdJSON permToOrdJSON (Permission.PermDef role permission comment) =
AO.object $ [ ("role", AO.toOrdered role)
, ("permission", permToOrdJSON permission)
] <> catMaybes [maybeCommentToMaybeOrdPair comment]
eventTriggerConfToOrdJSON :: EventTriggerConf -> AO.Value
eventTriggerConfToOrdJSON (EventTriggerConf name definition webhook webhookFromEnv retryConf headers) =
AO.object $ [ ("name", AO.toOrdered name)
, ("definition", AO.toOrdered definition)
, ("retry_conf", AO.toOrdered retryConf)
] <> catMaybes [ maybeAnyToMaybeOrdPair "webhook" AO.toOrdered webhook
, maybeAnyToMaybeOrdPair "webhook_from_env" AO.toOrdered webhookFromEnv
, headers >>= listToMaybeOrdPair "headers" AO.toOrdered
]
functionsMetadataToOrdJSON :: FunctionsMetadata -> Maybe AO.Value
functionsMetadataToOrdJSON fm =
let withList _ [] = Nothing
withList f list = Just $ f list
functionV2ToOrdJSON (Schema.TrackFunctionV2 function config) =
AO.object $ [("function", AO.toOrdered function)]
<> if config == Schema.emptyFunctionConfig then []
else pure ("configuration", AO.toOrdered config)
in case fm of
FMVersion1 functionsV1 -> withList AO.toOrdered functionsV1
FMVersion2 functionsV2 -> withList (AO.array . map functionV2ToOrdJSON) functionsV2
remoteSchemaQToOrdJSON :: AddRemoteSchemaQuery -> AO.Value
remoteSchemaQToOrdJSON (AddRemoteSchemaQuery name definition comment) =
AO.object $ [ ("name", AO.toOrdered name)
, ("definition", remoteSchemaDefToOrdJSON definition)
] <> catMaybes [maybeCommentToMaybeOrdPair comment]
where
remoteSchemaDefToOrdJSON :: RemoteSchemaDef -> AO.Value
remoteSchemaDefToOrdJSON (RemoteSchemaDef url urlFromEnv headers frwrdClientHdrs timeout) =
AO.object $ catMaybes [ maybeToPair "url" url
, maybeToPair "url_from_env" urlFromEnv
, maybeToPair "timeout_seconds" timeout
, headers >>= listToMaybeOrdPair "headers" AO.toOrdered
] <> [("forward_client_headers", AO.toOrdered frwrdClientHdrs) | frwrdClientHdrs]
where
maybeToPair n = maybeAnyToMaybeOrdPair n AO.toOrdered
createCollectionToOrdJSON :: Collection.CreateCollection -> AO.Value
createCollectionToOrdJSON (Collection.CreateCollection name definition comment) =
AO.object $ [ ("name", AO.toOrdered name)
, ("definition", AO.toOrdered definition)
] <> catMaybes [maybeCommentToMaybeOrdPair comment]
crontriggerQToOrdJSON :: CronTriggerMetadata -> AO.Value
crontriggerQToOrdJSON
(CronTriggerMetadata name webhook schedule payload retryConf headers includeInMetadata comment) =
AO.object $
[ ("name", AO.toOrdered name)
, ("webhook", AO.toOrdered webhook)
, ("schedule", AO.toOrdered schedule)
, ("include_in_metadata", AO.toOrdered includeInMetadata)
]
<> catMaybes
[ maybeAnyToMaybeOrdPair "payload" AO.toOrdered payload
, maybeAnyToMaybeOrdPair "retry_conf" AO.toOrdered (maybeRetryConfiguration retryConf)
, maybeAnyToMaybeOrdPair "headers" AO.toOrdered (maybeHeader headers)
, maybeAnyToMaybeOrdPair "comment" AO.toOrdered comment]
where
maybeRetryConfiguration retryConfig
| retryConfig == defaultSTRetryConf = Nothing
| otherwise = Just retryConfig
maybeHeader headerConfig
| null headerConfig = Nothing
| otherwise = Just headerConfig
customTypesToOrdJSON :: CustomTypes -> AO.Value
customTypesToOrdJSON (CustomTypes inpObjs objs scalars enums) =
AO.object . catMaybes $ [ listToMaybeOrdPair "input_objects" inputObjectToOrdJSON =<< inpObjs
, listToMaybeOrdPair "objects" objectTypeToOrdJSON =<< objs
, listToMaybeOrdPair "scalars" scalarTypeToOrdJSON =<< scalars
, listToMaybeOrdPair "enums" enumTypeToOrdJSON =<< enums
]
where
inputObjectToOrdJSON :: InputObjectTypeDefinition -> AO.Value
inputObjectToOrdJSON (InputObjectTypeDefinition tyName descM fields) =
AO.object $ [ ("name", AO.toOrdered tyName)
, ("fields", AO.array $ map fieldDefinitionToOrdJSON $ toList fields)
]
<> catMaybes [maybeDescriptionToMaybeOrdPair descM]
where
fieldDefinitionToOrdJSON :: InputObjectFieldDefinition -> AO.Value
fieldDefinitionToOrdJSON (InputObjectFieldDefinition fieldName fieldDescM ty) =
AO.object $ [ ("name", AO.toOrdered fieldName)
, ("type", AO.toOrdered ty)
]
<> catMaybes [maybeDescriptionToMaybeOrdPair fieldDescM]
objectTypeToOrdJSON :: ObjectType -> AO.Value
objectTypeToOrdJSON (ObjectTypeDefinition tyName descM fields rels) =
AO.object $ [ ("name", AO.toOrdered tyName)
, ("fields", AO.array $ map fieldDefinitionToOrdJSON $ toList fields)
]
<> catMaybes [ maybeDescriptionToMaybeOrdPair descM
, maybeAnyToMaybeOrdPair "relationships" AO.toOrdered rels
]
where
fieldDefinitionToOrdJSON :: ObjectFieldDefinition GraphQLType -> AO.Value
fieldDefinitionToOrdJSON (ObjectFieldDefinition fieldName argsValM fieldDescM ty) =
AO.object $ [ ("name", AO.toOrdered fieldName)
, ("type", AO.toOrdered ty)
]
<> catMaybes [ ("arguments", ) . AO.toOrdered <$> argsValM
, maybeDescriptionToMaybeOrdPair fieldDescM
]
scalarTypeToOrdJSON :: ScalarTypeDefinition -> AO.Value
scalarTypeToOrdJSON (ScalarTypeDefinition tyName descM) =
AO.object $ [("name", AO.toOrdered tyName)]
<> catMaybes [maybeDescriptionToMaybeOrdPair descM]
enumTypeToOrdJSON :: EnumTypeDefinition -> AO.Value
enumTypeToOrdJSON (EnumTypeDefinition tyName descM values) =
AO.object $ [ ("name", AO.toOrdered tyName)
, ("values", AO.toOrdered values)
]
<> catMaybes [maybeDescriptionToMaybeOrdPair descM]
actionMetadataToOrdJSON :: ActionMetadata -> AO.Value
actionMetadataToOrdJSON (ActionMetadata name comment definition permissions) =
AO.object $ [ ("name", AO.toOrdered name)
, ("definition", actionDefinitionToOrdJSON definition)
]
<> catMaybes [ maybeCommentToMaybeOrdPair comment
, listToMaybeOrdPair "permissions" permToOrdJSON permissions
]
where
argDefinitionToOrdJSON :: ArgumentDefinition GraphQLType -> AO.Value
argDefinitionToOrdJSON (ArgumentDefinition argName ty descM) =
AO.object $ [ ("name", AO.toOrdered argName)
, ("type", AO.toOrdered ty)
]
<> catMaybes [maybeAnyToMaybeOrdPair "description" AO.toOrdered descM]
actionDefinitionToOrdJSON :: ActionDefinitionInput -> AO.Value
actionDefinitionToOrdJSON (ActionDefinition args outputType actionType
headers frwrdClientHdrs timeout handler) =
let typeAndKind = case actionType of
ActionQuery -> [("type", AO.toOrdered ("query" :: String))]
ActionMutation kind -> [ ("type", AO.toOrdered ("mutation" :: String))
, ("kind", AO.toOrdered kind)]
in
AO.object $ [ ("handler", AO.toOrdered handler)
, ("output_type", AO.toOrdered outputType)
]
<> [("forward_client_headers", AO.toOrdered frwrdClientHdrs) | frwrdClientHdrs]
<> catMaybes [ listToMaybeOrdPair "headers" AO.toOrdered headers
, listToMaybeOrdPair "arguments" argDefinitionToOrdJSON args]
<> typeAndKind
<> bool [("timeout",AO.toOrdered timeout)] mempty (timeout == defaultActionTimeoutSecs)
permToOrdJSON :: ActionPermissionMetadata -> AO.Value
permToOrdJSON (ActionPermissionMetadata role permComment) =
AO.object $ [("role", AO.toOrdered role)] <> catMaybes [maybeCommentToMaybeOrdPair permComment]
-- Utility functions
listToMaybeOrdPair :: Text -> (a -> AO.Value) -> [a] -> Maybe (Text, AO.Value)
listToMaybeOrdPair name f = \case
[] -> Nothing
list -> Just $ (name,) $ AO.array $ map f list
maybeSetToMaybeOrdPair :: Maybe (ColumnValues Value) -> Maybe (Text, AO.Value)
maybeSetToMaybeOrdPair set = set >>= \colVals -> if colVals == HM.empty then Nothing
else Just ("set", AO.toOrdered colVals)
maybeDescriptionToMaybeOrdPair :: Maybe G.Description -> Maybe (Text, AO.Value)
maybeDescriptionToMaybeOrdPair = maybeAnyToMaybeOrdPair "description" AO.toOrdered
maybeCommentToMaybeOrdPair :: Maybe Text -> Maybe (Text, AO.Value)
maybeCommentToMaybeOrdPair = maybeAnyToMaybeOrdPair "comment" AO.toOrdered
maybeAnyToMaybeOrdPair :: Text -> (a -> AO.Value) -> Maybe a -> Maybe (Text, AO.Value)
maybeAnyToMaybeOrdPair name f = fmap ((name,) . f)

View File

@ -50,7 +50,6 @@ import Language.Haskell.TH.Syntax (Lift)
import Hasura.Backends.Postgres.SQL.Types
import Hasura.EncJSON
import Hasura.Incremental (Cacheable)
import Hasura.RQL.DDL.Permission.Internal
import Hasura.RQL.DML.Internal hiding (askPermInfo)
import Hasura.RQL.Types
@ -87,21 +86,6 @@ TRUE TRUE (OR NOT-SET) FALSE
TRUE TRUE (OR NOT-SET) TRUE Mutation is shown
-}
-- Insert permission
data InsPerm (b :: Backend)
= InsPerm
{ ipCheck :: !(BoolExp b)
, ipSet :: !(Maybe (ColumnValues Value))
, ipColumns :: !(Maybe PermColSpec)
, ipBackendOnly :: !(Maybe Bool) -- see Note [Backend only permissions]
} deriving (Show, Eq, Lift, Generic)
instance Cacheable (InsPerm 'Postgres)
instance FromJSON (InsPerm 'Postgres) where
parseJSON = genericParseJSON $ aesonDrop 2 snakeCase
instance ToJSON (InsPerm 'Postgres) where
toJSON = genericToJSON (aesonDrop 2 snakeCase) {omitNothingFields=True}
type InsPermDef b = PermDef (InsPerm b)
type CreateInsPerm b = CreatePerm (InsPerm b)
procSetObj
@ -126,6 +110,64 @@ procSetObj tn fieldInfoMap mObj = do
getDepReason = bool DRSessionVariable DROnType . isStaticValue
class (ToJSON a) => IsPerm a where
permAccessor
:: PermAccessor 'Postgres (PermInfo a)
buildPermInfo
:: (QErrM m, TableCoreInfoRM m)
=> QualifiedTable
-> FieldInfoMap (FieldInfo 'Postgres)
-> PermDef a
-> m (WithDeps (PermInfo a))
getPermAcc1
:: PermDef a -> PermAccessor 'Postgres (PermInfo a)
getPermAcc1 _ = permAccessor
getPermAcc2
:: DropPerm a -> PermAccessor 'Postgres (PermInfo a)
getPermAcc2 _ = permAccessor
addPermP2 :: (IsPerm a, MonadTx m, HasSystemDefined m) => QualifiedTable -> PermDef a -> m ()
addPermP2 tn pd = do
let pt = permAccToType $ getPermAcc1 pd
systemDefined <- askSystemDefined
liftTx $ savePermToCatalog pt tn pd systemDefined
runCreatePerm
:: (UserInfoM m, CacheRWM m, IsPerm a, MonadTx m, HasSystemDefined m)
=> CreatePerm a -> m EncJSON
runCreatePerm (WithTable tn pd) = do
addPermP2 tn pd
let pt = permAccToType $ getPermAcc1 pd
buildSchemaCacheFor $ MOTableObj tn (MTOPerm (_pdRole pd) pt)
pure successMsg
dropPermP1
:: (QErrM m, CacheRM m, IsPerm a)
=> DropPerm a -> m (PermInfo a)
dropPermP1 dp@(DropPerm tn rn) = do
tabInfo <- askTabInfo tn
askPermInfo tabInfo rn $ getPermAcc2 dp
dropPermP2 :: forall a m. (MonadTx m, IsPerm a) => DropPerm a -> m ()
dropPermP2 dp@(DropPerm tn rn) =
liftTx $ dropPermFromCatalog tn rn pt
where
pa = getPermAcc2 dp
pt = permAccToType pa
runDropPerm
:: (IsPerm a, UserInfoM m, CacheRWM m, MonadTx m)
=> DropPerm a -> m EncJSON
runDropPerm defn = do
dropPermP1 defn
dropPermP2 defn
withNewInconsistentObjsCheck buildSchemaCache
return successMsg
buildInsPermInfo
:: (QErrM m, TableCoreInfoRM m)
=> QualifiedTable
@ -158,28 +200,6 @@ instance IsPerm (InsPerm 'Postgres) where
permAccessor = PAInsert
buildPermInfo = buildInsPermInfo
-- Select constraint
data SelPerm (b :: Backend)
= SelPerm
{ spColumns :: !PermColSpec -- ^ Allowed columns
, spFilter :: !(BoolExp b) -- ^ Filter expression
, spLimit :: !(Maybe Int) -- ^ Limit value
, spAllowAggregations :: !Bool -- ^ Allow aggregation
, spComputedFields :: ![ComputedFieldName] -- ^ Allowed computed fields
} deriving (Show, Eq, Lift, Generic)
instance Cacheable (SelPerm 'Postgres)
instance ToJSON (SelPerm 'Postgres) where
toJSON = genericToJSON (aesonDrop 2 snakeCase) {omitNothingFields=True}
instance FromJSON (SelPerm 'Postgres) where
parseJSON = withObject "SelPerm" $ \o ->
SelPerm
<$> o .: "columns"
<*> o .: "filter"
<*> o .:? "limit"
<*> o .:? "allow_aggregations" .!= False
<*> o .:? "computed_fields" .!= []
buildSelPermInfo
:: (QErrM m, TableCoreInfoRM m)
=> QualifiedTable
@ -223,7 +243,6 @@ buildSelPermInfo tn fieldInfoMap sp = withPathK "permission" $ do
computedFields = spComputedFields sp
autoInferredErr = "permissions for relationships are automatically inferred"
type SelPermDef b = PermDef (SelPerm b)
type CreateSelPerm b = CreatePerm (SelPerm b)
-- TODO see TODO for PermInfo above.
@ -234,28 +253,8 @@ instance IsPerm (SelPerm 'Postgres) where
buildPermInfo tn fieldInfoMap (PermDef _ a _) =
buildSelPermInfo tn fieldInfoMap a
-- Update constraint
data UpdPerm b
= UpdPerm
{ ucColumns :: !PermColSpec -- Allowed columns
, ucSet :: !(Maybe (ColumnValues Value)) -- Preset columns
, ucFilter :: !(BoolExp b) -- Filter expression (applied before update)
, ucCheck :: !(Maybe (BoolExp b))
-- ^ Check expression, which must be true after update.
-- This is optional because we don't want to break the v1 API
-- but Nothing should be equivalent to the expression which always
-- returns true.
} deriving (Show, Eq, Lift, Generic)
instance Cacheable (UpdPerm 'Postgres)
instance FromJSON (UpdPerm 'Postgres) where
parseJSON = genericParseJSON $ aesonDrop 2 snakeCase
instance ToJSON (UpdPerm 'Postgres) where
toJSON = genericToJSON (aesonDrop 2 snakeCase) {omitNothingFields=True}
type UpdPermDef b = PermDef (UpdPerm b)
type CreateUpdPerm b = CreatePerm (UpdPerm b)
buildUpdPermInfo
:: (QErrM m, TableCoreInfoRM m)
=> QualifiedTable
@ -294,17 +293,6 @@ instance IsPerm (UpdPerm 'Postgres) where
buildPermInfo tn fieldInfoMap (PermDef _ a _) =
buildUpdPermInfo tn fieldInfoMap a
-- Delete permission
data DelPerm (b :: Backend)
= DelPerm { dcFilter :: !(BoolExp b) }
deriving (Show, Eq, Lift, Generic)
instance Cacheable (DelPerm 'Postgres)
instance FromJSON (DelPerm 'Postgres) where
parseJSON = genericParseJSON $ aesonDrop 2 snakeCase
instance ToJSON (DelPerm 'Postgres) where
toJSON = genericToJSON (aesonDrop 2 snakeCase) {omitNothingFields=True}
type DelPermDef b = PermDef (DelPerm b)
type CreateDelPerm b = CreatePerm (DelPerm b)
buildDelPermInfo

View File

@ -21,27 +21,11 @@ import Language.Haskell.TH.Syntax (Lift)
import Hasura.Backends.Postgres.SQL.Types
import Hasura.Backends.Postgres.SQL.Value
import Hasura.Backends.Postgres.Translate.BoolExp
import Hasura.EncJSON
import Hasura.Incremental (Cacheable)
import Hasura.RQL.Types
import Hasura.Server.Utils
import Hasura.Session
data PermColSpec
= PCStar
| PCCols ![PGCol]
deriving (Show, Eq, Lift, Generic)
instance Cacheable PermColSpec
instance FromJSON PermColSpec where
parseJSON (String "*") = return PCStar
parseJSON x = PCCols <$> parseJSON x
instance ToJSON PermColSpec where
toJSON (PCCols cols) = toJSON cols
toJSON PCStar = "*"
convColSpec :: FieldInfoMap (FieldInfo 'Postgres) -> PermColSpec -> [PGCol]
convColSpec _ (PCCols cols) = cols
convColSpec cim PCStar = map pgiColumn $ getCols cim
@ -134,25 +118,6 @@ dropPermFromCatalog (QualifiedObject sn tn) rn pt =
type CreatePerm a = WithTable (PermDef a)
data PermDef a =
PermDef
{ pdRole :: !RoleName
, pdPermission :: !a
, pdComment :: !(Maybe Text)
} deriving (Show, Eq, Lift, Generic)
instance (Cacheable a) => Cacheable (PermDef a)
$(deriveFromJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''PermDef)
instance (ToJSON a) => ToJSON (PermDef a) where
toJSON = object . toAesonPairs
instance (ToJSON a) => ToAesonPairs (PermDef a) where
toAesonPairs (PermDef rn perm comment) =
[ "role" .= rn
, "permission" .= perm
, "comment" .= comment
]
data CreatePermP1Res a
= CreatePermP1Res
{ cprInfo :: !a
@ -236,61 +201,3 @@ data DropPerm a
$(deriveJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''DropPerm)
type family PermInfo a = r | r -> a
class (ToJSON a) => IsPerm a where
permAccessor
:: PermAccessor 'Postgres (PermInfo a)
buildPermInfo
:: (QErrM m, TableCoreInfoRM m)
=> QualifiedTable
-> FieldInfoMap (FieldInfo 'Postgres)
-> PermDef a
-> m (WithDeps (PermInfo a))
getPermAcc1
:: PermDef a -> PermAccessor 'Postgres (PermInfo a)
getPermAcc1 _ = permAccessor
getPermAcc2
:: DropPerm a -> PermAccessor 'Postgres (PermInfo a)
getPermAcc2 _ = permAccessor
addPermP2 :: (IsPerm a, MonadTx m, HasSystemDefined m) => QualifiedTable -> PermDef a -> m ()
addPermP2 tn pd = do
let pt = permAccToType $ getPermAcc1 pd
systemDefined <- askSystemDefined
liftTx $ savePermToCatalog pt tn pd systemDefined
runCreatePerm
:: (UserInfoM m, CacheRWM m, IsPerm a, MonadTx m, HasSystemDefined m)
=> CreatePerm a -> m EncJSON
runCreatePerm (WithTable tn pd) = do
addPermP2 tn pd
let pt = permAccToType $ getPermAcc1 pd
buildSchemaCacheFor $ MOTableObj tn (MTOPerm (pdRole pd) pt)
pure successMsg
dropPermP1
:: (QErrM m, CacheRM m, IsPerm a)
=> DropPerm a -> m (PermInfo a)
dropPermP1 dp@(DropPerm tn rn) = do
tabInfo <- askTabInfo tn
askPermInfo tabInfo rn $ getPermAcc2 dp
dropPermP2 :: forall a m. (MonadTx m, IsPerm a) => DropPerm a -> m ()
dropPermP2 dp@(DropPerm tn rn) =
liftTx $ dropPermFromCatalog tn rn pt
where
pa = getPermAcc2 dp
pt = permAccToType pa
runDropPerm
:: (IsPerm a, UserInfoM m, CacheRWM m, MonadTx m)
=> DropPerm a -> m EncJSON
runDropPerm defn = do
dropPermP1 defn
dropPermP2 defn
withNewInconsistentObjsCheck buildSchemaCache
return successMsg

View File

@ -8,7 +8,6 @@ module Hasura.RQL.DDL.Relationship
, delRelFromCatalog
, runSetRelComment
, module Hasura.RQL.DDL.Relationship.Types
)
where
@ -26,7 +25,6 @@ import Hasura.Backends.Postgres.SQL.Types
import Hasura.EncJSON
import Hasura.RQL.DDL.Deps
import Hasura.RQL.DDL.Permission (purgePerm)
import Hasura.RQL.DDL.Relationship.Types
import Hasura.RQL.Types
runCreateRelationship
@ -34,7 +32,7 @@ runCreateRelationship
=> RelType -> WithTable (RelDef a) -> m EncJSON
runCreateRelationship relType (WithTable tableName relDef) = do
insertRelationshipToCatalog tableName relType relDef
buildSchemaCacheFor $ MOTableObj tableName (MTORel (rdName relDef) relType)
buildSchemaCacheFor $ MOTableObj tableName (MTORel (_rdName relDef) relType)
pure successMsg
insertRelationshipToCatalog
@ -68,7 +66,7 @@ runDropRel (DropRel qt rn cascade) = do
_ <- askRelType (_tciFieldInfoMap tabInfo) rn ""
sc <- askSchemaCache
let depObjs = getDependentObjs sc (SOTableObj qt $ TORel rn)
when (depObjs /= [] && not (or cascade)) $ reportDeps depObjs
when (depObjs /= [] && not cascade) $ reportDeps depObjs
pure depObjs
delRelFromCatalog

View File

@ -6,7 +6,6 @@ import Data.Text.Extended
import Hasura.Backends.Postgres.SQL.Types
import Hasura.EncJSON
import Hasura.Prelude
import Hasura.RQL.DDL.Relationship.Types
import Hasura.RQL.DDL.Schema (renameRelInCatalog)
import Hasura.RQL.Types

View File

@ -53,8 +53,7 @@ import Hasura.RQL.DDL.Schema.Catalog
import Hasura.RQL.DDL.Schema.Diff
import Hasura.RQL.DDL.Schema.Function
import Hasura.RQL.DDL.Schema.Table
import Hasura.RQL.DDL.Utils (clearHdbViews)
import Hasura.RQL.Types
import Hasura.RQL.Types hiding (fmFunction, tmTable)
import Hasura.RQL.Types.Catalog
import Hasura.Server.Version (HasVersion)
@ -333,7 +332,7 @@ buildSchemaCacheRule env = proc (catalogMetadata, invalidationKeys) -> do
(| withRecordInconsistency (
(| modifyErrA (do
etc <- bindErrorA -< decodeValue configuration
(info, dependencies) <- bindErrorA -< subTableP2Setup env qt etc
(info, dependencies) <- bindErrorA -< mkEventTriggerInfo env qt etc
let tableColumns = M.mapMaybe (^? _FIColumn) (_tciFieldInfoMap tableInfo)
recreateViewIfNeeded -< (qt, tableColumns, trn, etcDefinition etc)
recordDependencies -< (metadataObject, schemaObjectId, dependencies)
@ -409,8 +408,11 @@ buildSchemaCacheRule env = proc (catalogMetadata, invalidationKeys) -> do
-- if not, incorporates them into the schema cache.
withMetadataCheck :: (MonadTx m, CacheRWM m, HasSQLGenCtx m) => Bool -> m a -> m a
withMetadataCheck cascade action = do
-- Drop hdb_views so no interference is caused to the sql query
liftTx $ Q.catchE defaultTxErrorHandler clearHdbViews
-- Drop event triggers so no interference is caused to the sql query
preActionTables <- scTables <$> askSchemaCache
forM_ (M.elems preActionTables) $ \tableInfo -> do
let eventTriggers = _tiEventTriggerInfoMap tableInfo
forM_ (M.keys eventTriggers) (liftTx . delTriggerQ)
-- Get the metadata before the sql query, everything, need to filter this
oldMetaU <- liftTx $ Q.catchE defaultTxErrorHandler fetchTableMeta
@ -465,7 +467,7 @@ withMetadataCheck cascade action = do
buildSchemaCache
postSc <- askSchemaCache
-- Recreate event triggers in hdb_views
-- Recreate event triggers in hdb_catalog
forM_ (M.elems $ scTables postSc) $ \(TableInfo coreInfo _ eventTriggers) -> do
let table = _tciName coreInfo
columns = getCols $ _tciFieldInfoMap coreInfo
@ -485,9 +487,9 @@ withMetadataCheck cascade action = do
sc <- askSchemaCache
for_ alteredTables $ \(oldQtn, tableDiff) -> do
ti <- case M.lookup oldQtn $ scTables sc of
Just ti -> return ti
Nothing -> throw500 $ "old table metadata not found in cache : " <>> oldQtn
ti <- onNothing
(M.lookup oldQtn $ scTables sc)
(throw500 $ "old table metadata not found in cache : " <>> oldQtn)
processTableChanges (_tiCoreInfo ti) tableDiff
where
SchemaDiff droppedTables alteredTables = schemaDiff

View File

@ -33,7 +33,8 @@ import Data.Aeson.TH
import Data.List.Extended (duplicates)
import Hasura.Backends.Postgres.SQL.Types
import Hasura.RQL.Types
import Hasura.RQL.Types hiding (ConstraintName, fmFunction,
tmComputedFields, tmTable)
import Hasura.RQL.Types.Catalog
data FunctionMeta
@ -82,7 +83,7 @@ data ComputedFieldDiff
, _cfdOverloaded :: [(ComputedFieldName, QualifiedFunction)]
} deriving (Show, Eq)
data TableDiff (b :: Backend)
data TableDiff (b :: BackendType)
= TableDiff
{ _tdNewName :: !(Maybe QualifiedTable)
, _tdDroppedCols :: ![Column b]
@ -166,7 +167,7 @@ getTableChangeDeps tn tableDiff = do
TableDiff _ droppedCols _ _ droppedFKeyConstraints computedFieldDiff _ _ = tableDiff
droppedComputedFieldDeps = map (SOTableObj tn . TOComputedField) $ _cfdDropped computedFieldDiff
data SchemaDiff (b :: Backend)
data SchemaDiff (b :: BackendType)
= SchemaDiff
{ _sdDroppedTables :: ![QualifiedTable]
, _sdAlteredTables :: ![(QualifiedTable, TableDiff b)]

View File

@ -14,8 +14,6 @@ import qualified Database.PG.Query as Q
import Control.Lens
import Data.Aeson
import Data.Aeson.Casing
import Data.Aeson.TH
import Data.Text.Extended
import Language.Haskell.TH.Syntax (Lift)
@ -23,29 +21,10 @@ import qualified Language.GraphQL.Draft.Syntax as G
import Hasura.Backends.Postgres.SQL.Types
import Hasura.EncJSON
import Hasura.Incremental (Cacheable)
import Hasura.RQL.Types
import Hasura.Server.Utils (englishList, makeReasonMessage)
data RawFunctionInfo
= RawFunctionInfo
{ rfiHasVariadic :: !Bool
, rfiFunctionType :: !FunctionType
, rfiReturnTypeSchema :: !SchemaName
, rfiReturnTypeName :: !PGScalarType
, rfiReturnTypeType :: !PGTypeKind
, rfiReturnsSet :: !Bool
, rfiInputArgTypes :: ![QualifiedPGType]
, rfiInputArgNames :: ![FunctionArgName]
, rfiDefaultArgs :: !Int
, rfiReturnsTable :: !Bool
, rfiDescription :: !(Maybe PGDescription)
} deriving (Show, Eq, Generic)
instance NFData RawFunctionInfo
instance Cacheable RawFunctionInfo
$(deriveJSON (aesonDrop 3 snakeCase) ''RawFunctionInfo)
mkFunctionArgs :: Int -> [QualifiedPGType] -> [FunctionArgName] -> [FunctionArg]
mkFunctionArgs defArgsNo tys argNames =
bool withNames withNoNames $ null argNames
@ -185,17 +164,6 @@ newtype TrackFunction
{ tfName :: QualifiedFunction}
deriving (Show, Eq, FromJSON, ToJSON, Lift)
data FunctionConfig
= FunctionConfig
{ _fcSessionArgument :: !(Maybe FunctionArgName)
} deriving (Show, Eq, Generic, Lift)
instance NFData FunctionConfig
instance Cacheable FunctionConfig
$(deriveJSON (aesonDrop 3 snakeCase){omitNothingFields = True} ''FunctionConfig)
emptyFunctionConfig :: FunctionConfig
emptyFunctionConfig = FunctionConfig Nothing
-- | Track function, Phase 1:
-- Validate function tracking operation. Fails if function is already being
-- tracked, or if a table with the same name is being tracked.
@ -244,19 +212,6 @@ runTrackFunc (TrackFunction qf)= do
trackFunctionP1 qf
trackFunctionP2 qf emptyFunctionConfig
data TrackFunctionV2
= TrackFunctionV2
{ _tfv2Function :: !QualifiedFunction
, _tfv2Configuration :: !FunctionConfig
} deriving (Show, Eq, Lift, Generic)
$(deriveToJSON (aesonDrop 5 snakeCase) ''TrackFunctionV2)
instance FromJSON TrackFunctionV2 where
parseJSON = withObject "Object" $ \o ->
TrackFunctionV2
<$> o .: "function"
<*> o .:? "configuration" .!= emptyFunctionConfig
runTrackFunctionV2
:: ( QErrM m, CacheRWM m, HasSystemDefined m
, MonadTx m

View File

@ -28,7 +28,6 @@ import qualified Hasura.RQL.DDL.RemoteRelationship as RR
import Hasura.Backends.Postgres.SQL.Types
import Hasura.RQL.DDL.Permission
import Hasura.RQL.DDL.Permission.Internal
import Hasura.RQL.DDL.Relationship.Types
import Hasura.RQL.DDL.Schema.Catalog
import Hasura.RQL.Types
import Hasura.Session
@ -389,7 +388,7 @@ updateColInRemoteRelationship remoteRelationshipName renameCol = do
) $ fieldCalls
liftTx $ RR.updateRemoteRelInCatalog (RemoteRelationship remoteRelationshipName qt modifiedHasuraFlds remoteSchemaName (RemoteFields modifiedFieldCalls))
where
parseGraphQLName txt = maybe (throw400 ParseFailed $ errMsg) pure $ G.mkName txt
parseGraphQLName txt = onNothing (G.mkName txt) $ throw400 ParseFailed $ errMsg
where
errMsg = txt <> " is not a valid GraphQL name"

View File

@ -55,7 +55,7 @@ import Hasura.RQL.DDL.Schema.Catalog
import Hasura.RQL.DDL.Schema.Diff
import Hasura.RQL.DDL.Schema.Enum
import Hasura.RQL.DDL.Schema.Rename
import Hasura.RQL.Types
import Hasura.RQL.Types hiding (fmFunction)
import Hasura.RQL.Types.Catalog
import Hasura.Server.Utils

View File

@ -1,20 +0,0 @@
module Hasura.RQL.DDL.Utils
( clearHdbViews
) where
import qualified Database.PG.Query as Q
import Hasura.Prelude
clearHdbViews :: Q.Tx ()
clearHdbViews = Q.multiQ (Q.fromText clearHdbViewsFunc)
clearHdbViewsFunc :: Text
clearHdbViewsFunc =
"DO $$ DECLARE \
\ r RECORD; \
\ BEGIN \
\ FOR r IN (SELECT routine_name FROM information_schema.routines WHERE specific_schema = 'hdb_views' ORDER BY routine_name) LOOP \
\ EXECUTE 'DROP FUNCTION hdb_views.' || quote_ident(r.routine_name) || '() CASCADE'; \
\ END LOOP; \
\ END $$; "

View File

@ -21,6 +21,8 @@ import Hasura.Backends.Postgres.SQL.Types
import Hasura.Backends.Postgres.Translate.BoolExp
import Hasura.EncJSON
import Hasura.RQL.DML.Internal
import Hasura.RQL.DML.Types
import Hasura.RQL.IR.BoolExp
import Hasura.RQL.Types
import Hasura.SQL.Types

View File

@ -25,6 +25,7 @@ import Hasura.Backends.Postgres.Execute.Mutation
import Hasura.Backends.Postgres.Translate.Returning
import Hasura.EncJSON
import Hasura.RQL.DML.Internal
import Hasura.RQL.DML.Types
import Hasura.RQL.IR.Delete
import Hasura.RQL.Types
import Hasura.Server.Version (HasVersion)

View File

@ -18,10 +18,10 @@ import qualified Hasura.Backends.Postgres.SQL.DML as S
import Hasura.Backends.Postgres.Connection
import Hasura.Backends.Postgres.Execute.Mutation
import Hasura.Backends.Postgres.SQL.Types
import Hasura.Backends.Postgres.Translate.Insert
import Hasura.Backends.Postgres.Translate.Returning
import Hasura.EncJSON
import Hasura.RQL.DML.Internal
import Hasura.RQL.DML.Types
import Hasura.RQL.IR.Insert
import Hasura.RQL.Types
import Hasura.Server.Version (HasVersion)
@ -61,6 +61,71 @@ convObj prepFn defInsVals setInsVals fieldInfoMap insObj = do
<> " for role " <>> roleName
validateInpCols :: (MonadError QErr m) => [PGCol] -> [PGCol] -> m ()
validateInpCols inpCols updColsPerm = forM_ inpCols $ \inpCol ->
unless (inpCol `elem` updColsPerm) $ throw400 ValidationFailed $
"column " <> inpCol <<> " is not updatable"
buildConflictClause
:: (UserInfoM m, QErrM m)
=> SessVarBldr 'Postgres m
-> TableInfo 'Postgres
-> [PGCol]
-> OnConflict
-> m (ConflictClauseP1 'Postgres S.SQLExp)
buildConflictClause sessVarBldr tableInfo inpCols (OnConflict mTCol mTCons act) =
case (mTCol, mTCons, act) of
(Nothing, Nothing, CAIgnore) -> return $ CP1DoNothing Nothing
(Just col, Nothing, CAIgnore) -> do
validateCols col
return $ CP1DoNothing $ Just $ CTColumn $ getPGCols col
(Nothing, Just cons, CAIgnore) -> do
validateConstraint cons
return $ CP1DoNothing $ Just $ CTConstraint cons
(Nothing, Nothing, CAUpdate) -> throw400 UnexpectedPayload
"Expecting 'constraint' or 'constraint_on' when the 'action' is 'update'"
(Just col, Nothing, CAUpdate) -> do
validateCols col
(updFltr, preSet) <- getUpdPerm
resolvedUpdFltr <- convAnnBoolExpPartialSQL sessVarBldr updFltr
resolvedPreSet <- mapM (convPartialSQLExp sessVarBldr) preSet
return $ CP1Update (CTColumn $ getPGCols col) inpCols resolvedPreSet resolvedUpdFltr
(Nothing, Just cons, CAUpdate) -> do
validateConstraint cons
(updFltr, preSet) <- getUpdPerm
resolvedUpdFltr <- convAnnBoolExpPartialSQL sessVarBldr updFltr
resolvedPreSet <- mapM (convPartialSQLExp sessVarBldr) preSet
return $ CP1Update (CTConstraint cons) inpCols resolvedPreSet resolvedUpdFltr
(Just _, Just _, _) -> throw400 UnexpectedPayload
"'constraint' and 'constraint_on' cannot be set at a time"
where
coreInfo = _tiCoreInfo tableInfo
fieldInfoMap = _tciFieldInfoMap coreInfo
-- toSQLBool = toSQLBoolExp (S.mkQual $ _tciName coreInfo)
validateCols c = do
let targetcols = getPGCols c
void $ withPathK "constraint_on" $ indexedForM targetcols $
\pgCol -> askPGType fieldInfoMap pgCol ""
validateConstraint c = do
let tableConsNames = maybe [] toList $
fmap _cName <$> tciUniqueOrPrimaryKeyConstraints coreInfo
withPathK "constraint" $
unless (c `elem` tableConsNames) $
throw400 Unexpected $ "constraint " <> getConstraintTxt c
<<> " for table " <> _tciName coreInfo
<<> " does not exist"
getUpdPerm = do
upi <- askUpdPermInfo tableInfo
let updFiltr = upiFilter upi
preSet = upiSet upi
updCols = HS.toList $ upiCols upi
validateInpCols inpCols updCols
return (updFiltr, preSet)
convInsertQuery
:: (UserInfoM m, QErrM m, CacheRM m)
=> (Value -> m [InsObj])

View File

@ -158,7 +158,7 @@ fetchRelTabInfo refTabName =
-- Internal error
modifyErrAndSet500 ("foreign " <> ) $ askTabInfo refTabName
type SessVarBldr b m = PGType (ScalarType b) -> SessionVariable -> m S.SQLExp
type SessVarBldr b m = PGType (ScalarType b) -> SessionVariable -> m (SQLExp b)
fetchRelDet
:: (UserInfoM m, QErrM m, CacheRM m)
@ -211,7 +211,7 @@ convPartialSQLExp
:: (Applicative f)
=> SessVarBldr backend f
-> PartialSQLExp backend
-> f S.SQLExp
-> f (SQLExp backend)
convPartialSQLExp f = \case
PSESQLExp sqlExp -> pure sqlExp
PSESessVar colTy sessionVariable -> f colTy sessionVariable

View File

@ -23,6 +23,8 @@ import Hasura.Backends.Postgres.SQL.Types
import Hasura.Backends.Postgres.Translate.Select
import Hasura.EncJSON
import Hasura.RQL.DML.Internal
import Hasura.RQL.DML.Types
import Hasura.RQL.IR.OrderBy
import Hasura.RQL.IR.Select
import Hasura.RQL.Types
import Hasura.SQL.Types
@ -35,7 +37,7 @@ type SelectQExt b = SelectG (ExtCol b) (BoolExp b) Int
-- it is specific to this module; however the generalization work was
-- already done, and there's no particular reason to force this to be
-- specific.
data ExtCol (b :: Backend)
data ExtCol (b :: BackendType)
= ECSimple !(Column b)
| ECRel !RelName !(Maybe RelName) !(SelectQExt b)
deriving instance Lift (ExtCol 'Postgres)

View File

@ -1,14 +1,7 @@
module Hasura.RQL.Types.DML
( BoolExp(..)
, ColExp(..)
, DMLQuery(..)
, OrderType(..)
, NullsOrder(..)
module Hasura.RQL.DML.Types
( OrderByExp(..)
, OrderByExp(..)
, OrderByItemG(..)
, OrderByItem
, OrderByCol(..)
, DMLQuery(..)
, SelectG(..)
, selectGToPairs
@ -41,54 +34,57 @@ module Hasura.RQL.Types.DML
import Hasura.Prelude
import qualified Data.Attoparsec.Text as AT
import qualified Data.Attoparsec.Text as Atto
import qualified Data.Attoparsec.Types as AttoT
import qualified Data.HashMap.Strict as M
import qualified Data.Text as T
import Control.Lens.TH
import Data.Aeson
import Data.Aeson.Casing
import Data.Aeson.TH
import Instances.TH.Lift ()
import Language.Haskell.TH.Syntax (Lift)
import qualified Hasura.Backends.Postgres.SQL.DML as S
import qualified Hasura.Backends.Postgres.SQL.DML as PG
import Hasura.Backends.Postgres.SQL.Types
import Hasura.Incremental (Cacheable)
import Hasura.RQL.IR.BoolExp
import Hasura.RQL.IR.OrderBy
import Hasura.RQL.Instances ()
import Hasura.RQL.Types.Common
import Hasura.RQL.Types.Common hiding (ConstraintName)
import Hasura.SQL.Backend
data ColExp
= ColExp
{ ceCol :: !FieldName
, ceVal :: !Value
} deriving (Show, Eq, Lift, Data, Generic)
instance NFData ColExp
instance Cacheable ColExp
newtype OrderByExp
= OrderByExp { getOrderByItems :: [OrderByItem 'Postgres] }
deriving (Show, Eq, Lift, ToJSON)
newtype BoolExp (b :: Backend)
= BoolExp { unBoolExp :: GBoolExp b ColExp }
deriving (Show, Eq, Lift, Generic, NFData, Cacheable)
$(makeWrapped ''BoolExp)
instance ToJSON (BoolExp 'Postgres) where
toJSON (BoolExp gBoolExp) =
gBoolExpToJSON f gBoolExp
instance FromJSON OrderByExp where
parseJSON = \case
String s -> OrderByExp . pure <$> parseString s
Object o -> OrderByExp . pure <$> parseObject o
Array a -> OrderByExp <$> for (toList a) \case
String s -> parseString s
Object o -> parseObject o
_ -> fail "expecting an object or string for order by"
_ -> fail "Expecting : array/string/object"
where
f (ColExp k v) =
(getFieldNameTxt k, v)
parseString s = AT.parseOnly orderByParser s `onLeft`
const (fail "string format for 'order_by' entry : {+/-}column Eg : +posted")
parseObject o =
OrderByItemG
<$> o .:? "type"
<*> o .: "column"
<*> o .:? "nulls"
orderByParser =
OrderByItemG
<$> orderTypeParser
<*> orderColumnParser
<*> pure Nothing
orderTypeParser = choice
[ "+" *> pure (Just PG.OTAsc)
, "-" *> pure (Just PG.OTDesc)
, pure Nothing
]
orderColumnParser = AT.takeText >>= orderByColFromTxt
instance FromJSON (BoolExp 'Postgres) where
parseJSON =
fmap BoolExp . parseGBoolExp f
where
f (k, v) = ColExp (FieldName k) <$> parseJSON v
data DMLQuery a
= DMLQuery !QualifiedTable a
@ -102,130 +98,6 @@ instance (FromJSON a) => FromJSON (DMLQuery a) where
parseJSON _ =
fail "Expected an object for query"
newtype OrderType
= OrderType { unOrderType :: S.OrderType }
deriving (Show, Eq, Lift, Generic)
instance Hashable OrderType
instance FromJSON OrderType where
parseJSON =
fmap OrderType . f
where f = $(mkParseJSON
defaultOptions{constructorTagModifier = snakeCase . drop 2}
''S.OrderType)
newtype NullsOrder
= NullsOrder { unNullsOrder :: S.NullsOrder }
deriving (Show, Eq, Lift, Generic)
instance Hashable NullsOrder
instance FromJSON NullsOrder where
parseJSON =
fmap NullsOrder . f
where f = $(mkParseJSON
defaultOptions{constructorTagModifier = snakeCase . drop 1}
''S.NullsOrder)
instance ToJSON OrderType where
toJSON =
f . unOrderType
where f = $(mkToJSON
defaultOptions{constructorTagModifier = snakeCase . drop 2}
''S.OrderType)
instance ToJSON NullsOrder where
toJSON =
f . unNullsOrder
where f = $(mkToJSON
defaultOptions{constructorTagModifier = snakeCase . drop 1}
''S.NullsOrder)
data OrderByCol
= OCPG !FieldName
| OCRel !FieldName !OrderByCol
deriving (Show, Eq, Lift)
orderByColToTxt :: OrderByCol -> Text
orderByColToTxt = \case
OCPG pgCol -> getFieldNameTxt pgCol
OCRel rel obCol -> getFieldNameTxt rel <> "." <> orderByColToTxt obCol
instance ToJSON OrderByCol where
toJSON = toJSON . orderByColToTxt
orderByColFromToks
:: (MonadFail m)
=> [Text] -> m OrderByCol
orderByColFromToks toks = do
when (any T.null toks) $ fail "col/rel cannot be empty"
case toks of
[] -> fail "failed to parse an OrderByCol: found empty cols"
x:xs -> return $ go (FieldName x) xs
where
go fld = \case
[] -> OCPG fld
x:xs -> OCRel fld $ go (FieldName x) xs
orderByColFromTxt
:: (MonadFail m)
=> Text -> m OrderByCol
orderByColFromTxt =
orderByColFromToks . T.split (=='.')
instance FromJSON OrderByCol where
parseJSON = \case
(String t) -> orderByColFromToks $ T.split (=='.') t
v -> parseJSON v >>= orderByColFromToks
data OrderByItemG a
= OrderByItemG
{ obiType :: !(Maybe OrderType)
, obiColumn :: !a
, obiNulls :: !(Maybe NullsOrder)
} deriving (Show, Eq, Lift, Functor, Foldable, Traversable, Generic)
instance (Hashable a) => Hashable (OrderByItemG a)
type OrderByItem = OrderByItemG OrderByCol
$(deriveToJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''OrderByItemG)
-- Can either be string / object
instance FromJSON OrderByItem where
parseJSON (String t) =
case Atto.parseOnly orderByParser t of
Right r -> return r
Left _ ->
fail "string format for 'order_by' entry : {+/-}column Eg : +posted"
parseJSON (Object o) =
OrderByItemG
<$> o .:? "type"
<*> o .: "column"
<*> o .:? "nulls"
parseJSON _ = fail "expecting an object or string for order by"
newtype OrderByExp
= OrderByExp { getOrderByItems :: [OrderByItem] }
deriving (Show, Eq, ToJSON, Lift)
instance FromJSON OrderByExp where
parseJSON v@(String _) =
OrderByExp . (:[]) <$> parseJSON v
parseJSON v@(Array _) =
OrderByExp <$> parseJSON v
parseJSON v@(Object _) =
OrderByExp . (:[]) <$> parseJSON v
parseJSON _ =
fail "Expecting : array/string/object"
orderByParser :: AttoT.Parser Text OrderByItem
orderByParser =
OrderByItemG <$> otP <*> colP <*> return Nothing
where
otP = ("+" *> return (Just $ OrderType S.OTAsc))
<|> ("-" *> return (Just $ OrderType S.OTDesc))
<|> return Nothing
colP = Atto.takeText >>= orderByColFromTxt
data SelectG a b c
= SelectG
@ -265,7 +137,7 @@ parseWildcard =
fromList = foldr1 (\_ x -> StarDot x)
-- Columns in RQL
data SelCol b
data SelCol (b :: BackendType)
= SCStar !Wildcard
| SCExtSimple !(Column b)
| SCExtRel !RelName !(Maybe RelName) !(SelectQ b)

View File

@ -22,6 +22,8 @@ import Hasura.Backends.Postgres.SQL.Types
import Hasura.Backends.Postgres.Translate.Returning
import Hasura.EncJSON
import Hasura.RQL.DML.Internal
import Hasura.RQL.DML.Types
import Hasura.RQL.IR.BoolExp
import Hasura.RQL.IR.Update
import Hasura.RQL.Instances ()
import Hasura.RQL.Types

View File

@ -1,6 +1,8 @@
{-# LANGUAGE UndecidableInstances #-}
module Hasura.RQL.IR.BoolExp
( GBoolExp(..)
( BoolExp(..)
, ColExp(..)
, GBoolExp(..)
, gBoolExpTrue
, gBoolExpToJSON
, parseGBoolExp
@ -42,8 +44,6 @@ import Hasura.Prelude
import qualified Data.Aeson.Types as J
import qualified Data.HashMap.Strict as M
import qualified Hasura.Backends.Postgres.SQL.DML as S
import Control.Lens.Plated
import Control.Lens.TH
import Data.Aeson
@ -54,7 +54,8 @@ import Data.Typeable
import Instances.TH.Lift ()
import Language.Haskell.TH.Syntax (Lift)
import Hasura.Backends.Postgres.SQL.Types
import qualified Hasura.Backends.Postgres.SQL.Types as PG
import Hasura.Incremental (Cacheable)
import Hasura.RQL.Types.Column
import Hasura.RQL.Types.Common
@ -63,15 +64,28 @@ import Hasura.SQL.Types
import Hasura.Session
data GExists (b :: Backend) a
data ColExp
= ColExp
{ ceCol :: !FieldName
, ceVal :: !Value
} deriving (Show, Eq, Lift, Data, Generic)
instance NFData ColExp
instance Cacheable ColExp
data GExists (b :: BackendType) a
= GExists
{ _geTable :: !QualifiedTable
{ _geTable :: !(TableName b)
, _geWhere :: !(GBoolExp b a)
} deriving (Show, Eq, Lift, Functor, Foldable, Traversable, Data, Generic)
instance (NFData a) => NFData (GExists b a)
instance (Data a, Typeable b) => Plated (GExists b a)
instance (Cacheable a) => Cacheable (GExists b a)
instance (Hashable a) => Hashable (GExists b a)
} deriving (Functor, Foldable, Traversable, Generic)
deriving instance (Backend b, Show a) => Show (GExists b a)
deriving instance (Backend b, Eq a) => Eq (GExists b a)
deriving instance (Backend b, Lift a) => Lift (GExists b a)
deriving instance (Backend b, Typeable a, Data a) => Data (GExists b a)
instance (Backend b, NFData a) => NFData (GExists b a)
instance (Backend b, Data a) => Plated (GExists b a)
instance (Backend b, Cacheable a) => Cacheable (GExists b a)
instance (Backend b, Hashable a) => Hashable (GExists b a)
gExistsToJSON :: (a -> (Text, Value)) -> GExists 'Postgres a -> Value
gExistsToJSON f (GExists qt wh) =
@ -88,17 +102,18 @@ parseGExists f = \case
GExists qt <$> parseGBoolExp f wh
_ -> fail "expecting an Object for _exists expression"
data GBoolExp (b :: Backend) a
data GBoolExp (b :: BackendType) a
= BoolAnd ![GBoolExp b a]
| BoolOr ![GBoolExp b a]
| BoolNot !(GBoolExp b a)
| BoolExists !(GExists b a)
| BoolFld !a
deriving (Show, Eq, Lift, Functor, Foldable, Traversable, Data, Generic)
instance (NFData a) => NFData (GBoolExp b a)
instance (Data a, Typeable b) => Plated (GBoolExp b a)
instance (Cacheable a) => Cacheable (GBoolExp b a)
instance (Hashable a) => Hashable (GBoolExp b a)
instance (Backend b, NFData a) => NFData (GBoolExp b a)
instance (Backend b, Data a) => Plated (GBoolExp b a)
instance (Backend b, Cacheable a) => Cacheable (GBoolExp b a)
instance (Backend b, Hashable a) => Hashable (GBoolExp b a)
gBoolExpTrue :: GBoolExp b a
gBoolExpTrue = BoolAnd []
@ -122,7 +137,6 @@ gBoolExpToJSON f be = case be of
BoolExists bExists -> "_exists" .= gExistsToJSON f bExists
BoolFld a -> f a
parseGBoolExp
:: ((Text, Value) -> J.Parser a) -> Value -> J.Parser (GBoolExp 'Postgres a)
parseGBoolExp f = \case
@ -143,6 +157,27 @@ parseGBoolExp f = \case
parseGBoolExpL v =
parseJSON v >>= mapM (parseGBoolExp f)
newtype BoolExp (b :: BackendType)
= BoolExp { unBoolExp :: GBoolExp b ColExp }
deriving (Show, Eq, Lift, Generic, NFData, Cacheable)
$(makeWrapped ''BoolExp)
instance ToJSON (BoolExp 'Postgres) where
toJSON (BoolExp gBoolExp) =
gBoolExpToJSON f gBoolExp
where
f (ColExp k v) =
(getFieldNameTxt k, v)
instance FromJSON (BoolExp 'Postgres) where
parseJSON =
fmap BoolExp . parseGBoolExp f
where
f (k, v) = ColExp (FieldName k) <$> parseJSON v
data DWithinGeomOp a =
DWithinGeomOp
{ dwgeomDistance :: !a
@ -186,7 +221,7 @@ $(deriveJSON (aesonDrop 4 snakeCase) ''STIntersectsGeomminNband)
type CastExp b a = M.HashMap (ScalarType b) [OpExpG b a]
data OpExpG (b :: Backend) a
data OpExpG (b :: BackendType) a
= ACast !(CastExp b a)
| AEQ !Bool !a
@ -243,10 +278,10 @@ deriving instance (Eq a) => Eq (OpExpG 'Postgres a)
instance (NFData a) => NFData (OpExpG 'Postgres a)
instance (Cacheable a) => Cacheable (OpExpG 'Postgres a)
instance (Hashable a) => Hashable (OpExpG 'Postgres a)
type family XAILIKE (b :: Backend) where
type family XAILIKE (b :: BackendType) where
XAILIKE 'Postgres = ()
XAILIKE 'MySQL = Void
type family XANILIKE (b :: Backend) where
type family XANILIKE (b :: BackendType) where
XANILIKE 'Postgres = ()
XANILIKE 'MySQL = Void
@ -316,7 +351,7 @@ opExpToJPair f = \case
where
opExpsToJSON = object . map (opExpToJPair f)
data AnnBoolExpFld (b :: Backend) a
data AnnBoolExpFld (b :: BackendType) a
= AVCol !(ColumnInfo b) ![OpExpG 'Postgres a]
| AVRel !RelInfo !(AnnBoolExp b a)
deriving (Functor, Foldable, Traversable, Generic)
@ -354,8 +389,8 @@ andAnnBoolExps :: AnnBoolExp backend a -> AnnBoolExp backend a -> AnnBoolExp bac
andAnnBoolExps l r =
BoolAnd [l, r]
type AnnBoolExpFldSQL b = AnnBoolExpFld b S.SQLExp
type AnnBoolExpSQL b = AnnBoolExp b S.SQLExp
type AnnBoolExpFldSQL b = AnnBoolExpFld b (SQLExp b)
type AnnBoolExpSQL b = AnnBoolExp b (SQLExp b)
type AnnBoolExpFldPartialSQL b = AnnBoolExpFld b (PartialSQLExp b)
type AnnBoolExpPartialSQL b = AnnBoolExp b (PartialSQLExp b)
@ -364,16 +399,16 @@ type PreSetColsG b v = M.HashMap (Column b) v
type PreSetColsPartial b = M.HashMap (Column b) (PartialSQLExp b)
-- doesn't resolve the session variable
data PartialSQLExp (b :: Backend)
= PSESessVar !(PGType (ScalarType b)) !SessionVariable
| PSESQLExp !S.SQLExp
data PartialSQLExp (b :: BackendType)
= PSESessVar !(PG.PGType (ScalarType b)) !SessionVariable
| PSESQLExp !(SQLExp b)
deriving (Generic)
deriving instance Eq (PartialSQLExp 'Postgres)
deriving instance (Typeable backend, Data (ScalarType backend)) => Data (PartialSQLExp backend)
deriving instance Data (PartialSQLExp 'Postgres)
instance NFData (PartialSQLExp 'Postgres)
instance Cacheable (PartialSQLExp 'Postgres)
mkTypedSessionVar :: PGType PGColumnType -> SessionVariable -> PartialSQLExp 'Postgres
mkTypedSessionVar :: PG.PGType PGColumnType -> SessionVariable -> PartialSQLExp 'Postgres
mkTypedSessionVar columnType =
PSESessVar (unsafePGColumnToRepresentation <$> columnType)
@ -387,7 +422,7 @@ instance ToJSON (AnnBoolExpPartialSQL 'Postgres) where
where
f annFld = case annFld of
AVCol pci opExps ->
( getPGColTxt $ pgiColumn pci
( PG.getPGColTxt $ pgiColumn pci
, toJSON (pci, map opExpSToJSON opExps)
)
AVRel ri relBoolExp ->

View File

@ -2,24 +2,22 @@ module Hasura.RQL.IR.Delete where
import Hasura.Prelude
import qualified Hasura.Backends.Postgres.SQL.DML as S
import Hasura.Backends.Postgres.SQL.Types
import Hasura.RQL.IR.BoolExp
import Hasura.RQL.IR.Returning
import Hasura.RQL.Types.Column
import Hasura.RQL.Types.Common
import Hasura.SQL.Backend
data AnnDelG (b :: Backend) v
data AnnDelG (b :: BackendType) v
= AnnDel
{ dqp1Table :: !QualifiedTable
{ dqp1Table :: !(TableName b)
, dqp1Where :: !(AnnBoolExp b v, AnnBoolExp b v)
, dqp1Output :: !(MutationOutputG b v)
, dqp1AllCols :: ![ColumnInfo b]
}
type AnnDel b = AnnDelG b S.SQLExp
type AnnDel b = AnnDelG b (SQLExp b)
traverseAnnDel
:: (Applicative f)

View File

@ -1,11 +1,7 @@
module Hasura.RQL.IR.Insert where
import Hasura.Prelude
import qualified Hasura.Backends.Postgres.SQL.DML as S
import Hasura.Backends.Postgres.SQL.Types
import Hasura.RQL.IR.BoolExp
import Hasura.RQL.IR.Returning
import Hasura.RQL.Types.Column
@ -13,24 +9,73 @@ import Hasura.RQL.Types.Common
import Hasura.SQL.Backend
data ConflictTarget
= CTColumn ![PGCol]
| CTConstraint !ConstraintName
deriving (Show, Eq)
data AnnInsert (b :: BackendType) v
= AnnInsert
{ _aiFieldName :: !Text
, _aiIsSingle :: Bool
, _aiData :: AnnMultiInsert b v
}
data ConflictClauseP1 (b :: Backend) v
= CP1DoNothing !(Maybe ConflictTarget)
| CP1Update !ConflictTarget ![Column b] !(PreSetColsG b v) (AnnBoolExp b v)
data AnnIns (b :: BackendType) a v
= AnnIns
{ _aiInsObj :: !a
, _aiTableName :: !(TableName b)
, _aiConflictClause :: !(Maybe (ConflictClauseP1 b v))
, _aiCheckCond :: !(AnnBoolExp b v, Maybe (AnnBoolExp b v))
, _aiTableCols :: ![ColumnInfo b]
, _aiDefVals :: !(PreSetColsG b v)
}
type SingleObjIns b v = AnnIns b (AnnInsObj b v) v
type MultiObjIns b v = AnnIns b [AnnInsObj b v] v
data RelIns a
= RelIns
{ _riAnnIns :: !a
, _riRelInfo :: !RelInfo
} deriving (Show, Eq)
type ObjRelIns b v = RelIns (SingleObjIns b v)
type ArrRelIns b v = RelIns (MultiObjIns b v)
data AnnInsObj (b :: BackendType) v
= AnnInsObj
{ _aioColumns :: ![(Column b, v)]
, _aioObjRels :: ![ObjRelIns b v]
, _aioArrRels :: ![ArrRelIns b v]
}
type AnnSingleInsert b v = (SingleObjIns b v, MutationOutputG b v)
type AnnMultiInsert b v = (MultiObjIns b v, MutationOutputG b v)
instance Semigroup (AnnInsObj backend v) where
(AnnInsObj col1 obj1 rel1) <> (AnnInsObj col2 obj2 rel2) =
AnnInsObj (col1 <> col2) (obj1 <> obj2) (rel1 <> rel2)
instance Monoid (AnnInsObj backend v) where
mempty = AnnInsObj [] [] []
data ConflictTarget (b :: BackendType)
= CTColumn ![Column b]
| CTConstraint !(ConstraintName b)
deriving instance Backend b => Show (ConflictTarget b)
deriving instance Backend b => Eq (ConflictTarget b)
data ConflictClauseP1 (b :: BackendType) v
= CP1DoNothing !(Maybe (ConflictTarget b))
| CP1Update !(ConflictTarget b) ![Column b] !(PreSetColsG b v) (AnnBoolExp b v)
deriving (Functor, Foldable, Traversable)
data InsertQueryP1 (b :: Backend)
data InsertQueryP1 (b :: BackendType)
= InsertQueryP1
{ iqp1Table :: !QualifiedTable
{ iqp1Table :: !(TableName b)
, iqp1Cols :: ![Column b]
, iqp1Tuples :: ![[S.SQLExp]]
, iqp1Conflict :: !(Maybe (ConflictClauseP1 b S.SQLExp))
, iqp1Tuples :: ![[SQLExp b]]
, iqp1Conflict :: !(Maybe (ConflictClauseP1 b (SQLExp b)))
, iqp1CheckCond :: !(AnnBoolExpSQL b, Maybe (AnnBoolExpSQL b))
, iqp1Output :: !(MutationOutput b)
, iqp1AllCols :: ![ColumnInfo b]

View File

@ -0,0 +1,82 @@
module Hasura.RQL.IR.OrderBy
( OrderByCol(..)
, OrderByItemG(..)
, OrderByItem
-- used by RQL.DML.Types
, orderByColFromTxt
) where
import Hasura.Prelude
import qualified Data.Text as T
import Data.Aeson
import Data.Aeson.Casing
import Instances.TH.Lift ()
import Language.Haskell.TH.Syntax (Lift)
import Hasura.RQL.Instances ()
import Hasura.RQL.Types.Common
import Hasura.SQL.Backend
-- order by col
data OrderByCol
= OCPG !FieldName
| OCRel !FieldName !OrderByCol
deriving (Show, Eq, Lift)
instance FromJSON OrderByCol where
parseJSON = \case
(String t) -> orderByColFromToks $ T.split (=='.') t
v -> parseJSON v >>= orderByColFromToks
instance ToJSON OrderByCol where
toJSON = toJSON . orderByColToTxt
orderByColToTxt :: OrderByCol -> Text
orderByColToTxt = \case
OCPG pgCol -> getFieldNameTxt pgCol
OCRel rel obCol -> getFieldNameTxt rel <> "." <> orderByColToTxt obCol
orderByColFromToks
:: (MonadFail m)
=> [Text] -> m OrderByCol
orderByColFromToks toks = do
when (any T.null toks) $ fail "col/rel cannot be empty"
case toks of
[] -> fail "failed to parse an OrderByCol: found empty cols"
x:xs -> return $ go (FieldName x) xs
where
go fld = \case
[] -> OCPG fld
x:xs -> OCRel fld $ go (FieldName x) xs
orderByColFromTxt
:: (MonadFail m)
=> Text -> m OrderByCol
orderByColFromTxt =
orderByColFromToks . T.split (=='.')
-- order by item
data OrderByItemG (b :: BackendType) a
= OrderByItemG
{ obiType :: !(Maybe (BasicOrderType b))
, obiColumn :: !a
, obiNulls :: !(Maybe (NullsOrderType b))
} deriving (Functor, Foldable, Traversable, Generic)
deriving instance (Backend b, Show a) => Show (OrderByItemG b a)
deriving instance (Backend b, Eq a) => Eq (OrderByItemG b a)
deriving instance (Backend b, Lift a) => Lift (OrderByItemG b a)
instance (Backend b, Hashable a) => Hashable (OrderByItemG b a)
type OrderByItem b = OrderByItemG b OrderByCol
instance (Backend b, FromJSON a) => FromJSON (OrderByItemG b a) where
parseJSON = genericParseJSON (aesonPrefix snakeCase){omitNothingFields=True}
instance (Backend b, ToJSON a) => ToJSON (OrderByItemG b a) where
toJSON = genericToJSON (aesonPrefix snakeCase){omitNothingFields=True}

View File

@ -12,7 +12,7 @@ import Hasura.RQL.Types
-- | A 'RemoteJoin' represents the context of remote relationship to be extracted from 'AnnFieldG's.
data RemoteJoin (b :: Backend)
data RemoteJoin (b :: BackendType)
= RemoteJoin
{ _rjName :: !FieldName -- ^ The remote join field name.
, _rjArgs :: ![RemoteFieldArgument] -- ^ User-provided arguments with variables.

View File

@ -2,32 +2,31 @@ module Hasura.RQL.IR.Returning where
import Hasura.Prelude
import qualified Data.Aeson as J
import qualified Data.HashMap.Strict.InsOrd as OMap
import qualified Hasura.Backends.Postgres.SQL.DML as S
import qualified Data.Aeson as J
import qualified Data.HashMap.Strict.InsOrd as OMap
import Hasura.EncJSON
import Hasura.RQL.IR.Select
import Hasura.RQL.Types.Common
import Hasura.SQL.Backend
data MutFldG (b :: Backend) v
data MutFldG (b :: BackendType) v
= MCount
| MExp !Text
| MRet !(AnnFieldsG b v)
type MutFld b = MutFldG b S.SQLExp
type MutFld b = MutFldG b (SQLExp b)
type MutFldsG b v = Fields (MutFldG b v)
data MutationOutputG (b :: Backend) v
data MutationOutputG (b :: BackendType) v
= MOutMultirowFields !(MutFldsG b v)
| MOutSinglerowObject !(AnnFieldsG b v)
type MutationOutput b = MutationOutputG b S.SQLExp
type MutationOutput b = MutationOutputG b (SQLExp b)
type MutFlds b = MutFldsG b S.SQLExp
type MutFlds b = MutFldsG b (SQLExp b)
buildEmptyMutResp :: MutationOutput backend -> EncJSON
buildEmptyMutResp = \case

View File

@ -13,14 +13,14 @@ import qualified Language.GraphQL.Draft.Syntax as G
import Control.Lens.TH (makeLenses, makePrisms)
import qualified Hasura.Backends.Postgres.SQL.DML as S
import qualified Hasura.Backends.Postgres.SQL.DML as PG
import qualified Hasura.Backends.Postgres.SQL.Types as PG
import Hasura.Backends.Postgres.SQL.Types
import Hasura.GraphQL.Parser.Schema
import Hasura.RQL.IR.BoolExp
import Hasura.RQL.IR.OrderBy
import Hasura.RQL.Types.Column
import Hasura.RQL.Types.Common
import Hasura.RQL.Types.DML
import Hasura.RQL.Types.Function
import Hasura.RQL.Types.RemoteRelationship
import Hasura.RQL.Types.RemoteSchema
@ -33,14 +33,14 @@ data JsonAggSelect
deriving (Show, Eq, Generic)
instance Hashable JsonAggSelect
data AnnAggregateOrderBy (b :: Backend)
data AnnAggregateOrderBy (b :: BackendType)
= AAOCount
| AAOOp !Text !(ColumnInfo b)
deriving (Generic)
deriving instance Eq (AnnAggregateOrderBy 'Postgres)
instance Hashable (AnnAggregateOrderBy 'Postgres)
data AnnOrderByElementG (b :: Backend) v
data AnnOrderByElementG (b :: BackendType) v
= AOCColumn !(ColumnInfo b)
| AOCObjectRelation !RelInfo !v !(AnnOrderByElementG b v)
| AOCArrayAggregation !RelInfo !v !(AnnAggregateOrderBy b)
@ -64,7 +64,7 @@ traverseAnnOrderByElement f = \case
<$> traverseAnnBoolExp f annBoolExp
<*> pure annAggOb
type AnnOrderByItemG b v = OrderByItemG (AnnOrderByElement b v)
type AnnOrderByItemG b v = OrderByItemG b (AnnOrderByElement b v)
traverseAnnOrderByItem
:: (Applicative f)
@ -72,12 +72,12 @@ traverseAnnOrderByItem
traverseAnnOrderByItem f =
traverse (traverseAnnOrderByElement f)
type AnnOrderByItem b = AnnOrderByItemG b S.SQLExp
type AnnOrderByItem b = AnnOrderByItemG b (SQLExp b)
type OrderByItemExp b =
OrderByItemG (AnnOrderByElement b S.SQLExp, (S.Alias, S.SQLExp))
OrderByItemG b (AnnOrderByElement b (SQLExp b), (PG.Alias, (SQLExp b)))
data AnnRelationSelectG (b :: Backend) a
data AnnRelationSelectG (b :: BackendType) a
= AnnRelationSelectG
{ aarRelationshipName :: !RelName -- Relationship name
, aarColumnMapping :: !(HashMap (Column b) (Column b)) -- Column of left table to join with
@ -87,16 +87,16 @@ data AnnRelationSelectG (b :: Backend) a
type ArrayRelationSelectG b v = AnnRelationSelectG b (AnnSimpleSelG b v)
type ArrayAggregateSelectG b v = AnnRelationSelectG b (AnnAggregateSelectG b v)
type ArrayConnectionSelect b v = AnnRelationSelectG b (ConnectionSelect b v)
type ArrayAggregateSelect b = ArrayAggregateSelectG b S.SQLExp
type ArrayAggregateSelect b = ArrayAggregateSelectG b (SQLExp b)
data AnnObjectSelectG (b :: Backend) v
data AnnObjectSelectG (b :: BackendType) v
= AnnObjectSelectG
{ _aosFields :: !(AnnFieldsG b v)
, _aosTableFrom :: !QualifiedTable
, _aosTableFrom :: !(TableName b)
, _aosTableFilter :: !(AnnBoolExp b v)
}
type AnnObjectSelect b = AnnObjectSelectG b S.SQLExp
type AnnObjectSelect b = AnnObjectSelectG b (SQLExp b)
traverseAnnObjectSelect
:: (Applicative f)
@ -109,18 +109,20 @@ traverseAnnObjectSelect f (AnnObjectSelectG fields fromTable permissionFilter) =
<*> traverseAnnBoolExp f permissionFilter
type ObjectRelationSelectG b v = AnnRelationSelectG b (AnnObjectSelectG b v)
type ObjectRelationSelect b = ObjectRelationSelectG b S.SQLExp
type ObjectRelationSelect b = ObjectRelationSelectG b (SQLExp b)
data ComputedFieldScalarSelect v
data ComputedFieldScalarSelect (b :: BackendType) v
= ComputedFieldScalarSelect
{ _cfssFunction :: !QualifiedFunction
{ _cfssFunction :: !PG.QualifiedFunction
, _cfssArguments :: !(FunctionArgsExpTableRow v)
, _cfssType :: !PGScalarType
, _cfssColumnOp :: !(Maybe ColumnOp)
} deriving (Show, Eq, Functor, Foldable, Traversable)
, _cfssType :: !PG.PGScalarType
, _cfssColumnOp :: !(Maybe (ColumnOp b))
} deriving (Functor, Foldable, Traversable)
deriving instance Show v => Show (ComputedFieldScalarSelect 'Postgres v)
deriving instance Eq v => Eq (ComputedFieldScalarSelect 'Postgres v)
data ComputedFieldSelect (b :: Backend) v
= CFSScalar !(ComputedFieldScalarSelect v)
data ComputedFieldSelect (b :: BackendType) v
= CFSScalar !(ComputedFieldScalarSelect b v)
| CFSTable !JsonAggSelect !(AnnSimpleSelG b v)
traverseComputedFieldSelect
@ -133,7 +135,7 @@ traverseComputedFieldSelect fv = \case
type Fields a = [(FieldName, a)]
data ArraySelectG (b :: Backend) v
data ArraySelectG (b :: BackendType) v
= ASSimple !(ArrayRelationSelectG b v)
| ASAggregate !(ArrayAggregateSelectG b v)
| ASConnection !(ArrayConnectionSelect b v)
@ -151,24 +153,26 @@ traverseArraySelect f = \case
ASConnection relConnection ->
ASConnection <$> traverse (traverseConnectionSelect f) relConnection
type ArraySelect b = ArraySelectG b S.SQLExp
type ArraySelect b = ArraySelectG b (SQLExp b)
type ArraySelectFieldsG b v = Fields (ArraySelectG b v)
data ColumnOp
data ColumnOp (b :: BackendType)
= ColumnOp
{ _colOp :: S.SQLOp
, _colExp :: S.SQLExp
} deriving (Show, Eq)
{ _colOp :: PG.SQLOp
, _colExp :: (SQLExp b)
}
deriving instance Show (ColumnOp 'Postgres)
deriving instance Eq (ColumnOp 'Postgres)
data AnnColumnField (b :: Backend)
data AnnColumnField (b :: BackendType)
= AnnColumnField
{ _acfInfo :: !(ColumnInfo b)
, _acfAsText :: !Bool
-- ^ If this field is 'True', columns are explicitly casted to @text@ when fetched, which avoids
-- an issue that occurs because we dont currently have proper support for array types. See
-- https://github.com/hasura/graphql-engine/pull/3198 for more details.
, _acfOp :: !(Maybe ColumnOp)
, _acfOp :: !(Maybe (ColumnOp b))
}
data RemoteFieldArgument
@ -177,7 +181,7 @@ data RemoteFieldArgument
, _rfaValue :: !(InputValue Variable)
} deriving (Eq,Show)
data RemoteSelect (b :: Backend)
data RemoteSelect (b :: BackendType)
= RemoteSelect
{ _rselArgs :: ![RemoteFieldArgument]
, _rselSelection :: !(G.SelectionSet G.NoFragments Variable)
@ -186,16 +190,16 @@ data RemoteSelect (b :: Backend)
, _rselRemoteSchema :: !RemoteSchemaInfo
}
data AnnFieldG (b :: Backend) v
data AnnFieldG (b :: BackendType) v
= AFColumn !(AnnColumnField b)
| AFObjectRelation !(ObjectRelationSelectG b v)
| AFArrayRelation !(ArraySelectG b v)
| AFComputedField !(ComputedFieldSelect b v)
| AFRemote !(RemoteSelect b)
| AFNodeId !QualifiedTable !(PrimaryKeyColumns b)
| AFNodeId !(TableName b) !(PrimaryKeyColumns b)
| AFExpression !Text
mkAnnColumnField :: ColumnInfo backend -> Maybe ColumnOp -> AnnFieldG backend v
mkAnnColumnField :: ColumnInfo backend -> Maybe (ColumnOp backend) -> AnnFieldG backend v
mkAnnColumnField ci colOpM =
AFColumn $ AnnColumnField ci False colOpM
@ -215,14 +219,14 @@ traverseAnnField f = \case
AFNodeId qt pKeys -> pure $ AFNodeId qt pKeys
AFExpression t -> AFExpression <$> pure t
type AnnField b = AnnFieldG b S.SQLExp
type AnnField b = AnnFieldG b (SQLExp b)
data SelectArgsG (b :: Backend) v
data SelectArgsG (b :: BackendType) v
= SelectArgs
{ _saWhere :: !(Maybe (AnnBoolExp b v))
, _saOrderBy :: !(Maybe (NE.NonEmpty (AnnOrderByItemG b v)))
, _saLimit :: !(Maybe Int)
, _saOffset :: !(Maybe S.SQLExp)
, _saOffset :: !(Maybe (SQLExp b))
, _saDistinct :: !(Maybe (NE.NonEmpty (Column b)))
} deriving (Generic)
deriving instance Eq v => Eq (SelectArgsG 'Postgres v)
@ -240,12 +244,12 @@ traverseSelectArgs f (SelectArgs wh ordBy lmt ofst distCols) =
<*> pure ofst
<*> pure distCols
type SelectArgs b = SelectArgsG b S.SQLExp
type SelectArgs b = SelectArgsG b (SQLExp b)
noSelectArgs :: SelectArgsG backend v
noSelectArgs = SelectArgs Nothing Nothing Nothing Nothing Nothing
data ColFld (b :: Backend)
data ColFld (b :: BackendType)
= CFCol !(Column b)
| CFExp !Text
{-
@ -255,14 +259,14 @@ deriving instance Show (Column b) => Show (ColFld b)
type ColumnFields b = Fields (ColFld b)
data AggregateOp (b :: Backend)
data AggregateOp (b :: BackendType)
= AggregateOp
{ _aoOp :: !Text
, _aoFields :: !(ColumnFields b)
}
data AggregateField (b :: Backend)
= AFCount !S.CountType
data AggregateField (b :: BackendType)
= AFCount !PG.CountType
| AFOp !(AggregateOp b)
| AFExp !Text
@ -274,9 +278,9 @@ traverseAnnFields
=> (a -> f b) -> AnnFieldsG backend a -> f (AnnFieldsG backend b)
traverseAnnFields f = traverse (traverse (traverseAnnField f))
type AnnFields b = AnnFieldsG b S.SQLExp
type AnnFields b = AnnFieldsG b (SQLExp b)
data TableAggregateFieldG (b :: Backend) v
data TableAggregateFieldG (b :: BackendType) v
= TAFAgg !(AggregateFields b)
| TAFNodes !(AnnFieldsG b v)
| TAFExp !Text
@ -290,7 +294,7 @@ data PageInfoField
deriving (Show, Eq)
type PageInfoFields = Fields PageInfoField
data EdgeField (b :: Backend) v
data EdgeField (b :: BackendType) v
= EdgeTypename !Text
| EdgeCursor
| EdgeNode !(AnnFieldsG b v)
@ -304,7 +308,7 @@ traverseEdgeField f = \case
EdgeCursor -> pure EdgeCursor
EdgeNode fields -> EdgeNode <$> traverseAnnFields f fields
data ConnectionField (b :: Backend) v
data ConnectionField (b :: BackendType) v
= ConnectionTypename !Text
| ConnectionPageInfo !PageInfoFields
| ConnectionEdges !(EdgeFields b v)
@ -327,12 +331,12 @@ traverseTableAggregateField f = \case
TAFNodes annFlds -> TAFNodes <$> traverseAnnFields f annFlds
TAFExp t -> pure $ TAFExp t
type TableAggregateField b = TableAggregateFieldG b S.SQLExp
type TableAggregateField b = TableAggregateFieldG b (SQLExp b)
type TableAggregateFieldsG b v = Fields (TableAggregateFieldG b v)
type TableAggregateFields b = TableAggregateFieldsG b S.SQLExp
type TableAggregateFields b = TableAggregateFieldsG b (SQLExp b)
data ArgumentExp a
= AETableRow !(Maybe Identifier) -- ^ table row accessor
= AETableRow !(Maybe PG.Identifier) -- ^ table row accessor
| AESession !a -- ^ JSON/JSONB hasura session variable object
| AEInput !a
deriving (Show, Eq, Functor, Foldable, Traversable, Generic)
@ -340,19 +344,19 @@ instance (Hashable v) => Hashable (ArgumentExp v)
type FunctionArgsExpTableRow v = FunctionArgsExpG (ArgumentExp v)
data SelectFromG (b :: Backend) v
= FromTable !QualifiedTable
| FromIdentifier !Identifier
| FromFunction !QualifiedFunction
data SelectFromG (b :: BackendType) v
= FromTable !(TableName b)
| FromIdentifier !PG.Identifier
| FromFunction !PG.QualifiedFunction
!(FunctionArgsExpTableRow v)
-- a definition list
!(Maybe [(Column b, ScalarType b)])
deriving (Functor, Foldable, Traversable, Generic)
instance (Hashable v) => Hashable (SelectFromG 'Postgres v)
type SelectFrom b = SelectFromG b S.SQLExp
type SelectFrom b = SelectFromG b (SQLExp b)
data TablePermG (b :: Backend) v
data TablePermG (b :: BackendType) v
= TablePerm
{ _tpFilter :: !(AnnBoolExp b v)
, _tpLimit :: !(Maybe Int)
@ -373,9 +377,9 @@ noTablePermissions :: TablePermG backend v
noTablePermissions =
TablePerm annBoolExpTrue Nothing
type TablePerm b = TablePermG b S.SQLExp
type TablePerm b = TablePermG b (SQLExp b)
data AnnSelectG (b :: Backend) a v
data AnnSelectG (b :: BackendType) a v
= AnnSelectG
{ _asnFields :: !a
, _asnFrom :: !(SelectFromG b v)
@ -409,11 +413,11 @@ traverseAnnSelect f1 f2 (AnnSelectG flds tabFrom perm args strfyNum) =
<*> traverseSelectArgs f2 args
<*> pure strfyNum
type AnnSimpleSelG b v = AnnSelectG b (AnnFieldsG b v) v
type AnnSimpleSel b = AnnSimpleSelG b S.SQLExp
type AnnSimpleSelG b v = AnnSelectG b (AnnFieldsG b v) v
type AnnSimpleSel b = AnnSimpleSelG b (SQLExp b)
type AnnAggregateSelectG b v = AnnSelectG b (TableAggregateFieldsG b v) v
type AnnAggregateSelect b = AnnAggregateSelectG b S.SQLExp
type AnnAggregateSelect b = AnnAggregateSelectG b (SQLExp b)
data ConnectionSlice
= SliceFirst !Int
@ -427,11 +431,11 @@ data ConnectionSplitKind
deriving (Show, Eq, Generic)
instance Hashable ConnectionSplitKind
data ConnectionSplit (b :: Backend) v
data ConnectionSplit (b :: BackendType) v
= ConnectionSplit
{ _csKind :: !ConnectionSplitKind
, _csValue :: !v
, _csOrderBy :: !(OrderByItemG (AnnOrderByElementG b ()))
, _csOrderBy :: !(OrderByItemG b (AnnOrderByElementG b ()))
} deriving (Functor, Generic, Foldable, Traversable)
instance (Hashable v) => Hashable (ConnectionSplit 'Postgres v)
@ -441,7 +445,7 @@ traverseConnectionSplit
traverseConnectionSplit f (ConnectionSplit k v ob) =
ConnectionSplit k <$> f v <*> pure ob
data ConnectionSelect (b :: Backend) v
data ConnectionSelect (b :: BackendType) v
= ConnectionSelect
{ _csPrimaryKeyColumns :: !(PrimaryKeyColumns b)
, _csSplit :: !(Maybe (NE.NonEmpty (ConnectionSplit b v)))
@ -469,7 +473,7 @@ instance (Hashable a) => Hashable (FunctionArgsExpG a)
emptyFunctionArgsExp :: FunctionArgsExpG a
emptyFunctionArgsExp = FunctionArgsExp [] HM.empty
type FunctionArgExp = FunctionArgsExpG S.SQLExp
type FunctionArgExp b = FunctionArgsExpG (SQLExp b)
-- | If argument positional index is less than or equal to length of
-- 'positional' arguments then insert the value in 'positional' arguments else
@ -490,29 +494,31 @@ insertFunctionArg argName idx value (FunctionArgsExp positional named) =
data SourcePrefixes
= SourcePrefixes
{ _pfThis :: !Identifier -- ^ Current source prefix
, _pfBase :: !Identifier
{ _pfThis :: !PG.Identifier -- ^ Current source prefix
, _pfBase :: !PG.Identifier
-- ^ Base table source row identifier to generate
-- the table's column identifiers for computed field
-- function input parameters
} deriving (Show, Eq, Generic)
instance Hashable SourcePrefixes
data SelectSource
data SelectSource (b :: BackendType)
= SelectSource
{ _ssPrefix :: !Identifier
, _ssFrom :: !S.FromItem
, _ssDistinct :: !(Maybe S.DistinctExpr)
, _ssWhere :: !S.BoolExp
, _ssOrderBy :: !(Maybe S.OrderByExp)
{ _ssPrefix :: !PG.Identifier
, _ssFrom :: !PG.FromItem
, _ssDistinct :: !(Maybe PG.DistinctExpr)
, _ssWhere :: !PG.BoolExp
, _ssOrderBy :: !(Maybe PG.OrderByExp)
, _ssLimit :: !(Maybe Int)
, _ssOffset :: !(Maybe S.SQLExp)
} deriving (Show, Eq, Generic)
instance Hashable SelectSource
, _ssOffset :: !(Maybe (SQLExp b))
} deriving (Generic)
instance Hashable (SelectSource 'Postgres)
deriving instance Show (SelectSource 'Postgres)
deriving instance Eq (SelectSource 'Postgres)
data SelectNode (b :: Backend)
data SelectNode (b :: BackendType)
= SelectNode
{ _snExtractors :: !(HM.HashMap S.Alias S.SQLExp)
{ _snExtractors :: !(HM.HashMap PG.Alias (SQLExp b))
, _snJoinTree :: !(JoinTree b)
}
@ -522,17 +528,17 @@ instance Semigroup (SelectNode 'Postgres) where
data ObjectSelectSource
= ObjectSelectSource
{ _ossPrefix :: !Identifier
, _ossFrom :: !S.FromItem
, _ossWhere :: !S.BoolExp
{ _ossPrefix :: !PG.Identifier
, _ossFrom :: !PG.FromItem
, _ossWhere :: !PG.BoolExp
} deriving (Show, Eq, Generic)
instance Hashable ObjectSelectSource
objectSelectSourceToSelectSource :: ObjectSelectSource -> SelectSource
objectSelectSourceToSelectSource :: ObjectSelectSource -> (SelectSource backend)
objectSelectSourceToSelectSource ObjectSelectSource{..} =
SelectSource _ossPrefix _ossFrom Nothing _ossWhere Nothing Nothing Nothing
data ObjectRelationSource (b :: Backend)
data ObjectRelationSource (b :: BackendType)
= ObjectRelationSource
{ _orsRelationshipName :: !RelName
, _orsRelationMapping :: !(HM.HashMap (Column b) (Column b))
@ -541,18 +547,18 @@ data ObjectRelationSource (b :: Backend)
instance Hashable (ObjectRelationSource 'Postgres)
deriving instance Eq (Column b) => Eq (ObjectRelationSource b)
data ArrayRelationSource (b :: Backend)
data ArrayRelationSource (b :: BackendType)
= ArrayRelationSource
{ _arsAlias :: !S.Alias
{ _arsAlias :: !PG.Alias
, _arsRelationMapping :: !(HM.HashMap (Column b) (Column b))
, _arsSelectSource :: !SelectSource
, _arsSelectSource :: !(SelectSource b)
} deriving (Generic)
instance Hashable (ArrayRelationSource 'Postgres)
deriving instance Eq (Column b) => Eq (ArrayRelationSource b)
deriving instance Eq (ArrayRelationSource 'Postgres)
data ArraySelectNode (b :: Backend)
data ArraySelectNode (b :: BackendType)
= ArraySelectNode
{ _asnTopExtractors :: ![S.Extractor]
{ _asnTopExtractors :: ![PG.Extractor]
, _asnSelectNode :: !(SelectNode b)
}
@ -560,32 +566,34 @@ instance Semigroup (ArraySelectNode 'Postgres) where
ArraySelectNode lTopExtrs lSelNode <> ArraySelectNode rTopExtrs rSelNode =
ArraySelectNode (lTopExtrs <> rTopExtrs) (lSelNode <> rSelNode)
data ComputedFieldTableSetSource
data ComputedFieldTableSetSource (b :: BackendType)
= ComputedFieldTableSetSource
{ _cftssFieldName :: !FieldName
, _cftssSelectType :: !JsonAggSelect
, _cftssSelectSource :: !SelectSource
} deriving (Show, Eq, Generic)
instance Hashable ComputedFieldTableSetSource
data ArrayConnectionSource (b :: Backend)
= ArrayConnectionSource
{ _acsAlias :: !S.Alias
, _acsRelationMapping :: !(HM.HashMap (Column b) (Column b))
, _acsSplitFilter :: !(Maybe S.BoolExp)
, _acsSlice :: !(Maybe ConnectionSlice)
, _acsSource :: !SelectSource
, _cftssSelectSource :: !(SelectSource b)
} deriving (Generic)
deriving instance Eq (Column b) => Eq (ArrayConnectionSource b)
instance Hashable (ComputedFieldTableSetSource 'Postgres)
deriving instance Show (ComputedFieldTableSetSource 'Postgres)
deriving instance Eq (ComputedFieldTableSetSource 'Postgres)
data ArrayConnectionSource (b :: BackendType)
= ArrayConnectionSource
{ _acsAlias :: !PG.Alias
, _acsRelationMapping :: !(HM.HashMap (Column b) (Column b))
, _acsSplitFilter :: !(Maybe PG.BoolExp)
, _acsSlice :: !(Maybe ConnectionSlice)
, _acsSource :: !(SelectSource b)
} deriving (Generic)
deriving instance Eq (ArrayConnectionSource 'Postgres)
instance Hashable (ArrayConnectionSource 'Postgres)
data JoinTree (b :: Backend)
data JoinTree (b :: BackendType)
= JoinTree
{ _jtObjectRelations :: !(HM.HashMap (ObjectRelationSource b) (SelectNode b))
, _jtArrayRelations :: !(HM.HashMap (ArrayRelationSource b) (ArraySelectNode b))
, _jtArrayConnections :: !(HM.HashMap (ArrayConnectionSource b) (ArraySelectNode b))
, _jtComputedFieldTableSets :: !(HM.HashMap ComputedFieldTableSetSource (SelectNode b))
, _jtComputedFieldTableSets :: !(HM.HashMap (ComputedFieldTableSetSource b) (SelectNode b))
}
instance Semigroup (JoinTree 'Postgres) where

View File

@ -3,9 +3,6 @@ module Hasura.RQL.IR.Update where
import Hasura.Prelude
import qualified Hasura.Backends.Postgres.SQL.DML as S
import Hasura.Backends.Postgres.SQL.Types
import Hasura.RQL.IR.BoolExp
import Hasura.RQL.IR.Returning
import Hasura.RQL.Types.Column
@ -13,9 +10,9 @@ import Hasura.RQL.Types.Common
import Hasura.SQL.Backend
data AnnUpdG (b :: Backend) v
data AnnUpdG (b :: BackendType) v
= AnnUpd
{ uqp1Table :: !QualifiedTable
{ uqp1Table :: !(TableName b)
, uqp1OpExps :: ![(Column b, UpdOpExpG v)]
, uqp1Where :: !(AnnBoolExp b v, AnnBoolExp b v)
, uqp1Check :: !(AnnBoolExp b v)
@ -26,7 +23,7 @@ data AnnUpdG (b :: Backend) v
, uqp1AllCols :: ![ColumnInfo b]
}
type AnnUpd b = AnnUpdG b S.SQLExp
type AnnUpd b = AnnUpdG b (SQLExp b)
data UpdOpExpG v = UpdSet !v
| UpdInc !v

View File

@ -54,21 +54,21 @@ import Hasura.RQL.Types.Column as R
import Hasura.RQL.Types.Common as R
import Hasura.RQL.Types.ComputedField as R
import Hasura.RQL.Types.CustomTypes as R
import Hasura.RQL.Types.DML as R
import Hasura.RQL.Types.Error as R
import Hasura.RQL.Types.EventTrigger as R
import Hasura.RQL.Types.Function as R
import Hasura.RQL.Types.Metadata as R
import Hasura.RQL.Types.Permission as R
import Hasura.RQL.Types.QueryCollection as R
import Hasura.RQL.Types.Relationship as R
import Hasura.RQL.Types.RemoteRelationship as R
import Hasura.RQL.Types.RemoteSchema as R
import Hasura.RQL.Types.ScheduledTrigger as R
import Hasura.RQL.Types.SchemaCache as R
import Hasura.RQL.Types.SchemaCache.Build as R
import Hasura.RQL.Types.Table as R
import Hasura.SQL.Backend as R
import Hasura.Session
import Hasura.SQL.Backend as R
import Hasura.Tracing (TraceT)
data QCtx

View File

@ -184,7 +184,7 @@ getActionOutputFields :: AnnotatedObjectType backend -> ActionOutputFields
getActionOutputFields =
Map.fromList . map ( (unObjectFieldName . _ofdName) &&& (fst . _ofdType)) . toList . _otdFields
data ActionInfo (b :: Backend)
data ActionInfo (b :: BackendType)
= ActionInfo
{ _aiName :: !ActionName
, _aiOutputObject :: !(AnnotatedObjectType b)
@ -236,7 +236,7 @@ data ActionPermissionMetadata
instance NFData ActionPermissionMetadata
instance Cacheable ActionPermissionMetadata
$(J.deriveFromJSON
$(J.deriveJSON
(J.aesonDrop 4 J.snakeCase){J.omitNothingFields=True}
''ActionPermissionMetadata)
@ -248,6 +248,7 @@ data ActionMetadata
, _amDefinition :: !ActionDefinitionInput
, _amPermissions :: ![ActionPermissionMetadata]
} deriving (Show, Eq, Lift, Generic)
$(J.deriveToJSON (J.aesonDrop 3 J.snakeCase) ''ActionMetadata)
instance NFData ActionMetadata
instance Cacheable ActionMetadata
@ -261,7 +262,7 @@ instance J.FromJSON ActionMetadata where
----------------- Resolve Types ----------------
data AnnActionExecution (b :: Backend) v
data AnnActionExecution (b :: BackendType) v
= AnnActionExecution
{ _aaeName :: !ActionName
, _aaeOutputType :: !GraphQLType -- ^ output type
@ -283,7 +284,7 @@ data AnnActionMutationAsync
, _aamaPayload :: !J.Value -- ^ jsonified input arguments
} deriving (Show, Eq)
data AsyncActionQueryFieldG (b :: Backend) v
data AsyncActionQueryFieldG (b :: BackendType) v
= AsyncTypename !Text
| AsyncOutput !(AnnFieldsG b v)
| AsyncId
@ -292,7 +293,7 @@ data AsyncActionQueryFieldG (b :: Backend) v
type AsyncActionQueryFieldsG b v = Fields (AsyncActionQueryFieldG b v)
data AnnActionAsyncQuery (b :: Backend) v
data AnnActionAsyncQuery (b :: BackendType) v
= AnnActionAsyncQuery
{ _aaaqName :: !ActionName
, _aaaqActionId :: !v

View File

@ -28,12 +28,12 @@ import System.Cron.Types (CronSchedule (..))
import Hasura.Backends.Postgres.SQL.Types
import Hasura.Incremental (Cacheable)
import Hasura.RQL.DDL.ComputedField
import Hasura.RQL.DDL.Schema.Function
import Hasura.RQL.Types.Action
import Hasura.RQL.Types.Column
import Hasura.RQL.Types.Common
import Hasura.RQL.Types.CustomTypes
import Hasura.RQL.Types.EventTrigger
import Hasura.RQL.Types.Function
import Hasura.RQL.Types.Permission
import Hasura.RQL.Types.QueryCollection
import Hasura.RQL.Types.RemoteRelationship
@ -146,7 +146,7 @@ instance NFData CatalogFunction
instance Cacheable CatalogFunction
$(deriveFromJSON (aesonDrop 3 snakeCase) ''CatalogFunction)
data CatalogCustomTypes (b :: Backend)
data CatalogCustomTypes (b :: BackendType)
= CatalogCustomTypes
{ _cctCustomTypes :: !CustomTypes
, _cctPgScalars :: !(HashSet (ScalarType b))

View File

@ -94,7 +94,7 @@ instance ToTxt PGColumnType where
PGColumnScalar scalar -> toTxt scalar
PGColumnEnumReference (EnumReference tableName _) -> toTxt tableName
type family ColumnType (b :: Backend) where
type family ColumnType (b :: BackendType) where
ColumnType 'Postgres = PGColumnType
ColumnType 'MySQL = Void -- TODO
@ -147,7 +147,7 @@ parseTxtEncodedPGValue colTy val =
-- | “Raw” column info, as stored in the catalog (but not in the schema cache). Instead of
-- containing a 'PGColumnType', it only contains a 'PGScalarType', which is combined with the
-- 'pcirReferences' field and other table data to eventually resolve the type to a 'PGColumnType'.
data RawColumnInfo (b :: Backend)
data RawColumnInfo (b :: BackendType)
= RawColumnInfo
{ prciName :: !(Column b)
, prciPosition :: !Int
@ -168,7 +168,7 @@ instance FromJSON (RawColumnInfo 'Postgres) where
-- | “Resolved” column info, produced from a 'RawColumnInfo' value that has been combined with
-- other schema information to produce a 'PGColumnType'.
data ColumnInfo (b :: Backend)
data ColumnInfo (b :: BackendType)
= ColumnInfo
{ pgiColumn :: !(Column b)
, pgiName :: !G.Name

View File

@ -7,7 +7,8 @@ module Hasura.RQL.Types.Common
, RelInfo(..)
, ScalarType
, Column
, SQLExp
, Backend (..)
, FieldName(..)
, fromPGCol
@ -48,6 +49,10 @@ module Hasura.RQL.Types.Common
, unsafeNonNegativeInt
, Timeout(..)
, defaultActionTimeoutSecs
, UrlConf(..)
, resolveUrlConf
, getEnv
) where
import Hasura.Prelude
@ -66,14 +71,18 @@ import Data.Aeson
import Data.Aeson.Casing
import Data.Aeson.TH
import Data.Bifunctor (bimap)
import Data.Kind (Type)
import Data.Scientific (toBoundedInteger)
import Data.Text.Extended
import Data.Text.NonEmpty
import Data.Typeable
import Data.URL.Template
import Instances.TH.Lift ()
import Language.Haskell.TH.Syntax (Lift)
import Hasura.Backends.Postgres.SQL.Types
import qualified Hasura.Backends.Postgres.SQL.DML as PG
import qualified Hasura.Backends.Postgres.SQL.Types as PG
import Hasura.EncJSON
import Hasura.Incremental (Cacheable)
import Hasura.RQL.DDL.Headers ()
@ -81,14 +90,64 @@ import Hasura.RQL.Types.Error
import Hasura.SQL.Backend
type family ScalarType (b :: Backend) where
ScalarType 'Postgres = PGScalarType
type family ScalarType (b :: BackendType) where
ScalarType 'Postgres = PG.PGScalarType
type family ColumnType (b :: Backend) where
ColumnType 'Postgres = PGType
type family ColumnType (b :: BackendType) where
ColumnType 'Postgres = PG.PGType
type family Column (b :: Backend) where
Column 'Postgres = PGCol
type family SQLExp (b :: BackendType) where
SQLExp 'Postgres = PG.SQLExp
-- | Mapping from abstract types to concrete backend representation
--
-- The RQL IR, used as the output of GraphQL parsers and of the RQL parsers, is
-- backend-agnostic: it uses an abstract representation of the structure of a
-- query, and delegates to the backends the task of choosing an appropriate
-- concrete representation.
--
-- Additionally, grouping all those types under one typeclass rather than having
-- dedicated type families allows to explicitly list all typeclass requirements,
-- which simplifies the instance declarations of all IR types.
class
( Show (TableName b)
, Show (ConstraintName b)
, Show (Column b)
, Show (BasicOrderType b)
, Show (NullsOrderType b)
, Eq (TableName b)
, Eq (ConstraintName b)
, Eq (Column b)
, Eq (BasicOrderType b)
, Eq (NullsOrderType b)
, Lift (TableName b)
, Lift (BasicOrderType b)
, Lift (NullsOrderType b)
, Cacheable (TableName b)
, Data (TableName b)
, Hashable (BasicOrderType b)
, Hashable (NullsOrderType b)
, Hashable (TableName b)
, NFData (TableName b)
, FromJSON (BasicOrderType b)
, FromJSON (NullsOrderType b)
, ToJSON (BasicOrderType b)
, ToJSON (NullsOrderType b)
, Typeable b
) => Backend (b :: BackendType) where
type TableName b :: Type
type ConstraintName b :: Type
type BasicOrderType b :: Type
type NullsOrderType b :: Type
type Column b :: Type
instance Backend 'Postgres where
type TableName 'Postgres = PG.QualifiedTable
type ConstraintName 'Postgres = PG.ConstraintName
type BasicOrderType 'Postgres = PG.OrderType
type NullsOrderType 'Postgres = PG.NullsOrder
type Column 'Postgres = PG.PGCol
adminText :: NonEmptyText
@ -99,10 +158,10 @@ rootText = mkNonEmptyTextUnsafe "root"
newtype RelName
= RelName { getRelTxt :: NonEmptyText }
deriving (Show, Eq, Hashable, FromJSON, ToJSON, Q.ToPrepArg, Q.FromCol, Lift, Generic, Arbitrary, NFData, Cacheable)
deriving (Show, Eq, Hashable, FromJSON, ToJSON, ToJSONKey, Q.ToPrepArg, Q.FromCol, Lift, Generic, Arbitrary, NFData, Cacheable)
instance IsIdentifier RelName where
toIdentifier rn = Identifier $ relNameToTxt rn
instance PG.IsIdentifier RelName where
toIdentifier rn = PG.Identifier $ relNameToTxt rn
instance ToTxt RelName where
toTxt = relNameToTxt
@ -143,8 +202,8 @@ data RelInfo
= RelInfo
{ riName :: !RelName
, riType :: !RelType
, riMapping :: !(HashMap PGCol PGCol)
, riRTable :: !QualifiedTable
, riMapping :: !(HashMap PG.PGCol PG.PGCol)
, riRTable :: !PG.QualifiedTable
, riIsManual :: !Bool
, riIsNullable :: !Bool
} deriving (Show, Eq, Generic)
@ -161,14 +220,14 @@ newtype FieldName
, Semigroup
)
instance IsIdentifier FieldName where
toIdentifier (FieldName f) = Identifier f
instance PG.IsIdentifier FieldName where
toIdentifier (FieldName f) = PG.Identifier f
instance ToTxt FieldName where
toTxt (FieldName c) = c
fromPGCol :: PGCol -> FieldName
fromPGCol c = FieldName $ getPGColTxt c
fromPGCol :: PG.PGCol -> FieldName
fromPGCol c = FieldName $ PG.getPGColTxt c
fromRel :: RelName -> FieldName
fromRel = FieldName . relNameToTxt
@ -178,7 +237,7 @@ class ToAesonPairs a where
data WithTable a
= WithTable
{ wtName :: !QualifiedTable
{ wtName :: !PG.QualifiedTable
, wtInfo :: !a
} deriving (Show, Eq, Lift)
@ -192,7 +251,7 @@ instance (ToAesonPairs a) => ToJSON (WithTable a) where
toJSON (WithTable tn rel) =
object $ ("table" .= tn):toAesonPairs rel
type ColumnValues a = HM.HashMap PGCol a
type ColumnValues a = HM.HashMap PG.PGCol a
data MutateResp a
= MutateResp
@ -202,7 +261,7 @@ data MutateResp a
$(deriveJSON (aesonDrop 3 snakeCase) ''MutateResp)
type ColMapping = HM.HashMap PGCol PGCol
type ColMapping = HM.HashMap PG.PGCol PG.PGCol
-- | Postgres OIDs. <https://www.postgresql.org/docs/12/datatype-oid.html>
newtype OID = OID { unOID :: Int }
@ -210,7 +269,7 @@ newtype OID = OID { unOID :: Int }
data Constraint
= Constraint
{ _cName :: !ConstraintName
{ _cName :: !PG.ConstraintName
, _cOid :: !OID
} deriving (Show, Eq, Generic)
instance NFData Constraint
@ -231,7 +290,7 @@ $(deriveJSON (aesonDrop 3 snakeCase) ''PrimaryKey)
data ForeignKey
= ForeignKey
{ _fkConstraint :: !Constraint
, _fkForeignTable :: !QualifiedTable
, _fkForeignTable :: !PG.QualifiedTable
, _fkColumnMapping :: !ColMapping
} deriving (Show, Eq, Generic)
instance NFData ForeignKey
@ -257,7 +316,7 @@ class EquatableGType a where
type EqProps a
getEqProps :: a -> EqProps a
type CustomColumnNames = HM.HashMap PGCol G.Name
type CustomColumnNames = HM.HashMap PG.PGCol G.Name
newtype SystemDefined = SystemDefined { unSystemDefined :: Bool }
deriving (Show, Eq, FromJSON, ToJSON, Q.ToPrepArg, NFData, Cacheable)
@ -282,12 +341,8 @@ unsafeNonNegativeInt = NonNegativeInt
instance FromJSON NonNegativeInt where
parseJSON = withScientific "NonNegativeInt" $ \t -> do
case t >= 0 of
True -> NonNegativeInt <$> maybeInt (toBoundedInteger t)
True -> maybe (fail "integer passed is out of bounds") (pure . NonNegativeInt) $ toBoundedInteger t
False -> fail "negative value not allowed"
where
maybeInt x = case x of
Just v -> return v
Nothing -> fail "integer passed is out of bounds"
newtype NonNegativeDiffTime = NonNegativeDiffTime { unNonNegativeDiffTime :: DiffTime }
deriving (Show, Eq,ToJSON,Generic, NFData, Cacheable, Num)
@ -351,3 +406,35 @@ instance Arbitrary Timeout where
defaultActionTimeoutSecs :: Timeout
defaultActionTimeoutSecs = Timeout 30
data UrlConf
= UrlValue !InputWebhook
| UrlFromEnv !T.Text
deriving (Show, Eq, Generic, Lift)
instance NFData UrlConf
instance Cacheable UrlConf
instance ToJSON UrlConf where
toJSON (UrlValue w) = toJSON w
toJSON (UrlFromEnv wEnv) = object ["from_env" .= wEnv ]
instance FromJSON UrlConf where
parseJSON (Object o) = UrlFromEnv <$> o .: "from_env"
parseJSON t@(String _) =
case (fromJSON t) of
Error s -> fail s
Success a -> pure $ UrlValue a
parseJSON _ = fail "one of string or object must be provided for url/webhook"
resolveUrlConf
:: MonadError QErr m => Env.Environment -> UrlConf -> m Text
resolveUrlConf env = \case
UrlValue v -> unResolvedWebhook <$> resolveWebhook env v
UrlFromEnv envVar -> getEnv env envVar
getEnv :: QErrM m => Env.Environment -> T.Text -> m T.Text
getEnv env k = do
let mEnv = Env.lookupEnv env (T.unpack k)
case mEnv of
Nothing -> throw400 NotFound $ "environment variable '" <> k <> "' not set"
Just envVal -> return (T.pack envVal)

View File

@ -28,7 +28,7 @@ import Hasura.SQL.Backend
newtype ComputedFieldName =
ComputedFieldName { unComputedFieldName :: NonEmptyText}
deriving (Show, Eq, NFData, Lift, FromJSON, ToJSON, Q.ToPrepArg, ToTxt, Hashable, Q.FromCol, Generic, Arbitrary, Cacheable)
deriving (Show, Eq, NFData, Lift, FromJSON, ToJSON, ToJSONKey, Q.ToPrepArg, ToTxt, Hashable, Q.FromCol, Generic, Arbitrary, Cacheable)
computedFieldNameToText :: ComputedFieldName -> Text
computedFieldNameToText = unNonEmptyText . unComputedFieldName
@ -36,6 +36,16 @@ computedFieldNameToText = unNonEmptyText . unComputedFieldName
fromComputedField :: ComputedFieldName -> FieldName
fromComputedField = FieldName . computedFieldNameToText
data ComputedFieldDefinition
= ComputedFieldDefinition
{ _cfdFunction :: !QualifiedFunction
, _cfdTableArgument :: !(Maybe FunctionArgName)
, _cfdSessionArgument :: !(Maybe FunctionArgName)
} deriving (Show, Eq, Lift, Generic)
instance NFData ComputedFieldDefinition
instance Cacheable ComputedFieldDefinition
$(deriveJSON (aesonDrop 4 snakeCase){omitNothingFields = True} ''ComputedFieldDefinition)
-- | The function table argument is either the very first argument or the named
-- argument with an index. The index is 0 if the named argument is the first.
data FunctionTableArgument
@ -62,7 +72,7 @@ instance Cacheable FunctionSessionArgument
instance ToJSON FunctionSessionArgument where
toJSON (FunctionSessionArgument argName _) = toJSON argName
data ComputedFieldReturn (b :: Backend)
data ComputedFieldReturn (b :: BackendType)
= CFRScalar !(ScalarType b)
| CFRSetofTable !QualifiedTable
deriving (Generic)
@ -91,7 +101,7 @@ data ComputedFieldFunction
instance Cacheable ComputedFieldFunction
$(deriveToJSON (aesonDrop 4 snakeCase) ''ComputedFieldFunction)
data ComputedFieldInfo (b :: Backend)
data ComputedFieldInfo (b :: BackendType)
= ComputedFieldInfo
{ _cfiName :: !ComputedFieldName
, _cfiFunction :: !ComputedFieldFunction

View File

@ -267,7 +267,7 @@ type AnnotatedObjectType b =
type AnnotatedObjects b = Map.HashMap G.Name (AnnotatedObjectType b)
data AnnotatedCustomTypes (b :: Backend)
data AnnotatedCustomTypes (b :: BackendType)
= AnnotatedCustomTypes
{ _actNonObjects :: !NonObjectTypeMap
, _actObjects :: !(AnnotatedObjects b)

View File

@ -107,6 +107,7 @@ data RetryConf
, rcTimeoutSec :: !(Maybe Int)
} deriving (Show, Eq, Generic, Lift)
instance NFData RetryConf
instance Cacheable RetryConf
$(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''RetryConf)
data EventHeaderInfo
@ -222,7 +223,7 @@ data EventTriggerConf
, etcRetryConf :: !RetryConf
, etcHeaders :: !(Maybe [HeaderConf])
} deriving (Show, Eq, Lift, Generic)
instance Cacheable EventTriggerConf
$(deriveJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''EventTriggerConf)
newtype RedeliverEventQuery

View File

@ -78,3 +78,51 @@ $(deriveToJSON (aesonDrop 2 snakeCase) ''FunctionInfo)
getInputArgs :: FunctionInfo -> Seq.Seq FunctionArg
getInputArgs =
Seq.fromList . mapMaybe (^? _IAUserProvided) . toList . fiInputArgs
type FunctionCache = HashMap QualifiedFunction FunctionInfo -- info of all functions
-- Metadata requests related types
data FunctionConfig
= FunctionConfig
{ _fcSessionArgument :: !(Maybe FunctionArgName)
} deriving (Show, Eq, Generic, Lift)
instance NFData FunctionConfig
instance Cacheable FunctionConfig
$(deriveJSON (aesonDrop 3 snakeCase){omitNothingFields = True} ''FunctionConfig)
emptyFunctionConfig :: FunctionConfig
emptyFunctionConfig = FunctionConfig Nothing
data TrackFunctionV2
= TrackFunctionV2
{ _tfv2Function :: !QualifiedFunction
, _tfv2Configuration :: !FunctionConfig
} deriving (Show, Eq, Lift, Generic)
$(deriveToJSON (aesonDrop 5 snakeCase) ''TrackFunctionV2)
instance FromJSON TrackFunctionV2 where
parseJSON = withObject "Object" $ \o ->
TrackFunctionV2
<$> o .: "function"
<*> o .:? "configuration" .!= emptyFunctionConfig
-- | Raw SQL function metadata from postgres
data RawFunctionInfo
= RawFunctionInfo
{ rfiHasVariadic :: !Bool
, rfiFunctionType :: !FunctionType
, rfiReturnTypeSchema :: !SchemaName
, rfiReturnTypeName :: !PGScalarType
, rfiReturnTypeType :: !PGTypeKind
, rfiReturnsSet :: !Bool
, rfiInputArgTypes :: ![QualifiedPGType]
, rfiInputArgNames :: ![FunctionArgName]
, rfiDefaultArgs :: !Int
, rfiReturnsTable :: !Bool
, rfiDescription :: !(Maybe PGDescription)
} deriving (Show, Eq, Generic)
instance NFData RawFunctionInfo
instance Cacheable RawFunctionInfo
$(deriveJSON (aesonDrop 3 snakeCase) ''RawFunctionInfo)
type PostgresFunctionsMetadata = HashMap QualifiedFunction [RawFunctionInfo]

Some files were not shown because too many files have changed in this diff Show More