add relay modern support (#4458)

* validation support for unions and interfaces

* refactor SQL generation logic for improved readability

* '/v1/relay' endpoint for relay schema

* implement 'Node' interface and top level 'node' field resolver

* add relay toggle on graphiql

* fix explain api response & index plan id with query type

* add hasura mutations to relay

* add relay pytests

* update CHANGELOG.md

Co-authored-by: rakeshkky <12475069+rakeshkky@users.noreply.github.com>
Co-authored-by: Rishichandra Wawhal <rishi@hasura.io>
Co-authored-by: Rikin Kachhia <54616969+rikinsk@users.noreply.github.com>
This commit is contained in:
Vamshi Surabhi 2020-06-08 17:43:01 +05:30 committed by GitHub
parent 762e947ae2
commit 2a9bc2354f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
90 changed files with 5485 additions and 2023 deletions

View File

@ -2,6 +2,14 @@
## Next release
### Relay
The Hasura GraphQL Engine serves [Relay](https://relay.dev/en/) schema for Postgres tables which has a primary key defined.
The Relay schema can be accessed through `/v1/relay` endpoint.
[Add docs links][add console screenshot for relay toggle]
### Remote Joins
Remote Joins extend the concept of joining data across tables, to being able to join data across tables and remote schemas.

View File

@ -10,6 +10,7 @@ const Endpoints = {
getSchema: `${baseUrl}/v1/query`,
serverConfig: `${baseUrl}/v1alpha1/config`,
graphQLUrl: `${baseUrl}/v1/graphql`,
relayURL: `${baseUrl}/v1/relay`,
schemaChange: `${baseUrl}/v1/query`,
query: `${baseUrl}/v1/query`,
rawSQL: `${baseUrl}/v1/query`,

View File

@ -1263,12 +1263,6 @@ code {
display: block;
top: 0;
z-index: 11;
.stickySeparator {
padding-top: 60px;
margin-left: -15px;
margin-right: -15px;
}
}
.fixed_header_internal_link {

View File

@ -2,24 +2,21 @@ import React from 'react';
import OverlayTrigger from 'react-bootstrap/lib/OverlayTrigger';
import Tooltip from 'react-bootstrap/lib/Tooltip';
import styles from './Tooltip.scss';
const tooltipGen = (message: string) => {
return <Tooltip id={message}>{message}</Tooltip>;
};
export interface TooltipProps extends React.ComponentProps<'i'> {
};export interface TooltipProps extends React.ComponentProps<'i'> {
message: string;
placement?: 'right' | 'left' | 'top' | 'bottom';
className?: string;
}
const ToolTip: React.FC<TooltipProps> = ({ message, placement = 'right' }) => (
const ToolTip: React.FC<TooltipProps> = ({ message, placement = 'right', children }) => (
<OverlayTrigger placement={placement} overlay={tooltipGen(message)}>
<i
className={`fa fa-question-circle + ${styles.tooltipIcon}`}
aria-hidden="true"
/>
{children || (
<i
className={`fa fa-question-circle ${styles.tooltipIcon}`}
aria-hidden="true"
/>
)}
</OverlayTrigger>
);
export default ToolTip;
export default ToolTip;

View File

@ -6,6 +6,14 @@ import {
import React from 'react';
import endpoints from '../../../Endpoints';
export const getGraphQLQueryPayload = (
query: string,
variables: Record<string, any>
) => ({
query,
variables,
});
export const useIntrospectionSchema = (headers = {}) => {
const [schema, setSchema] = React.useState<GraphQLSchema | null>(null);
const [loading, setLoading] = React.useState(true);
@ -17,7 +25,7 @@ export const useIntrospectionSchema = (headers = {}) => {
fetch(endpoints.graphQLUrl, {
method: 'POST',
headers,
body: JSON.stringify({ query: getIntrospectionQuery() }),
body: JSON.stringify(getGraphQLQueryPayload(getIntrospectionQuery(), {})),
})
.then(r => r.json())
.then(response => {

View File

@ -9,7 +9,7 @@ import { WebSocketLink } from 'apollo-link-ws';
import { parse } from 'graphql';
import { execute } from 'apollo-link';
import { getHeadersAsJSON } from './utils';
import { getHeadersAsJSON, getGraphQLEndpoint } from './utils';
import { saveAppState, clearState } from '../../AppState.js';
import { ADMIN_SECRET_HEADER_KEY } from '../../../constants';
@ -44,6 +44,18 @@ const CREATE_WEBSOCKET_CLIENT = 'ApiExplorer/CREATE_WEBSOCKET_CLIENT';
const FOCUS_ROLE_HEADER = 'ApiExplorer/FOCUS_ROLE_HEADER';
const UNFOCUS_ROLE_HEADER = 'ApiExplorer/UNFOCUS_ROLE_HEADER';
const SET_LOADING = 'ApiExplorer/SET_LOADING';
export const setLoading = isLoading => ({
type: SET_LOADING,
data: isLoading,
});
const SWITCH_GRAPHIQL_MODE = 'ApiExplorer/SWITCH_GRAPHIQL_MODE';
export const switchGraphiQLMode = mode => ({
type: SWITCH_GRAPHIQL_MODE,
mode,
});
const clearHistory = () => {
return {
type: CLEAR_HISTORY,
@ -88,8 +100,9 @@ const getChangedHeaders = (headers, changedHeaderDetails) => {
return nonEmptyHeaders;
};
const verifyJWTToken = token => dispatch => {
const url = Endpoints.graphQLUrl;
const verifyJWTToken = token => (dispatch, getState) => {
const { mode: graphiqlMode } = getState().apiexplorer;
const url = getGraphQLEndpoint(graphiqlMode);
const body = {
query: '{ __type(name: "dummy") {name}}',
variables: null,
@ -199,10 +212,11 @@ const graphQLFetcherFinal = (graphQLParams, url, headers) => {
};
/* Analyse Fetcher */
const analyzeFetcher = (url, headers) => {
const analyzeFetcher = (headers, mode) => {
return query => {
const editedQuery = {
query,
is_relay: mode === 'relay',
};
const user = {
@ -227,7 +241,7 @@ const analyzeFetcher = (url, headers) => {
editedQuery.user = user;
return fetch(`${url}/explain`, {
return fetch(`${Endpoints.graphQLUrl}/explain`, {
method: 'post',
headers: reqHeaders,
body: JSON.stringify(editedQuery),
@ -619,6 +633,16 @@ const apiExplorerReducer = (state = defaultState, action) => {
},
},
};
case SWITCH_GRAPHIQL_MODE:
return {
...state,
mode: action.mode,
};
case SET_LOADING:
return {
...state,
loading: action.data,
};
default:
return state;
}

View File

@ -4,6 +4,7 @@ import QueryAnalyzer from './QueryAnalyzer';
import GraphiQL from 'graphiql';
import { print, parse } from 'graphql';
import { isValidGraphQLOperation } from '../utils';
import { getGraphQLQueryPayload } from '../../../Common/utils/graphqlUtils';
export default class AnalyseButton extends React.Component {
constructor(props) {
@ -20,7 +21,7 @@ export default class AnalyseButton extends React.Component {
};
}
render() {
const operations = this.props.operations;
const { operations, mode } = this.props;
const optionsOpen = this.state.optionsOpen;
const hasOptions = operations && operations.length > 1;
@ -67,6 +68,7 @@ export default class AnalyseButton extends React.Component {
{this.state.analyseQuery && (
<QueryAnalyzer
show={this.state.isAnalysing}
mode={mode}
analyseQuery={this.state.analyseQuery}
clearAnalyse={this.clearAnalyse.bind(this)}
{...this.props}
@ -125,15 +127,13 @@ export default class AnalyseButton extends React.Component {
}
const plainQuery = print(parseQuery);
const query = {
query: plainQuery,
variables: jsonVariables,
};
const query = getGraphQLQueryPayload(plainQuery, jsonVariables);
if (operation) {
query.operationName = operation;
}
const analyseQuery = {
query,
is_relay: this.props.mode === 'relay',
};
this.setState({
analyseQuery,

View File

@ -63,6 +63,8 @@ class ApiExplorer extends Component {
location,
serverVersion,
serverConfig,
mode,
loading,
} = this.props;
const styles = require('./ApiExplorer.scss');
@ -83,12 +85,14 @@ class ApiExplorer extends Component {
details={displayedApi.details}
request={displayedApi.request}
route={route}
mode={mode}
dataHeaders={dataHeaders}
numberOfTables={tables.length}
headerFocus={headerFocus}
urlParams={location.query}
serverVersion={serverVersion}
consoleUrl={consoleUrl}
loading={loading}
serverConfig={serverConfig}
/>
</div>
@ -105,6 +109,7 @@ ApiExplorer.propTypes = {
tables: PropTypes.array.isRequired,
headerFocus: PropTypes.bool.isRequired,
location: PropTypes.object.isRequired,
mode: PropTypes.string.isRequired,
};
const generatedApiExplorer = connect => {

View File

@ -827,3 +827,7 @@ label {
}
}
}
.graphiqlModeToggle {
float: right;
}

View File

@ -1,12 +1,10 @@
import React, { Component } from 'react';
import PropTypes from 'prop-types';
import jwt from 'jsonwebtoken';
import TextAreaWithCopy from '../../../Common/TextAreaWithCopy/TextAreaWithCopy';
import OverlayTrigger from 'react-bootstrap/lib/OverlayTrigger';
import Tooltip from 'react-bootstrap/lib/Tooltip';
import Modal from '../../../Common/Modal/Modal';
import Tooltip from '../../../Common/Tooltip/Tooltip';
import {
changeRequestHeader,
@ -15,9 +13,11 @@ import {
unfocusTypingHeader,
verifyJWTToken,
setHeadersBulk,
switchGraphiQLMode,
} from '../Actions';
import GraphiQLWrapper from '../GraphiQLWrapper/GraphiQLWrapper';
import Toggle from '../../../Common/Toggle/Toggle';
import CollapsibleToggle from '../../../Common/CollapsibleToggle/CollapsibleToggle';
@ -34,19 +34,13 @@ import {
getPersistedAdminSecretHeaderWasAdded,
persistAdminSecretHeaderWasAdded,
removePersistedAdminSecretHeaderWasAdded,
persistGraphiQLMode,
} from './utils';
import { getGraphQLEndpoint } from '../utils';
import styles from '../ApiExplorer.scss';
import { ADMIN_SECRET_HEADER_KEY } from '../../../../constants';
const inspectJWTTooltip = (
<Tooltip id="tooltip-inspect-jwt">Decode JWT</Tooltip>
);
const jwtValidityStatus = message => (
<Tooltip id="tooltip-jwt-validity-status">{message}</Tooltip>
);
/* When the page is loaded for the first time, hydrate the header state from the localStorage
* Keep syncing the localStorage state when user modifies.
* */
@ -207,6 +201,7 @@ class ApiRequest extends Component {
}
render() {
const { mode, dispatch, loading } = this.props;
const { isAnalyzingToken, tokenInfo, analyzingHeaderRow } = this.state;
const { is_jwt_set: isJWTSet = false } = this.props.serverConfig;
@ -227,6 +222,13 @@ class ApiRequest extends Component {
this.setState({ endpointSectionIsOpen: newIsOpen });
};
const toggleGraphiqlMode = () => {
if (loading) return;
const newMode = mode === 'relay' ? 'graphql' : 'relay';
persistGraphiQLMode(newMode);
dispatch(switchGraphiQLMode(newMode));
};
return (
<CollapsibleToggle
title={'GraphQL Endpoint'}
@ -244,7 +246,9 @@ class ApiRequest extends Component {
styles.stickyHeader
}
>
<div className={'col-xs-12 ' + styles.padd_remove}>
<div
className={`col-xs-12 ${styles.padd_remove} ${styles.add_mar_bottom_mid}`}
>
<div
className={
'input-group ' +
@ -259,15 +263,33 @@ class ApiRequest extends Component {
</button>
</div>
<input
onChange={this.onUrlChanged}
value={this.props.url || ''}
value={getGraphQLEndpoint(mode)}
type="text"
readOnly
className={styles.inputGroupInput + ' form-control '}
/>
</div>
</div>
<div className={styles.stickySeparator} />
<div
className={`${styles.display_flex} ${styles.graphiqlModeToggle} ${styles.cursorPointer}`}
onClick={toggleGraphiqlMode}
>
<Toggle
checked={mode === 'relay'}
className={`${styles.display_flex} ${styles.add_mar_right_mid}`}
readOnly
disabled={loading}
icons={false}
/>
<span className={styles.add_mar_right_mid}>Relay API</span>
<Tooltip
id="relay-mode-toggle"
placement="left"
message={
'Toggle to point this GraphiQL to a relay-compliant GraphQL API served at /v1/relay'
}
/>
</div>
</div>
</CollapsibleToggle>
);
@ -417,12 +439,18 @@ class ApiRequest extends Component {
if (isAdminSecret) {
headerAdminVal = (
<i
className={styles.showAdminSecret + ' fa fa-eye'}
data-header-id={i}
aria-hidden="true"
onClick={onShowAdminSecretClicked}
/>
<Tooltip
id="admin-secret-show"
placement="left"
message="Show admin secret"
>
<i
className={styles.showAdminSecret + ' fa fa-eye'}
data-header-id={i}
aria-hidden="true"
onClick={onShowAdminSecretClicked}
/>
</Tooltip>
);
}
@ -462,9 +490,13 @@ class ApiRequest extends Component {
if (isAuthHeader && isJWTSet) {
inspectorIcon = (
<OverlayTrigger placement="top" overlay={inspectJWTTooltip}>
<Tooltip
id="tooltip-inspect-jwt"
message="Decode JWT"
placement="left"
>
{getAnalyzeIcon()}
</OverlayTrigger>
</Tooltip>
);
}
@ -564,6 +596,7 @@ class ApiRequest extends Component {
return (
<div className={styles.apiRequestBody}>
<GraphiQLWrapper
mode={mode}
data={this.props}
numberOfTables={this.props.numberOfTables}
dispatch={this.props.dispatch}
@ -606,14 +639,14 @@ class ApiRequest extends Component {
switch (true) {
case tokenVerified:
return (
<OverlayTrigger
placement="top"
overlay={jwtValidityStatus('Valid JWT token')}
<Tooltip
id="tooltip-jwt-validity-status"
message="Valid JWT token"
>
<span className={styles.valid_jwt_token}>
<i className="fa fa-check" />
</span>
</OverlayTrigger>
</Tooltip>
);
case !tokenVerified && JWTError.length > 0:
return (

View File

@ -147,3 +147,11 @@ export const parseAuthHeader = header => {
return { isAuthHeader, token };
};
export const persistGraphiQLMode = mode => {
window.localStorage.setItem('ApiExplorer:GraphiQLMode', mode);
};
export const getPersistedGraphiQLMode = () => {
return window.localStorage.getItem('ApiExplorer:GraphiQLMode');
};

View File

@ -58,6 +58,7 @@ class ApiRequestWrapper extends Component {
this.props.request.bodyType ? this.props.request.bodyType : ''
}
credentials={this.props.credentials}
mode={this.props.mode}
method={this.props.request.method}
url={this.props.request.url}
headers={this.props.request.headers}
@ -65,6 +66,7 @@ class ApiRequestWrapper extends Component {
params={this.props.request.params}
explorerData={this.props.explorerData}
dispatch={this.props.dispatch}
loading={this.props.loading}
dataHeaders={this.props.dataHeaders}
numberOfTables={this.props.numberOfTables}
headerFocus={this.props.headerFocus}

View File

@ -1,6 +1,7 @@
import React, { Component } from 'react';
import { push } from 'react-router-redux';
import GraphiQL from 'graphiql';
import { connect } from 'react-redux';
import PropTypes from 'prop-types';
import GraphiQLErrorBoundary from './GraphiQLErrorBoundary';
import OneGraphExplorer from '../OneGraphExplorer/OneGraphExplorer';
@ -26,6 +27,7 @@ import {
setTypeDefinition,
setDerivedActionParentOperation,
} from '../../Actions/Add/reducer';
import { getGraphQLEndpoint } from '../utils';
import 'graphiql/graphiql.css';
import './GraphiQL.css';
@ -52,7 +54,14 @@ class GraphiQLWrapper extends Component {
render() {
const styles = require('../../../Common/Common.scss');
const { numberOfTables, urlParams, headerFocus, dispatch } = this.props;
const {
numberOfTables,
urlParams,
headerFocus,
dispatch,
mode,
loading,
} = this.props;
const graphqlNetworkData = this.props.data;
const graphQLFetcher = graphQLParams => {
if (headerFocus) {
@ -61,14 +70,14 @@ class GraphiQLWrapper extends Component {
return graphQLFetcherFinal(
graphQLParams,
graphqlNetworkData.url,
getGraphQLEndpoint(mode),
graphqlNetworkData.headers
);
};
const analyzeFetcherInstance = analyzeFetcher(
graphqlNetworkData.url,
graphqlNetworkData.headers
graphqlNetworkData.headers,
mode
);
let graphiqlContext;
@ -170,12 +179,14 @@ class GraphiQLWrapper extends Component {
onClick: () => window.open(voyagerUrl, '_blank'),
icon: <i className="fa fa-external-link" aria-hidden="true" />,
},
{
];
if (mode === 'graphql') {
buttons.push({
label: 'Derive action',
title: 'Derive action for the given mutation',
onClick: deriveActionFromOperation,
},
];
});
}
return buttons.map(b => {
return <GraphiQL.Button key={b.label} {...b} />;
});
@ -183,12 +194,12 @@ class GraphiQLWrapper extends Component {
return (
<GraphiQL
{...graphiqlProps}
ref={c => {
graphiqlContext = c;
}}
fetcher={graphQLFetcher}
voyagerUrl={voyagerUrl}
{...graphiqlProps}
>
<GraphiQL.Logo>GraphiQL</GraphiQL.Logo>
<GraphiQL.Toolbar>
@ -210,13 +221,16 @@ class GraphiQLWrapper extends Component {
>
<OneGraphExplorer
renderGraphiql={renderGraphiql}
endpoint={graphqlNetworkData.url}
endpoint={getGraphQLEndpoint(mode)}
dispatch={dispatch}
headers={graphqlNetworkData.headers}
headersInitialised={graphqlNetworkData.headersInitialised}
headerFocus={headerFocus}
urlParams={urlParams}
loading={loading}
numberOfTables={numberOfTables}
dispatch={dispatch}
mode={mode}
/>
</div>
</GraphiQLErrorBoundary>
@ -232,4 +246,11 @@ GraphiQLWrapper.propTypes = {
urlParams: PropTypes.object.isRequired,
};
export default GraphiQLWrapper;
const mapStateToProps = state => ({
mode: state.apiexplorer.mode,
loading: state.apiexplorer.loading,
});
const GraphiQLWrapperConnected = connect(mapStateToProps)(GraphiQLWrapper);
export default GraphiQLWrapperConnected;

View File

@ -1,6 +1,7 @@
import React from 'react';
import { getIntrospectionQuery, buildClientSchema } from 'graphql';
import GraphiQLExplorer from 'graphiql-explorer';
import { setLoading } from '../Actions';
import {
makeDefaultArg,
@ -25,7 +26,6 @@ class OneGraphExplorer extends React.Component {
schema: null,
query: undefined,
isResizing: false,
loading: false,
previousIntrospectionHeaders: [],
};
@ -34,9 +34,14 @@ class OneGraphExplorer extends React.Component {
this.introspect();
}
componentDidUpdate() {
const { headerFocus, headers } = this.props;
const { loading, previousIntrospectionHeaders } = this.state;
componentDidUpdate(prevProps) {
const { headerFocus, headers, loading } = this.props;
const { previousIntrospectionHeaders } = this.state;
// always introspect if mode changes
if (this.props.mode !== prevProps.mode) {
this.introspect();
return;
}
if (!headerFocus && !loading) {
if (
JSON.stringify(headers) !== JSON.stringify(previousIntrospectionHeaders)
@ -78,12 +83,18 @@ class OneGraphExplorer extends React.Component {
}
introspect() {
const { endpoint, headersInitialised, headers: headers_ } = this.props;
const {
endpoint,
headersInitialised,
headers: headers_,
dispatch,
} = this.props;
if (!headersInitialised) {
return;
}
const headers = JSON.parse(JSON.stringify(headers_));
this.setState({ loading: true });
dispatch(setLoading(true));
this.setState({ schema: null });
fetch(endpoint, {
method: 'POST',
headers: getHeadersAsJSON(headers || []),
@ -95,16 +106,17 @@ class OneGraphExplorer extends React.Component {
.then(result => {
this.setState({
schema: buildClientSchema(result.data),
loading: false,
previousIntrospectionHeaders: headers,
});
})
.catch(() => {
this.setState({
schema: null,
loading: false,
previousIntrospectionHeaders: headers,
});
})
.finally(() => {
dispatch(setLoading(false));
});
}

View File

@ -1,4 +1,7 @@
import globals from '../../../Globals';
import { getPersistedGraphiQLMode } from './ApiRequest/utils';
import { getGraphQLEndpoint } from './utils';
const persistedGraphiqlMode = getPersistedGraphiQLMode();
const defaultHeader = [
{
@ -8,37 +11,33 @@ const defaultHeader = [
isNewHeader: false,
isDisabled: true,
},
{
key: '',
value: '',
isActive: false,
isNewHeader: true,
},
];
defaultHeader.push({
key: '',
value: '',
isActive: false,
isNewHeader: true,
});
const getUrl = path => {
return `${globals.dataApiUrl}${path}`;
};
const dataApisContent = [];
// check project version
dataApisContent.push({
id: 'DataApi-3',
details: {
title: 'GraphQL API',
description:
'GraphQL API for CRUD operations on tables & views in your database',
category: 'data',
const dataApisContent = [
{
id: 'DataApi-3',
details: {
title: 'GraphQL API',
description:
'GraphQL API for CRUD operations on tables & views in your database',
category: 'data',
},
request: {
method: 'POST',
url: getGraphQLEndpoint(persistedGraphiqlMode),
headers: defaultHeader,
headersInitialised: false,
bodyType: 'graphql',
params: JSON.stringify({}, null, 4),
},
},
request: {
method: 'POST',
url: getUrl('/v1/graphql'),
headers: defaultHeader,
headersInitialised: false,
bodyType: 'graphql',
params: JSON.stringify({}, null, 4),
},
});
];
const dataApis = {
title: 'Data',
@ -64,6 +63,8 @@ const defaultState = {
explorerData,
authApiExpanded: 'Username-password Login',
headerFocus: false,
mode: persistedGraphiqlMode,
loading: false,
};
export default defaultState;

View File

@ -1,3 +1,5 @@
import endpoints from '../../../Endpoints';
export const getHeadersAsJSON = (headers = []) => {
const headerJSON = {};
const nonEmptyHeaders = headers.filter(header => {
@ -16,3 +18,6 @@ export const isValidGraphQLOperation = operation => {
operation.name && operation.name.value && operation.operation === 'query'
);
};
export const getGraphQLEndpoint = mode =>
mode === 'relay' ? endpoints.relayURL : endpoints.graphQLUrl;

View File

@ -41,7 +41,7 @@ source-repository-package
source-repository-package
type: git
location: https://github.com/hasura/graphql-parser-hs.git
tag: ba2379640248ce67cdfe700cbb79acd91c644bdb
tag: f4a093981ca5626982a17c2bfaad047cc0834a81
source-repository-package
type: git

View File

@ -202,6 +202,7 @@ library
, ghc-heap-view
, directory
, semigroups >= 0.19.1
-- scheduled triggers
, cron >= 0.6.2
@ -353,11 +354,13 @@ library
, Hasura.GraphQL.Schema.Mutation.Update
, Hasura.GraphQL.Schema.Mutation.Delete
, Hasura.GraphQL.Schema
, Hasura.GraphQL.RelaySchema
, Hasura.GraphQL.Utils
, Hasura.GraphQL.NormalForm
, Hasura.GraphQL.Validate
, Hasura.GraphQL.Validate.Types
, Hasura.GraphQL.Validate.Context
, Hasura.GraphQL.Validate.Field
, Hasura.GraphQL.Validate.SelectionSet
, Hasura.GraphQL.Validate.InputValue
, Hasura.GraphQL.Explain
, Hasura.GraphQL.Execute

View File

@ -1,14 +1,14 @@
module Data.HashMap.Strict.InsOrd.Extended
( OMap.elems
( module OMap
, groupTuples
, groupListWith
) where
import qualified Data.HashMap.Strict.InsOrd as OMap
import Data.HashMap.Strict.InsOrd as OMap
import qualified Data.Sequence.NonEmpty as NE
import qualified Data.List as L
import Data.Hashable (Hashable)
import Data.List (foldl')
import Prelude (Eq, Foldable, Functor, fmap, ($))
@ -16,7 +16,7 @@ groupTuples
:: (Eq k, Hashable k, Foldable t)
=> t (k, v) -> OMap.InsOrdHashMap k (NE.NESeq v)
groupTuples =
foldl' groupFlds OMap.empty
L.foldl' groupFlds OMap.empty
where
groupFlds m (k, v) =
OMap.insertWith (\_ c -> c NE.|> v) k (NE.init v) m

View File

@ -4,12 +4,14 @@ module Data.Sequence.NonEmpty
, (|>)
, init
, head
, tail
, toSeq
) where
import qualified Data.Foldable as F
import qualified Data.Foldable as Foldable
import qualified Data.Sequence as Seq
import Prelude (Eq, Show, fst, (.))
import qualified Data.Functor as Functor
import Prelude (Eq, Show, fst, snd, (.), Semigroup(..))
infixr 5 <|
infixl 5 |>
@ -18,8 +20,12 @@ newtype NESeq a
= NESeq { unNESeq :: (a, Seq.Seq a)}
deriving (Show, Eq)
instance F.Foldable NESeq where
foldr f v = F.foldr f v . toSeq
instance Functor.Functor NESeq where
fmap f (NESeq (a, rest))
= NESeq (f a, Functor.fmap f rest)
instance Foldable.Foldable NESeq where
foldr f v = Foldable.foldr f v . toSeq
init :: a -> NESeq a
init a = NESeq (a, Seq.empty)
@ -27,6 +33,9 @@ init a = NESeq (a, Seq.empty)
head :: NESeq a -> a
head = fst . unNESeq
tail :: NESeq a -> Seq.Seq a
tail = snd . unNESeq
(|>) :: NESeq a -> a -> NESeq a
(NESeq (h, l)) |> v = NESeq (h, l Seq.|> v)
@ -35,3 +44,7 @@ v <| (NESeq (h, l)) = NESeq (v, h Seq.<| l)
toSeq :: NESeq a -> Seq.Seq a
toSeq (NESeq (v, l)) = v Seq.<| l
instance Semigroup (NESeq a) where
(NESeq (h, l)) <> r =
NESeq (h, l <> toSeq r)

View File

@ -1,5 +1,6 @@
module Hasura.GraphQL.Execute
( GQExecPlan(..)
, EQ.GraphQLQueryType(..)
, ExecPlanPartial
, getExecPlanPartial
@ -45,11 +46,13 @@ import Hasura.Server.Utils (RequestId)
import Hasura.Server.Version (HasVersion)
import Hasura.Session
import qualified Hasura.GraphQL.Context as GC
import qualified Hasura.GraphQL.Execute.LiveQuery as EL
import qualified Hasura.GraphQL.Execute.Plan as EP
import qualified Hasura.GraphQL.Execute.Query as EQ
import qualified Hasura.GraphQL.Resolve as GR
import qualified Hasura.GraphQL.Validate as VQ
import qualified Hasura.GraphQL.Validate.SelectionSet as VQ
import qualified Hasura.GraphQL.Validate.Types as VT
import qualified Hasura.Logging as L
import qualified Hasura.Server.Telemetry.Counters as Telem
@ -111,21 +114,26 @@ gatherTypeLocs gCtx nodes =
in maybe qr (Map.union qr) mr
-- This is for when the graphql query is validated
type ExecPlanPartial = GQExecPlan (GCtx, VQ.RootSelSet)
type ExecPlanPartial = GQExecPlan (GCtx, VQ.RootSelectionSet)
getExecPlanPartial
:: (MonadReusability m, MonadError QErr m)
=> UserInfo
-> SchemaCache
-> EQ.GraphQLQueryType
-> Bool
-> GQLReqParsed
-> m ExecPlanPartial
getExecPlanPartial userInfo sc enableAL req = do
getExecPlanPartial userInfo sc queryType enableAL req = do
-- check if query is in allowlist
when enableAL checkQueryInAllowlist
let gCtx = getGCtx (_uiBackendOnlyFieldAccess userInfo) sc roleName
let gCtx = case queryType of
EQ.QueryHasura -> getGCtx (_uiBackendOnlyFieldAccess userInfo) sc roleName
EQ.QueryRelay -> maybe GC.emptyGCtx _rctxDefault $
Map.lookup roleName (scRelayGCtxMap sc)
queryParts <- flip runReaderT gCtx $ VQ.getQueryParts req
let opDef = VQ.qpOpDef queryParts
@ -179,14 +187,15 @@ getResolvedExecPlan
-> Bool
-> SchemaCache
-> SchemaCacheVer
-> EQ.GraphQLQueryType
-> HTTP.Manager
-> [N.Header]
-> GQLReqUnparsed
-> m (Telem.CacheHit, GQExecPlanResolved)
getResolvedExecPlan pgExecCtx planCache userInfo sqlGenCtx
enableAL sc scVer httpManager reqHeaders reqUnparsed = do
enableAL sc scVer queryType httpManager reqHeaders reqUnparsed = do
planM <- liftIO $ EP.getPlan scVer (_uiRole userInfo)
opNameM queryStr planCache
opNameM queryStr queryType planCache
let usrVars = _uiSession userInfo
case planM of
-- plans are only for queries and subscriptions
@ -201,24 +210,26 @@ getResolvedExecPlan pgExecCtx planCache userInfo sqlGenCtx
GQLReq opNameM queryStr queryVars = reqUnparsed
addPlanToCache plan =
liftIO $ EP.addPlan scVer (_uiRole userInfo)
opNameM queryStr plan planCache
opNameM queryStr plan queryType planCache
noExistingPlan :: m GQExecPlanResolved
noExistingPlan = do
req <- toParsed reqUnparsed
(partialExecPlan, queryReusability) <- runReusabilityT $
getExecPlanPartial userInfo sc enableAL req
getExecPlanPartial userInfo sc queryType enableAL req
forM partialExecPlan $ \(gCtx, rootSelSet) ->
case rootSelSet of
VQ.RMutation selSet -> do
(tx, respHeaders) <- getMutOp gCtx sqlGenCtx userInfo httpManager reqHeaders selSet
pure $ ExOpMutation respHeaders tx
VQ.RQuery selSet -> do
(queryTx, plan, genSql) <- getQueryOp gCtx sqlGenCtx httpManager reqHeaders userInfo queryReusability (allowQueryActionExecuter httpManager reqHeaders) selSet
(queryTx, plan, genSql) <- getQueryOp gCtx sqlGenCtx httpManager reqHeaders userInfo
queryReusability (allowQueryActionExecuter httpManager reqHeaders) selSet
traverse_ (addPlanToCache . EP.RPQuery) plan
return $ ExOpQuery queryTx (Just genSql)
VQ.RSubscription fld -> do
(lqOp, plan) <- getSubsOp pgExecCtx gCtx sqlGenCtx userInfo queryReusability (restrictActionExecuter "query actions cannot be run as a subscription") fld
VQ.RSubscription fields -> do
(lqOp, plan) <- getSubsOp pgExecCtx gCtx sqlGenCtx userInfo queryReusability
(restrictActionExecuter "query actions cannot be run as a subscription") fields
traverse_ (addPlanToCache . EP.RPSubs) plan
return $ ExOpSubs lqOp
@ -264,7 +275,7 @@ getQueryOp
-> UserInfo
-> QueryReusability
-> QueryActionExecuter
-> VQ.SelSet
-> VQ.ObjectSelectionSet
-> m (LazyRespTx, Maybe EQ.ReusableQueryPlan, EQ.GeneratedSqlMap)
getQueryOp gCtx sqlGenCtx manager reqHdrs userInfo queryReusability actionExecuter selSet =
runE gCtx sqlGenCtx userInfo $ EQ.convertQuerySelSet manager reqHdrs queryReusability selSet actionExecuter
@ -283,14 +294,13 @@ resolveMutSelSet
, Has [N.Header] r
, MonadIO m
)
=> VQ.SelSet
=> VQ.ObjectSelectionSet
-> m (LazyRespTx, N.ResponseHeaders)
resolveMutSelSet fields = do
aliasedTxs <- forM (toList fields) $ \fld -> do
fldRespTx <- case VQ._fName fld of
aliasedTxs <- traverseObjectSelectionSet fields $ \fld ->
case VQ._fName fld of
"__typename" -> return (return $ encJFromJValue mutationRootNamedType, [])
_ -> evalReusabilityT $ GR.mutFldToTx fld
return (G.unName $ G.unAlias $ VQ._fAlias fld, fldRespTx)
-- combines all transactions into a single transaction
return (liftTx $ toSingleTx aliasedTxs, concatMap (snd . snd) aliasedTxs)
@ -311,7 +321,7 @@ getMutOp
-> UserInfo
-> HTTP.Manager
-> [N.Header]
-> VQ.SelSet
-> VQ.ObjectSelectionSet
-> m (LazyRespTx, N.ResponseHeaders)
getMutOp ctx sqlGenCtx userInfo manager reqHeaders selSet =
peelReaderT $ resolveMutSelSet selSet
@ -341,11 +351,11 @@ getSubsOp
-> UserInfo
-> QueryReusability
-> QueryActionExecuter
-> VQ.SelSet
-> VQ.ObjectSelectionSet
-> m (EL.LiveQueryPlan, Maybe EL.ReusableLiveQueryPlan)
getSubsOp pgExecCtx gCtx sqlGenCtx userInfo queryReusability actionExecuter fields =
runE gCtx sqlGenCtx userInfo $ EL.buildLiveQueryPlan pgExecCtx queryReusability actionExecuter fields
-- runE gCtx sqlGenCtx userInfo $ getSubsOpM pgExecCtx queryReusability fld actionExecuter
getSubsOp pgExecCtx gCtx sqlGenCtx userInfo queryReusability actionExecuter =
runE gCtx sqlGenCtx userInfo .
EL.buildLiveQueryPlan pgExecCtx queryReusability actionExecuter
execRemoteGQ
:: ( HasVersion

View File

@ -32,6 +32,7 @@ import qualified Data.Aeson.Extended as J
import qualified Data.Aeson.TH as J
import qualified Data.ByteString as B
import qualified Data.HashMap.Strict as Map
import qualified Data.HashMap.Strict.InsOrd as OMap
import qualified Data.Text as T
import qualified Data.UUID.V4 as UUID
import qualified Database.PG.Query as Q
@ -46,15 +47,16 @@ import Data.Has
import Data.UUID (UUID)
import qualified Hasura.GraphQL.Resolve as GR
import qualified Hasura.GraphQL.Resolve.Action as RA
import qualified Hasura.GraphQL.Resolve.Types as GR
import qualified Hasura.GraphQL.Transport.HTTP.Protocol as GH
import qualified Hasura.GraphQL.Validate as GV
import qualified Hasura.GraphQL.Validate.Types as GV
import qualified Hasura.SQL.DML as S
import Hasura.Db
import Hasura.GraphQL.Resolve.Action
import Hasura.GraphQL.Resolve.Types
import Hasura.GraphQL.Utils
import Hasura.GraphQL.Validate.SelectionSet
import Hasura.GraphQL.Validate.Types
import Hasura.RQL.Types
import Hasura.Server.Version (HasVersion)
import Hasura.SQL.Error
@ -67,7 +69,7 @@ import Hasura.SQL.Value
newtype MultiplexedQuery = MultiplexedQuery { unMultiplexedQuery :: Q.Query }
deriving (Show, Eq, Hashable, J.ToJSON)
mkMultiplexedQuery :: Map.HashMap G.Alias GR.QueryRootFldResolved -> MultiplexedQuery
mkMultiplexedQuery :: OMap.InsOrdHashMap G.Alias GR.QueryRootFldResolved -> MultiplexedQuery
mkMultiplexedQuery rootFields = MultiplexedQuery . Q.fromBuilder . toSQL $ S.mkSelect
{ S.selExtr =
-- SELECT _subs.result_id, _fld_resp.root AS result
@ -88,13 +90,13 @@ mkMultiplexedQuery rootFields = MultiplexedQuery . Q.fromBuilder . toSQL $ S.mkS
selectRootFields = S.mkSelect
{ S.selExtr = [S.Extractor rootFieldsJsonAggregate (Just . S.Alias $ Iden "root")]
, S.selFrom = Just . S.FromExp $
flip map (Map.toList rootFields) $ \(fieldAlias, resolvedAST) ->
S.mkSelFromItem (GR.toSQLSelect resolvedAST) (S.Alias $ aliasToIden fieldAlias)
flip map (OMap.toList rootFields) $ \(fieldAlias, resolvedAST) ->
GR.toSQLFromItem (S.Alias $ aliasToIden fieldAlias) resolvedAST
}
-- json_build_object('field1', field1.root, 'field2', field2.root, ...)
rootFieldsJsonAggregate = S.SEFnApp "json_build_object" rootFieldsJsonPairs Nothing
rootFieldsJsonPairs = flip concatMap (Map.keys rootFields) $ \fieldAlias ->
rootFieldsJsonPairs = flip concatMap (OMap.keys rootFields) $ \fieldAlias ->
[ S.SELit (G.unName $ G.unAlias fieldAlias)
, mkQualIden (aliasToIden fieldAlias) (Iden "root") ]
@ -269,25 +271,26 @@ buildLiveQueryPlan
:: ( MonadError QErr m
, MonadReader r m
, Has UserInfo r
, Has GR.FieldMap r
, Has GR.OrdByCtx r
, Has GR.QueryCtxMap r
, Has FieldMap r
, Has OrdByCtx r
, Has QueryCtxMap r
, Has SQLGenCtx r
, HasVersion
, MonadIO m
, HasVersion
)
=> PGExecCtx
-> GV.QueryReusability
-> RA.QueryActionExecuter
-> GV.SelSet
-> QueryReusability
-> QueryActionExecuter
-> ObjectSelectionSet
-> m (LiveQueryPlan, Maybe ReusableLiveQueryPlan)
buildLiveQueryPlan pgExecCtx initialReusability actionExecutioner fields = do
((resolvedASTs, (queryVariableValues, syntheticVariableValues)), finalReusability) <-
GV.runReusabilityTWith initialReusability . flip runStateT mempty $
fmap Map.fromList . for (toList fields) $ \field -> case GV._fName field of
buildLiveQueryPlan pgExecCtx initialReusability actionExecuter selectionSet = do
((resolvedASTMap, (queryVariableValues, syntheticVariableValues)), finalReusability) <-
runReusabilityTWith initialReusability $
flip runStateT mempty $ flip OMap.traverseWithKey (unAliasedFields $ unObjectSelectionSet selectionSet) $
\_ field -> case GV._fName field of
"__typename" -> throwVE "you cannot create a subscription on '__typename' field"
_ -> do
unresolvedAST <- GR.queryFldToPGAST field actionExecutioner
unresolvedAST <- GR.queryFldToPGAST field actionExecuter
resolvedAST <- GR.traverseQueryRootFldAST resolveMultiplexedValue unresolvedAST
let (_, remoteJoins) = GR.toPGQuery resolvedAST
@ -295,10 +298,10 @@ buildLiveQueryPlan pgExecCtx initialReusability actionExecutioner fields = do
when (remoteJoins /= mempty) $
throw400 NotSupported
"Remote relationships are not allowed in subscriptions"
pure (GV._fAlias field, resolvedAST)
pure resolvedAST
userInfo <- asks getter
let multiplexedQuery = mkMultiplexedQuery resolvedASTs
let multiplexedQuery = mkMultiplexedQuery resolvedASTMap
roleName = _uiRole userInfo
parameterizedPlan = ParameterizedLiveQueryPlan roleName multiplexedQuery
@ -309,7 +312,7 @@ buildLiveQueryPlan pgExecCtx initialReusability actionExecutioner fields = do
validatedSyntheticVars <- validateVariables pgExecCtx (toList syntheticVariableValues)
let cohortVariables = CohortVariables (_uiSession userInfo) validatedQueryVars validatedSyntheticVars
plan = LiveQueryPlan parameterizedPlan cohortVariables
varTypes = finalReusability ^? GV._Reusable
varTypes = finalReusability ^? _Reusable
reusablePlan = ReusableLiveQueryPlan parameterizedPlan validatedSyntheticVars <$> varTypes
pure (plan, reusablePlan)

View File

@ -28,17 +28,19 @@ data PlanId
, _piRole :: !RoleName
, _piOperationName :: !(Maybe GH.OperationName)
, _piQuery :: !GH.GQLQueryText
, _piQueryType :: !EQ.GraphQLQueryType
} deriving (Show, Eq, Ord, Generic)
instance Hashable PlanId
instance J.ToJSON PlanId where
toJSON (PlanId scVer rn opNameM query) =
toJSON (PlanId scVer rn opNameM query queryType) =
J.object
[ "schema_cache_version" J..= scVer
, "role" J..= rn
, "operation" J..= opNameM
, "query" J..= query
, "query_type" J..= queryType
]
newtype PlanCache
@ -64,19 +66,19 @@ initPlanCache options =
getPlan
:: SchemaCacheVer -> RoleName -> Maybe GH.OperationName -> GH.GQLQueryText
-> PlanCache -> IO (Maybe ReusablePlan)
getPlan schemaVer rn opNameM q (PlanCache planCache) =
-> EQ.GraphQLQueryType -> PlanCache -> IO (Maybe ReusablePlan)
getPlan schemaVer rn opNameM q queryType (PlanCache planCache) =
Cache.lookup planId planCache
where
planId = PlanId schemaVer rn opNameM q
planId = PlanId schemaVer rn opNameM q queryType
addPlan
:: SchemaCacheVer -> RoleName -> Maybe GH.OperationName -> GH.GQLQueryText
-> ReusablePlan -> PlanCache -> IO ()
addPlan schemaVer rn opNameM q queryPlan (PlanCache planCache) =
-> ReusablePlan -> EQ.GraphQLQueryType -> PlanCache -> IO ()
addPlan schemaVer rn opNameM q queryPlan queryType (PlanCache planCache) =
Cache.insert planId queryPlan planCache
where
planId = PlanId schemaVer rn opNameM q
planId = PlanId schemaVer rn opNameM q queryType
clearPlanCache :: PlanCache -> IO ()
clearPlanCache (PlanCache planCache) =

View File

@ -4,6 +4,7 @@ module Hasura.GraphQL.Execute.Query
, ReusableQueryPlan
, GeneratedSqlMap
, PreparedSql(..)
, GraphQLQueryType(..)
) where
import qualified Data.Aeson as J
@ -23,7 +24,7 @@ import Data.Has
import qualified Hasura.GraphQL.Resolve as R
import qualified Hasura.GraphQL.Transport.HTTP.Protocol as GH
import qualified Hasura.GraphQL.Validate as GV
import qualified Hasura.GraphQL.Validate.Field as V
import qualified Hasura.GraphQL.Validate.SelectionSet as V
import qualified Hasura.SQL.DML as S
import Hasura.EncJSON
@ -199,14 +200,14 @@ convertQuerySelSet
=> HTTP.Manager
-> [N.Header]
-> QueryReusability
-> V.SelSet
-> V.ObjectSelectionSet
-> QueryActionExecuter
-> m (LazyRespTx, Maybe ReusableQueryPlan, GeneratedSqlMap)
convertQuerySelSet manager reqHdrs initialReusability fields actionRunner = do
convertQuerySelSet manager reqHdrs initialReusability selSet actionRunner = do
userInfo <- asks getter
(fldPlans, finalReusability) <- runReusabilityTWith initialReusability $
forM (toList fields) $ \fld -> do
fldPlan <- case V._fName fld of
fmap (map (\(a, b) -> (G.Alias $ G.Name a, b))) $ V.traverseObjectSelectionSet selSet $ \fld ->
case V._fName fld of
"__type" -> fldPlanFromJ <$> R.typeR fld
"__schema" -> fldPlanFromJ <$> R.schemaR fld
"__typename" -> pure $ fldPlanFromJ queryRootNamedType
@ -216,7 +217,7 @@ convertQuerySelSet manager reqHdrs initialReusability fields actionRunner = do
R.traverseQueryRootFldAST prepareWithPlan unresolvedAst
let (query, remoteJoins) = R.toPGQuery q
pure . RFPPostgres $ PGPlan query vars prepped remoteJoins
pure (V._fAlias fld, fldPlan)
-- pure (V._fAlias fld, fldPlan)
let varTypes = finalReusability ^? _Reusable
reusablePlan = ReusableQueryPlan <$> varTypes <*> pure fldPlans
(tx, sql) <- mkCurPlanTx manager reqHdrs userInfo fldPlans
@ -293,3 +294,15 @@ mkGeneratedSqlMap resolved =
RRRaw _ -> Nothing
RRSql ps -> Just ps
in (alias, res)
-- The GraphQL Query type
data GraphQLQueryType
= QueryHasura
| QueryRelay
deriving (Show, Eq, Ord, Generic)
instance Hashable GraphQLQueryType
instance J.ToJSON GraphQLQueryType where
toJSON = \case
QueryHasura -> "hasura"
QueryRelay -> "relay"

View File

@ -27,15 +27,17 @@ import qualified Hasura.GraphQL.Execute.LiveQuery as E
import qualified Hasura.GraphQL.Resolve as RS
import qualified Hasura.GraphQL.Transport.HTTP.Protocol as GH
import qualified Hasura.GraphQL.Validate as GV
import qualified Hasura.GraphQL.Validate.SelectionSet as GV
import qualified Hasura.SQL.DML as S
data GQLExplain
= GQLExplain
{ _gqeQuery :: !GH.GQLReqParsed
, _gqeUser :: !(Maybe (Map.HashMap Text Text))
{ _gqeQuery :: !GH.GQLReqParsed
, _gqeUser :: !(Maybe (Map.HashMap Text Text))
, _gqeIsRelay :: !(Maybe Bool)
} deriving (Show, Eq)
$(J.deriveJSON (J.aesonDrop 4 J.camelCase){J.omitNothingFields=True}
$(J.deriveJSON (J.aesonDrop 4 J.snakeCase){J.omitNothingFields=True}
''GQLExplain
)
@ -125,24 +127,26 @@ explainGQLQuery
-> QueryActionExecuter
-> GQLExplain
-> m EncJSON
explainGQLQuery pgExecCtx sc sqlGenCtx enableAL actionExecuter (GQLExplain query userVarsRaw) = do
explainGQLQuery pgExecCtx sc sqlGenCtx enableAL actionExecuter (GQLExplain query userVarsRaw maybeIsRelay) = do
userInfo <- mkUserInfo (URBFromSessionVariablesFallback adminRoleName) UAdminSecretSent sessionVariables
(execPlan, queryReusability) <- runReusabilityT $
E.getExecPlanPartial userInfo sc enableAL query
E.getExecPlanPartial userInfo sc queryType enableAL query
(gCtx, rootSelSet) <- case execPlan of
E.GExPHasura (gCtx, rootSelSet) ->
return (gCtx, rootSelSet)
E.GExPRemote _ _ ->
E.GExPRemote{} ->
throw400 InvalidParams "only hasura queries can be explained"
case rootSelSet of
GV.RQuery selSet ->
runInTx $ encJFromJValue <$> traverse (explainField userInfo gCtx sqlGenCtx actionExecuter)
(toList selSet)
runInTx $ encJFromJValue . map snd <$>
GV.traverseObjectSelectionSet selSet (explainField userInfo gCtx sqlGenCtx actionExecuter)
GV.RMutation _ ->
throw400 InvalidParams "only queries can be explained"
GV.RSubscription rootField -> do
(plan, _) <- E.getSubsOp pgExecCtx gCtx sqlGenCtx userInfo queryReusability actionExecuter rootField
GV.RSubscription fields -> do
(plan, _) <- E.getSubsOp pgExecCtx gCtx sqlGenCtx userInfo
queryReusability actionExecuter fields
runInTx $ encJFromJValue <$> E.explainLiveQueryPlan plan
where
queryType = bool E.QueryHasura E.QueryRelay $ fromMaybe False maybeIsRelay
sessionVariables = mkSessionVariablesText $ maybe [] Map.toList userVarsRaw
runInTx = liftEither <=< liftIO . runExceptT . runLazyTx pgExecCtx Q.ReadOnly

View File

@ -0,0 +1,300 @@
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilyDependencies #-}
module Hasura.GraphQL.NormalForm
( Selection(..)
, NormalizedSelection
, NormalizedSelectionSet
, NormalizedField
, SelectionSet(..)
, RootSelectionSet(..)
-- , toGraphQLOperation
, ArgsMap
, Field(..)
, Typename(..)
, IsField(..)
, toField
, AliasedFields(..)
, asObjectSelectionSet
, ObjectSelectionSet(..)
, ObjectSelectionSetMap
, traverseObjectSelectionSet
, InterfaceSelectionSet
, asInterfaceSelectionSet
, getMemberSelectionSet
, UnionSelectionSet
, ScopedSelectionSet(..)
, emptyScopedSelectionSet
, getUnionSelectionSet
, getInterfaceSelectionSet
, getObjectSelectionSet
, AnnInpVal(..)
, AnnGValue(..)
, AnnGObject
, AnnGEnumValue(..)
, hasNullVal
, getAnnInpValKind
, toGraphQLField
, toGraphQLSelectionSet
) where
import Hasura.Prelude
import qualified Data.Aeson as J
import qualified Data.Aeson.Casing as J
import qualified Data.Aeson.TH as J
import qualified Data.HashMap.Strict as Map
import qualified Data.HashMap.Strict.InsOrd.Extended as OMap
import qualified Language.GraphQL.Draft.Syntax as G
import qualified Hasura.RQL.Types.Column as RQL
import qualified Hasura.RQL.Types.Error as RQL
import Hasura.SQL.Types
import Hasura.SQL.Value
data Selection f s
= SelectionField !G.Alias !f
| SelectionInlineFragmentSpread !s
| SelectionFragmentSpread !G.Name !s
deriving (Show, Eq)
-- | What a processed G.SelectionSet should look like
type family NormalizedSelectionSet a = s | s -> a
-- | What a processed G.Field should look like
type family NormalizedField a
type NormalizedSelection a
= Selection (NormalizedField a) (NormalizedSelectionSet a)
-- | Ordered fields
newtype AliasedFields f
= AliasedFields { unAliasedFields :: OMap.InsOrdHashMap G.Alias f }
deriving (Show, Eq, Functor, Foldable, Traversable, Semigroup)
newtype ObjectSelectionSet
= ObjectSelectionSet { unObjectSelectionSet :: AliasedFields Field }
deriving (Show, Eq, Semigroup)
traverseObjectSelectionSet
:: (Monad m) => ObjectSelectionSet -> (Field -> m a) -> m [(Text, a)]
traverseObjectSelectionSet selectionSet f =
forM (OMap.toList $ unAliasedFields $ unObjectSelectionSet selectionSet) $
\(alias, field) -> (G.unName $ G.unAlias alias,) <$> f field
type ObjectSelectionSetMap
= Map.HashMap G.NamedType ObjectSelectionSet
data Typename = Typename
deriving (Show, Eq, Generic)
data ScopedSelectionSet f
= ScopedSelectionSet
{ _sssBaseSelectionSet :: !(AliasedFields f)
-- ^ Fields that aren't explicitly defined for member types
, _sssMemberSelectionSets :: !ObjectSelectionSetMap
-- ^ SelectionSets of individual member types
} deriving (Show, Eq, Generic)
emptyScopedSelectionSet :: ScopedSelectionSet f
emptyScopedSelectionSet =
ScopedSelectionSet (AliasedFields mempty) mempty
type InterfaceSelectionSet = ScopedSelectionSet Field
type UnionSelectionSet = ScopedSelectionSet Typename
data RootSelectionSet
= RQuery !ObjectSelectionSet
| RMutation !ObjectSelectionSet
| RSubscription !ObjectSelectionSet
deriving (Show, Eq)
-- toGraphQLOperation :: RootSelectionSet -> G.ExecutableDefinition
-- toGraphQLOperation = \case
-- RQuery selectionSet ->
-- mkExecutableDefinition G.OperationTypeQuery $
-- toGraphQLSelectionSet $ SelectionSetObject selectionSet
-- RMutation selectionSet ->
-- mkExecutableDefinition G.OperationTypeQuery $
-- toGraphQLSelectionSet $ SelectionSetObject selectionSet
-- RSubscription opDef _ ->
-- G.ExecutableDefinitionOperation $ G.OperationDefinitionTyped opDef
-- where
-- mkExecutableDefinition operationType selectionSet =
-- G.ExecutableDefinitionOperation $ G.OperationDefinitionTyped $
-- G.TypedOperationDefinition
-- { G._todName = Nothing -- TODO, store the name too?
-- , G._todDirectives = []
-- , G._todType = operationType
-- , G._todVariableDefinitions = []
-- , G._todSelectionSet = selectionSet
-- }
data SelectionSet
= SelectionSetObject !ObjectSelectionSet
| SelectionSetUnion !UnionSelectionSet
| SelectionSetInterface !InterfaceSelectionSet
| SelectionSetNone
-- ^ in cases of enums and scalars
deriving (Show, Eq)
getObjectSelectionSet :: SelectionSet -> Maybe ObjectSelectionSet
getObjectSelectionSet = \case
SelectionSetObject s -> pure s
_ -> Nothing
asObjectSelectionSet
:: (MonadError RQL.QErr m) => SelectionSet -> m ObjectSelectionSet
asObjectSelectionSet selectionSet =
onNothing (getObjectSelectionSet selectionSet) $
RQL.throw500 "expecting ObjectSelectionSet"
getUnionSelectionSet :: SelectionSet -> Maybe UnionSelectionSet
getUnionSelectionSet = \case
SelectionSetUnion s -> pure s
_ -> Nothing
getInterfaceSelectionSet :: SelectionSet -> Maybe InterfaceSelectionSet
getInterfaceSelectionSet = \case
SelectionSetInterface s -> pure s
_ -> Nothing
asInterfaceSelectionSet
:: (MonadError RQL.QErr m) => SelectionSet -> m InterfaceSelectionSet
asInterfaceSelectionSet selectionSet =
onNothing (getInterfaceSelectionSet selectionSet) $
RQL.throw500 "expecting InterfaceSelectionSet"
type ArgsMap = Map.HashMap G.Name AnnInpVal
data Field
= Field
{ _fName :: !G.Name
, _fType :: !G.NamedType
, _fArguments :: !ArgsMap
, _fSelSet :: !SelectionSet
} deriving (Eq, Show)
toGraphQLField :: G.Alias -> Field -> G.Field
toGraphQLField alias Field{..} =
G.Field
{ G._fName = _fName
, G._fArguments = [] -- TODO
, G._fDirectives = []
, G._fAlias = Just alias
, G._fSelectionSet = toGraphQLSelectionSet _fSelSet
}
toGraphQLSelectionSet :: SelectionSet -> G.SelectionSet
toGraphQLSelectionSet = \case
SelectionSetObject selectionSet -> fromSelectionSet selectionSet
SelectionSetInterface selectionSet -> fromScopedSelectionSet selectionSet
SelectionSetUnion selectionSet -> fromScopedSelectionSet selectionSet
SelectionSetNone -> mempty
where
fromAliasedFields :: (IsField f) => AliasedFields f -> G.SelectionSet
fromAliasedFields =
map (G.SelectionField . uncurry toGraphQLField) .
OMap.toList . fmap toField . unAliasedFields
fromSelectionSet =
fromAliasedFields . unObjectSelectionSet
toInlineSelection typeName =
G.SelectionInlineFragment . G.InlineFragment (Just typeName) mempty .
fromSelectionSet
fromScopedSelectionSet (ScopedSelectionSet base specific) =
map (uncurry toInlineSelection) (Map.toList specific) <> fromAliasedFields base
-- $(J.deriveToJSON (J.aesonDrop 2 J.camelCase){J.omitNothingFields=True}
-- ''Field
-- )
-- $(J.deriveToJSON (J.aesonDrop 2 J.camelCase){J.omitNothingFields=True}
-- ''InterfaceSelectionSet
-- )
-- $(J.deriveToJSON (J.aesonDrop 2 J.camelCase){J.omitNothingFields=True}
-- ''SelectionSet
-- )
class IsField f where
getFieldName :: f -> G.Name
getFieldType :: f -> G.NamedType
getFieldArguments :: f -> ArgsMap
getFieldSelectionSet :: f -> SelectionSet
toField :: (IsField f) => f -> Field
toField f =
Field (getFieldName f) (getFieldType f)
(getFieldArguments f) (getFieldSelectionSet f)
instance IsField Field where
getFieldName = _fName
getFieldType = _fType
getFieldArguments = _fArguments
getFieldSelectionSet = _fSelSet
instance IsField Typename where
getFieldName _ = "__typename"
getFieldType _ = G.NamedType "String"
getFieldArguments _ = mempty
getFieldSelectionSet _ = SelectionSetNone
getMemberSelectionSet
:: IsField f
=> G.NamedType -> ScopedSelectionSet f -> ObjectSelectionSet
getMemberSelectionSet namedType (ScopedSelectionSet {..}) =
fromMaybe (ObjectSelectionSet (fmap toField _sssBaseSelectionSet)) $
Map.lookup namedType $ _sssMemberSelectionSets
data AnnInpVal
= AnnInpVal
{ _aivType :: !G.GType
, _aivVariable :: !(Maybe G.Variable)
, _aivValue :: !AnnGValue
} deriving (Show, Eq)
type AnnGObject = OMap.InsOrdHashMap G.Name AnnInpVal
-- | See 'EnumValuesInfo' for information about what these cases mean.
data AnnGEnumValue
= AGESynthetic !(Maybe G.EnumValue)
| AGEReference !RQL.EnumReference !(Maybe RQL.EnumValue)
deriving (Show, Eq)
data AnnGValue
= AGScalar !PGScalarType !(Maybe PGScalarValue)
| AGEnum !G.NamedType !AnnGEnumValue
| AGObject !G.NamedType !(Maybe AnnGObject)
| AGArray !G.ListType !(Maybe [AnnInpVal])
deriving (Show, Eq)
$(J.deriveToJSON (J.aesonDrop 4 J.camelCase){J.omitNothingFields=True}
''AnnInpVal
)
instance J.ToJSON AnnGValue where
-- toJSON (AGScalar ty valM) =
toJSON = const J.Null
-- J.
-- J.toJSON [J.toJSON ty, J.toJSON valM]
hasNullVal :: AnnGValue -> Bool
hasNullVal = \case
AGScalar _ Nothing -> True
AGEnum _ (AGESynthetic Nothing) -> True
AGEnum _ (AGEReference _ Nothing) -> True
AGObject _ Nothing -> True
AGArray _ Nothing -> True
_ -> False
getAnnInpValKind :: AnnGValue -> Text
getAnnInpValKind = \case
AGScalar _ _ -> "scalar"
AGEnum _ _ -> "enum"
AGObject _ _ -> "object"
AGArray _ _ -> "array"

View File

@ -0,0 +1,407 @@
module Hasura.GraphQL.RelaySchema where
import Control.Lens.Extended hiding (op)
import qualified Data.HashMap.Strict as Map
import qualified Data.HashSet as Set
import qualified Data.Text as T
import qualified Language.GraphQL.Draft.Syntax as G
import Hasura.GraphQL.Context
import Hasura.GraphQL.Resolve.Types
import Hasura.GraphQL.Validate.Types
import Hasura.Prelude
import Hasura.RQL.Types
import Hasura.Server.Utils (duplicates)
import Hasura.Session
import Hasura.SQL.Types
import Hasura.GraphQL.Schema
import Hasura.GraphQL.Schema.BoolExp
import Hasura.GraphQL.Schema.Builder
import Hasura.GraphQL.Schema.Common
import Hasura.GraphQL.Schema.Function
import Hasura.GraphQL.Schema.OrderBy
import Hasura.GraphQL.Schema.Select
mkNodeInterface :: [QualifiedTable] -> IFaceTyInfo
mkNodeInterface relayTableNames =
let description = G.Description "An object with globally unique ID"
in mkIFaceTyInfo (Just description) nodeType (mapFromL _fiName [idField]) $
Set.fromList $ map mkTableTy relayTableNames
where
idField =
let description = G.Description "A globally unique identifier"
in mkHsraObjFldInfo (Just description) "id" mempty nodeIdType
mkRelayGCtxMap
:: forall m. (MonadError QErr m)
=> TableCache -> FunctionCache -> m GCtxMap
mkRelayGCtxMap tableCache functionCache = do
typesMapL <- mapM (mkRelayGCtxMapTable tableCache functionCache) relayTables
typesMap <- combineTypes typesMapL
let gCtxMap = flip Map.map typesMap $
\(ty, flds, insCtx) -> mkGCtx ty flds insCtx
pure $ Map.map (flip RoleContext Nothing) gCtxMap
where
relayTables =
filter (tableFltr . _tiCoreInfo) $ Map.elems tableCache
tableFltr ti =
not (isSystemDefined $ _tciSystemDefined ti)
&& isValidObjectName (_tciName ti)
&& isJust (_tciPrimaryKey ti)
combineTypes
:: [Map.HashMap RoleName (TyAgg, RootFields, InsCtxMap)]
-> m (Map.HashMap RoleName (TyAgg, RootFields, InsCtxMap))
combineTypes maps = do
let listMap = foldr (Map.unionWith (++) . Map.map pure) mempty maps
flip Map.traverseWithKey listMap $ \roleName typeList -> do
let relayTableNames = map (_tciName . _tiCoreInfo) relayTables
tyAgg = addTypeInfoToTyAgg
(TIIFace $ mkNodeInterface relayTableNames) $
mconcat $ map (^. _1) typeList
insCtx = mconcat $ map (^. _3) typeList
rootFields <- combineRootFields roleName $ map (^. _2) typeList
pure (tyAgg, rootFields, insCtx)
combineRootFields :: RoleName -> [RootFields] -> m RootFields
combineRootFields roleName rootFields = do
let duplicateQueryFields = duplicates $
concatMap (Map.keys . _rootQueryFields) rootFields
duplicateMutationFields = duplicates $
concatMap (Map.keys . _rootMutationFields) rootFields
-- TODO: The following exception should result in inconsistency
when (not $ null duplicateQueryFields) $
throw400 Unexpected $ "following query root fields are duplicated: "
<> showNames duplicateQueryFields
when (not $ null duplicateMutationFields) $
throw400 Unexpected $ "following mutation root fields are duplicated: "
<> showNames duplicateMutationFields
pure $ mconcat $ mkNodeQueryRootFields roleName relayTables : rootFields
mkRelayGCtxMapTable
:: (MonadError QErr m)
=> TableCache
-> FunctionCache
-> TableInfo
-> m (Map.HashMap RoleName (TyAgg, RootFields, InsCtxMap))
mkRelayGCtxMapTable tableCache funcCache tabInfo = do
m <- flip Map.traverseWithKey rolePerms $
mkRelayGCtxRole tableCache tn descM fields primaryKey validConstraints tabFuncs viewInfo customConfig
adminSelFlds <- mkAdminSelFlds fields tableCache
adminInsCtx <- mkAdminInsCtx tableCache fields
let adminCtx = mkRelayTyAggRole tn descM (Just (cols, icRelations adminInsCtx))
(Just (True, adminSelFlds)) (Just cols) (Just ())
primaryKey validConstraints viewInfo tabFuncs
adminInsCtxMap = Map.singleton tn adminInsCtx
return $ Map.insert adminRoleName (adminCtx, adminRootFlds, adminInsCtxMap) m
where
TableInfo coreInfo rolePerms _ = tabInfo
TableCoreInfo tn descM _ fields primaryKey _ _ viewInfo _ customConfig = coreInfo
validConstraints = mkValidConstraints $ map _cName (tciUniqueOrPrimaryKeyConstraints coreInfo)
tabFuncs = filter (isValidObjectName . fiName) $
getFuncsOfTable tn funcCache
cols = getValidCols fields
adminRootFlds =
let insertPermDetails = Just ([], True)
selectPermDetails = Just (noFilter, Nothing, [], True)
updatePermDetails = Just (getValidCols fields, mempty, noFilter, Nothing, [])
deletePermDetails = Just (noFilter, [])
queryFields = getRelayQueryRootFieldsRole tn primaryKey fields tabFuncs
selectPermDetails
mutationFields = getMutationRootFieldsRole tn primaryKey
validConstraints fields insertPermDetails
selectPermDetails updatePermDetails
deletePermDetails viewInfo customConfig
in RootFields queryFields mutationFields
mkRelayGCtxRole
:: (MonadError QErr m)
=> TableCache
-> QualifiedTable
-> Maybe PGDescription
-> FieldInfoMap FieldInfo
-> Maybe (PrimaryKey PGColumnInfo)
-> [ConstraintName]
-> [FunctionInfo]
-> Maybe ViewInfo
-> TableConfig
-> RoleName
-> RolePermInfo
-> m (TyAgg, RootFields, InsCtxMap)
mkRelayGCtxRole tableCache tn descM fields primaryKey constraints funcs viM tabConfigM role permInfo = do
selPermM <- mapM (getSelPerm tableCache fields role) selM
tabInsInfoM <- forM (_permIns permInfo) $ \ipi -> do
ctx <- mkInsCtx role tableCache fields ipi $ _permUpd permInfo
let permCols = flip getColInfos allCols $ Set.toList $ ipiCols ipi
return (ctx, (permCols, icRelations ctx))
let insPermM = snd <$> tabInsInfoM
insCtxM = fst <$> tabInsInfoM
updColsM = filterColumnFields . upiCols <$> _permUpd permInfo
tyAgg = mkRelayTyAggRole tn descM insPermM selPermM updColsM
(void $ _permDel permInfo) primaryKey constraints viM funcs
queryRootFlds = getRelayQueryRootFieldsRole tn primaryKey fields funcs
(mkSel <$> _permSel permInfo)
mutationRootFlds = getMutationRootFieldsRole tn primaryKey constraints fields
(mkIns <$> insM) (mkSel <$> selM)
(mkUpd <$> updM) (mkDel <$> delM) viM tabConfigM
insCtxMap = maybe Map.empty (Map.singleton tn) insCtxM
return (tyAgg, RootFields queryRootFlds mutationRootFlds, insCtxMap)
where
RolePermInfo insM selM updM delM = permInfo
allCols = getCols fields
filterColumnFields allowedSet =
filter ((`Set.member` allowedSet) . pgiColumn) $ getValidCols fields
mkIns i = (ipiRequiredHeaders i, isJust updM)
mkSel s = ( spiFilter s, spiLimit s
, spiRequiredHeaders s, spiAllowAgg s
)
mkUpd u = ( flip getColInfos allCols $ Set.toList $ upiCols u
, upiSet u
, upiFilter u
, upiCheck u
, upiRequiredHeaders u
)
mkDel d = (dpiFilter d, dpiRequiredHeaders d)
mkRelayTyAggRole
:: QualifiedTable
-> Maybe PGDescription
-- ^ Postgres description
-> Maybe ([PGColumnInfo], RelationInfoMap)
-- ^ insert permission
-> Maybe (Bool, [SelField])
-- ^ select permission
-> Maybe [PGColumnInfo]
-- ^ update cols
-> Maybe ()
-- ^ delete cols
-> Maybe (PrimaryKey PGColumnInfo)
-> [ConstraintName]
-- ^ constraints
-> Maybe ViewInfo
-> [FunctionInfo]
-- ^ all functions
-> TyAgg
mkRelayTyAggRole tn descM insPermM selPermM updColsM delPermM pkeyCols constraints viM funcs =
let (mutationTypes, mutationFields) =
mkMutationTypesAndFieldsRole tn insPermM selFldsM updColsM delPermM pkeyCols constraints viM
in TyAgg (mkTyInfoMap allTypes <> mutationTypes)
(fieldMap <> mutationFields)
scalars ordByCtx
where
ordByCtx = fromMaybe Map.empty ordByCtxM
funcInpArgTys = bool [] (map TIInpObj funcArgInpObjs) $ isJust selFldsM
allTypes = queryTypes <> aggQueryTypes <> funcInpArgTys <> computedFieldFuncArgsInps
queryTypes = map TIObj selectObjects <>
catMaybes
[ TIInpObj <$> boolExpInpObjM
, TIInpObj <$> ordByInpObjM
, TIEnum <$> selColInpTyM
]
aggQueryTypes = map TIObj aggObjs <> map TIInpObj aggOrdByInps
fieldMap = Map.unions $ catMaybes [boolExpInpObjFldsM, selObjFldsM]
scalars = selByPkScalarSet <> funcArgScalarSet <> computedFieldFuncArgScalars
selFldsM = snd <$> selPermM
selColNamesM = map pgiName . getPGColumnFields <$> selFldsM
selColInpTyM = mkSelColumnTy tn <$> selColNamesM
-- boolexp input type
boolExpInpObjM = case selFldsM of
Just selFlds -> Just $ mkBoolExpInp tn selFlds
-- no select permission
Nothing ->
-- but update/delete is defined
if isJust updColsM || isJust delPermM
then Just $ mkBoolExpInp tn []
else Nothing
-- funcargs input type
funcArgInpObjs = flip mapMaybe funcs $ \func ->
mkFuncArgsInp (fiName func) (getInputArgs func)
-- funcArgCtx = Map.unions funcArgCtxs
funcArgScalarSet = funcs ^.. folded.to getInputArgs.folded.to (_qptName.faType)
-- helper
mkFldMap ty = Map.fromList . concatMap (mkFld ty)
mkFld ty = \case
SFPGColumn ci -> [((ty, pgiName ci), RFPGColumn ci)]
SFRelationship (RelationshipFieldInfo relInfo allowAgg cols permFilter permLimit maybePkCols _) ->
let relationshipName = riName relInfo
relFld = ( (ty, mkRelName relationshipName)
, RFRelationship $ RelationshipField relInfo RFKSimple cols permFilter permLimit
)
aggRelFld = ( (ty, mkAggRelName relationshipName)
, RFRelationship $ RelationshipField relInfo RFKAggregate cols permFilter permLimit
)
maybeConnFld = maybePkCols <&> \pkCols ->
( (ty, mkConnectionRelName relationshipName)
, RFRelationship $ RelationshipField relInfo
(RFKConnection pkCols) cols permFilter permLimit
)
in case riType relInfo of
ObjRel -> [relFld]
ArrRel -> bool [relFld] ([relFld, aggRelFld] <> maybe [] pure maybeConnFld) allowAgg
SFComputedField cf -> pure
( (ty, mkComputedFieldName $ _cfName cf)
, RFComputedField cf
)
SFRemoteRelationship remoteField -> pure
( (ty, G.Name (remoteRelationshipNameToText (_rfiName remoteField)))
, RFRemoteRelationship remoteField
)
-- the fields used in bool exp
boolExpInpObjFldsM = mkFldMap (mkBoolExpTy tn) <$> selFldsM
-- table obj
selectObjects = case selPermM of
Just (_, selFlds) ->
[ (mkRelayTableObj tn descM selFlds)
{_otiImplIFaces = Set.singleton nodeType}
, mkTableEdgeObj tn
, mkTableConnectionObj tn
]
Nothing -> []
-- aggregate objs and order by inputs
(aggObjs, aggOrdByInps) = case selPermM of
Just (True, selFlds) ->
let cols = getPGColumnFields selFlds
numCols = onlyNumCols cols
compCols = onlyComparableCols cols
objs = [ mkTableAggObj tn
, mkTableAggregateFieldsObj tn (numCols, numAggregateOps) (compCols, compAggregateOps)
] <> mkColAggregateFieldsObjs selFlds
ordByInps = mkTabAggOrdByInpObj tn (numCols, numAggregateOps) (compCols, compAggregateOps)
: mkTabAggregateOpOrdByInpObjs tn (numCols, numAggregateOps) (compCols, compAggregateOps)
in (objs, ordByInps)
_ -> ([], [])
getNumericCols = onlyNumCols . getPGColumnFields
getComparableCols = onlyComparableCols . getPGColumnFields
onlyFloat = const $ mkScalarTy PGFloat
mkTypeMaker "sum" = mkColumnType
mkTypeMaker _ = onlyFloat
mkColAggregateFieldsObjs flds =
let numCols = getNumericCols flds
compCols = getComparableCols flds
mkNumObjFld n = mkTableColAggregateFieldsObj tn n (mkTypeMaker n) numCols
mkCompObjFld n = mkTableColAggregateFieldsObj tn n mkColumnType compCols
numFldsObjs = bool (map mkNumObjFld numAggregateOps) [] $ null numCols
compFldsObjs = bool (map mkCompObjFld compAggregateOps) [] $ null compCols
in numFldsObjs <> compFldsObjs
-- the fields used in table object
nodeFieldM = RFNodeId tn . _pkColumns <$> pkeyCols
selObjFldsM = mkFldMap (mkTableTy tn) <$> selFldsM >>=
\fm -> nodeFieldM <&> \nodeField ->
Map.insert (mkTableTy tn, "id") nodeField fm
-- the scalar set for table_by_pk arguments
selByPkScalarSet = pkeyCols ^.. folded.to _pkColumns.folded.to pgiType._PGColumnScalar
ordByInpCtxM = mkOrdByInpObj tn <$> selFldsM
(ordByInpObjM, ordByCtxM) = case ordByInpCtxM of
Just (a, b) -> (Just a, Just b)
Nothing -> (Nothing, Nothing)
-- computed fields' function args input objects and scalar types
mkComputedFieldRequiredTypes computedFieldInfo =
let ComputedFieldFunction qf inputArgs _ _ _ = _cfFunction computedFieldInfo
scalarArgs = map (_qptName . faType) $ toList inputArgs
in (, scalarArgs) <$> mkFuncArgsInp qf inputArgs
computedFieldReqTypes = catMaybes $
maybe [] (map mkComputedFieldRequiredTypes . getComputedFields) selFldsM
computedFieldFuncArgsInps = map (TIInpObj . fst) computedFieldReqTypes
computedFieldFuncArgScalars = Set.fromList $ concatMap snd computedFieldReqTypes
mkSelectOpCtx
:: QualifiedTable
-> [PGColumnInfo]
-> (AnnBoolExpPartialSQL, Maybe Int, [T.Text]) -- select filter
-> SelOpCtx
mkSelectOpCtx tn allCols (fltr, pLimit, hdrs) =
SelOpCtx tn hdrs colGNameMap fltr pLimit
where
colGNameMap = mkPGColGNameMap allCols
getRelayQueryRootFieldsRole
:: QualifiedTable
-> Maybe (PrimaryKey PGColumnInfo)
-> FieldInfoMap FieldInfo
-> [FunctionInfo]
-> Maybe (AnnBoolExpPartialSQL, Maybe Int, [T.Text], Bool) -- select filter
-> QueryRootFieldMap
getRelayQueryRootFieldsRole tn primaryKey fields funcs selM =
makeFieldMap $
funcConnectionQueries
<> catMaybes
[ getSelConnectionDet <$> selM <*> maybePrimaryKeyColumns
]
where
maybePrimaryKeyColumns = fmap _pkColumns primaryKey
colGNameMap = mkPGColGNameMap $ getCols fields
funcConnectionQueries = fromMaybe [] $ getFuncQueryConnectionFlds
<$> selM <*> maybePrimaryKeyColumns
getSelConnectionDet (selFltr, pLimit, hdrs, _) primaryKeyColumns =
selFldHelper (QCSelectConnection primaryKeyColumns)
(mkSelFldConnection Nothing) selFltr pLimit hdrs
selFldHelper f g pFltr pLimit hdrs =
( f $ mkSelectOpCtx tn (getCols fields) (pFltr, pLimit, hdrs)
, g tn
)
getFuncQueryConnectionFlds (selFltr, pLimit, hdrs, _) primaryKeyColumns =
funcFldHelper (QCFuncConnection primaryKeyColumns) mkFuncQueryConnectionFld selFltr pLimit hdrs
funcFldHelper f g pFltr pLimit hdrs =
flip map funcs $ \fi ->
( f $ FuncQOpCtx (fiName fi) (mkFuncArgItemSeq fi) hdrs colGNameMap pFltr pLimit
, g fi $ fiDescription fi
)
mkNodeQueryRootFields :: RoleName -> [TableInfo] -> RootFields
mkNodeQueryRootFields roleName relayTables =
RootFields (mapFromL (_fiName . snd) [nodeQueryDet]) mempty
where
nodeQueryDet =
( QCNodeSelect nodeSelMap
, nodeQueryField
)
nodeQueryField =
let nodeParams = fromInpValL $ pure $
InpValInfo (Just $ G.Description "A globally unique id")
"id" Nothing nodeIdType
in mkHsraObjFldInfo Nothing "node" nodeParams $ G.toGT nodeType
nodeSelMap =
Map.fromList $ flip mapMaybe relayTables $ \table ->
let tableName = _tciName $ _tiCoreInfo table
allColumns = getCols $ _tciFieldInfoMap $ _tiCoreInfo table
selectPermM = _permSel <$> Map.lookup roleName
(_tiRolePermInfoMap table)
permDetailsM = join selectPermM <&> \perm ->
( spiFilter perm
, spiLimit perm
, spiRequiredHeaders perm
)
adminPermDetails = (noFilter, Nothing, [])
in (mkTableTy tableName,) . mkSelectOpCtx tableName allColumns
<$> bool permDetailsM (Just adminPermDetails) (isAdmin roleName)

View File

@ -12,7 +12,7 @@ module Hasura.GraphQL.Resolve
, QueryRootFldUnresolved
, QueryRootFldResolved
, toPGQuery
, toSQLSelect
, toSQLFromItem
, RIntro.schemaR
, RIntro.typeR
@ -38,15 +38,18 @@ import qualified Hasura.GraphQL.Resolve.Insert as RI
import qualified Hasura.GraphQL.Resolve.Introspect as RIntro
import qualified Hasura.GraphQL.Resolve.Mutation as RM
import qualified Hasura.GraphQL.Resolve.Select as RS
import qualified Hasura.GraphQL.Schema.Common as GS
import qualified Hasura.GraphQL.Validate as V
import qualified Hasura.RQL.DML.RemoteJoin as RR
import qualified Hasura.RQL.DML.Select as DS
import qualified Hasura.SQL.DML as S
data QueryRootFldAST v
= QRFPk !(DS.AnnSimpleSelG v)
= QRFNode !(DS.AnnSimpleSelG v)
| QRFPk !(DS.AnnSimpleSelG v)
| QRFSimple !(DS.AnnSimpleSelG v)
| QRFAgg !(DS.AnnAggSelG v)
| QRFAgg !(DS.AnnAggregateSelectG v)
| QRFConnection !(DS.ConnectionSelect v)
| QRFActionSelect !(DS.AnnSimpleSelG v)
| QRFActionExecuteObject !(DS.AnnSimpleSelG v)
| QRFActionExecuteList !(DS.AnnSimpleSelG v)
@ -61,21 +64,28 @@ traverseQueryRootFldAST
-> QueryRootFldAST a
-> f (QueryRootFldAST b)
traverseQueryRootFldAST f = \case
QRFPk s -> QRFPk <$> DS.traverseAnnSimpleSel f s
QRFSimple s -> QRFSimple <$> DS.traverseAnnSimpleSel f s
QRFAgg s -> QRFAgg <$> DS.traverseAnnAggSel f s
QRFActionSelect s -> QRFActionSelect <$> DS.traverseAnnSimpleSel f s
QRFActionExecuteObject s -> QRFActionExecuteObject <$> DS.traverseAnnSimpleSel f s
QRFActionExecuteList s -> QRFActionExecuteList <$> DS.traverseAnnSimpleSel f s
QRFNode s -> QRFNode <$> DS.traverseAnnSimpleSelect f s
QRFPk s -> QRFPk <$> DS.traverseAnnSimpleSelect f s
QRFSimple s -> QRFSimple <$> DS.traverseAnnSimpleSelect f s
QRFAgg s -> QRFAgg <$> DS.traverseAnnAggregateSelect f s
QRFActionSelect s -> QRFActionSelect <$> DS.traverseAnnSimpleSelect f s
QRFActionExecuteObject s -> QRFActionExecuteObject <$> DS.traverseAnnSimpleSelect f s
QRFActionExecuteList s -> QRFActionExecuteList <$> DS.traverseAnnSimpleSelect f s
QRFConnection s -> QRFConnection <$> DS.traverseConnectionSelect f s
toPGQuery :: QueryRootFldResolved -> (Q.Query, Maybe RR.RemoteJoins)
toPGQuery = \case
QRFPk s -> first (DS.selectQuerySQL DS.JASSingleObject) $ RR.getRemoteJoins s
QRFSimple s -> first (DS.selectQuerySQL DS.JASMultipleRows) $ RR.getRemoteJoins s
QRFAgg s -> first DS.selectAggQuerySQL $ RR.getRemoteJoinsAggSel s
QRFActionSelect s -> first (DS.selectQuerySQL DS.JASSingleObject) $ RR.getRemoteJoins s
QRFActionExecuteObject s -> first (DS.selectQuerySQL DS.JASSingleObject) $ RR.getRemoteJoins s
QRFActionExecuteList s -> first (DS.selectQuerySQL DS.JASMultipleRows) $ RR.getRemoteJoins s
QRFNode s -> first (toQuery . DS.mkSQLSelect DS.JASSingleObject) $ RR.getRemoteJoins s
QRFPk s -> first (toQuery . DS.mkSQLSelect DS.JASSingleObject) $ RR.getRemoteJoins s
QRFSimple s -> first (toQuery . DS.mkSQLSelect DS.JASMultipleRows) $ RR.getRemoteJoins s
QRFAgg s -> first (toQuery . DS.mkAggregateSelect) $ RR.getRemoteJoinsAggregateSelect s
QRFActionSelect s -> first (toQuery . DS.mkSQLSelect DS.JASSingleObject) $ RR.getRemoteJoins s
QRFActionExecuteObject s -> first (toQuery . DS.mkSQLSelect DS.JASSingleObject) $ RR.getRemoteJoins s
QRFActionExecuteList s -> first (toQuery . DS.mkSQLSelect DS.JASMultipleRows) $ RR.getRemoteJoins s
QRFConnection s -> first (toQuery . DS.mkConnectionSelect) $ RR.getRemoteJoinsConnectionSelect s
where
toQuery :: ToSQL a => a -> Q.Query
toQuery = Q.fromBuilder . toSQL
validateHdrs
:: (Foldable t, QErrM m) => UserInfo -> t Text -> m ()
@ -104,6 +114,13 @@ queryFldToPGAST fld actionExecuter = do
opCtx <- getOpCtx $ V._fName fld
userInfo <- asks getter
case opCtx of
QCNodeSelect nodeSelectMap -> do
NodeIdData table pkeyColumnValues <- RS.resolveNodeId fld
case Map.lookup (GS.mkTableTy table) nodeSelectMap of
Nothing -> throwVE $ "table " <> table <<> " not found"
Just selOpCtx -> do
validateHdrs userInfo (_socHeaders selOpCtx)
QRFNode <$> RS.convertNodeSelect selOpCtx pkeyColumnValues fld
QCSelect ctx -> do
validateHdrs userInfo (_socHeaders ctx)
QRFSimple <$> RS.convertSelect ctx fld
@ -128,13 +145,15 @@ queryFldToPGAST fld actionExecuter = do
-- an SQL query, but in case of query actions it's converted into JSON
-- and included in the action's webhook payload.
markNotReusable
let f = case jsonAggType of
let jsonAggType = RA.mkJsonAggSelect $ _saecOutputType ctx
f = case jsonAggType of
DS.JASMultipleRows -> QRFActionExecuteList
DS.JASSingleObject -> QRFActionExecuteObject
f <$> actionExecuter (RA.resolveActionQuery fld ctx (_uiSession userInfo))
where
outputType = _saecOutputType ctx
jsonAggType = RA.mkJsonAggSelect outputType
QCSelectConnection pk ctx ->
QRFConnection <$> RS.convertConnectionSelect pk ctx fld
QCFuncConnection pk ctx ->
QRFConnection <$> RS.convertConnectionFuncQuery pk ctx fld
mutFldToTx
:: ( HasVersion
@ -195,11 +214,16 @@ getOpCtx f = do
onNothing (Map.lookup f opCtxMap) $ throw500 $
"lookup failed: opctx: " <> showName f
toSQLSelect :: QueryRootFldResolved -> S.Select
toSQLSelect = \case
QRFPk s -> DS.mkSQLSelect DS.JASSingleObject s
QRFSimple s -> DS.mkSQLSelect DS.JASMultipleRows s
QRFAgg s -> DS.mkAggSelect s
QRFActionSelect s -> DS.mkSQLSelect DS.JASSingleObject s
QRFActionExecuteObject s -> DS.mkSQLSelect DS.JASSingleObject s
QRFActionExecuteList s -> DS.mkSQLSelect DS.JASSingleObject s
toSQLFromItem :: S.Alias -> QueryRootFldResolved -> S.FromItem
toSQLFromItem alias = \case
QRFNode s -> fromSelect $ DS.mkSQLSelect DS.JASSingleObject s
QRFPk s -> fromSelect $ DS.mkSQLSelect DS.JASSingleObject s
QRFSimple s -> fromSelect $ DS.mkSQLSelect DS.JASMultipleRows s
QRFAgg s -> fromSelect $ DS.mkAggregateSelect s
QRFActionSelect s -> fromSelect $ DS.mkSQLSelect DS.JASSingleObject s
QRFActionExecuteObject s -> fromSelect $ DS.mkSQLSelect DS.JASSingleObject s
QRFActionExecuteList s -> fromSelect $ DS.mkSQLSelect DS.JASSingleObject s
QRFConnection s -> flip (S.FISelectWith (S.Lateral False)) alias
$ DS.mkConnectionSelect s
where
fromSelect = flip (S.FISelect (S.Lateral False)) alias

View File

@ -14,46 +14,47 @@ module Hasura.GraphQL.Resolve.Action
import Hasura.Prelude
import Control.Concurrent (threadDelay)
import Control.Exception (try)
import Control.Concurrent (threadDelay)
import Control.Exception (try)
import Control.Lens
import Data.Has
import Data.IORef
import qualified Control.Concurrent.Async as A
import qualified Data.Aeson as J
import qualified Data.Aeson.Casing as J
import qualified Data.Aeson.TH as J
import qualified Data.ByteString.Lazy as BL
import qualified Data.CaseInsensitive as CI
import qualified Data.HashMap.Strict as Map
import qualified Data.Text as T
import qualified Data.UUID as UUID
import qualified Database.PG.Query as Q
import qualified Language.GraphQL.Draft.Syntax as G
import qualified Network.HTTP.Client as HTTP
import qualified Network.HTTP.Types as HTTP
import qualified Network.Wreq as Wreq
import qualified Hasura.GraphQL.Resolve.Select as GRS
import qualified Hasura.RQL.DML.Select as RS
import qualified Hasura.RQL.DML.RemoteJoin as RJ
import qualified Control.Concurrent.Async as A
import qualified Data.Aeson as J
import qualified Data.Aeson.Casing as J
import qualified Data.Aeson.TH as J
import qualified Data.ByteString.Lazy as BL
import qualified Data.CaseInsensitive as CI
import qualified Data.HashMap.Strict as Map
import qualified Data.Text as T
import qualified Data.UUID as UUID
import qualified Database.PG.Query as Q
import qualified Language.GraphQL.Draft.Syntax as G
import qualified Network.HTTP.Client as HTTP
import qualified Network.HTTP.Types as HTTP
import qualified Network.Wreq as Wreq
import qualified Hasura.GraphQL.Resolve.Select as GRS
import qualified Hasura.RQL.DML.RemoteJoin as RJ
import qualified Hasura.RQL.DML.Select as RS
import Hasura.EncJSON
import Hasura.GraphQL.Resolve.Context
import Hasura.GraphQL.Resolve.InputValue
import Hasura.GraphQL.Resolve.Select (processTableSelectionSet)
import Hasura.GraphQL.Validate.Field
import Hasura.GraphQL.Resolve.Select (processTableSelectionSet)
import Hasura.GraphQL.Validate.SelectionSet
import Hasura.HTTP
import Hasura.RQL.DDL.Headers (makeHeadersFromConf, toHeadersConf)
import Hasura.RQL.DDL.Headers (makeHeadersFromConf, toHeadersConf)
import Hasura.RQL.DDL.Schema.Cache
import Hasura.RQL.DML.Select (asSingleRowJsonResp)
import Hasura.RQL.DML.Select (asSingleRowJsonResp)
import Hasura.RQL.Types
import Hasura.RQL.Types.Run
import Hasura.Server.Utils (mkClientHeadersForward, mkSetCookieHeaders)
import Hasura.Server.Version (HasVersion)
import Hasura.Server.Utils (mkClientHeadersForward, mkSetCookieHeaders)
import Hasura.Server.Version (HasVersion)
import Hasura.Session
import Hasura.SQL.Types
import Hasura.SQL.Value (PGScalarValue (..),toTxtValue)
import Hasura.SQL.Value (PGScalarValue (..), toTxtValue)
newtype ActionContext
= ActionContext {_acName :: ActionName}
@ -165,16 +166,18 @@ resolveActionMutationSync field executionContext userInfo = do
forwardClientHeaders resolvedWebhook handlerPayload
let webhookResponseExpression = RS.AEInput $ UVSQL $
toTxtValue $ WithScalarType PGJSONB $ PGValJSONB $ Q.JSONB $ J.toJSON webhookRes
selSet <- asObjectSelectionSet $ _fSelSet field
selectAstUnresolved <-
processOutputSelectionSet webhookResponseExpression outputType definitionList
(_fType field) $ _fSelSet field
astResolved <- RS.traverseAnnSimpleSel resolveValTxt selectAstUnresolved
(_fType field) selSet
astResolved <- RS.traverseAnnSimpleSelect resolveValTxt selectAstUnresolved
let (astResolvedWithoutRemoteJoins,maybeRemoteJoins) = RJ.getRemoteJoins astResolved
let jsonAggType = mkJsonAggSelect outputType
jsonAggType = mkJsonAggSelect outputType
return $ (,respHeaders) $
case maybeRemoteJoins of
Just remoteJoins ->
let query = Q.fromBuilder $ toSQL $ RS.mkSQLSelect jsonAggType astResolvedWithoutRemoteJoins
let query = Q.fromBuilder $ toSQL $
RS.mkSQLSelect jsonAggType astResolvedWithoutRemoteJoins
in RJ.executeQueryWithRemoteJoins manager reqHeaders userInfo query [] remoteJoins
Nothing ->
asSingleRowJsonResp (Q.fromBuilder $ toSQL $ RS.mkSQLSelect jsonAggType astResolved) []
@ -225,9 +228,10 @@ resolveActionQuery field executionContext sessionVariables httpManager reqHeader
forwardClientHeaders resolvedWebhook handlerPayload
let webhookResponseExpression = RS.AEInput $ UVSQL $
toTxtValue $ WithScalarType PGJSONB $ PGValJSONB $ Q.JSONB $ J.toJSON webhookRes
selSet <- asObjectSelectionSet $ _fSelSet field
selectAstUnresolved <-
processOutputSelectionSet webhookResponseExpression outputType definitionList
(_fType field) $ _fSelSet field
(_fType field) selSet
return selectAstUnresolved
where
ActionExecutionContext actionName outputType outputFields definitionList resolvedWebhook confHeaders
@ -301,37 +305,40 @@ resolveAsyncActionQuery userInfo selectOpCtx field = do
actionId <- withArg (_fArguments field) "id" parseActionId
stringifyNumerics <- stringifyNum <$> asks getter
annotatedFields <- fmap (map (first FieldName)) $ withSelSet (_fSelSet field) $ \fld ->
selSet <- asObjectSelectionSet $ _fSelSet field
annotatedFields <- fmap (map (first FieldName)) $ traverseObjectSelectionSet selSet $ \fld ->
case _fName fld of
"__typename" -> return $ RS.FExp $ G.unName $ G.unNamedType $ _fType field
"__typename" -> return $ RS.AFExpression $ G.unName $ G.unNamedType $ _fType field
"output" -> do
-- See Note [Resolving async action query/subscription]
let inputTableArgument = RS.AETableRow $ Just $ Iden "response_payload"
ActionSelectOpContext outputType definitionList = selectOpCtx
jsonAggSelect = mkJsonAggSelect outputType
(RS.FComputedField . RS.CFSTable jsonAggSelect)
fldSelSet <- asObjectSelectionSet $ _fSelSet fld
(RS.AFComputedField . RS.CFSTable jsonAggSelect)
<$> processOutputSelectionSet inputTableArgument outputType
definitionList (_fType fld) (_fSelSet fld)
definitionList (_fType fld) fldSelSet
-- The metadata columns
"id" -> return $ mkAnnFldFromPGCol "id" PGUUID
"created_at" -> return $ mkAnnFldFromPGCol "created_at" PGTimeStampTZ
"errors" -> return $ mkAnnFldFromPGCol "errors" PGJSONB
"id" -> return $ mkAnnFieldFromPGCol "id" PGUUID
"created_at" -> return $ mkAnnFieldFromPGCol "created_at" PGTimeStampTZ
"errors" -> return $ mkAnnFieldFromPGCol "errors" PGJSONB
G.Name t -> throw500 $ "unexpected field in actions' httpResponse : " <> t
let tableFromExp = RS.FromTable actionLogTable
tableArguments = RS.noTableArgs
{ RS._taWhere = Just $ mkTableBoolExpression actionId}
tableArguments = RS.noSelectArgs
{ RS._saWhere = Just $ mkTableBoolExpression actionId}
tablePermissions = RS.TablePerm annBoolExpTrue Nothing
selectAstUnresolved = RS.AnnSelG annotatedFields tableFromExp tablePermissions
selectAstUnresolved = RS.AnnSelectG annotatedFields tableFromExp tablePermissions
tableArguments stringifyNumerics
return selectAstUnresolved
where
actionLogTable = QualifiedObject (SchemaName "hdb_catalog") (TableName "hdb_action_log")
-- TODO:- Avoid using PGColumnInfo
mkAnnFldFromPGCol column columnType =
flip RS.mkAnnColField Nothing $
mkAnnFieldFromPGCol column columnType =
flip RS.mkAnnColumnField Nothing $
PGColumnInfo (unsafePGCol column) (G.Name column) 0 (PGColumnScalar columnType) True Nothing
parseActionId annInpValue = mkParameterizablePGValue <$> asPGColumnValue annInpValue
@ -559,12 +566,12 @@ processOutputSelectionSet
=> RS.ArgumentExp UnresolvedVal
-> GraphQLType
-> [(PGCol, PGScalarType)]
-> G.NamedType -> SelSet -> m GRS.AnnSimpleSelect
-> G.NamedType -> ObjectSelectionSet -> m GRS.AnnSimpleSelect
processOutputSelectionSet tableRowInput actionOutputType definitionList fldTy flds = do
stringifyNumerics <- stringifyNum <$> asks getter
annotatedFields <- processTableSelectionSet fldTy flds
let annSel = RS.AnnSelG annotatedFields selectFrom
RS.noTablePermissions RS.noTableArgs stringifyNumerics
let annSel = RS.AnnSelectG annotatedFields selectFrom
RS.noTablePermissions RS.noSelectArgs stringifyNumerics
pure annSel
where
jsonbToPostgresRecordFunction =

View File

@ -171,6 +171,8 @@ parseColExp nt n val = do
"computed fields are not allowed in bool_exp"
RFRemoteRelationship _ -> throw500
"remote relationships are not allowed in bool_exp"
RFNodeId _ _ -> throw500
"node id is not allowed in bool_exp"
parseBoolExp
:: ( MonadReusability m

View File

@ -23,7 +23,7 @@ module Hasura.GraphQL.Resolve.Context
, txtConverter
, withSelSet
, traverseObjectSelectionSet
, fieldAsPath
, resolvePGCol
, module Hasura.GraphQL.Utils
@ -33,21 +33,21 @@ module Hasura.GraphQL.Resolve.Context
import Data.Has
import Hasura.Prelude
import qualified Data.HashMap.Strict as Map
import qualified Data.Sequence as Seq
import qualified Database.PG.Query as Q
import qualified Language.GraphQL.Draft.Syntax as G
import qualified Data.HashMap.Strict as Map
import qualified Data.Sequence as Seq
import qualified Database.PG.Query as Q
import qualified Language.GraphQL.Draft.Syntax as G
import Hasura.GraphQL.Resolve.Types
import Hasura.GraphQL.Utils
import Hasura.GraphQL.Validate.Field
import Hasura.GraphQL.Validate.SelectionSet
import Hasura.GraphQL.Validate.Types
import Hasura.RQL.DML.Internal (currentSession, sessVarFromCurrentSetting)
import Hasura.RQL.DML.Internal (currentSession, sessVarFromCurrentSetting)
import Hasura.RQL.Types
import Hasura.SQL.Types
import Hasura.SQL.Value
import qualified Hasura.SQL.DML as S
import qualified Hasura.SQL.DML as S
getFldInfo
:: (MonadError QErr m, MonadReader r m, Has FieldMap r)
@ -69,6 +69,7 @@ getPGColInfo nt n = do
RFRelationship _ -> throw500 $ mkErrMsg "relation"
RFComputedField _ -> throw500 $ mkErrMsg "computed field"
RFRemoteRelationship _ -> throw500 $ mkErrMsg "remote relationship"
RFNodeId _ _ -> throw500 $ mkErrMsg "node id"
where
mkErrMsg ty =
"found " <> ty <> " when expecting pgcolinfo for "
@ -140,12 +141,6 @@ prepareColVal (WithScalarType scalarType colVal) = do
txtConverter :: Applicative f => AnnPGVal -> f S.SQLExp
txtConverter (AnnPGVal _ _ scalarValue) = pure $ toTxtValue scalarValue
withSelSet :: (Monad m) => SelSet -> (Field -> m a) -> m [(Text, a)]
withSelSet selSet f =
forM (toList selSet) $ \fld -> do
res <- f fld
return (G.unName $ G.unAlias $ _fAlias fld, res)
fieldAsPath :: (MonadError QErr m) => Field -> m a -> m a
fieldAsPath = nameAsPath . _fName

View File

@ -9,36 +9,36 @@ import Hasura.EncJSON
import Hasura.Prelude
import Hasura.Session
import qualified Data.Aeson as J
import qualified Data.Aeson.Casing as J
import qualified Data.Aeson.TH as J
import qualified Data.HashMap.Strict as Map
import qualified Data.HashMap.Strict.InsOrd as OMap
import qualified Data.Sequence as Seq
import qualified Data.Text as T
import qualified Language.GraphQL.Draft.Syntax as G
import qualified Data.Aeson as J
import qualified Data.Aeson.Casing as J
import qualified Data.Aeson.TH as J
import qualified Data.HashMap.Strict as Map
import qualified Data.HashMap.Strict.InsOrd as OMap
import qualified Data.Sequence as Seq
import qualified Data.Text as T
import qualified Language.GraphQL.Draft.Syntax as G
import qualified Database.PG.Query as Q
import qualified Hasura.RQL.DML.Insert as RI
import qualified Hasura.RQL.DML.Returning as RR
import qualified Database.PG.Query as Q
import qualified Hasura.RQL.DML.Insert as RI
import qualified Hasura.RQL.DML.Returning as RR
import qualified Hasura.SQL.DML as S
import qualified Hasura.SQL.DML as S
import Hasura.GraphQL.Resolve.BoolExp
import Hasura.GraphQL.Resolve.Context
import Hasura.GraphQL.Resolve.InputValue
import Hasura.GraphQL.Resolve.Mutation
import Hasura.GraphQL.Resolve.Select
import Hasura.GraphQL.Validate.Field
import Hasura.GraphQL.Validate.SelectionSet
import Hasura.GraphQL.Validate.Types
import Hasura.RQL.DML.Insert (insertOrUpdateCheckExpr)
import Hasura.RQL.DML.Internal (convAnnBoolExpPartialSQL, convPartialSQLExp,
sessVarFromCurrentSetting)
import Hasura.RQL.DML.Insert (insertOrUpdateCheckExpr)
import Hasura.RQL.DML.Internal (convAnnBoolExpPartialSQL, convPartialSQLExp,
sessVarFromCurrentSetting)
import Hasura.RQL.DML.Mutation
import Hasura.RQL.DML.RemoteJoin
import Hasura.RQL.GBoolExp (toSQLBoolExp)
import Hasura.RQL.GBoolExp (toSQLBoolExp)
import Hasura.RQL.Types
import Hasura.Server.Version (HasVersion)
import Hasura.Server.Version (HasVersion)
import Hasura.SQL.Types
import Hasura.SQL.Value
@ -486,7 +486,8 @@ convertInsert
-> Field -- the mutation field
-> m RespTx
convertInsert rjCtx role tn fld = prefixErrPath fld $ do
mutOutputUnres <- RR.MOutMultirowFields <$> resolveMutationFields (_fType fld) (_fSelSet fld)
selSet <- asObjectSelectionSet $ _fSelSet fld
mutOutputUnres <- RR.MOutMultirowFields <$> resolveMutationFields (_fType fld) selSet
mutOutputRes <- RR.traverseMutationOutput resolveValTxt mutOutputUnres
annVals <- withArg arguments "objects" asArray
-- if insert input objects is empty array then
@ -522,7 +523,8 @@ convertInsertOne
-> Field -- the mutation field
-> m RespTx
convertInsertOne rjCtx role qt field = prefixErrPath field $ do
tableSelFields <- processTableSelectionSet (_fType field) $ _fSelSet field
selSet <- asObjectSelectionSet $ _fSelSet field
tableSelFields <- processTableSelectionSet (_fType field) selSet
let mutationOutputUnresolved = RR.MOutSinglerowObject tableSelFields
mutationOutputResolved <- RR.traverseMutationOutput resolveValTxt mutationOutputUnresolved
annInputObj <- withArg arguments "object" asObject

View File

@ -6,20 +6,20 @@ module Hasura.GraphQL.Resolve.Introspect
import Data.Has
import Hasura.Prelude
import qualified Data.Aeson as J
import qualified Data.HashMap.Strict as Map
import qualified Data.HashSet as Set
import qualified Data.Text as T
import qualified Language.GraphQL.Draft.Syntax as G
import qualified Hasura.SQL.Value as S
import qualified Hasura.SQL.Types as S
import qualified Data.Aeson as J
import qualified Data.HashMap.Strict as Map
import qualified Data.HashSet as Set
import qualified Data.Text as T
import qualified Hasura.SQL.Types as S
import qualified Hasura.SQL.Value as S
import qualified Language.GraphQL.Draft.Syntax as G
import Hasura.GraphQL.Context
import Hasura.GraphQL.Resolve.Context
import Hasura.GraphQL.Resolve.InputValue
import Hasura.GraphQL.Validate.Context
import Hasura.GraphQL.Validate.Field
import Hasura.GraphQL.Validate.InputValue
import Hasura.GraphQL.Validate.SelectionSet
import Hasura.GraphQL.Validate.Types
import Hasura.RQL.Types
@ -38,14 +38,15 @@ instance J.ToJSON TypeKind where
toJSON = J.toJSON . T.pack . drop 2 . show
withSubFields
:: (Monad m)
=> SelSet
:: (MonadError QErr m)
=> SelectionSet
-> (Field -> m J.Value)
-> m J.Object
withSubFields selSet fn =
fmap Map.fromList $ forM (toList selSet) $ \fld -> do
val <- fn fld
return (G.unName $ G.unAlias $ _fAlias fld, val)
withSubFields selSet fn = do
objectSelectionSet <- asObjectSelectionSet selSet
Map.fromList <$> traverseObjectSelectionSet objectSelectionSet fn
-- val <- fn fld
-- return (G.unName $ G.unAlias $ _fAlias fld, val)
namedTyToTxt :: G.NamedType -> Text
namedTyToTxt = G.unName . G.unNamedType
@ -106,9 +107,9 @@ notBuiltinFld f =
getImplTypes :: (MonadReader t m, Has TypeMap t) => AsObjType -> m [ObjTyInfo]
getImplTypes aot = do
tyInfo :: TypeMap <- asks getter
tyInfo <- asks getter
return $ sortOn _otiName $
Map.elems $ getPossibleObjTypes' tyInfo aot
Map.elems $ getPossibleObjTypes tyInfo aot
-- 4.5.2.3
unionR
@ -145,20 +146,24 @@ ifaceR'
=> IFaceTyInfo
-> Field
-> m J.Object
ifaceR' i@(IFaceTyInfo descM n flds) fld = do
ifaceR' ifaceTyInfo fld = do
dummyReadIncludeDeprecated fld
withSubFields (_fSelSet fld) $ \subFld ->
case _fName subFld of
"__typename" -> retJT "__Type"
"kind" -> retJ TKINTERFACE
"name" -> retJ $ namedTyToTxt n
"description" -> retJ $ fmap G.unDescription descM
"name" -> retJ $ namedTyToTxt name
"description" -> retJ $ fmap G.unDescription maybeDescription
"fields" -> fmap J.toJSON $ mapM (`fieldR` subFld) $
sortOn _fiName $
filter notBuiltinFld $ Map.elems flds
filter notBuiltinFld $ Map.elems fields
"possibleTypes" -> fmap J.toJSON $ mapM (`objectTypeR` subFld)
=<< getImplTypes (AOTIFace i)
=<< getImplTypes (AOTIFace ifaceTyInfo)
_ -> return J.Null
where
maybeDescription = _ifDesc ifaceTyInfo
name = _ifName ifaceTyInfo
fields = _ifFields ifaceTyInfo
-- 4.5.2.5
enumTypeR
@ -219,8 +224,10 @@ dummyReadIncludeDeprecated
:: ( Monad m, MonadReusability m, MonadError QErr m )
=> Field
-> m ()
dummyReadIncludeDeprecated fld =
void $ forM (toList (_fSelSet fld)) $ \subFld ->
dummyReadIncludeDeprecated fld = do
selSet <- unAliasedFields . unObjectSelectionSet
<$> asObjectSelectionSet (_fSelSet fld)
forM_ (toList selSet) $ \subFld ->
case _fName subFld of
"fields" -> readIncludeDeprecated subFld
"enumValues" -> readIncludeDeprecated subFld
@ -337,7 +344,7 @@ inputValueR fld (InpValInfo descM n defM ty) =
-- 4.5.5
enumValueR
:: (Monad m)
:: (MonadError QErr m)
=> Field -> EnumValInfo -> m J.Object
enumValueR fld (EnumValInfo descM enumVal isDeprecated) =
withSubFields (_fSelSet fld) $ \subFld ->

View File

@ -33,7 +33,7 @@ import Hasura.GraphQL.Resolve.BoolExp
import Hasura.GraphQL.Resolve.Context
import Hasura.GraphQL.Resolve.InputValue
import Hasura.GraphQL.Resolve.Select (processTableSelectionSet)
import Hasura.GraphQL.Validate.Field
import Hasura.GraphQL.Validate.SelectionSet
import Hasura.GraphQL.Validate.Types
import Hasura.RQL.DML.Internal (currentSession, sessVarFromCurrentSetting)
import Hasura.RQL.DML.Mutation (MutationRemoteJoinCtx)
@ -46,15 +46,16 @@ resolveMutationFields
:: ( MonadReusability m, MonadError QErr m, MonadReader r m, Has FieldMap r
, Has OrdByCtx r, Has SQLGenCtx r
)
=> G.NamedType -> SelSet -> m (RR.MutFldsG UnresolvedVal)
=> G.NamedType -> ObjectSelectionSet -> m (RR.MutFldsG UnresolvedVal)
resolveMutationFields ty selSet = fmap (map (first FieldName)) $
withSelSet selSet $ \fld -> case _fName fld of
traverseObjectSelectionSet selSet $ \fld -> case _fName fld of
"__typename" -> return $ RR.MExp $ G.unName $ G.unNamedType ty
"affected_rows" -> return RR.MCount
"returning" -> do
annFlds <- processTableSelectionSet (_fType fld) $ _fSelSet fld
annFlds <- asObjectSelectionSet (_fSelSet fld)
>>= processTableSelectionSet (_fType fld)
annFldsResolved <- traverse
(traverse (RS.traverseAnnFld convertPGValueToTextValue)) annFlds
(traverse (RS.traverseAnnField convertPGValueToTextValue)) annFlds
return $ RR.MRet annFldsResolved
G.Name t -> throw500 $ "unexpected field in mutation resp : " <> t
where
@ -327,8 +328,9 @@ mutationFieldsResolver
, Has OrdByCtx r, Has SQLGenCtx r
)
=> Field -> m (RR.MutationOutputG UnresolvedVal)
mutationFieldsResolver field =
RR.MOutMultirowFields <$> resolveMutationFields (_fType field) (_fSelSet field)
mutationFieldsResolver field = do
asObjectSelectionSet (_fSelSet field) >>= \selSet ->
RR.MOutMultirowFields <$> resolveMutationFields (_fType field) selSet
tableSelectionAsMutationOutput
:: ( MonadReusability m, MonadError QErr m
@ -337,7 +339,8 @@ tableSelectionAsMutationOutput
)
=> Field -> m (RR.MutationOutputG UnresolvedVal)
tableSelectionAsMutationOutput field =
RR.MOutSinglerowObject <$> processTableSelectionSet (_fType field) (_fSelSet field)
asObjectSelectionSet (_fSelSet field) >>= \selSet ->
RR.MOutSinglerowObject <$> processTableSelectionSet (_fType field) selSet
-- | build mutation response for empty objects
buildEmptyMutResp :: RR.MutationOutput -> EncJSON

View File

@ -1,38 +1,47 @@
module Hasura.GraphQL.Resolve.Select
( convertSelect
, convertConnectionSelect
, convertConnectionFuncQuery
, convertSelectByPKey
, convertAggSelect
, convertFuncQuerySimple
, convertFuncQueryAgg
, parseColumns
, processTableSelectionSet
, resolveNodeId
, convertNodeSelect
, AnnSimpleSelect
) where
import Control.Lens ((^?), _2)
import Control.Lens (to, (^..), (^?), _2)
import Data.Has
import Data.Parser.JSONPath
import Hasura.Prelude
import qualified Data.HashMap.Strict as Map
import qualified Data.HashMap.Strict.InsOrd as OMap
import qualified Data.List.NonEmpty as NE
import qualified Data.Sequence as Seq
import qualified Data.Text as T
import qualified Language.GraphQL.Draft.Syntax as G
import qualified Data.Aeson as J
import qualified Data.Aeson.Internal as J
import qualified Data.ByteString.Lazy as BL
import qualified Data.HashMap.Strict as Map
import qualified Data.HashMap.Strict.InsOrd as OMap
import qualified Data.List.NonEmpty as NE
import qualified Data.Sequence as Seq
import qualified Data.Text as T
import qualified Language.GraphQL.Draft.Syntax as G
import qualified Hasura.RQL.DML.Select as RS
import qualified Hasura.SQL.DML as S
import qualified Hasura.RQL.DML.Select as RS
import qualified Hasura.SQL.DML as S
import Hasura.GraphQL.Resolve.BoolExp
import Hasura.GraphQL.Resolve.Context
import Hasura.GraphQL.Resolve.InputValue
import Hasura.GraphQL.Schema (isAggFld)
import Hasura.GraphQL.Schema (isAggregateField)
import Hasura.GraphQL.Schema.Common (mkTableTy)
import Hasura.GraphQL.Validate
import Hasura.GraphQL.Validate.Field
import Hasura.GraphQL.Validate.SelectionSet
import Hasura.GraphQL.Validate.Types
import Hasura.RQL.DML.Internal (onlyPositiveInt)
import Hasura.RQL.DML.Internal (onlyPositiveInt)
import Hasura.RQL.Types
import Hasura.Server.Utils
import Hasura.SQL.Types
import Hasura.SQL.Value
@ -46,29 +55,29 @@ jsonPathToColExp t = case parseJSONPath t of
elToColExp (Index i) = S.SELit $ T.pack (show i)
argsToColOp :: (MonadReusability m, MonadError QErr m) => ArgsMap -> m (Maybe RS.ColOp)
argsToColOp args = case Map.lookup "path" args of
argsToColumnOp :: (MonadReusability m, MonadError QErr m) => ArgsMap -> m (Maybe RS.ColumnOp)
argsToColumnOp args = case Map.lookup "path" args of
Nothing -> return Nothing
Just txt -> do
mColTxt <- asPGColTextM txt
mColExps <- maybe (return Nothing) jsonPathToColExp mColTxt
return $ RS.ColOp S.jsonbPathOp <$> mColExps
pure $ RS.ColumnOp S.jsonbPathOp <$> mColExps
type AnnFlds = RS.AnnFldsG UnresolvedVal
type AnnFields = RS.AnnFieldsG UnresolvedVal
resolveComputedField
:: ( MonadReusability m, MonadReader r m, Has FieldMap r
, Has OrdByCtx r, Has SQLGenCtx r, MonadError QErr m
)
=> ComputedField -> Field -> m (RS.ComputedFieldSel UnresolvedVal)
=> ComputedField -> Field -> m (RS.ComputedFieldSelect UnresolvedVal)
resolveComputedField computedField fld = fieldAsPath fld $ do
funcArgs <- parseFunctionArgs argSeq argFn $ Map.lookup "args" $ _fArguments fld
let argsWithTableArgument = withTableAndSessionArgument funcArgs
case fieldType of
CFTScalar scalarTy -> do
colOpM <- argsToColOp $ _fArguments fld
colOpM <- argsToColumnOp $ _fArguments fld
pure $ RS.CFSScalar $
RS.ComputedFieldScalarSel qf argsWithTableArgument scalarTy colOpM
RS.ComputedFieldScalarSelect qf argsWithTableArgument scalarTy colOpM
CFTTable (ComputedFieldTable _ cols permFilter permLimit) -> do
let functionFrom = RS.FromFunction qf argsWithTableArgument Nothing
RS.CFSTable RS.JASMultipleRows <$> fromField functionFrom cols permFilter permLimit fld
@ -98,85 +107,142 @@ processTableSelectionSet
:: ( MonadReusability m, MonadError QErr m, MonadReader r m, Has FieldMap r
, Has OrdByCtx r, Has SQLGenCtx r
)
=> G.NamedType -> SelSet -> m AnnFlds
=> G.NamedType -> ObjectSelectionSet -> m AnnFields
processTableSelectionSet fldTy flds =
forM (toList flds) $ \fld -> do
fmap (map (\(a, b) -> (FieldName a, b))) $ traverseObjectSelectionSet flds $ \fld -> do
let fldName = _fName fld
let rqlFldName = FieldName $ G.unName $ G.unAlias $ _fAlias fld
(rqlFldName,) <$> case fldName of
"__typename" -> return $ RS.FExp $ G.unName $ G.unNamedType fldTy
case fldName of
"__typename" -> return $ RS.AFExpression $ G.unName $ G.unNamedType fldTy
_ -> do
fldInfo <- getFldInfo fldTy fldName
case fldInfo of
RFNodeId tn pkeys -> pure $ RS.AFNodeId tn pkeys
RFPGColumn colInfo ->
RS.mkAnnColField colInfo <$> argsToColOp (_fArguments fld)
RS.mkAnnColumnField colInfo <$> argsToColumnOp (_fArguments fld)
RFComputedField computedField ->
RS.FComputedField <$> resolveComputedField computedField fld
RFRelationship (RelationshipField relInfo isAgg colGNameMap tableFilter tableLimit) -> do
RS.AFComputedField <$> resolveComputedField computedField fld
RFRelationship (RelationshipField relInfo fieldKind colGNameMap tableFilter tableLimit) -> do
let relTN = riRTable relInfo
colMapping = riMapping relInfo
rn = riName relInfo
if isAgg then do
aggSel <- fromAggField (RS.FromTable relTN) colGNameMap tableFilter tableLimit fld
return $ RS.FArr $ RS.ASAgg $ RS.AnnRelG rn colMapping aggSel
else do
annSel <- fromField (RS.FromTable relTN) colGNameMap tableFilter tableLimit fld
let annRel = RS.AnnRelG rn colMapping annSel
return $ case riType relInfo of
ObjRel -> RS.FObj annRel
ArrRel -> RS.FArr $ RS.ASSimple annRel
case fieldKind of
RFKSimple -> do
annSel <- fromField (RS.FromTable relTN) colGNameMap tableFilter tableLimit fld
let annRel = RS.AnnRelationSelectG rn colMapping annSel
pure $ case riType relInfo of
ObjRel -> RS.AFObjectRelation annRel
ArrRel -> RS.AFArrayRelation $ RS.ASSimple annRel
RFKAggregate -> do
aggSel <- fromAggField (RS.FromTable relTN) colGNameMap tableFilter tableLimit fld
pure $ RS.AFArrayRelation $ RS.ASAggregate $ RS.AnnRelationSelectG rn colMapping aggSel
RFKConnection pkCols -> do
connSel <- fromConnectionField (RS.FromTable relTN) pkCols tableFilter tableLimit fld
pure $ RS.AFArrayRelation $ RS.ASConnection $ RS.AnnRelationSelectG rn colMapping connSel
RFRemoteRelationship info ->
pure $ RS.FRemote $ RS.RemoteSelect
pure $ RS.AFRemote $ RS.RemoteSelect
(unValidateArgsMap $ _fArguments fld) -- Unvalidate the input arguments
(map unValidateField $ toList $ _fSelSet fld) -- Unvalidate the selection fields
(unValidateSelectionSet $ _fSelSet fld) -- Unvalidate the selection fields
(_rfiHasuraFields info)
(_rfiRemoteFields info)
(_rfiRemoteSchema info)
type TableAggFlds = RS.TableAggFldsG UnresolvedVal
type TableAggregateFields = RS.TableAggregateFieldsG UnresolvedVal
fromAggSelSet
:: ( MonadReusability m, MonadError QErr m, MonadReader r m, Has FieldMap r
, Has OrdByCtx r, Has SQLGenCtx r
)
=> PGColGNameMap -> G.NamedType -> SelSet -> m TableAggFlds
=> PGColGNameMap -> G.NamedType -> ObjectSelectionSet -> m TableAggregateFields
fromAggSelSet colGNameMap fldTy selSet = fmap toFields $
withSelSet selSet $ \Field{..} ->
traverseObjectSelectionSet selSet $ \Field{..} ->
case _fName of
"__typename" -> return $ RS.TAFExp $ G.unName $ G.unNamedType fldTy
"aggregate" -> RS.TAFAgg <$> convertAggFld colGNameMap _fType _fSelSet
"nodes" -> RS.TAFNodes <$> processTableSelectionSet _fType _fSelSet
"aggregate" -> do
objSelSet <- asObjectSelectionSet _fSelSet
RS.TAFAgg <$> convertAggregateField colGNameMap _fType objSelSet
"nodes" -> do
objSelSet <- asObjectSelectionSet _fSelSet
RS.TAFNodes <$> processTableSelectionSet _fType objSelSet
G.Name t -> throw500 $ "unexpected field in _agg node: " <> t
type TableArgs = RS.TableArgsG UnresolvedVal
fromConnectionSelSet
:: ( MonadReusability m, MonadError QErr m, MonadReader r m, Has FieldMap r
, Has OrdByCtx r, Has SQLGenCtx r
)
=> G.NamedType -> ObjectSelectionSet -> m (RS.ConnectionFields UnresolvedVal)
fromConnectionSelSet fldTy selSet = fmap toFields $
traverseObjectSelectionSet selSet $ \Field{..} ->
case _fName of
"__typename" -> return $ RS.ConnectionTypename $ G.unName $ G.unNamedType fldTy
"pageInfo" -> do
fSelSet <- asObjectSelectionSet _fSelSet
RS.ConnectionPageInfo <$> parsePageInfoSelectionSet _fType fSelSet
"edges" -> do
fSelSet <- asObjectSelectionSet _fSelSet
RS.ConnectionEdges <$> parseEdgeSelectionSet _fType fSelSet
-- "aggregate" -> RS.TAFAgg <$> convertAggregateField colGNameMap fTy fSelSet
-- "nodes" -> RS.TAFNodes <$> processTableSelectionSet fTy fSelSet
G.Name t -> throw500 $ "unexpected field in _connection node: " <> t
parseTableArgs
parseEdgeSelectionSet
:: ( MonadReusability m, MonadError QErr m, MonadReader r m, Has FieldMap r
, Has OrdByCtx r, Has SQLGenCtx r
)
=> G.NamedType -> ObjectSelectionSet -> m (RS.EdgeFields UnresolvedVal)
parseEdgeSelectionSet fldTy selSet = fmap toFields $
traverseObjectSelectionSet selSet $ \f -> do
let fTy = _fType f
case _fName f of
"__typename" -> pure $ RS.EdgeTypename $ G.unName $ G.unNamedType fldTy
"cursor" -> pure RS.EdgeCursor
"node" -> do
fSelSet <- asObjectSelectionSet $ _fSelSet f
RS.EdgeNode <$> processTableSelectionSet fTy fSelSet
G.Name t -> throw500 $ "unexpected field in Edge node: " <> t
parsePageInfoSelectionSet
:: ( MonadReusability m, MonadError QErr m)
=> G.NamedType -> ObjectSelectionSet -> m RS.PageInfoFields
parsePageInfoSelectionSet fldTy selSet =
fmap toFields $ traverseObjectSelectionSet selSet $ \f ->
case _fName f of
"__typename" -> pure $ RS.PageInfoTypename $ G.unName $ G.unNamedType fldTy
"hasNextPage" -> pure RS.PageInfoHasNextPage
"hasPreviousPage" -> pure RS.PageInfoHasPreviousPage
"startCursor" -> pure RS.PageInfoStartCursor
"endCursor" -> pure RS.PageInfoEndCursor
-- "aggregate" -> RS.TAFAgg <$> convertAggregateField colGNameMap fTy fSelSet
-- "nodes" -> RS.TAFNodes <$> processTableSelectionSet fTy fSelSet
G.Name t -> throw500 $ "unexpected field in PageInfo node: " <> t
type SelectArgs = RS.SelectArgsG UnresolvedVal
parseSelectArgs
:: ( MonadReusability m, MonadError QErr m, MonadReader r m
, Has FieldMap r, Has OrdByCtx r
)
=> PGColGNameMap -> ArgsMap -> m TableArgs
parseTableArgs colGNameMap args = do
=> PGColGNameMap -> ArgsMap -> m SelectArgs
parseSelectArgs colGNameMap args = do
whereExpM <- withArgM args "where" parseBoolExp
ordByExpML <- withArgM args "order_by" parseOrderBy
let ordByExpM = NE.nonEmpty =<< ordByExpML
limitExpM <- withArgM args "limit" parseLimit
limitExpM <- withArgM args "limit" $
parseNonNegativeInt "expecting Integer value for \"limit\""
offsetExpM <- withArgM args "offset" $ asPGColumnValue >=> openOpaqueValue >=> txtConverter
distOnColsML <- withArgM args "distinct_on" $ parseColumns colGNameMap
let distOnColsM = NE.nonEmpty =<< distOnColsML
mapM_ (validateDistOn ordByExpM) distOnColsM
return $ RS.TableArgs whereExpM ordByExpM limitExpM offsetExpM distOnColsM
return $ RS.SelectArgs whereExpM ordByExpM limitExpM offsetExpM distOnColsM
where
validateDistOn Nothing _ = return ()
validateDistOn (Just ordBys) cols = withPathK "args" $ do
let colsLen = length cols
initOrdBys = take colsLen $ toList ordBys
initOrdByCols = flip mapMaybe initOrdBys $ \ob ->
case obiColumn ob of
RS.AOCPG pgCol -> Just pgCol
_ -> Nothing
case obiColumn ob of
RS.AOCColumn pgCol -> Just $ pgiColumn pgCol
_ -> Nothing
isValid = (colsLen == length initOrdByCols)
&& all (`elem` initOrdByCols) (toList cols)
@ -195,12 +261,13 @@ fromField
-> Maybe Int
-> Field -> m AnnSimpleSelect
fromField selFrom colGNameMap permFilter permLimitM fld = fieldAsPath fld $ do
tableArgs <- parseTableArgs colGNameMap args
annFlds <- processTableSelectionSet (_fType fld) $ _fSelSet fld
tableArgs <- parseSelectArgs colGNameMap args
selSet <- asObjectSelectionSet $ _fSelSet fld
annFlds <- processTableSelectionSet (_fType fld) selSet
let unresolvedPermFltr = fmapAnnBoolExp partialSQLExpToUnresolvedVal permFilter
let tabPerm = RS.TablePerm unresolvedPermFltr permLimitM
strfyNum <- stringifyNum <$> asks getter
return $ RS.AnnSelG annFlds selFrom tabPerm tableArgs strfyNum
return $ RS.AnnSelectG annFlds selFrom tabPerm tableArgs strfyNum
where
args = _fArguments fld
@ -221,7 +288,8 @@ parseOrderBy
, MonadReader r m
, Has OrdByCtx r
)
=> AnnInpVal -> m [RS.AnnOrderByItemG UnresolvedVal]
=> AnnInpVal
-> m [RS.AnnOrderByItemG UnresolvedVal]
parseOrderBy = fmap concat . withArray f
where
f _ = mapM (withObject (getAnnObItems id))
@ -232,7 +300,7 @@ getAnnObItems
, MonadReader r m
, Has OrdByCtx r
)
=> (RS.AnnObColG UnresolvedVal -> RS.AnnObColG UnresolvedVal)
=> (RS.AnnOrderByElement UnresolvedVal -> RS.AnnOrderByElement UnresolvedVal)
-> G.NamedType
-> AnnGObject
-> m [RS.AnnOrderByItemG UnresolvedVal]
@ -244,7 +312,7 @@ getAnnObItems f nt obj = do
<> showNamedTy nt <> " map"
case ordByItem of
OBIPGCol ci -> do
let aobCol = f $ RS.AOCPG $ pgiColumn ci
let aobCol = f $ RS.AOCColumn ci
(_, enumValM) <- asEnumValM v
ordByItemM <- forM enumValM $ \enumVal -> do
(ordTy, nullsOrd) <- parseOrderByEnum enumVal
@ -253,13 +321,13 @@ getAnnObItems f nt obj = do
OBIRel ri fltr -> do
let unresolvedFltr = fmapAnnBoolExp partialSQLExpToUnresolvedVal fltr
let annObColFn = f . RS.AOCObj ri unresolvedFltr
let annObColFn = f . RS.AOCObjectRelation ri unresolvedFltr
flip withObjectM v $ \nameTy objM ->
maybe (pure []) (getAnnObItems annObColFn nameTy) objM
OBIAgg ri relColGNameMap fltr -> do
let unresolvedFltr = fmapAnnBoolExp partialSQLExpToUnresolvedVal fltr
let aobColFn = f . RS.AOCAgg ri unresolvedFltr
let aobColFn = f . RS.AOCArrayAggregation ri unresolvedFltr
flip withObjectM v $ \_ objM ->
maybe (pure []) (parseAggOrdBy relColGNameMap aobColFn) objM
@ -270,7 +338,7 @@ mkOrdByItemG ordTy aobCol nullsOrd =
parseAggOrdBy
:: (MonadReusability m, MonadError QErr m)
=> PGColGNameMap
-> (RS.AnnAggOrdBy -> RS.AnnObColG UnresolvedVal)
-> (RS.AnnAggregateOrderBy -> RS.AnnOrderByElement UnresolvedVal)
-> AnnGObject
-> m [RS.AnnOrderByItemG UnresolvedVal]
parseAggOrdBy colGNameMap f annObj =
@ -283,14 +351,14 @@ parseAggOrdBy colGNameMap f annObj =
return $ mkOrdByItemG ordTy (f RS.AAOCount) nullsOrd
return $ maybe [] pure ordByItemM
G.Name opT ->
G.Name opText ->
flip withObject obVal $ \_ opObObj -> fmap catMaybes $
forM (OMap.toList opObObj) $ \(colName, eVal) -> do
(_, enumValM) <- asEnumValM eVal
forM enumValM $ \enumVal -> do
(ordTy, nullsOrd) <- parseOrderByEnum enumVal
col <- pgiColumn <$> resolvePGCol colGNameMap colName
let aobCol = f $ RS.AAOOp opT col
col <- resolvePGCol colGNameMap colName
let aobCol = f $ RS.AAOOp opText col
return $ mkOrdByItemG ordTy aobCol nullsOrd
parseOrderByEnum
@ -307,15 +375,14 @@ parseOrderByEnum = \case
G.EnumValue v -> throw500 $
"enum value " <> showName v <> " not found in type order_by"
parseLimit :: (MonadReusability m, MonadError QErr m) => AnnInpVal -> m Int
parseLimit v = do
parseNonNegativeInt
:: (MonadReusability m, MonadError QErr m) => Text -> AnnInpVal -> m Int
parseNonNegativeInt errMsg v = do
pgColVal <- openOpaqueValue =<< asPGColumnValue v
limit <- maybe noIntErr return . pgColValueToInt . pstValue $ _apvValue pgColVal
limit <- maybe (throwVE errMsg) return . pgColValueToInt . pstValue $ _apvValue pgColVal
-- validate int value
onlyPositiveInt limit
return limit
where
noIntErr = throwVE "expecting Integer value for \"limit\""
type AnnSimpleSel = RS.AnnSimpleSelG UnresolvedVal
@ -331,14 +398,15 @@ fromFieldByPKey
-> AnnBoolExpPartialSQL -> Field -> m AnnSimpleSel
fromFieldByPKey tn colArgMap permFilter fld = fieldAsPath fld $ do
boolExp <- pgColValToBoolExp colArgMap $ _fArguments fld
annFlds <- processTableSelectionSet fldTy $ _fSelSet fld
selSet <- asObjectSelectionSet $ _fSelSet fld
annFlds <- processTableSelectionSet fldTy selSet
let tabFrom = RS.FromTable tn
unresolvedPermFltr = fmapAnnBoolExp partialSQLExpToUnresolvedVal
permFilter
tabPerm = RS.TablePerm unresolvedPermFltr Nothing
tabArgs = RS.noTableArgs { RS._taWhere = Just boolExp}
tabArgs = RS.noSelectArgs { RS._saWhere = Just boolExp}
strfyNum <- stringifyNum <$> asks getter
return $ RS.AnnSelG annFlds tabFrom tabPerm tabArgs strfyNum
return $ RS.AnnSelectG annFlds tabFrom tabPerm tabArgs strfyNum
where
fldTy = _fType fld
@ -365,14 +433,18 @@ convertSelectByPKey opCtx fld =
SelPkOpCtx qt _ permFilter colArgMap = opCtx
-- agg select related
parseColumns :: (MonadReusability m, MonadError QErr m) => PGColGNameMap -> AnnInpVal -> m [PGCol]
parseColumns
:: (MonadReusability m, MonadError QErr m)
=> PGColGNameMap -> AnnInpVal -> m [PGCol]
parseColumns allColFldMap val =
flip withArray val $ \_ vals ->
forM vals $ \v -> do
(_, G.EnumValue enumVal) <- asEnumVal v
pgiColumn <$> resolvePGCol allColFldMap enumVal
convertCount :: (MonadReusability m, MonadError QErr m) => PGColGNameMap -> ArgsMap -> m S.CountType
convertCount
:: (MonadReusability m, MonadError QErr m)
=> PGColGNameMap -> ArgsMap -> m S.CountType
convertCount colGNameMap args = do
columnsM <- withArgM args "columns" $ parseColumns colGNameMap
isDistinct <- or <$> withArgM args "distinct" parseDistinct
@ -391,32 +463,33 @@ convertCount colGNameMap args = do
toFields :: [(T.Text, a)] -> RS.Fields a
toFields = map (first FieldName)
convertColFlds
convertColumnFields
:: (MonadError QErr m)
=> PGColGNameMap -> G.NamedType -> SelSet -> m RS.ColFlds
convertColFlds colGNameMap ty selSet = fmap toFields $
withSelSet selSet $ \fld ->
=> PGColGNameMap -> G.NamedType -> ObjectSelectionSet -> m RS.ColumnFields
convertColumnFields colGNameMap ty selSet = fmap toFields $
traverseObjectSelectionSet selSet $ \fld ->
case _fName fld of
"__typename" -> return $ RS.PCFExp $ G.unName $ G.unNamedType ty
n -> (RS.PCFCol . pgiColumn) <$> resolvePGCol colGNameMap n
n -> RS.PCFCol . pgiColumn <$> resolvePGCol colGNameMap n
convertAggFld
convertAggregateField
:: (MonadReusability m, MonadError QErr m)
=> PGColGNameMap -> G.NamedType -> SelSet -> m RS.AggFlds
convertAggFld colGNameMap ty selSet = fmap toFields $
withSelSet selSet $ \Field{..} ->
=> PGColGNameMap -> G.NamedType -> ObjectSelectionSet -> m RS.AggregateFields
convertAggregateField colGNameMap ty selSet = fmap toFields $
traverseObjectSelectionSet selSet $ \Field{..} ->
case _fName of
"__typename" -> return $ RS.AFExp $ G.unName $ G.unNamedType ty
"count" -> RS.AFCount <$> convertCount colGNameMap _fArguments
n -> do
colFlds <- convertColFlds colGNameMap _fType _fSelSet
unless (isAggFld n) $ throwInvalidFld n
return $ RS.AFOp $ RS.AggOp (G.unName n) colFlds
fSelSet <- asObjectSelectionSet _fSelSet
colFlds <- convertColumnFields colGNameMap _fType fSelSet
unless (isAggregateField n) $ throwInvalidFld n
return $ RS.AFOp $ RS.AggregateOp (G.unName n) colFlds
where
throwInvalidFld (G.Name t) =
throw500 $ "unexpected field in _aggregate node: " <> t
type AnnAggSel = RS.AnnAggSelG UnresolvedVal
type AnnAggregateSelect = RS.AnnAggregateSelectG UnresolvedVal
fromAggField
:: ( MonadReusability m, MonadError QErr m, MonadReader r m, Has FieldMap r
@ -426,29 +499,162 @@ fromAggField
-> PGColGNameMap
-> AnnBoolExpPartialSQL
-> Maybe Int
-> Field -> m AnnAggSel
-> Field -> m AnnAggregateSelect
fromAggField selectFrom colGNameMap permFilter permLimit fld = fieldAsPath fld $ do
tableArgs <- parseTableArgs colGNameMap args
aggSelFlds <- fromAggSelSet colGNameMap (_fType fld) (_fSelSet fld)
tableArgs <- parseSelectArgs colGNameMap args
selSet <- asObjectSelectionSet $ _fSelSet fld
aggSelFlds <- fromAggSelSet colGNameMap (_fType fld) selSet
let unresolvedPermFltr =
fmapAnnBoolExp partialSQLExpToUnresolvedVal permFilter
let tabPerm = RS.TablePerm unresolvedPermFltr permLimit
strfyNum <- stringifyNum <$> asks getter
return $ RS.AnnSelG aggSelFlds selectFrom tabPerm tableArgs strfyNum
return $ RS.AnnSelectG aggSelFlds selectFrom tabPerm tableArgs strfyNum
where
args = _fArguments fld
fromConnectionField
:: ( MonadReusability m, MonadError QErr m, MonadReader r m, Has FieldMap r
, Has OrdByCtx r, Has SQLGenCtx r
)
=> RS.SelectFromG UnresolvedVal
-> NonEmpty PGColumnInfo
-> AnnBoolExpPartialSQL
-> Maybe Int
-> Field -> m (RS.ConnectionSelect UnresolvedVal)
fromConnectionField selectFrom pkCols permFilter permLimit fld = fieldAsPath fld $ do
(tableArgs, slice, split) <- parseConnectionArgs pkCols args
selSet <- asObjectSelectionSet $ _fSelSet fld
connSelFlds <- fromConnectionSelSet (_fType fld) selSet
strfyNum <- stringifyNum <$> asks getter
let unresolvedPermFltr =
fmapAnnBoolExp partialSQLExpToUnresolvedVal permFilter
tabPerm = RS.TablePerm unresolvedPermFltr permLimit
annSel = RS.AnnSelectG connSelFlds selectFrom tabPerm tableArgs strfyNum
pure $ RS.ConnectionSelect pkCols split slice annSel
where
args = _fArguments fld
parseConnectionArgs
:: forall r m.
( MonadReusability m, MonadError QErr m, MonadReader r m
, Has FieldMap r, Has OrdByCtx r
)
=> NonEmpty PGColumnInfo
-> ArgsMap
-> m ( SelectArgs
, Maybe RS.ConnectionSlice
, Maybe (NE.NonEmpty (RS.ConnectionSplit UnresolvedVal))
)
parseConnectionArgs pKeyColumns args = do
whereExpM <- withArgM args "where" parseBoolExp
ordByExpML <- withArgM args "order_by" parseOrderBy
slice <- case (Map.lookup "first" args, Map.lookup "last" args) of
(Nothing, Nothing) -> pure Nothing
(Just _, Just _) -> throwVE "\"first\" and \"last\" are not allowed at once"
(Just v, Nothing) -> Just . RS.SliceFirst <$> parseNonNegativeInt
"expecting Integer value for \"first\"" v
(Nothing, Just v) -> Just . RS.SliceLast <$> parseNonNegativeInt
"expecting Integer value for \"last\"" v
maybeSplit <- case (Map.lookup "after" args, Map.lookup "before" args) of
(Nothing, Nothing) -> pure Nothing
(Just _, Just _) -> throwVE "\"after\" and \"before\" are not allowed at once"
(Just v, Nothing) -> fmap ((RS.CSKAfter,) . base64Decode) <$> asPGColTextM v
(Nothing, Just v) -> fmap ((RS.CSKBefore,) . base64Decode) <$> asPGColTextM v
let ordByExpM = NE.nonEmpty =<< appendPrimaryKeyOrderBy <$> ordByExpML
tableArgs = RS.SelectArgs whereExpM ordByExpM Nothing Nothing Nothing
split <- mapM (uncurry (validateConnectionSplit ordByExpM)) maybeSplit
pure (tableArgs, slice, split)
where
appendPrimaryKeyOrderBy :: [RS.AnnOrderByItemG v] -> [RS.AnnOrderByItemG v]
appendPrimaryKeyOrderBy orderBys =
let orderByColumnNames =
orderBys ^.. traverse . to obiColumn . RS._AOCColumn . to pgiColumn
pkeyOrderBys = flip mapMaybe (toList pKeyColumns) $ \pgColumnInfo ->
if pgiColumn pgColumnInfo `elem` orderByColumnNames then Nothing
else Just $ OrderByItemG Nothing (RS.AOCColumn pgColumnInfo) Nothing
in orderBys <> pkeyOrderBys
validateConnectionSplit
:: Maybe (NonEmpty (RS.AnnOrderByItemG UnresolvedVal))
-> RS.ConnectionSplitKind
-> BL.ByteString
-> m (NonEmpty (RS.ConnectionSplit UnresolvedVal))
validateConnectionSplit maybeOrderBys splitKind cursorSplit = do
cursorValue <- either (const throwInvalidCursor) pure $
J.eitherDecode cursorSplit
case maybeOrderBys of
Nothing -> forM pKeyColumns $
\pgColumnInfo -> do
let columnJsonPath = [J.Key $ getPGColTxt $ pgiColumn pgColumnInfo]
pgColumnValue <- maybe throwInvalidCursor pure $ iResultToMaybe $
executeJSONPath columnJsonPath cursorValue
pgValue <- parsePGScalarValue (pgiType pgColumnInfo) pgColumnValue
let unresolvedValue = UVPG $ AnnPGVal Nothing False pgValue
pure $ RS.ConnectionSplit splitKind unresolvedValue $
OrderByItemG Nothing (RS.AOCColumn pgColumnInfo) Nothing
Just orderBys ->
forM orderBys $ \orderBy -> do
let OrderByItemG orderType annObCol nullsOrder = orderBy
orderByItemValue <- maybe throwInvalidCursor pure $ iResultToMaybe $
executeJSONPath (getPathFromOrderBy annObCol) cursorValue
pgValue <- parsePGScalarValue (getOrderByColumnType annObCol) orderByItemValue
let unresolvedValue = UVPG $ AnnPGVal Nothing False pgValue
pure $ RS.ConnectionSplit splitKind unresolvedValue $
OrderByItemG orderType (() <$ annObCol) nullsOrder
where
throwInvalidCursor = throwVE "the \"after\" or \"before\" cursor is invalid"
iResultToMaybe = \case
J.ISuccess v -> Just v
J.IError{} -> Nothing
getPathFromOrderBy = \case
RS.AOCColumn pgColInfo ->
let pathElement = J.Key $ getPGColTxt $ pgiColumn pgColInfo
in [pathElement]
RS.AOCObjectRelation relInfo _ obCol ->
let pathElement = J.Key $ relNameToTxt $ riName relInfo
in pathElement : getPathFromOrderBy obCol
RS.AOCArrayAggregation relInfo _ aggOb ->
let fieldName = J.Key $ relNameToTxt (riName relInfo) <> "_aggregate"
in fieldName : case aggOb of
RS.AAOCount -> [J.Key "count"]
RS.AAOOp t col -> [J.Key t, J.Key $ getPGColTxt $ pgiColumn col]
getOrderByColumnType = \case
RS.AOCColumn pgColInfo -> pgiType pgColInfo
RS.AOCObjectRelation _ _ obCol -> getOrderByColumnType obCol
RS.AOCArrayAggregation _ _ aggOb ->
case aggOb of
RS.AAOCount -> PGColumnScalar PGInteger
RS.AAOOp _ colInfo -> pgiType colInfo
convertAggSelect
:: ( MonadReusability m, MonadError QErr m, MonadReader r m, Has FieldMap r
, Has OrdByCtx r, Has SQLGenCtx r
)
=> SelOpCtx -> Field -> m (RS.AnnAggSelG UnresolvedVal)
=> SelOpCtx -> Field -> m (RS.AnnAggregateSelectG UnresolvedVal)
convertAggSelect opCtx fld =
withPathK "selectionSet" $
fromAggField (RS.FromTable qt) colGNameMap permFilter permLimit fld
where
SelOpCtx qt _ colGNameMap permFilter permLimit = opCtx
convertConnectionSelect
:: ( MonadReusability m, MonadError QErr m, MonadReader r m, Has FieldMap r
, Has OrdByCtx r, Has SQLGenCtx r
)
=> NonEmpty PGColumnInfo -> SelOpCtx -> Field -> m (RS.ConnectionSelect UnresolvedVal)
convertConnectionSelect pkCols opCtx fld =
withPathK "selectionSet" $
fromConnectionField (RS.FromTable qt) pkCols permFilter permLimit fld
where
SelOpCtx qt _ _ permFilter permLimit = opCtx
parseFunctionArgs
:: (MonadReusability m, MonadError QErr m)
=> Seq.Seq a
@ -524,10 +730,77 @@ convertFuncQueryAgg
, Has OrdByCtx r
, Has SQLGenCtx r
)
=> FuncQOpCtx -> Field -> m AnnAggSel
=> FuncQOpCtx -> Field -> m AnnAggregateSelect
convertFuncQueryAgg funcOpCtx fld =
withPathK "selectionSet" $ fieldAsPath fld $ do
selectFrom <- makeFunctionSelectFrom qf argSeq fld
fromAggField selectFrom colGNameMap permFilter permLimit fld
where
FuncQOpCtx qf argSeq _ colGNameMap permFilter permLimit = funcOpCtx
convertConnectionFuncQuery
:: ( MonadReusability m
, MonadError QErr m
, MonadReader r m
, Has FieldMap r
, Has OrdByCtx r
, Has SQLGenCtx r
)
=> NonEmpty PGColumnInfo -> FuncQOpCtx -> Field -> m (RS.ConnectionSelect UnresolvedVal)
convertConnectionFuncQuery pkCols funcOpCtx fld =
withPathK "selectionSet" $ fieldAsPath fld $ do
selectFrom <- makeFunctionSelectFrom qf argSeq fld
fromConnectionField selectFrom pkCols permFilter permLimit fld
where
FuncQOpCtx qf argSeq _ _ permFilter permLimit = funcOpCtx
resolveNodeId
:: forall m. ( MonadError QErr m
, MonadReusability m
)
=> Field -> m NodeIdData
resolveNodeId field =
withPathK "selectionSet" $ fieldAsPath field $ do
nodeIdText <- asPGColText =<< getArg (_fArguments field) "id"
either (const throwInvalidNodeId) pure $
J.eitherDecode $ base64Decode nodeIdText
where
throwInvalidNodeId = throwVE "the node id is invalid"
convertNodeSelect
:: ( MonadReusability m
, MonadError QErr m
, MonadReader r m
, Has FieldMap r
, Has OrdByCtx r
, Has SQLGenCtx r
)
=> SelOpCtx
-> Map.HashMap PGCol J.Value
-> Field
-> m (RS.AnnSimpleSelG UnresolvedVal)
convertNodeSelect selOpCtx pkeyColumnValues field =
withPathK "selectionSet" $ fieldAsPath field $ do
-- Parse selection set as interface
ifaceSelectionSet <- asInterfaceSelectionSet $ _fSelSet field
let tableObjectType = mkTableTy table
selSet = getMemberSelectionSet tableObjectType ifaceSelectionSet
unresolvedPermFilter = fmapAnnBoolExp partialSQLExpToUnresolvedVal permFilter
tablePerm = RS.TablePerm unresolvedPermFilter permLimit
-- Resolve the table selection set
annFields <- processTableSelectionSet tableObjectType selSet
-- Resolve the Node id primary key column values
unresolvedPkeyValues <- flip Map.traverseWithKey pkeyColumnValues $
\pgColumn jsonValue -> case Map.lookup pgColumn pgColumnMap of
Nothing -> throwVE $ "column " <> pgColumn <<> " not found"
Just columnInfo -> (,columnInfo) . UVPG . AnnPGVal Nothing False <$>
parsePGScalarValue (pgiType columnInfo) jsonValue
-- Generate the bool expression from the primary key column values
let pkeyBoolExp = BoolAnd $ flip map (Map.elems unresolvedPkeyValues) $
\(unresolvedValue, columnInfo) -> (BoolFld . AVCol columnInfo) [AEQ True unresolvedValue]
selectArgs = RS.noSelectArgs{RS._saWhere = Just pkeyBoolExp}
strfyNum <- stringifyNum <$> asks getter
pure $ RS.AnnSelectG annFields (RS.FromTable table) tablePerm selectArgs strfyNum
where
SelOpCtx table _ allColumns permFilter permLimit = selOpCtx
pgColumnMap = mapFromL pgiColumn $ Map.elems allColumns

View File

@ -7,6 +7,9 @@ module Hasura.GraphQL.Resolve.Types
import Control.Lens.TH
import Hasura.Prelude
import qualified Data.Aeson as J
import qualified Data.Aeson.Casing as J
import qualified Data.Aeson.TH as J
import qualified Data.HashMap.Strict as Map
import qualified Data.Sequence as Seq
import qualified Data.Text as T
@ -28,12 +31,17 @@ import Hasura.SQL.Value
import qualified Hasura.SQL.DML as S
type NodeSelectMap = Map.HashMap G.NamedType SelOpCtx
data QueryCtx
= QCSelect !SelOpCtx
= QCNodeSelect !NodeSelectMap
| QCSelect !SelOpCtx
| QCSelectConnection !(NonEmpty PGColumnInfo) !SelOpCtx
| QCSelectPkey !SelPkOpCtx
| QCSelectAgg !SelOpCtx
| QCFuncQuery !FuncQOpCtx
| QCFuncAggQuery !FuncQOpCtx
| QCFuncConnection !(NonEmpty PGColumnInfo) !FuncQOpCtx
| QCAsyncActionFetch !ActionSelectOpContext
| QCAction !ActionExecutionContext
deriving (Show, Eq)
@ -133,10 +141,16 @@ data ActionSelectOpContext
-- used in resolvers
type PGColGNameMap = Map.HashMap G.Name PGColumnInfo
data RelationshipFieldKind
= RFKAggregate
| RFKSimple
| RFKConnection !(NonEmpty PGColumnInfo)
deriving (Show, Eq)
data RelationshipField
= RelationshipField
{ _rfInfo :: !RelInfo
, _rfIsAgg :: !Bool
, _rfIsAgg :: !RelationshipFieldKind
, _rfCols :: !PGColGNameMap
, _rfPermFilter :: !AnnBoolExpPartialSQL
, _rfPermLimit :: !(Maybe Int)
@ -170,6 +184,7 @@ data ResolveField
| RFRelationship !RelationshipField
| RFComputedField !ComputedField
| RFRemoteRelationship !RemoteFieldInfo
| RFNodeId !QualifiedTable !(NonEmpty PGColumnInfo)
deriving (Show, Eq)
type FieldMap = Map.HashMap (G.NamedType, G.Name) ResolveField
@ -247,6 +262,13 @@ data InputFunctionArgument
| IFAUnknown !FunctionArgItem -- ^ Unknown value, need to be parsed
deriving (Show, Eq)
data NodeIdData
= NodeIdData
{ _nidTable :: !QualifiedTable
, _nidColumns :: !(Map.HashMap PGCol J.Value)
} deriving (Show, Eq)
$(J.deriveFromJSON (J.aesonDrop 4 J.snakeCase) ''NodeIdData)
-- template haskell related
$(makePrisms ''ResolveField)
$(makeLenses ''ComputedField)

View File

@ -1,18 +1,33 @@
module Hasura.GraphQL.Schema
( mkGCtxMap
, GCtxMap
, getGCtx
, GCtx(..)
, QueryCtx(..)
, MutationCtx(..)
, InsCtx(..)
, InsCtxMap
, RelationInfoMap
, isAggFld
, qualObjectToName
, ppGCtx
, checkConflictingNode
, checkSchemaConflicts
-- * To be consumed by Hasura.GraphQL.RelaySchema module
, mkGCtx
, isAggregateField
, qualObjectToName
, ppGCtx
, getSelPerm
, isValidObjectName
, mkAdminSelFlds
, noFilter
, getGCtx
, getMutationRootFieldsRole
, makeFieldMap
, mkMutationTypesAndFieldsRole
, mkAdminInsCtx
, mkValidConstraints
, getValidCols
, mkInsCtx
) where
import Control.Lens.Extended hiding (op)
@ -108,28 +123,104 @@ isRelNullable fim ri = isNullable
lColInfos = getColInfos lCols allCols
isNullable = any pgiIsNullable lColInfos
mkPGColGNameMap :: [PGColumnInfo] -> PGColGNameMap
mkPGColGNameMap cols = Map.fromList $
flip map cols $ \ci -> (pgiName ci, ci)
numAggOps :: [G.Name]
numAggOps = [ "sum", "avg", "stddev", "stddev_samp", "stddev_pop"
, "variance", "var_samp", "var_pop"
]
compAggOps :: [G.Name]
compAggOps = ["max", "min"]
isAggFld :: G.Name -> Bool
isAggFld = flip elem (numAggOps <> compAggOps)
isAggregateField :: G.Name -> Bool
isAggregateField = flip elem (numAggregateOps <> compAggregateOps)
mkComputedFieldFunctionArgSeq :: Seq.Seq FunctionArg -> ComputedFieldFunctionArgSeq
mkComputedFieldFunctionArgSeq inputArgs =
Seq.fromList $ procFuncArgs inputArgs faName $
\fa t -> FunctionArgItem (G.Name t) (faName fa) (faHasDefault fa)
mkMutationTypesAndFieldsRole
:: QualifiedTable
-> Maybe ([PGColumnInfo], RelationInfoMap)
-- ^ insert permission
-> Maybe [SelField]
-- ^ select permission
-> Maybe [PGColumnInfo]
-- ^ update cols
-> Maybe ()
-- ^ delete cols
-> Maybe (PrimaryKey PGColumnInfo)
-> [ConstraintName]
-- ^ constraints
-> Maybe ViewInfo
-> (TypeMap, FieldMap)
mkMutationTypesAndFieldsRole tn insPermM selFldsM updColsM delPermM pkeyCols constraints viM =
(mkTyInfoMap allTypes, fieldMap)
where
allTypes = relInsInpObjTys <> onConflictTypes <> jsonOpTys
<> mutationTypes <> referencedEnumTypes
upsertPerm = isJust updColsM
isUpsertable = upsertable constraints upsertPerm $ isJust viM
updatableCols = maybe [] (map pgiName) updColsM
onConflictTypes = mkOnConflictTypes tn constraints updatableCols isUpsertable
jsonOpTys = fromMaybe [] updJSONOpInpObjTysM
relInsInpObjTys = maybe [] (map TIInpObj) $
mutHelper viIsInsertable relInsInpObjsM
mutationTypes = catMaybes
[ TIInpObj <$> mutHelper viIsInsertable insInpObjM
, TIInpObj <$> mutHelper viIsUpdatable updSetInpObjM
, TIInpObj <$> mutHelper viIsUpdatable updIncInpObjM
, TIInpObj <$> mutHelper viIsUpdatable primaryKeysInpObjM
, TIObj <$> mutRespObjM
]
mutHelper :: (ViewInfo -> Bool) -> Maybe a -> Maybe a
mutHelper f objM = bool Nothing objM $ isMutable f viM
fieldMap = Map.unions $ catMaybes [insInpObjFldsM, updSetInpObjFldsM]
-- helper
mkColFldMap ty cols = Map.fromList $ flip map cols $
\ci -> ((ty, pgiName ci), RFPGColumn ci)
-- insert input type
insInpObjM = uncurry (mkInsInp tn) <$> insPermM
-- column fields used in insert input object
insInpObjFldsM = (mkColFldMap (mkInsInpTy tn) . fst) <$> insPermM
-- relationship input objects
relInsInpObjsM = mkRelInsInps tn isUpsertable <$ insPermM
-- update set input type
updSetInpObjM = mkUpdSetInp tn <$> updColsM
-- update increment input type
updIncInpObjM = mkUpdIncInp tn updColsM
-- update json operator input type
updJSONOpInpObjsM = mkUpdJSONOpInp tn <$> updColsM
updJSONOpInpObjTysM = map TIInpObj <$> updJSONOpInpObjsM
-- fields used in set input object
updSetInpObjFldsM = mkColFldMap (mkUpdSetTy tn) <$> updColsM
-- primary key columns input object for update_by_pk
primaryKeysInpObjM = guard (isJust selFldsM) *> (mkPKeyColumnsInpObj tn <$> pkeyCols)
-- mut resp obj
mutRespObjM =
if isMut
then Just $ mkMutRespObj tn $ isJust selFldsM
else Nothing
isMut = (isJust insPermM || isJust updColsM || isJust delPermM)
&& any (`isMutable` viM) [viIsInsertable, viIsUpdatable, viIsDeletable]
-- the types for all enums that are /referenced/ by this table (not /defined/ by this table;
-- there isnt actually any need to generate a GraphQL enum type for an enum table if its
-- never referenced anywhere else)
referencedEnumTypes =
let allColumnInfos =
(selFldsM ^.. _Just.traverse._SFPGColumn)
<> (insPermM ^. _Just._1)
<> (updColsM ^. _Just)
allEnumReferences = allColumnInfos ^.. traverse.to pgiType._PGColumnEnumReference
in flip map allEnumReferences $ \enumReference@(EnumReference referencedTableName _) ->
let typeName = mkTableEnumType referencedTableName
in TIEnum $ mkHsraEnumTyInfo Nothing typeName (EnumValuesReference enumReference)
-- see Note [Split schema generation (TODO)]
mkGCtxRole'
mkTyAggRole
:: QualifiedTable
-> Maybe PGDescription
-- ^ Postgres description
@ -148,73 +239,31 @@ mkGCtxRole'
-> [FunctionInfo]
-- ^ all functions
-> TyAgg
mkGCtxRole' tn descM insPermM selPermM updColsM delPermM pkeyCols constraints viM funcs =
TyAgg (mkTyInfoMap allTypes) fieldMap scalars ordByCtx
mkTyAggRole tn descM insPermM selPermM updColsM delPermM pkeyCols constraints viM funcs =
let (mutationTypes, mutationFields) =
mkMutationTypesAndFieldsRole tn insPermM selFldsM updColsM delPermM pkeyCols constraints viM
in TyAgg (mkTyInfoMap allTypes <> mutationTypes)
(fieldMap <> mutationFields)
scalars ordByCtx
where
ordByCtx = fromMaybe Map.empty ordByCtxM
upsertPerm = isJust updColsM
isUpsertable = upsertable constraints upsertPerm $ isJust viM
updatableCols = maybe [] (map pgiName) updColsM
onConflictTypes = mkOnConflictTypes tn constraints updatableCols isUpsertable
jsonOpTys = fromMaybe [] updJSONOpInpObjTysM
relInsInpObjTys = maybe [] (map TIInpObj) $
mutHelper viIsInsertable relInsInpObjsM
funcInpArgTys = bool [] (map TIInpObj funcArgInpObjs) $ isJust selFldsM
allTypes = relInsInpObjTys <> onConflictTypes <> jsonOpTys
<> queryTypes <> aggQueryTypes <> mutationTypes
<> funcInpArgTys <> referencedEnumTypes <> computedFieldFuncArgsInps
allTypes = queryTypes <> aggQueryTypes
<> funcInpArgTys <> computedFieldFuncArgsInps
queryTypes = catMaybes
queryTypes = map TIObj selectObjects <>
catMaybes
[ TIInpObj <$> boolExpInpObjM
, TIInpObj <$> ordByInpObjM
, TIObj <$> selObjM
, TIEnum <$> selColInpTyM
]
aggQueryTypes = map TIObj aggObjs <> map TIInpObj aggOrdByInps
mutationTypes = catMaybes
[ TIInpObj <$> mutHelper viIsInsertable insInpObjM
, TIInpObj <$> mutHelper viIsUpdatable updSetInpObjM
, TIInpObj <$> mutHelper viIsUpdatable updIncInpObjM
, TIInpObj <$> mutHelper viIsUpdatable primaryKeysInpObjM
, TIObj <$> mutRespObjM
, TIEnum <$> selColInpTyM
]
mutHelper :: (ViewInfo -> Bool) -> Maybe a -> Maybe a
mutHelper f objM = bool Nothing objM $ isMutable f viM
fieldMap = Map.unions $ catMaybes
[ insInpObjFldsM, updSetInpObjFldsM
, boolExpInpObjFldsM , selObjFldsM
]
fieldMap = Map.unions $ catMaybes [boolExpInpObjFldsM , selObjFldsM]
scalars = selByPkScalarSet <> funcArgScalarSet <> computedFieldFuncArgScalars
-- helper
mkColFldMap ty cols = Map.fromList $ flip map cols $
\ci -> ((ty, pgiName ci), RFPGColumn ci)
-- insert input type
insInpObjM = uncurry (mkInsInp tn) <$> insPermM
-- column fields used in insert input object
insInpObjFldsM = (mkColFldMap (mkInsInpTy tn) . fst) <$> insPermM
-- relationship input objects
relInsInpObjsM = const (mkRelInsInps tn isUpsertable) <$> insPermM
-- update set input type
updSetInpObjM = mkUpdSetInp tn <$> updColsM
-- update increment input type
updIncInpObjM = mkUpdIncInp tn updColsM
-- update json operator input type
updJSONOpInpObjsM = mkUpdJSONOpInp tn <$> updColsM
updJSONOpInpObjTysM = map TIInpObj <$> updJSONOpInpObjsM
-- fields used in set input object
updSetInpObjFldsM = mkColFldMap (mkUpdSetTy tn) <$> updColsM
-- primary key columns input object for update_by_pk
primaryKeysInpObjM = guard (isJust selPermM) *> (mkPKeyColumnsInpObj tn <$> pkeyCols)
selFldsM = snd <$> selPermM
selColNamesM = (map pgiName . getPGColumnFields) <$> selFldsM
selColInpTyM = mkSelColumnTy tn <$> selColNamesM
@ -239,13 +288,13 @@ mkGCtxRole' tn descM insPermM selPermM updColsM delPermM pkeyCols constraints vi
mkFldMap ty = Map.fromList . concatMap (mkFld ty)
mkFld ty = \case
SFPGColumn ci -> [((ty, pgiName ci), RFPGColumn ci)]
SFRelationship (RelationshipFieldInfo relInfo allowAgg cols permFilter permLimit _) ->
SFRelationship (RelationshipFieldInfo relInfo allowAgg cols permFilter permLimit _ _) ->
let relationshipName = riName relInfo
relFld = ( (ty, mkRelName relationshipName)
, RFRelationship $ RelationshipField relInfo False cols permFilter permLimit
, RFRelationship $ RelationshipField relInfo RFKSimple cols permFilter permLimit
)
aggRelFld = ( (ty, mkAggRelName relationshipName)
, RFRelationship $ RelationshipField relInfo True cols permFilter permLimit
, RFRelationship $ RelationshipField relInfo RFKAggregate cols permFilter permLimit
)
in case riType relInfo of
ObjRel -> [relFld]
@ -262,17 +311,12 @@ mkGCtxRole' tn descM insPermM selPermM updColsM delPermM pkeyCols constraints vi
-- the fields used in bool exp
boolExpInpObjFldsM = mkFldMap (mkBoolExpTy tn) <$> selFldsM
-- mut resp obj
mutRespObjM =
if isMut
then Just $ mkMutRespObj tn $ isJust selFldsM
else Nothing
isMut = (isJust insPermM || isJust updColsM || isJust delPermM)
&& any (`isMutable` viM) [viIsInsertable, viIsUpdatable, viIsDeletable]
-- table obj
selObjM = mkTableObj tn descM <$> selFldsM
selectObjects = case selPermM of
Just (_, selFlds) ->
[ mkTableObj tn descM selFlds
]
Nothing -> []
-- aggregate objs and order by inputs
(aggObjs, aggOrdByInps) = case selPermM of
@ -281,10 +325,10 @@ mkGCtxRole' tn descM insPermM selPermM updColsM delPermM pkeyCols constraints vi
numCols = onlyNumCols cols
compCols = onlyComparableCols cols
objs = [ mkTableAggObj tn
, mkTableAggFldsObj tn (numCols, numAggOps) (compCols, compAggOps)
] <> mkColAggFldsObjs selFlds
ordByInps = mkTabAggOrdByInpObj tn (numCols, numAggOps) (compCols, compAggOps)
: mkTabAggOpOrdByInpObjs tn (numCols, numAggOps) (compCols, compAggOps)
, mkTableAggregateFieldsObj tn (numCols, numAggregateOps) (compCols, compAggregateOps)
] <> mkColAggregateFieldsObjs selFlds
ordByInps = mkTabAggOrdByInpObj tn (numCols, numAggregateOps) (compCols, compAggregateOps)
: mkTabAggregateOpOrdByInpObjs tn (numCols, numAggregateOps) (compCols, compAggregateOps)
in (objs, ordByInps)
_ -> ([], [])
@ -295,13 +339,13 @@ mkGCtxRole' tn descM insPermM selPermM updColsM delPermM pkeyCols constraints vi
mkTypeMaker "sum" = mkColumnType
mkTypeMaker _ = onlyFloat
mkColAggFldsObjs flds =
mkColAggregateFieldsObjs flds =
let numCols = getNumericCols flds
compCols = getComparableCols flds
mkNumObjFld n = mkTableColAggFldsObj tn n (mkTypeMaker n) numCols
mkCompObjFld n = mkTableColAggFldsObj tn n mkColumnType compCols
numFldsObjs = bool (map mkNumObjFld numAggOps) [] $ null numCols
compFldsObjs = bool (map mkCompObjFld compAggOps) [] $ null compCols
mkNumObjFld n = mkTableColAggregateFieldsObj tn n (mkTypeMaker n) numCols
mkCompObjFld n = mkTableColAggregateFieldsObj tn n mkColumnType compCols
numFldsObjs = bool (map mkNumObjFld numAggregateOps) [] $ null numCols
compFldsObjs = bool (map mkCompObjFld compAggregateOps) [] $ null compCols
in numFldsObjs <> compFldsObjs
-- the fields used in table object
selObjFldsM = mkFldMap (mkTableTy tn) <$> selFldsM
@ -313,20 +357,6 @@ mkGCtxRole' tn descM insPermM selPermM updColsM delPermM pkeyCols constraints vi
Just (a, b) -> (Just a, Just b)
Nothing -> (Nothing, Nothing)
-- the types for all enums that are /referenced/ by this table (not /defined/ by this table;
-- there isnt actually any need to generate a GraphQL enum type for an enum table if its
-- never referenced anywhere else)
referencedEnumTypes =
let allColumnInfos =
(selPermM ^.. _Just._2.traverse._SFPGColumn)
<> (insPermM ^. _Just._1)
<> (updColsM ^. _Just)
allEnumReferences = allColumnInfos ^.. traverse.to pgiType._PGColumnEnumReference
in flip map allEnumReferences $ \enumReference@(EnumReference referencedTableName _) ->
let typeName = mkTableEnumType referencedTableName
in TIEnum $ mkHsraEnumTyInfo Nothing typeName (EnumValuesReference enumReference)
-- computed fields' function args input objects and scalar types
mkComputedFieldRequiredTypes computedFieldInfo =
let ComputedFieldFunction qf inputArgs _ _ _ = _cfFunction computedFieldInfo
@ -339,32 +369,25 @@ mkGCtxRole' tn descM insPermM selPermM updColsM delPermM pkeyCols constraints vi
computedFieldFuncArgsInps = map (TIInpObj . fst) computedFieldReqTypes
computedFieldFuncArgScalars = Set.fromList $ concatMap snd computedFieldReqTypes
makeFieldMap :: [(a, ObjFldInfo)] -> Map.HashMap G.Name (a, ObjFldInfo)
makeFieldMap = mapFromL (_fiName . snd)
-- see Note [Split schema generation (TODO)]
getRootFldsRole'
getMutationRootFieldsRole
:: QualifiedTable
-> Maybe (PrimaryKey PGColumnInfo)
-> [ConstraintName]
-> FieldInfoMap FieldInfo
-> [FunctionInfo]
-> Maybe ([T.Text], Bool) -- insert perm
-> Maybe (AnnBoolExpPartialSQL, Maybe Int, [T.Text], Bool) -- select filter
-> Maybe ([PGColumnInfo], PreSetColsPartial, AnnBoolExpPartialSQL, Maybe AnnBoolExpPartialSQL, [T.Text]) -- update filter
-> Maybe (AnnBoolExpPartialSQL, [T.Text]) -- delete filter
-> Maybe ViewInfo
-> TableConfig -- custom config
-> RootFields
getRootFldsRole' tn primaryKey constraints fields funcs insM
selM updM delM viM tableConfig =
RootFields
{ _rootQueryFields = makeFieldMap $
funcQueries
<> funcAggQueries
<> catMaybes
[ getSelDet <$> selM
, getSelAggDet selM
, getPKeySelDet <$> selM <*> primaryKey
]
, _rootMutationFields = makeFieldMap $ catMaybes
-> MutationRootFieldMap
getMutationRootFieldsRole tn primaryKey constraints fields insM
selM updM delM viM tableConfig =
makeFieldMap $ catMaybes
[ mutHelper viIsInsertable getInsDet insM
, onlyIfSelectPermExist $ mutHelper viIsInsertable getInsOneDet insM
, mutHelper viIsUpdatable getUpdDet updM
@ -372,15 +395,10 @@ getRootFldsRole' tn primaryKey constraints fields funcs insM
, mutHelper viIsDeletable getDelDet delM
, onlyIfSelectPermExist $ mutHelper viIsDeletable getDelByPkDet $ (,) <$> delM <*> primaryKey
]
}
where
makeFieldMap = mapFromL (_fiName . snd)
customRootFields = _tcCustomRootFields tableConfig
colGNameMap = mkPGColGNameMap $ getCols fields
funcQueries = maybe [] getFuncQueryFlds selM
funcAggQueries = maybe [] getFuncAggQueryFlds selM
mutHelper :: (ViewInfo -> Bool) -> (a -> b) -> Maybe a -> Maybe b
mutHelper f getDet mutM =
bool Nothing (getDet <$> mutM) $ isMutable f viM
@ -426,6 +444,32 @@ getRootFldsRole' tn primaryKey constraints fields funcs insM
, mkDeleteByPkMutationField delByPkCustName tn pKey
)
-- see Note [Split schema generation (TODO)]
getQueryRootFieldsRole
:: QualifiedTable
-> Maybe (PrimaryKey PGColumnInfo)
-> FieldInfoMap FieldInfo
-> [FunctionInfo]
-> Maybe (AnnBoolExpPartialSQL, Maybe Int, [T.Text], Bool) -- select filter
-> TableConfig -- custom config
-> QueryRootFieldMap
getQueryRootFieldsRole tn primaryKey fields funcs selM tableConfig =
makeFieldMap $
funcQueries
<> funcAggQueries
<> catMaybes
[ getSelDet <$> selM
, getSelAggDet selM
, getPKeySelDet <$> selM <*> primaryKey
]
where
customRootFields = _tcCustomRootFields tableConfig
colGNameMap = mkPGColGNameMap $ getCols fields
funcQueries = maybe [] getFuncQueryFlds selM
funcAggQueries = maybe [] getFuncAggQueryFlds selM
getCustomNameWith f = f customRootFields
selCustName = getCustomNameWith _tcrfSelect
getSelDet (selFltr, pLimit, hdrs, _) =
@ -462,17 +506,6 @@ getRootFldsRole' tn primaryKey constraints fields funcs insM
, g fi $ fiDescription fi
)
mkFuncArgItemSeq functionInfo =
let inputArgs = fiInputArgs functionInfo
in Seq.fromList $ procFuncArgs inputArgs nameFn resultFn
where
nameFn = \case
IAUserProvided fa -> faName fa
IASessionVariables name -> Just name
resultFn arg gName = flip fmap arg $
\fa -> FunctionArgItem (G.Name gName) (faName fa) (faHasDefault fa)
getSelPermission :: TableInfo -> RoleName -> Maybe SelPermInfo
getSelPermission tabInfo roleName =
Map.lookup roleName (_tiRolePermInfoMap tabInfo) >>= _permSel
@ -503,6 +536,8 @@ getSelPerm tableCache fields roleName selPermInfo = do
, _rfiColumns = remTableColGNameMap
, _rfiPermFilter = spiFilter rmSelPermM
, _rfiPermLimit = spiLimit rmSelPermM
, _rfiPrimaryKeyColumns = _pkColumns <$>
_tciPrimaryKey (_tiCoreInfo remTableInfo)
, _rfiIsNullable = isRelNullable fields relInfo
}
FIComputedField info -> do
@ -607,6 +642,7 @@ mkAdminSelFlds fields tableCache =
, _rfiColumns = remoteTableColGNameMap
, _rfiPermFilter = noFilter
, _rfiPermLimit = Nothing
, _rfiPrimaryKeyColumns = _pkColumns <$> _tciPrimaryKey remoteTableInfo
, _rfiIsNullable = isRelNullable fields info
}
@ -652,8 +688,8 @@ mkGCtxRole tableCache tn descM fields primaryKey constraints funcs viM tabConfig
return (ctx, (permCols, icRelations ctx))
let insPermM = snd <$> tabInsInfoM
insCtxM = fst <$> tabInsInfoM
updColsM = filterColFlds . upiCols <$> _permUpd permInfo
tyAgg = mkGCtxRole' tn descM insPermM selPermM updColsM
updColsM = filterColumnFields . upiCols <$> _permUpd permInfo
tyAgg = mkTyAggRole tn descM insPermM selPermM updColsM
(void $ _permDel permInfo) primaryKey constraints viM funcs
rootFlds = getRootFldsRole tn primaryKey constraints fields funcs
viM permInfo tabConfigM
@ -662,7 +698,7 @@ mkGCtxRole tableCache tn descM fields primaryKey constraints funcs viM tabConfig
where
allCols = getCols fields
cols = getValidCols fields
filterColFlds allowedSet =
filterColumnFields allowedSet =
filter ((`Set.member` allowedSet) . pgiColumn) cols
getRootFldsRole
@ -675,10 +711,12 @@ getRootFldsRole
-> RolePermInfo
-> TableConfig
-> RootFields
getRootFldsRole tn pCols constraints fields funcs viM (RolePermInfo insM selM updM delM)=
getRootFldsRole' tn pCols constraints fields funcs
(mkIns <$> insM) (mkSel <$> selM)
(mkUpd <$> updM) (mkDel <$> delM) viM
getRootFldsRole tn pCols constraints fields funcs viM (RolePermInfo insM selM updM delM) tableConfig =
let queryFields = getQueryRootFieldsRole tn pCols fields funcs (mkSel <$> selM) tableConfig
mutationFields = getMutationRootFieldsRole tn pCols constraints fields
(mkIns <$> insM) (mkSel <$> selM)
(mkUpd <$> updM) (mkDel <$> delM) viM tableConfig
in RootFields queryFields mutationFields
where
mkIns i = (ipiRequiredHeaders i, isJust updM)
mkSel s = ( spiFilter s, spiLimit s
@ -706,7 +744,7 @@ mkGCtxMapTable tableCache funcCache tabInfo = do
tabFuncs viewInfo customConfig roleName
adminInsCtx <- mkAdminInsCtx tableCache fields
adminSelFlds <- mkAdminSelFlds fields tableCache
let adminCtx = mkGCtxRole' tn descM (Just (cols, icRelations adminInsCtx))
let adminCtx = mkTyAggRole tn descM (Just (cols, icRelations adminInsCtx))
(Just (True, adminSelFlds)) (Just cols) (Just ())
primaryKey validConstraints viewInfo tabFuncs
adminInsCtxMap = Map.singleton tn adminInsCtx
@ -720,10 +758,18 @@ mkGCtxMapTable tableCache funcCache tabInfo = do
tabFuncs = filter (isValidObjectName . fiName) $ getFuncsOfTable tn funcCache
adminRootFlds =
getRootFldsRole' tn primaryKey validConstraints fields tabFuncs
(Just ([], True)) (Just (noFilter, Nothing, [], True))
(Just (cols, mempty, noFilter, Nothing, [])) (Just (noFilter, []))
viewInfo customConfig
let insertPermDetails = Just ([], True)
selectPermDetails = Just (noFilter, Nothing, [], True)
updatePermDetails = Just (cols, mempty, noFilter, Nothing, [])
deletePermDetails = Just (noFilter, [])
queryFields = getQueryRootFieldsRole tn primaryKey fields tabFuncs
selectPermDetails customConfig
mutationFields = getMutationRootFieldsRole tn primaryKey
validConstraints fields insertPermDetails
selectPermDetails updatePermDetails
deletePermDetails viewInfo customConfig
in RootFields queryFields mutationFields
rolePermsMap :: Map.HashMap RoleName (RoleContext RolePermInfo)
rolePermsMap = flip Map.map rolePerms $ \permInfo ->
@ -745,11 +791,11 @@ noFilter = annBoolExpTrue
{- Note [Split schema generation (TODO)]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
As of writing this, the schema is generated per table per role and for all permissions.
See functions "mkGCtxRole'" and "getRootFldsRole'". This approach makes hard to
differentiate schema generation for each operation (select, insert, delete and update)
based on respective permission information and safe merging those schemas eventually.
For backend-only inserts (see https://github.com/hasura/graphql-engine/pull/4224)
As of writing this, the schema is generated per table per role and for queries and mutations
separately. See functions "mkTyAggRole", "getQueryRootFieldsRole" and "getMutationRootFieldsRole".
This approach makes hard to differentiate schema generation for each operation
(select, insert, delete and update) based on respective permission information and safe merging
those schemas eventually. For backend-only inserts (see https://github.com/hasura/graphql-engine/pull/4224)
we need to somehow defer the logic of merging schema for inserts with others based on its
backend-only credibility. This requires significant refactor of this module and
we can't afford to do it as of now since we're going to rewrite the entire GraphQL schema
@ -867,6 +913,7 @@ mkGCtx tyAgg (RootFields queryFields mutationFields) insCtxMap =
, TIObj <$> mutRootM
, TIObj <$> subRootM
, TIEnum <$> ordByEnumTyM
, Just $ TIObj mkPageInfoObj
] <>
scalarTys <> compTys <> defaultTypes <> wiredInGeoInputTypes
<> wiredInRastInputTypes

View File

@ -230,7 +230,7 @@ mkFieldMap annotatedOutputType actionInfo fieldReferences roleName =
(RelName $ mkNonEmptyTextUnsafe $ coerce relationshipName)
(_trType relationship)
columnMapping remoteTable True)
False mempty
RFKSimple mempty
tableFilter
tableLimit
)

View File

@ -8,6 +8,8 @@ module Hasura.GraphQL.Schema.Builder
, addFieldsToTyAgg
, addTypeInfoToTyAgg
, addScalarToTyAgg
, QueryRootFieldMap
, MutationRootFieldMap
, RootFields(..)
, addQueryField
, addMutationField
@ -57,11 +59,14 @@ instance Semigroup TyAgg where
instance Monoid TyAgg where
mempty = TyAgg Map.empty Map.empty Set.empty Map.empty
type QueryRootFieldMap = Map.HashMap G.Name (QueryCtx, ObjFldInfo)
type MutationRootFieldMap = Map.HashMap G.Name (MutationCtx, ObjFldInfo)
-- | A role-specific mapping from root field names to allowed operations.
data RootFields
= RootFields
{ _rootQueryFields :: !(Map.HashMap G.Name (QueryCtx, ObjFldInfo))
, _rootMutationFields :: !(Map.HashMap G.Name (MutationCtx, ObjFldInfo))
{ _rootQueryFields :: !QueryRootFieldMap
, _rootMutationFields :: !MutationRootFieldMap
} deriving (Show, Eq)
$(makeLenses ''RootFields)

View File

@ -14,9 +14,12 @@ module Hasura.GraphQL.Schema.Common
, mkColumnType
, mkRelName
, mkAggRelName
, mkConnectionRelName
, mkComputedFieldName
, mkTableTy
, mkTableConnectionTy
, mkTableEdgeTy
, mkTableEnumType
, mkTableAggTy
@ -25,6 +28,14 @@ module Hasura.GraphQL.Schema.Common
, mkDescriptionWith
, mkFuncArgsTy
, mkPGColGNameMap
, numAggregateOps
, compAggregateOps
, nodeType
, nodeIdType
) where
import qualified Data.HashMap.Strict as Map
@ -41,12 +52,13 @@ import Hasura.SQL.Types
data RelationshipFieldInfo
= RelationshipFieldInfo
{ _rfiInfo :: !RelInfo
, _rfiAllowAgg :: !Bool
, _rfiColumns :: !PGColGNameMap
, _rfiPermFilter :: !AnnBoolExpPartialSQL
, _rfiPermLimit :: !(Maybe Int)
, _rfiIsNullable :: !Bool
{ _rfiInfo :: !RelInfo
, _rfiAllowAgg :: !Bool
, _rfiColumns :: !PGColGNameMap
, _rfiPermFilter :: !AnnBoolExpPartialSQL
, _rfiPermLimit :: !(Maybe Int)
, _rfiPrimaryKeyColumns :: !(Maybe (NonEmpty PGColumnInfo))
, _rfiIsNullable :: !Bool
} deriving (Show, Eq)
data SelField
@ -73,7 +85,8 @@ qualObjectToName :: (ToTxt a) => QualifiedObject a -> G.Name
qualObjectToName = G.Name . snakeCaseQualObject
addTypeSuffix :: Text -> G.NamedType -> G.NamedType
addTypeSuffix suffix baseType = G.NamedType $ G.unNamedType baseType <> G.Name suffix
addTypeSuffix suffix baseType =
G.NamedType $ G.unNamedType baseType <> G.Name suffix
fromInpValL :: [InpValInfo] -> Map.HashMap G.Name InpValInfo
fromInpValL = mapFromL _iviName
@ -84,6 +97,9 @@ mkRelName rn = G.Name $ relNameToTxt rn
mkAggRelName :: RelName -> G.Name
mkAggRelName rn = G.Name $ relNameToTxt rn <> "_aggregate"
mkConnectionRelName :: RelName -> G.Name
mkConnectionRelName rn = G.Name $ relNameToTxt rn <> "_connection"
mkComputedFieldName :: ComputedFieldName -> G.Name
mkComputedFieldName = G.Name . computedFieldNameToText
@ -95,6 +111,12 @@ mkColumnType = \case
mkTableTy :: QualifiedTable -> G.NamedType
mkTableTy = G.NamedType . qualObjectToName
mkTableConnectionTy :: QualifiedTable -> G.NamedType
mkTableConnectionTy = addTypeSuffix "Connection" . mkTableTy
mkTableEdgeTy :: QualifiedTable -> G.NamedType
mkTableEdgeTy = addTypeSuffix "Edge" . mkTableTy
mkTableEnumType :: QualifiedTable -> G.NamedType
mkTableEnumType = addTypeSuffix "_enum" . mkTableTy
@ -126,3 +148,23 @@ mkFuncArgsName fn =
mkFuncArgsTy :: QualifiedFunction -> G.NamedType
mkFuncArgsTy =
G.NamedType . mkFuncArgsName
mkPGColGNameMap :: [PGColumnInfo] -> PGColGNameMap
mkPGColGNameMap cols = Map.fromList $
flip map cols $ \ci -> (pgiName ci, ci)
numAggregateOps :: [G.Name]
numAggregateOps = [ "sum", "avg", "stddev", "stddev_samp", "stddev_pop"
, "variance", "var_samp", "var_pop"
]
compAggregateOps :: [G.Name]
compAggregateOps = ["max", "min"]
nodeType :: G.NamedType
nodeType =
G.NamedType "Node"
nodeIdType :: G.GType
nodeIdType =
G.toGT $ G.toNT $ G.NamedType "ID"

View File

@ -2,14 +2,17 @@ module Hasura.GraphQL.Schema.Function
( procFuncArgs
, mkFuncArgsInp
, mkFuncQueryFld
, mkFuncQueryConnectionFld
, mkFuncAggQueryFld
, mkFuncArgsTy
, mkFuncArgItemSeq
) where
import qualified Data.Sequence as Seq
import qualified Data.Text as T
import qualified Language.GraphQL.Draft.Syntax as G
import Hasura.GraphQL.Resolve.Types
import Hasura.GraphQL.Schema.Common
import Hasura.GraphQL.Schema.Select
import Hasura.GraphQL.Validate.Types
@ -92,6 +95,20 @@ mkFuncQueryFld funInfo descM =
ty = G.toGT $ G.toNT $ G.toLT $ G.toNT $ mkTableTy retTable
mkFuncQueryConnectionFld
:: FunctionInfo -> Maybe PGDescription -> ObjFldInfo
mkFuncQueryConnectionFld funInfo descM =
mkHsraObjFldInfo (Just desc) fldName (mkFuncArgs funInfo) ty
where
retTable = fiReturnType funInfo
funcName = fiName funInfo
desc = mkDescriptionWith descM $ "execute function " <> funcName
<<> " which returns " <>> retTable
fldName = qualObjectToName funcName <> "_connection"
ty = G.toGT $ G.toNT $ G.toLT $ G.toNT $ mkTableConnectionTy retTable
{-
function_aggregate(
@ -118,3 +135,15 @@ mkFuncAggQueryFld funInfo descM =
fldName = qualObjectToName funcName <> "_aggregate"
ty = G.toGT $ G.toNT $ mkTableAggTy retTable
mkFuncArgItemSeq :: FunctionInfo -> Seq (InputArgument FunctionArgItem)
mkFuncArgItemSeq functionInfo =
let inputArgs = fiInputArgs functionInfo
in Seq.fromList $ procFuncArgs inputArgs nameFn resultFn
where
nameFn = \case
IAUserProvided fa -> faName fa
IASessionVariables name -> Just name
resultFn arg gName = flip fmap arg $
\fa -> FunctionArgItem (G.Name gName) (faName fa) (faHasDefault fa)

View File

@ -3,7 +3,7 @@ module Hasura.GraphQL.Schema.OrderBy
, ordByEnumTy
, mkOrdByInpObj
, mkTabAggOrdByInpObj
, mkTabAggOpOrdByInpObjs
, mkTabAggregateOpOrdByInpObjs
) where
import qualified Data.HashMap.Strict as Map
@ -48,8 +48,8 @@ ordByEnumTy =
)
]
mkTabAggOpOrdByTy :: QualifiedTable -> G.Name -> G.NamedType
mkTabAggOpOrdByTy tn op =
mkTabAggregateOpOrdByTy :: QualifiedTable -> G.Name -> G.NamedType
mkTabAggregateOpOrdByTy tn op =
G.NamedType $ qualObjectToName tn <> "_" <> op <> "_order_by"
{-
@ -60,14 +60,14 @@ input table_<op>_order_by {
}
-}
mkTabAggOpOrdByInpObjs
mkTabAggregateOpOrdByInpObjs
:: QualifiedTable
-> ([PGColumnInfo], [G.Name])
-> ([PGColumnInfo], [G.Name])
-> [InpObjTyInfo]
mkTabAggOpOrdByInpObjs tn (numCols, numAggOps) (compCols, compAggOps) =
mapMaybe (mkInpObjTyM numCols) numAggOps
<> mapMaybe (mkInpObjTyM compCols) compAggOps
mkTabAggregateOpOrdByInpObjs tn (numCols, numericAggregateOps) (compCols, compareAggregateOps) =
mapMaybe (mkInpObjTyM numCols) numericAggregateOps
<> mapMaybe (mkInpObjTyM compCols) compareAggregateOps
where
mkDesc (G.Name op) =
@ -75,7 +75,7 @@ mkTabAggOpOrdByInpObjs tn (numCols, numAggOps) (compCols, compAggOps) =
mkInpObjTyM cols op = bool (Just $ mkInpObjTy cols op) Nothing $ null cols
mkInpObjTy cols op =
mkHsraInpTyInfo (Just $ mkDesc op) (mkTabAggOpOrdByTy tn op) $
mkHsraInpTyInfo (Just $ mkDesc op) (mkTabAggregateOpOrdByTy tn op) $
fromInpValL $ map mkColInpVal cols
mkColInpVal ci = InpValInfo Nothing (pgiName ci) Nothing $ G.toGT
@ -97,17 +97,17 @@ mkTabAggOrdByInpObj
-> ([PGColumnInfo], [G.Name])
-> ([PGColumnInfo], [G.Name])
-> InpObjTyInfo
mkTabAggOrdByInpObj tn (numCols, numAggOps) (compCols, compAggOps) =
mkTabAggOrdByInpObj tn (numCols, numericAggregateOps) (compCols, compareAggregateOps) =
mkHsraInpTyInfo (Just desc) (mkTabAggOrdByTy tn) $ fromInpValL $
numOpOrdBys <> compOpOrdBys <> [countInpVal]
where
desc = G.Description $
"order by aggregate values of table " <>> tn
numOpOrdBys = bool (map mkInpValInfo numAggOps) [] $ null numCols
compOpOrdBys = bool (map mkInpValInfo compAggOps) [] $ null compCols
numOpOrdBys = bool (map mkInpValInfo numericAggregateOps) [] $ null numCols
compOpOrdBys = bool (map mkInpValInfo compareAggregateOps) [] $ null compCols
mkInpValInfo op = InpValInfo Nothing op Nothing $ G.toGT $
mkTabAggOpOrdByTy tn op
mkTabAggregateOpOrdByTy tn op
countInpVal = InpValInfo Nothing "count" Nothing $ G.toGT ordByTy
@ -132,14 +132,14 @@ mkOrdByInpObj tn selFlds = (inpObjTy, ordByCtx)
where
inpObjTy =
mkHsraInpTyInfo (Just desc) namedTy $ fromInpValL $
map mkColOrdBy pgColFlds <> map mkObjRelOrdBy objRels
<> mapMaybe mkArrRelAggOrdBy arrRels
map mkColOrdBy pgColumnFields <> map mkObjRelOrdBy objRels
<> mapMaybe mkArrayAggregateSelectOrdBy arrRels
namedTy = mkOrdByTy tn
desc = G.Description $
"ordering options when selecting data from " <>> tn
pgColFlds = getPGColumnFields selFlds
pgColumnFields = getPGColumnFields selFlds
relFltr ty = flip filter (getRelationshipFields selFlds) $
\rf -> riType (_rfiInfo rf) == ty
objRels = relFltr ObjRel
@ -152,7 +152,7 @@ mkOrdByInpObj tn selFlds = (inpObjTy, ordByCtx)
in InpValInfo Nothing (mkRelName $ riName ri) Nothing $
G.toGT $ mkOrdByTy $ riRTable ri
mkArrRelAggOrdBy relationshipField =
mkArrayAggregateSelectOrdBy relationshipField =
let ri = _rfiInfo relationshipField
isAggAllowed = _rfiAllowAgg relationshipField
ivi = InpValInfo Nothing (mkAggRelName $ riName ri) Nothing $
@ -161,7 +161,7 @@ mkOrdByInpObj tn selFlds = (inpObjTy, ordByCtx)
ordByCtx = Map.singleton namedTy $ Map.fromList $
colOrdBys <> relOrdBys <> arrRelOrdBys
colOrdBys = map (pgiName &&& OBIPGCol) pgColFlds
colOrdBys = map (pgiName &&& OBIPGCol) pgColumnFields
relOrdBys = flip map objRels $
\relationshipField ->
let ri = _rfiInfo relationshipField
@ -171,7 +171,7 @@ mkOrdByInpObj tn selFlds = (inpObjTy, ordByCtx)
)
arrRelOrdBys = flip mapMaybe arrRels $
\(RelationshipFieldInfo ri isAggAllowed colGNameMap fltr _ _) ->
\(RelationshipFieldInfo ri isAggAllowed colGNameMap fltr _ _ _) ->
let obItem = ( mkAggRelName $ riName ri
, OBIAgg ri colGNameMap fltr
)

View File

@ -1,16 +1,23 @@
module Hasura.GraphQL.Schema.Select
( mkTableObj
, mkRelayTableObj
, mkTableAggObj
, mkSelColumnTy
, mkTableAggFldsObj
, mkTableColAggFldsObj
, mkTableAggregateFieldsObj
, mkTableColAggregateFieldsObj
, mkTableEdgeObj
, mkPageInfoObj
, mkTableConnectionObj
, mkTableConnectionTy
, mkSelFld
, mkAggSelFld
, mkSelFldPKey
, mkSelFldConnection
, mkRemoteRelationshipName
, mkSelArgs
, mkConnectionArgs
) where
import qualified Data.HashMap.Strict as Map
@ -40,11 +47,11 @@ mkSelColumnInpTy :: QualifiedTable -> G.NamedType
mkSelColumnInpTy tn =
G.NamedType $ qualObjectToName tn <> "_select_column"
mkTableAggFldsTy :: QualifiedTable -> G.NamedType
mkTableAggFldsTy = addTypeSuffix "_aggregate_fields" . mkTableTy
mkTableAggregateFieldsTy :: QualifiedTable -> G.NamedType
mkTableAggregateFieldsTy = addTypeSuffix "_aggregate_fields" . mkTableTy
mkTableColAggFldsTy :: G.Name -> QualifiedTable -> G.NamedType
mkTableColAggFldsTy op tn =
mkTableColAggregateFieldsTy :: G.Name -> QualifiedTable -> G.NamedType
mkTableColAggregateFieldsTy op tn =
G.NamedType $ qualObjectToName tn <> "_" <> op <> "_fields"
mkTableByPkName :: QualifiedTable -> G.Name
@ -79,6 +86,7 @@ mkComputedFieldFld field =
in (inputParams, G.toGT $ mkScalarTy scalarTy)
CFTTable computedFieldtable ->
let table = _cftTable computedFieldtable
-- TODO: connection stuff
in ( fromInpValL $ maybeToList maybeFunctionInputArg <> mkSelArgs table
, G.toGT $ G.toLT $ G.toNT $ mkTableTy table
)
@ -118,6 +126,30 @@ mkSelArgs tn =
orderByDesc = "sort the rows by one or more columns"
distinctDesc = "distinct select on columns"
-- distinct_on: [table_select_column!]
-- where: table_bool_exp
-- order_by: table_order_by
-- first: Int
-- after: String
-- last: Int
-- before: String
mkConnectionArgs :: QualifiedTable -> [InpValInfo]
mkConnectionArgs tn =
[ InpValInfo (Just whereDesc) "where" Nothing $ G.toGT $ mkBoolExpTy tn
, InpValInfo (Just orderByDesc) "order_by" Nothing $ G.toGT $ G.toLT $ G.toNT $
mkOrdByTy tn
, InpValInfo (Just distinctDesc) "distinct_on" Nothing $ G.toGT $ G.toLT $
G.toNT $ mkSelColumnInpTy tn
, InpValInfo Nothing "first" Nothing $ G.toGT $ mkScalarTy PGInteger
, InpValInfo Nothing "after" Nothing $ G.toGT $ mkScalarTy PGText
, InpValInfo Nothing "last" Nothing $ G.toGT $ mkScalarTy PGInteger
, InpValInfo Nothing "before" Nothing $ G.toGT $ mkScalarTy PGText
]
where
whereDesc = "filter the rows returned"
orderByDesc = "sort the rows by one or more columns"
distinctDesc = "distinct select on columns"
{-
array_relationship(
@ -137,10 +169,13 @@ mkRelationshipField
:: Bool
-> RelInfo
-> Bool
-> Maybe (NonEmpty PGColumnInfo)
-> Bool
-> [ObjFldInfo]
mkRelationshipField allowAgg (RelInfo rn rTy _ remTab isManual) isNullable = case rTy of
ArrRel -> bool [arrRelFld] [arrRelFld, aggArrRelFld] allowAgg
ObjRel -> [objRelFld]
mkRelationshipField allowAgg (RelInfo rn rTy _ remTab isManual) isRelay maybePkCols isNullable =
case rTy of
ArrRel -> bool [arrRelFld] ([arrRelFld, aggArrRelFld] <> connFields) allowAgg
ObjRel -> [objRelFld]
where
objRelFld = mkHsraObjFldInfo (Just "An object relationship")
(mkRelName rn) Map.empty objRelTy
@ -150,12 +185,33 @@ mkRelationshipField allowAgg (RelInfo rn rTy _ remTab isManual) isNullable = cas
arrRelFld =
mkHsraObjFldInfo (Just "An array relationship") (mkRelName rn)
(fromInpValL $ mkSelArgs remTab) arrRelTy
arrRelTy = G.toGT $ G.toNT $ G.toLT $ G.toNT $ mkTableTy remTab
(fromInpValL $ mkSelArgs remTab) $
G.toGT $ G.toNT $ G.toLT $ G.toNT $ mkTableTy remTab
connFields = if isNothing maybePkCols || not isRelay then [] else pure $
mkHsraObjFldInfo Nothing (mkConnectionRelName rn)
(fromInpValL $ mkConnectionArgs remTab) $
G.toGT $ G.toNT $ mkTableConnectionTy remTab
aggArrRelFld = mkHsraObjFldInfo (Just "An aggregated array relationship")
(mkAggRelName rn) (fromInpValL $ mkSelArgs remTab) $
G.toGT $ G.toNT $ mkTableAggTy remTab
mkTableObjectDescription :: QualifiedTable -> Maybe PGDescription -> G.Description
mkTableObjectDescription tn pgDescription =
mkDescriptionWith pgDescription $ "columns and relationships of " <>> tn
mkTableObjectFields :: Bool -> [SelField] -> [ObjFldInfo]
mkTableObjectFields isRelay =
concatMap \case
SFPGColumn info -> pure $ mkPGColFld info
SFRelationship info -> mkRelationshipField' info
SFComputedField info -> pure $ mkComputedFieldFld info
SFRemoteRelationship info -> pure $ mkRemoteRelationshipFld info
where
mkRelationshipField' (RelationshipFieldInfo relInfo allowAgg _ _ _ maybePkCols isNullable) =
mkRelationshipField allowAgg relInfo isRelay maybePkCols isNullable
{-
type table {
col1: colty1
@ -169,18 +225,30 @@ mkTableObj
-> Maybe PGDescription
-> [SelField]
-> ObjTyInfo
mkTableObj tn descM allowedFlds =
mkObjTyInfo (Just desc) (mkTableTy tn) Set.empty (mapFromL _fiName flds) TLHasuraType
mkTableObj tn descM allowedFields =
mkObjTyInfo (Just desc) (mkTableTy tn) Set.empty (mapFromL _fiName fields) TLHasuraType
where
flds = flip concatMap allowedFlds $ \case
SFPGColumn info -> pure $ mkPGColFld info
SFRelationship info -> mkRelationshipField' info
SFComputedField info -> pure $ mkComputedFieldFld info
SFRemoteRelationship info -> pure $ mkRemoteRelationshipFld info
fields = mkTableObjectFields False allowedFields
desc = mkTableObjectDescription tn descM
mkRelationshipField' (RelationshipFieldInfo relInfo allowAgg _ _ _ isNullable) =
mkRelationshipField allowAgg relInfo isNullable
desc = mkDescriptionWith descM $ "columns and relationships of " <>> tn
mkRelayTableObj
:: QualifiedTable
-> Maybe PGDescription
-> [SelField]
-> ObjTyInfo
mkRelayTableObj tn descM allowedFields =
mkObjTyInfo (Just desc) (mkTableTy tn) Set.empty (mapFromL _fiName fields) TLHasuraType
where
fields =
let idColumnFilter = \case
SFPGColumn columnInfo -> (/=) "id" $ pgiName columnInfo
_ -> True
in (:) nodeIdField $ mkTableObjectFields True $
-- Remove "id" column
filter idColumnFilter allowedFields
nodeIdField = mkHsraObjFldInfo Nothing "id" mempty nodeIdType
desc = mkTableObjectDescription tn descM
mkRemoteRelationshipName :: RemoteRelationshipName -> G.Name
mkRemoteRelationshipName =
@ -211,7 +279,7 @@ mkTableAggObj tn =
"aggregated selection of " <>> tn
aggFld = mkHsraObjFldInfo Nothing "aggregate" Map.empty $ G.toGT $
mkTableAggFldsTy tn
mkTableAggregateFieldsTy tn
nodesFld = mkHsraObjFldInfo Nothing "nodes" Map.empty $ G.toGT $
G.toNT $ G.toLT $ G.toNT $ mkTableTy tn
@ -228,13 +296,13 @@ type table_aggregate_fields{
min: table_min_fields
}
-}
mkTableAggFldsObj
mkTableAggregateFieldsObj
:: QualifiedTable
-> ([PGColumnInfo], [G.Name])
-> ([PGColumnInfo], [G.Name])
-> ObjTyInfo
mkTableAggFldsObj tn (numCols, numAggOps) (compCols, compAggOps) =
mkHsraObjTyInfo (Just desc) (mkTableAggFldsTy tn) Set.empty $ mapFromL _fiName $
mkTableAggregateFieldsObj tn (numCols, numericAggregateOps) (compCols, compareAggregateOps) =
mkHsraObjTyInfo (Just desc) (mkTableAggregateFieldsTy tn) Set.empty $ mapFromL _fiName $
countFld : (numFlds <> compFlds)
where
desc = G.Description $
@ -250,11 +318,11 @@ mkTableAggFldsObj tn (numCols, numAggOps) (compCols, compAggOps) =
distinctInpVal = InpValInfo Nothing "distinct" Nothing $ G.toGT $
mkScalarTy PGBoolean
numFlds = bool (map mkColOpFld numAggOps) [] $ null numCols
compFlds = bool (map mkColOpFld compAggOps) [] $ null compCols
numFlds = bool (map mkColumnOpFld numericAggregateOps) [] $ null numCols
compFlds = bool (map mkColumnOpFld compareAggregateOps) [] $ null compCols
mkColOpFld op = mkHsraObjFldInfo Nothing op Map.empty $ G.toGT $
mkTableColAggFldsTy op tn
mkColumnOpFld op = mkHsraObjFldInfo Nothing op Map.empty $ G.toGT $
mkTableColAggregateFieldsTy op tn
{-
type table_<agg-op>_fields{
@ -263,14 +331,14 @@ type table_<agg-op>_fields{
. .
}
-}
mkTableColAggFldsObj
mkTableColAggregateFieldsObj
:: QualifiedTable
-> G.Name
-> (PGColumnType -> G.NamedType)
-> [PGColumnInfo]
-> ObjTyInfo
mkTableColAggFldsObj tn op f cols =
mkHsraObjTyInfo (Just desc) (mkTableColAggFldsTy op tn) Set.empty $ mapFromL _fiName $
mkTableColAggregateFieldsObj tn op f cols =
mkHsraObjTyInfo (Just desc) (mkTableColAggregateFieldsTy op tn) Set.empty $ mapFromL _fiName $
map mkColObjFld cols
where
desc = G.Description $ "aggregate " <> G.unName op <> " on columns"
@ -296,6 +364,93 @@ mkSelFld mCustomName tn =
args = fromInpValL $ mkSelArgs tn
ty = G.toGT $ G.toNT $ G.toLT $ G.toNT $ mkTableTy tn
{-
table(
where: table_bool_exp
limit: Int
offset: Int
): tableConnection!
-}
mkSelFldConnection :: Maybe G.Name -> QualifiedTable -> ObjFldInfo
mkSelFldConnection mCustomName tn =
mkHsraObjFldInfo (Just desc) fldName args ty
where
desc = G.Description $ "fetch data from the table: " <>> tn
fldName = fromMaybe (qualObjectToName tn <> "_connection") mCustomName
args = fromInpValL $ mkConnectionArgs tn
ty = G.toGT $ G.toNT $ mkTableConnectionTy tn
{-
type tableConnection {
pageInfo: PageInfo!
edges: [tableEdge!]!
}
-}
mkTableConnectionObj
:: QualifiedTable -> ObjTyInfo
mkTableConnectionObj tn =
mkHsraObjTyInfo (Just desc) (mkTableConnectionTy tn) Set.empty $ mapFromL _fiName
[pageInfoFld, edgesFld]
where
desc = G.Description $ "A Relay Connection object on " <>> tn
pageInfoFld = mkHsraObjFldInfo Nothing "pageInfo" Map.empty $
G.toGT $ G.toNT pageInfoTy
edgesFld = mkHsraObjFldInfo Nothing "edges" Map.empty $ G.toGT $
G.toNT $ G.toLT $ G.toNT $ mkTableEdgeTy tn
booleanScalar :: G.NamedType
booleanScalar = G.NamedType "Boolean"
stringScalar :: G.NamedType
stringScalar = G.NamedType "String"
pageInfoTyName :: G.Name
pageInfoTyName = "PageInfo"
pageInfoTy :: G.NamedType
pageInfoTy = G.NamedType pageInfoTyName
{-
type PageInfo {
hasNextPage: Boolean!
hasPrevousPage: Boolean!
startCursor: String!
endCursor: String!
}
-}
mkPageInfoObj :: ObjTyInfo
mkPageInfoObj =
mkHsraObjTyInfo Nothing pageInfoTy Set.empty $ mapFromL _fiName
[hasNextPage, hasPreviousPage, startCursor, endCursor]
where
hasNextPage = mkHsraObjFldInfo Nothing "hasNextPage" Map.empty $
G.toGT $ G.toNT booleanScalar
hasPreviousPage = mkHsraObjFldInfo Nothing "hasPreviousPage" Map.empty $
G.toGT $ G.toNT booleanScalar
startCursor = mkHsraObjFldInfo Nothing "startCursor" Map.empty $
G.toGT $ G.toNT stringScalar
endCursor = mkHsraObjFldInfo Nothing "endCursor" Map.empty $
G.toGT $ G.toNT stringScalar
{-
type tableConnection {
cursor: String!
node: table
}
-}
mkTableEdgeObj
:: QualifiedTable -> ObjTyInfo
mkTableEdgeObj tn =
mkHsraObjTyInfo Nothing (mkTableEdgeTy tn) Set.empty $ mapFromL _fiName
[cursor, node]
where
cursor = mkHsraObjFldInfo Nothing "cursor" Map.empty $
G.toGT $ G.toNT stringScalar
node = mkHsraObjFldInfo Nothing "node" Map.empty $ G.toGT $
mkTableTy tn
{-
table_by_pk(
col1: value1!,

View File

@ -33,26 +33,25 @@ runGQ
=> RequestId
-> UserInfo
-> [N.Header]
-> E.GraphQLQueryType
-> GQLReq GQLQueryText
-> m (HttpResponse EncJSON)
runGQ reqId userInfo reqHdrs req = do
runGQ reqId userInfo reqHdrs queryType req = do
-- The response and misc telemetry data:
let telemTransport = Telem.HTTP
(telemTimeTot_DT, (telemCacheHit, telemLocality, (telemTimeIO_DT, telemQueryType, !resp))) <- withElapsedTime $ do
E.ExecutionCtx _ sqlGenCtx pgExecCtx planCache sc scVer httpManager enableAL <- ask
(telemCacheHit, execPlan) <- E.getResolvedExecPlan pgExecCtx planCache
userInfo sqlGenCtx enableAL sc scVer httpManager reqHdrs req
userInfo sqlGenCtx enableAL sc scVer queryType httpManager reqHdrs req
case execPlan of
E.GExPHasura resolvedOp -> do
(telemTimeIO, telemQueryType, respHdrs, resp) <- runHasuraGQ reqId req userInfo resolvedOp
return (telemCacheHit, Telem.Local, (telemTimeIO, telemQueryType, HttpResponse resp respHdrs))
E.GExPRemote rsi opDef -> do
let telemQueryType | G._todType opDef == G.OperationTypeMutation = Telem.Mutation
| otherwise = Telem.Query
| otherwise = Telem.Query
(telemTimeIO, resp) <- E.execRemoteGQ reqId userInfo reqHdrs req rsi $ G._todType opDef
pure (telemCacheHit, Telem.Remote, (telemTimeIO, telemQueryType, resp))
return (telemCacheHit, Telem.Remote, (telemTimeIO, telemQueryType, resp))
let telemTimeIO = convertDuration telemTimeIO_DT
telemTimeTot = convertDuration telemTimeTot_DT
@ -69,12 +68,13 @@ runGQBatched
-> ResponseInternalErrorsConfig
-> UserInfo
-> [N.Header]
-> E.GraphQLQueryType
-> GQLBatchedReqs GQLQueryText
-> m (HttpResponse EncJSON)
runGQBatched reqId responseErrorsConfig userInfo reqHdrs reqs =
runGQBatched reqId responseErrorsConfig userInfo reqHdrs queryType reqs =
case reqs of
GQLSingleRequest req ->
runGQ reqId userInfo reqHdrs req
runGQ reqId userInfo reqHdrs queryType req
GQLBatchedReqs batch -> do
-- It's unclear what we should do if we receive multiple
-- responses with distinct headers, so just do the simplest thing
@ -85,7 +85,7 @@ runGQBatched reqId responseErrorsConfig userInfo reqHdrs reqs =
. encJFromList
. map (either (encJFromJValue . encodeGQErr includeInternal) _hrBody)
try = flip catchError (pure . Left) . fmap Right
removeHeaders <$> traverse (try . runGQ reqId userInfo reqHdrs) batch
removeHeaders <$> traverse (try . runGQ reqId userInfo reqHdrs queryType) batch
runHasuraGQ
:: ( MonadIO m

View File

@ -95,6 +95,7 @@ data WSConnData
-- are not tracked here
, _wscOpMap :: !OperationMap
, _wscErrRespTy :: !ErrRespType
, _wscAPIType :: !E.GraphQLQueryType
}
type WSServer = WS.WSServer WSConnData
@ -204,11 +205,11 @@ onConn :: (MonadIO m)
=> L.Logger L.Hasura -> CorsPolicy -> WS.OnConnH m WSConnData
onConn (L.Logger logger) corsPolicy wsId requestHead = do
res <- runExceptT $ do
errType <- checkPath
(errType, queryType) <- checkPath
let reqHdrs = WS.requestHeaders requestHead
headers <- maybe (return reqHdrs) (flip enforceCors reqHdrs . snd) getOrigin
return (WsHeaders $ filterWsHeaders headers, errType)
either reject (uncurry accept) res
return (WsHeaders $ filterWsHeaders headers, errType, queryType)
either reject accept res
where
keepAliveAction wsConn = liftIO $ forever $ do
@ -226,12 +227,13 @@ onConn (L.Logger logger) corsPolicy wsId requestHead = do
currTime <- TC.getCurrentTime
sleep $ convertDuration $ TC.diffUTCTime expTime currTime
accept hdrs errType = do
accept (hdrs, errType, queryType) = do
logger $ mkWsInfoLog Nothing (WsConnInfo wsId Nothing Nothing) EAccepted
connData <- liftIO $ WSConnData
<$> STM.newTVarIO (CSNotInitialised hdrs)
<*> STMMap.newIO
<*> pure errType
<*> pure queryType
let acceptRequest = WS.defaultAcceptRequest
{ WS.acceptSubprotocol = Just "graphql-ws"}
return $ Right $ WS.AcceptWith connData acceptRequest keepAliveAction tokenExpiryHandler
@ -244,8 +246,9 @@ onConn (L.Logger logger) corsPolicy wsId requestHead = do
(BL.toStrict $ J.encode $ encodeGQLErr False qErr)
checkPath = case WS.requestPath requestHead of
"/v1alpha1/graphql" -> return ERTLegacy
"/v1/graphql" -> return ERTGraphqlCompliant
"/v1alpha1/graphql" -> return (ERTLegacy, E.QueryHasura)
"/v1/graphql" -> return (ERTGraphqlCompliant, E.QueryHasura)
"/v1/relay" -> return (ERTGraphqlCompliant, E.QueryRelay)
_ ->
throw404 "only '/v1/graphql', '/v1alpha1/graphql' are supported on websockets"
@ -306,7 +309,7 @@ onStart serverEnv wsConn (StartMsg opId q) = catchAndIgnore $ do
requestId <- getRequestId reqHdrs
(sc, scVer) <- liftIO getSchemaCache
execPlanE <- runExceptT $ E.getResolvedExecPlan pgExecCtx
planCache userInfo sqlGenCtx enableAL sc scVer httpMgr reqHdrs q
planCache userInfo sqlGenCtx enableAL sc scVer queryType httpMgr reqHdrs q
(telemCacheHit, execPlan) <- either (withComplete . preExecErr requestId) return execPlanE
let execCtx = E.ExecutionCtx logger sqlGenCtx pgExecCtx
@ -401,7 +404,8 @@ onStart serverEnv wsConn (StartMsg opId q) = catchAndIgnore $ do
WSServerEnv logger pgExecCtx lqMap getSchemaCache httpMgr _ sqlGenCtx planCache
_ enableAL = serverEnv
WSConnData userInfoR opMap errRespTy = WS.getData wsConn
WSConnData userInfoR opMap errRespTy queryType = WS.getData wsConn
logOpEv opTy reqId = logWSEvent logger wsConn $ EOperation opDet
where
opDet = OperationDetails opId reqId (_grOperationName q) opTy query
@ -523,7 +527,7 @@ logWSEvent (L.Logger logger) wsConn wsEv = do
_ -> (Nothing, Nothing)
liftIO $ logger $ WSLog logLevel $ WSLogInfo userVarsM (WsConnInfo wsId tokenExpM Nothing) wsEv
where
WSConnData userInfoR _ _ = WS.getData wsConn
WSConnData userInfoR _ _ _ = WS.getData wsConn
wsId = WS.getWSId wsConn
logLevel = bool L.LevelInfo L.LevelError isError
isError = case wsEv of

View File

@ -1,8 +1,8 @@
module Hasura.GraphQL.Validate
( validateGQ
, showVars
, RootSelSet(..)
, SelSet
, RootSelectionSet(..)
, SelectionSet(..)
, Field(..)
, getTypedOp
, QueryParts(..)
@ -15,6 +15,7 @@ module Hasura.GraphQL.Validate
, isQueryInAllowlist
, unValidateArgsMap
, unValidateSelectionSet
, unValidateField
) where
@ -27,24 +28,24 @@ import qualified Data.Aeson as A
import qualified Data.HashMap.Strict as Map
import qualified Data.HashMap.Strict.InsOrd as OMap
import qualified Data.HashSet as HS
import qualified Data.Sequence as Seq
import qualified Data.Text as T
import qualified Data.UUID as UUID
import qualified Database.PG.Query as Q
import qualified Language.GraphQL.Draft.Syntax as G
import Hasura.GraphQL.NormalForm
import Hasura.GraphQL.Resolve.InputValue (annInpValueToJson)
import Hasura.GraphQL.Schema
import Hasura.GraphQL.Transport.HTTP.Protocol
import Hasura.GraphQL.Utils
import Hasura.GraphQL.Validate.Context
import Hasura.GraphQL.Validate.Field
import Hasura.GraphQL.Validate.InputValue
import Hasura.GraphQL.Validate.SelectionSet
import Hasura.GraphQL.Validate.Types
import Hasura.RQL.DML.Select.Types
import Hasura.RQL.Types
import Hasura.SQL.Time
import Hasura.SQL.Value
import Hasura.RQL.DML.Select.Types
import Hasura.GraphQL.Resolve.InputValue (annInpValueToJson)
data QueryParts
= QueryParts
@ -149,19 +150,12 @@ validateFrag
validateFrag (G.FragmentDefinition n onTy dirs selSet) = do
unless (null dirs) $ throwVE
"unexpected directives at fragment definition"
tyInfo <- getTyInfoVE onTy
objTyInfo <- onNothing (getObjTyM tyInfo) $ throwVE
"fragments can only be defined on object types"
return $ FragDef n objTyInfo selSet
data RootSelSet
= RQuery !SelSet
| RMutation !SelSet
| RSubscription !SelSet
deriving (Show, Eq)
fragmentTypeInfo <- getFragmentTyInfo onTy
return $ FragDef n fragmentTypeInfo selSet
validateGQ
:: (MonadError QErr m, MonadReader GCtx m, MonadReusability m) => QueryParts -> m RootSelSet
:: (MonadError QErr m, MonadReader GCtx m, MonadReusability m)
=> QueryParts -> m RootSelectionSet
validateGQ (QueryParts opDef opRoot fragDefsL varValsM) = do
ctx <- ask
@ -177,19 +171,19 @@ validateGQ (QueryParts opDef opRoot fragDefsL varValsM) = do
-- build a validation ctx
let valCtx = ValidationCtx (_gTypes ctx) annVarVals annFragDefs
selSet <- flip runReaderT valCtx $ denormSelSet [] opRoot $
selSet <- flip runReaderT valCtx $ parseObjectSelectionSet valCtx opRoot $
G._todSelectionSet opDef
case G._todType opDef of
G.OperationTypeQuery -> return $ RQuery selSet
G.OperationTypeMutation -> return $ RMutation selSet
G.OperationTypeSubscription ->
case selSet of
Seq.Empty -> throw500 "empty selset for subscription"
(_ Seq.:<| rst) -> do
case OMap.toList $ unAliasedFields $ unObjectSelectionSet selSet of
[] -> throw500 "empty selset for subscription"
(_:rst) -> do
-- As an internal testing feature, we support subscribing to multiple
-- selection sets. First check if the corresponding directive is set.
let multipleAllowed = elem (G.Directive "_multiple_top_level_fields" []) (G._todDirectives opDef)
let multipleAllowed = G.Directive "_multiple_top_level_fields" [] `elem` G._todDirectives opDef
unless (multipleAllowed || null rst) $
throwVE "subscriptions must select one top level field"
return $ RSubscription selSet
@ -230,12 +224,31 @@ unValidateArgsMap argsMap =
. Map.toList $ argsMap
-- | Convert the validated field to GraphQL parser AST field
unValidateField :: Field -> G.Field
unValidateField (Field alias name _ argsMap selSet _) =
unValidateField :: G.Alias -> Field -> G.Field
unValidateField alias (Field name _ argsMap selSet) =
let args = map (\(n, inpVal) -> G.Argument n $ unValidateInpVal inpVal) $
Map.toList argsMap
sels = map (G.SelectionField . unValidateField) $ toList selSet
in G.Field (Just alias) name args [] sels
in G.Field (Just alias) name args [] $ unValidateSelectionSet selSet
-- | Convert the validated selection set to GraphQL parser AST selection set
unValidateSelectionSet :: SelectionSet -> G.SelectionSet
unValidateSelectionSet = \case
SelectionSetObject selectionSet -> fromSelectionSet selectionSet
SelectionSetInterface selectionSet -> fromScopedSelectionSet selectionSet
SelectionSetUnion selectionSet -> fromScopedSelectionSet selectionSet
SelectionSetNone -> mempty
where
fromAliasedFields :: (IsField f) => AliasedFields f -> G.SelectionSet
fromAliasedFields =
map (G.SelectionField . uncurry unValidateField) .
OMap.toList . fmap toField . unAliasedFields
fromSelectionSet =
fromAliasedFields . unObjectSelectionSet
toInlineSelection typeName =
G.SelectionInlineFragment . G.InlineFragment (Just typeName) mempty .
fromSelectionSet
fromScopedSelectionSet (ScopedSelectionSet base specific) =
map (uncurry toInlineSelection) (Map.toList specific) <> fromAliasedFields base
-- | Get the variable definition and it's value (if exists)
unValidateInpVariable :: AnnInpVal -> Maybe [(G.VariableDefinition,A.Value)]

View File

@ -4,6 +4,7 @@ module Hasura.GraphQL.Validate.Context
, getInpFieldInfo
, getTyInfo
, getTyInfoVE
, getFragmentTyInfo
, module Hasura.GraphQL.Utils
) where
@ -19,11 +20,11 @@ import Hasura.RQL.Types
getFieldInfo
:: ( MonadError QErr m)
=> ObjTyInfo -> G.Name -> m ObjFldInfo
getFieldInfo oti fldName =
onNothing (Map.lookup fldName $ _otiFields oti) $ throwVE $
=> G.NamedType -> ObjFieldMap -> G.Name -> m ObjFldInfo
getFieldInfo typeName fieldMap fldName =
onNothing (Map.lookup fldName fieldMap) $ throwVE $
"field " <> showName fldName <>
" not found in type: " <> showNamedTy (_otiName oti)
" not found in type: " <> showNamedTy typeName
getInpFieldInfo
:: ( MonadError QErr m)
@ -65,3 +66,13 @@ getTyInfoVE namedTy = do
tyMap <- asks getter
onNothing (Map.lookup namedTy tyMap) $
throwVE $ "no such type exists in the schema: " <> showNamedTy namedTy
getFragmentTyInfo
:: (MonadReader r m, Has TypeMap r, MonadError QErr m)
=> G.NamedType -> m FragmentTypeInfo
getFragmentTyInfo onType =
getTyInfoVE onType >>= \case
TIObj tyInfo -> pure $ FragmentTyObject tyInfo
TIIFace tyInfo -> pure $ FragmentTyInterface tyInfo
TIUnion tyInfo -> pure $ FragmentTyUnion tyInfo
_ -> throwVE "fragments can only be defined on object/interface/union types"

View File

@ -1,360 +0,0 @@
module Hasura.GraphQL.Validate.Field
( ArgsMap
, Field(..), fAlias, fName, fType, fArguments, fSelSet, fSource
, SelSet
, denormSelSet
) where
import Control.Lens
import Hasura.Prelude
import qualified Data.Aeson as J
import qualified Data.Aeson.Casing as J
import qualified Data.Aeson.TH as J
import qualified Data.HashMap.Strict as Map
import qualified Data.HashMap.Strict.InsOrd.Extended as OMap
import qualified Data.List as L
import qualified Data.Sequence as Seq
import qualified Data.Sequence.NonEmpty as NE
import qualified Data.Text as T
import qualified Language.GraphQL.Draft.Syntax as G
import Hasura.GraphQL.Validate.Context
import Hasura.GraphQL.Validate.InputValue
import Hasura.GraphQL.Validate.Types
import Hasura.RQL.Types
import Hasura.SQL.Value
-- data ScalarInfo
-- = SIBuiltin !GBuiltin
-- | SICustom !PGScalarType
-- deriving (Show, Eq)
-- data GBuiltin
-- = GInt
-- | GFloat
-- | GBoolean
-- | GString
-- deriving (Show, Eq)
data TypedOperation
= TypedOperation
{ _toType :: !G.OperationType
, _toName :: !(Maybe G.Name)
, _toSelectionSet :: ![Field]
} deriving (Show, Eq)
type ArgsMap = Map.HashMap G.Name AnnInpVal
type SelSet = Seq.Seq Field
data Field
= Field
{ _fAlias :: !G.Alias
, _fName :: !G.Name
, _fType :: !G.NamedType
, _fArguments :: !ArgsMap
, _fSelSet :: !SelSet
, _fSource :: !TypeLoc
} deriving (Eq, Show)
$(J.deriveToJSON (J.aesonDrop 2 J.camelCase){J.omitNothingFields=True}
''Field
)
makeLenses ''Field
-- newtype FieldMapAlias
-- = FieldMapAlias
-- { unFieldMapAlias :: Map.HashMap G.Alias (FieldG FieldMapAlias)
-- } deriving (Show, Eq)
-- newtype FieldMapName
-- = FieldMapName
-- { unFieldMapName :: Map.HashMap G.Name (NE.NonEmpty (FieldG FieldMapName))
-- } deriving (Show, Eq)
-- type Field = FieldG FieldMapAlias
-- type FieldGrouped = FieldG FieldMapName
-- toFieldGrouped :: Field -> FieldGrouped
-- toFieldGrouped =
-- fmap groupFields
-- where
-- groupFields m =
-- FieldMapName $ groupTuples $
-- flip map (Map.elems $ unFieldMapAlias m) $ \fld ->
-- (_fName fld, toFieldGrouped fld)
data FieldGroupSrc
= FGSFragSprd !G.Name
| FGSInlnFrag
deriving (Show, Eq)
data FieldGroup
= FieldGroup
{ _fgSource :: !FieldGroupSrc
, _fgFields :: !(Seq.Seq Field)
} deriving (Show, Eq)
-- data GLoc
-- = GLoc
-- { _glLine :: !Int
-- , _glColumn :: !Int
-- } deriving (Show, Eq)
-- data GErr
-- = GErr
-- { _geMessage :: !Text
-- , _geLocations :: ![GLoc]
-- } deriving (Show, Eq)
-- throwGE :: (MonadError QErr m) => Text -> m a
-- throwGE msg = throwError $ QErr msg []
withDirectives
:: ( MonadReader ValidationCtx m
, MonadError QErr m
, MonadReusability m
)
=> [G.Directive]
-> m a
-> m (Maybe a)
withDirectives dirs act = do
dirDefs <- onLeft (mkMapWith G._dName dirs) $ \dups ->
throwVE $ "the following directives are used more than once: " <>
showNames dups
procDirs <- flip Map.traverseWithKey dirDefs $ \name dir ->
withPathK (G.unName name) $ do
dirInfo <- onNothing (Map.lookup (G._dName dir) defDirectivesMap) $
throwVE $ "unexpected directive: " <> showName name
procArgs <- withPathK "args" $ processArgs (_diParams dirInfo)
(G._dArguments dir)
getIfArg procArgs
let shouldSkip = fromMaybe False $ Map.lookup "skip" procDirs
shouldInclude = fromMaybe True $ Map.lookup "include" procDirs
if not shouldSkip && shouldInclude
then Just <$> act
else return Nothing
where
getIfArg m = do
val <- onNothing (Map.lookup "if" m) $ throw500
"missing if argument in the directive"
when (isJust $ _aivVariable val) markNotReusable
case _aivValue val of
AGScalar _ (Just (PGValBoolean v)) -> return v
_ -> throw500 "did not find boolean scalar for if argument"
denormSel
:: ( MonadReader ValidationCtx m
, MonadError QErr m
, MonadReusability m
)
=> [G.Name] -- visited fragments
-> ObjTyInfo -- parent type info
-> G.Selection
-> m (Maybe (Either Field FieldGroup))
denormSel visFrags parObjTyInfo sel = case sel of
G.SelectionField fld -> withPathK (G.unName $ G._fName fld) $ do
fldInfo <- getFieldInfo parObjTyInfo $ G._fName fld
fmap Left <$> denormFld visFrags fldInfo fld
G.SelectionFragmentSpread fragSprd ->
withPathK (G.unName $ G._fsName fragSprd) $
fmap Right <$> denormFrag visFrags parTy fragSprd
G.SelectionInlineFragment inlnFrag ->
withPathK "inlineFragment" $
fmap Right <$> denormInlnFrag visFrags parObjTyInfo inlnFrag
where
parTy = _otiName parObjTyInfo
processArgs
:: ( MonadReader ValidationCtx m
, MonadError QErr m
)
=> ParamMap
-> [G.Argument]
-> m ArgsMap
processArgs fldParams argsL = do
args <- onLeft (mkMapWith G._aName argsL) $ \dups ->
throwVE $ "the following arguments are defined more than once: " <>
showNames dups
let requiredParams = Map.filter (G.isNotNull . _iviType) fldParams
inpArgs <- forM args $ \(G.Argument argName argVal) ->
withPathK (G.unName argName) $ do
argTy <- getArgTy argName
validateInputValue valueParser argTy argVal
forM_ requiredParams $ \argDef -> do
let param = _iviName argDef
onNothing (Map.lookup param inpArgs) $ throwVE $ mconcat
[ "the required argument ", showName param, " is missing"]
return inpArgs
where
getArgTy argName =
onNothing (_iviType <$> Map.lookup argName fldParams) $ throwVE $
"no such argument " <> showName argName <> " is expected"
denormFld
:: ( MonadReader ValidationCtx m
, MonadError QErr m
, MonadReusability m
)
=> [G.Name] -- visited fragments
-> ObjFldInfo
-> G.Field
-> m (Maybe Field)
denormFld visFrags fldInfo (G.Field aliasM name args dirs selSet) = do
let fldTy = _fiTy fldInfo
fldBaseTy = getBaseTy fldTy
fldSource = _fiLoc fldInfo
fldTyInfo <- getTyInfo fldBaseTy
argMap <- withPathK "args" $ processArgs (_fiParams fldInfo) args
fields <- case (fldTyInfo, selSet) of
(TIObj _, []) ->
throwVE $ "field " <> showName name <> " of type "
<> G.showGT fldTy <> " must have a selection of subfields"
(TIObj fldObjTyInfo, _) ->
denormSelSet visFrags fldObjTyInfo selSet
(TIScalar _, []) -> return Seq.empty
(TIEnum _, []) -> return Seq.empty
(TIInpObj _, _) ->
throwVE $ "internal error: unexpected input type for field: "
<> showName name
(TIIFace _, _) -> throwVE $ "interface types not supported"
(TIUnion _, _) -> throwVE $ "union types not supported"
-- when scalar/enum and no empty set
(_, _) ->
throwVE $ "field " <> showName name <> " must not have a "
<> "selection since type " <> G.showGT fldTy <> " has no subfields"
withPathK "directives" $ withDirectives dirs $ return $
Field (fromMaybe (G.Alias name) aliasM) name fldBaseTy argMap fields fldSource
denormInlnFrag
:: ( MonadReader ValidationCtx m
, MonadError QErr m
, MonadReusability m
)
=> [G.Name] -- visited fragments
-> ObjTyInfo -- type information of the field
-> G.InlineFragment
-> m (Maybe FieldGroup)
denormInlnFrag visFrags fldTyInfo inlnFrag = do
let fldTy = _otiName fldTyInfo
let fragTy = fromMaybe fldTy tyM
when (fldTy /= fragTy) $
throwVE $ "inline fragment is expected on type " <>
showNamedTy fldTy <> " but found " <> showNamedTy fragTy
withPathK "directives" $ withDirectives directives $
fmap (FieldGroup FGSInlnFrag) $ denormSelSet visFrags fldTyInfo selSet
where
G.InlineFragment tyM directives selSet = inlnFrag
denormSelSet
:: ( MonadReader ValidationCtx m
, MonadError QErr m
, MonadReusability m
)
=> [G.Name] -- visited fragments
-> ObjTyInfo
-> G.SelectionSet
-> m (Seq.Seq Field)
denormSelSet visFrags fldTyInfo selSet =
withPathK "selectionSet" $ do
resFlds <- catMaybes <$> mapM (denormSel visFrags fldTyInfo) selSet
mergeFields $ foldl' flatten Seq.empty resFlds
where
flatten s (Left fld) = s Seq.|> fld
flatten s (Right (FieldGroup _ flds)) =
s Seq.>< flds
mergeFields
:: ( MonadReader ValidationCtx m
, MonadError QErr m
, MonadReusability m
)
=> Seq.Seq Field
-> m (Seq.Seq Field)
mergeFields flds =
fmap Seq.fromList $ forM fldGroups $ \fieldGroup -> do
newFld <- checkMergeability fieldGroup
childFields <- mergeFields $ foldl' (\l f -> l Seq.>< _fSelSet f) Seq.empty
$ NE.toSeq fieldGroup
return $ newFld {_fSelSet = childFields}
where
fldGroups = OMap.elems $ OMap.groupListWith _fAlias flds
-- can a group be merged?
checkMergeability fldGroup = do
let groupedFlds = toList $ NE.toSeq fldGroup
fldNames = L.nub $ map _fName groupedFlds
args = L.nub $ map _fArguments groupedFlds
fld = NE.head fldGroup
fldAl = _fAlias fld
when (length fldNames > 1) $
throwVE $ "cannot merge different fields under the same alias ("
<> showName (G.unAlias fldAl) <> "): "
<> showNames fldNames
when (length args > 1) $
throwVE $ "cannot merge fields with different arguments"
<> " under the same alias: "
<> showName (G.unAlias fldAl)
return fld
denormFrag
:: ( MonadReader ValidationCtx m
, MonadError QErr m
, MonadReusability m
)
=> [G.Name] -- visited fragments
-> G.NamedType -- parent type
-> G.FragmentSpread
-> m (Maybe FieldGroup)
denormFrag visFrags parTy (G.FragmentSpread name directives) = do
-- check for cycles
when (name `elem` visFrags) $
throwVE $ "cannot spread fragment " <> showName name
<> " within itself via "
<> T.intercalate "," (map G.unName visFrags)
(FragDef _ fragTyInfo selSet) <- getFragInfo
let fragTy = _otiName fragTyInfo
-- we don't have unions or interfaces so we can get away with equality
when (fragTy /= parTy) $
throwVE $ "cannot spread fragment " <> showName name <> " defined on " <>
showNamedTy fragTy <> " when selecting fields of type " <> showNamedTy parTy
resFlds <- denormSelSet (name:visFrags) fragTyInfo selSet
withPathK "directives" $ withDirectives directives $
return $ FieldGroup (FGSFragSprd name) resFlds
where
getFragInfo = do
dctx <- ask
onNothing (Map.lookup name $ _vcFragDefMap dctx) $
throwVE $ "fragment '" <> G.unName name <> "' not found"

View File

@ -0,0 +1,550 @@
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilyDependencies #-}
module Hasura.GraphQL.Validate.SelectionSet
( ArgsMap
, Field(..)
, AliasedFields(..)
, SelectionSet(..)
, ObjectSelectionSet(..)
, traverseObjectSelectionSet
, InterfaceSelectionSet
, UnionSelectionSet
, RootSelectionSet(..)
, parseObjectSelectionSet
, asObjectSelectionSet
, asInterfaceSelectionSet
, getMemberSelectionSet
) where
import Hasura.Prelude
import qualified Data.HashMap.Strict as Map
import qualified Data.HashMap.Strict.InsOrd.Extended as OMap
import qualified Data.HashSet as Set
import qualified Data.List as L
import qualified Data.Sequence.NonEmpty as NE
import qualified Data.Text as T
import qualified Language.GraphQL.Draft.Syntax as G
import Hasura.GraphQL.NormalForm
import Hasura.GraphQL.Validate.Context
import Hasura.GraphQL.Validate.InputValue
import Hasura.GraphQL.Validate.Types
import Hasura.RQL.Types
import Hasura.SQL.Value
class HasSelectionSet a where
getTypename :: a -> G.NamedType
getMemberTypes :: a -> Set.HashSet G.NamedType
fieldToSelectionSet
:: G.Alias -> NormalizedField a -> NormalizedSelectionSet a
parseField_
:: ( MonadReader ValidationCtx m
, MonadError QErr m
, MonadReusability m
, MonadState [G.Name] m
)
=> a
-> G.Field
-> m (Maybe (NormalizedField a))
mergeNormalizedSelectionSets
:: ( MonadReader ValidationCtx m
, MonadError QErr m
, MonadReusability m
)
=> [NormalizedSelectionSet a]
-> m (NormalizedSelectionSet a)
fromObjectSelectionSet
:: G.NamedType
-- ^ parent typename
-> G.NamedType
-- ^ fragment typename
-> Set.HashSet G.NamedType
-- ^ common types
-> NormalizedSelectionSet ObjTyInfo
-> NormalizedSelectionSet a
fromInterfaceSelectionSet
:: G.NamedType
-- ^ parent typename
-> G.NamedType
-- ^ fragment typename
-> Set.HashSet G.NamedType
-> NormalizedSelectionSet IFaceTyInfo
-> NormalizedSelectionSet a
fromUnionSelectionSet
:: G.NamedType
-- ^ parent typename
-> G.NamedType
-- ^ fragment typename
-> Set.HashSet G.NamedType
-- ^ common types
-> NormalizedSelectionSet UnionTyInfo
-> NormalizedSelectionSet a
parseObjectSelectionSet
:: ( MonadError QErr m
, MonadReusability m
)
=> ValidationCtx
-> ObjTyInfo
-> G.SelectionSet
-> m ObjectSelectionSet
parseObjectSelectionSet validationCtx objectTypeInfo selectionSet =
flip evalStateT [] $ flip runReaderT validationCtx $
parseSelectionSet objectTypeInfo selectionSet
selectionToSelectionSet
:: HasSelectionSet a
=> NormalizedSelection a -> NormalizedSelectionSet a
selectionToSelectionSet = \case
SelectionField alias fld -> fieldToSelectionSet alias fld
SelectionInlineFragmentSpread selectionSet -> selectionSet
SelectionFragmentSpread _ selectionSet -> selectionSet
parseSelectionSet
:: ( MonadReader ValidationCtx m
, MonadError QErr m
, MonadReusability m
, HasSelectionSet a
, MonadState [G.Name] m
)
=> a
-> G.SelectionSet
-> m (NormalizedSelectionSet a)
parseSelectionSet fieldTypeInfo selectionSet = do
visitedFragments <- get
withPathK "selectionSet" $ do
-- The visited fragments state shouldn't accumulate over a selection set.
normalizedSelections <-
catMaybes <$> mapM (parseSelection visitedFragments fieldTypeInfo) selectionSet
mergeNormalizedSelections normalizedSelections
where
mergeNormalizedSelections = mergeNormalizedSelectionSets . map selectionToSelectionSet
-- | While interfaces and objects have fields, unions do not, so
-- this is a specialized function for every Object type
parseSelection
:: ( MonadReader ValidationCtx m
, MonadError QErr m
, MonadReusability m
, HasSelectionSet a
)
=> [G.Name]
-> a -- parent type info
-> G.Selection
-> m (Maybe (NormalizedSelection a))
parseSelection visitedFragments parentTypeInfo =
flip evalStateT visitedFragments . \case
G.SelectionField fld -> withPathK (G.unName $ G._fName fld) $ do
let fieldName = G._fName fld
fieldAlias = fromMaybe (G.Alias fieldName) $ G._fAlias fld
fmap (SelectionField fieldAlias) <$> parseField_ parentTypeInfo fld
G.SelectionFragmentSpread (G.FragmentSpread name directives) -> do
FragDef _ fragmentTyInfo fragmentSelectionSet <- getFragmentInfo name
withPathK (G.unName name) $
fmap (SelectionFragmentSpread name) <$>
parseFragment parentTypeInfo fragmentTyInfo directives fragmentSelectionSet
G.SelectionInlineFragment G.InlineFragment{..} -> do
let fragmentType = fromMaybe (getTypename parentTypeInfo) _ifTypeCondition
fragmentTyInfo <- getFragmentTyInfo fragmentType
withPathK "inlineFragment" $ fmap SelectionInlineFragmentSpread <$>
parseFragment parentTypeInfo fragmentTyInfo _ifDirectives _ifSelectionSet
parseFragment
:: ( MonadReader ValidationCtx m
, MonadError QErr m
, MonadReusability m
, MonadState [G.Name] m
, HasSelectionSet a
)
=> a
-> FragmentTypeInfo
-> [G.Directive]
-> G.SelectionSet
-> m (Maybe (NormalizedSelectionSet a))
parseFragment parentTyInfo fragmentTyInfo directives fragmentSelectionSet = do
commonTypes <- validateSpread
case fragmentTyInfo of
FragmentTyObject objTyInfo ->
withDirectives directives $
fmap (fromObjectSelectionSet parentType fragmentType commonTypes) $
parseSelectionSet objTyInfo fragmentSelectionSet
FragmentTyInterface interfaceTyInfo ->
withDirectives directives $
fmap (fromInterfaceSelectionSet parentType fragmentType commonTypes) $
parseSelectionSet interfaceTyInfo fragmentSelectionSet
FragmentTyUnion unionTyInfo ->
withDirectives directives $
fmap (fromUnionSelectionSet parentType fragmentType commonTypes) $
parseSelectionSet unionTyInfo fragmentSelectionSet
where
validateSpread = do
let commonTypes = parentTypeMembers `Set.intersection` fragmentTypeMembers
if null commonTypes then
-- TODO: better error location by capturing the fragment source -
-- named or otherwise
-- throwVE $ "cannot spread fragment " <> showName name <> " defined on " <>
throwVE $ "cannot spread fragment defined on " <> showNamedTy fragmentType
<> " when selecting fields of type " <> showNamedTy parentType
else pure commonTypes
parentType = getTypename parentTyInfo
parentTypeMembers = getMemberTypes parentTyInfo
fragmentType = case fragmentTyInfo of
FragmentTyObject tyInfo -> getTypename tyInfo
FragmentTyInterface tyInfo -> getTypename tyInfo
FragmentTyUnion tyInfo -> getTypename tyInfo
fragmentTypeMembers = case fragmentTyInfo of
FragmentTyObject tyInfo -> getMemberTypes tyInfo
FragmentTyInterface tyInfo -> getMemberTypes tyInfo
FragmentTyUnion tyInfo -> getMemberTypes tyInfo
class IsField f => MergeableField f where
checkFieldMergeability
:: (MonadError QErr m) => G.Alias -> NE.NESeq f -> m f
instance MergeableField Field where
checkFieldMergeability alias fields = do
let groupedFlds = toList $ NE.toSeq fields
fldNames = L.nub $ map getFieldName groupedFlds
args = L.nub $ map getFieldArguments groupedFlds
when (length fldNames > 1) $
throwVE $ "cannot merge different fields under the same alias ("
<> showName (G.unAlias alias) <> "): "
<> showNames fldNames
when (length args > 1) $
throwVE $ "cannot merge fields with different arguments"
<> " under the same alias: "
<> showName (G.unAlias alias)
let fld = NE.head fields
mergedGroupSelectionSet <- mergeSelectionSets $ fmap _fSelSet fields
return $ fld { _fSelSet = mergedGroupSelectionSet }
instance MergeableField Typename where
checkFieldMergeability _ fields = pure $ NE.head fields
parseArguments
:: ( MonadReader ValidationCtx m
, MonadError QErr m
)
=> ParamMap
-> [G.Argument]
-> m ArgsMap
parseArguments fldParams argsL = do
args <- onLeft (mkMapWith G._aName argsL) $ \dups ->
throwVE $ "the following arguments are defined more than once: " <>
showNames dups
let requiredParams = Map.filter (G.isNotNull . _iviType) fldParams
inpArgs <- forM args $ \(G.Argument argName argVal) ->
withPathK (G.unName argName) $ do
argTy <- getArgTy argName
validateInputValue valueParser argTy argVal
forM_ requiredParams $ \argDef -> do
let param = _iviName argDef
onNothing (Map.lookup param inpArgs) $ throwVE $ mconcat
[ "the required argument ", showName param, " is missing"]
return inpArgs
where
getArgTy argName =
onNothing (_iviType <$> Map.lookup argName fldParams) $ throwVE $
"no such argument " <> showName argName <> " is expected"
mergeFields
:: ( MonadError QErr m
, MergeableField f
)
-- => Seq.Seq Field
=> [AliasedFields f]
-> m (AliasedFields f)
mergeFields flds =
AliasedFields <$> OMap.traverseWithKey checkFieldMergeability groups
where
groups = foldr (OMap.unionWith (<>)) mempty $
map (fmap NE.init . unAliasedFields) flds
appendSelectionSets
:: (MonadError QErr m) => SelectionSet -> SelectionSet -> m SelectionSet
appendSelectionSets = curry \case
(SelectionSetObject s1, SelectionSetObject s2) ->
SelectionSetObject <$> mergeObjectSelectionSets [s1, s2]
(SelectionSetInterface s1, SelectionSetInterface s2) ->
SelectionSetInterface <$> appendScopedSelectionSet s1 s2
(SelectionSetUnion s1, SelectionSetUnion s2) ->
SelectionSetUnion <$> appendScopedSelectionSet s1 s2
(SelectionSetNone, SelectionSetNone) -> pure SelectionSetNone
(_, _) -> throw500 $ "mergeSelectionSets: 'same kind' assertion failed"
-- query q {
-- author {
-- id
-- }
-- author {
-- name
-- }
-- }
--
-- | When we are merging two selection sets down two different trees they
-- should be of the same type, however, as it is not enforced in the type
-- system, an internal error is thrown when this assumption is violated
mergeSelectionSets
:: (MonadError QErr m) => NE.NESeq SelectionSet -> m SelectionSet
-- mergeSelectionSets = curry $ \case
mergeSelectionSets selectionSets =
foldM appendSelectionSets (NE.head selectionSets) $ NE.tail selectionSets
mergeObjectSelectionSets
:: (MonadError QErr m) => [ObjectSelectionSet] -> m ObjectSelectionSet
mergeObjectSelectionSets =
fmap ObjectSelectionSet . mergeFields . map unObjectSelectionSet
mergeObjectSelectionSetMaps
:: (MonadError QErr m) => [ObjectSelectionSetMap] -> m ObjectSelectionSetMap
mergeObjectSelectionSetMaps selectionSetMaps =
traverse mergeObjectSelectionSets $
foldr (Map.unionWith (<>)) mempty $ map (fmap (:[])) selectionSetMaps
appendScopedSelectionSet
:: (MonadError QErr m, MergeableField f)
=> ScopedSelectionSet f -> ScopedSelectionSet f -> m (ScopedSelectionSet f)
appendScopedSelectionSet s1 s2 =
ScopedSelectionSet
<$> mergeFields [_sssBaseSelectionSet s1, _sssBaseSelectionSet s2]
<*> mergeObjectSelectionSetMaps [s1MembersUnified, s2MembersUnified]
where
s1Base = fmap toField $ _sssBaseSelectionSet s1
s2Base = fmap toField $ _sssBaseSelectionSet s2
s1MembersUnified =
(_sssMemberSelectionSets s1)
<> fmap (const (ObjectSelectionSet s1Base)) (_sssMemberSelectionSets s2)
s2MembersUnified =
(_sssMemberSelectionSets s2)
<> fmap (const (ObjectSelectionSet s2Base)) (_sssMemberSelectionSets s1)
mergeScopedSelectionSets
:: (MonadError QErr m, MergeableField f)
=> [ScopedSelectionSet f] -> m (ScopedSelectionSet f)
mergeScopedSelectionSets selectionSets =
foldM appendScopedSelectionSet emptyScopedSelectionSet selectionSets
withDirectives
:: ( MonadReader ValidationCtx m
, MonadError QErr m
, MonadReusability m
)
=> [G.Directive]
-> m a
-> m (Maybe a)
withDirectives dirs act = do
procDirs <- withPathK "directives" $ do
dirDefs <- onLeft (mkMapWith G._dName dirs) $ \dups ->
throwVE $ "the following directives are used more than once: " <>
showNames dups
flip Map.traverseWithKey dirDefs $ \name dir ->
withPathK (G.unName name) $ do
dirInfo <- onNothing (Map.lookup (G._dName dir) defDirectivesMap) $
throwVE $ "unexpected directive: " <> showName name
procArgs <- withPathK "args" $ parseArguments (_diParams dirInfo)
(G._dArguments dir)
getIfArg procArgs
let shouldSkip = fromMaybe False $ Map.lookup "skip" procDirs
shouldInclude = fromMaybe True $ Map.lookup "include" procDirs
if not shouldSkip && shouldInclude
then Just <$> act
else return Nothing
where
getIfArg m = do
val <- onNothing (Map.lookup "if" m) $ throw500
"missing if argument in the directive"
when (isJust $ _aivVariable val) markNotReusable
case _aivValue val of
AGScalar _ (Just (PGValBoolean v)) -> return v
_ -> throw500 "did not find boolean scalar for if argument"
getFragmentInfo
:: (MonadReader ValidationCtx m, MonadError QErr m, MonadState [G.Name] m)
=> G.Name
-- ^ fragment name
-> m FragDef
getFragmentInfo name = do
-- check for cycles
visitedFragments <- get
if name `elem` visitedFragments
then throwVE $ "cannot spread fragment " <> showName name
<> " within itself via "
<> T.intercalate "," (map G.unName visitedFragments)
else put $ name:visitedFragments
fragInfo <- Map.lookup name <$> asks _vcFragDefMap
onNothing fragInfo $ throwVE $ "fragment '" <> G.unName name <> "' not found"
denormalizeField
:: ( MonadReader ValidationCtx m
, MonadError QErr m
, MonadReusability m
, MonadState [G.Name] m
)
=> ObjFldInfo
-> G.Field
-> m (Maybe Field)
denormalizeField fldInfo (G.Field _ name args dirs selSet) = do
let fldTy = _fiTy fldInfo
fldBaseTy = getBaseTy fldTy
fldTyInfo <- getTyInfo fldBaseTy
argMap <- withPathK "args" $ parseArguments (_fiParams fldInfo) args
fields <- case (fldTyInfo, selSet) of
(TIObj _, []) ->
throwVE $ "field " <> showName name <> " of type "
<> G.showGT fldTy <> " must have a selection of subfields"
(TIObj objTyInfo, _) ->
SelectionSetObject <$> parseSelectionSet objTyInfo selSet
(TIIFace _, []) ->
throwVE $ "field " <> showName name <> " of type "
<> G.showGT fldTy <> " must have a selection of subfields"
(TIIFace interfaceTyInfo, _) ->
SelectionSetInterface <$> parseSelectionSet interfaceTyInfo selSet
(TIUnion _, []) ->
throwVE $ "field " <> showName name <> " of type "
<> G.showGT fldTy <> " must have a selection of subfields"
(TIUnion unionTyInfo, _) ->
SelectionSetUnion <$> parseSelectionSet unionTyInfo selSet
(TIScalar _, []) -> return SelectionSetNone
-- when scalar/enum and no empty set
(TIScalar _, _) ->
throwVE $ "field " <> showName name <> " must not have a "
<> "selection since type " <> G.showGT fldTy <> " has no subfields"
(TIEnum _, []) -> return SelectionSetNone
(TIEnum _, _) ->
throwVE $ "field " <> showName name <> " must not have a "
<> "selection since type " <> G.showGT fldTy <> " has no subfields"
(TIInpObj _, _) ->
throwVE $ "internal error: unexpected input type for field: "
<> showName name
withDirectives dirs $ pure $ Field name fldBaseTy argMap fields
type instance NormalizedSelectionSet ObjTyInfo = ObjectSelectionSet
type instance NormalizedField ObjTyInfo = Field
instance HasSelectionSet ObjTyInfo where
getTypename = _otiName
getMemberTypes = Set.singleton . _otiName
parseField_ objTyInfo field = do
fieldInfo <- getFieldInfo (_otiName objTyInfo) (_otiFields objTyInfo) $ G._fName field
denormalizeField fieldInfo field
fieldToSelectionSet alias fld =
ObjectSelectionSet $ AliasedFields $ OMap.singleton alias fld
mergeNormalizedSelectionSets = mergeObjectSelectionSets
fromObjectSelectionSet _ _ _ objectSelectionSet =
objectSelectionSet
fromInterfaceSelectionSet parentType _ _ interfaceSelectionSet =
getMemberSelectionSet parentType interfaceSelectionSet
fromUnionSelectionSet parentType _ _ unionSelectionSet =
getMemberSelectionSet parentType unionSelectionSet
type instance NormalizedSelectionSet IFaceTyInfo = InterfaceSelectionSet
type instance NormalizedField IFaceTyInfo = Field
instance HasSelectionSet IFaceTyInfo where
getTypename = _ifName
getMemberTypes = _ifMemberTypes
parseField_ interfaceTyInfo field = do
fieldInfo <- getFieldInfo (_ifName interfaceTyInfo) (_ifFields interfaceTyInfo)
$ G._fName field
denormalizeField fieldInfo field
fieldToSelectionSet alias field =
ScopedSelectionSet (AliasedFields $ OMap.singleton alias field) mempty
mergeNormalizedSelectionSets = mergeScopedSelectionSets
fromObjectSelectionSet _ fragmentType _ objectSelectionSet =
ScopedSelectionSet (AliasedFields mempty) $
Map.singleton fragmentType objectSelectionSet
fromInterfaceSelectionSet _ _ commonTypes interfaceSelectionSet =
ScopedSelectionSet (AliasedFields mempty) $
Map.fromList $ flip map (toList commonTypes) $
\commonType -> (commonType, getMemberSelectionSet commonType interfaceSelectionSet)
fromUnionSelectionSet _ _ commonTypes unionSelectionSet =
ScopedSelectionSet (AliasedFields mempty) $
Map.fromList $ flip map (toList commonTypes) $
\commonType -> (commonType, getMemberSelectionSet commonType unionSelectionSet)
type instance NormalizedSelectionSet UnionTyInfo = UnionSelectionSet
type instance NormalizedField UnionTyInfo = Typename
instance HasSelectionSet UnionTyInfo where
getTypename = _utiName
getMemberTypes = _utiMemberTypes
parseField_ unionTyInfo field = do
let fieldMap = Map.singleton (_fiName typenameFld) typenameFld
fieldInfo <- getFieldInfo (_utiName unionTyInfo) fieldMap $ G._fName field
fmap (const Typename) <$> denormalizeField fieldInfo field
fieldToSelectionSet alias field =
ScopedSelectionSet (AliasedFields $ OMap.singleton alias field) mempty
mergeNormalizedSelectionSets = mergeScopedSelectionSets
fromObjectSelectionSet _ fragmentType _ objectSelectionSet =
ScopedSelectionSet (AliasedFields mempty) $
Map.singleton fragmentType objectSelectionSet
fromInterfaceSelectionSet _ _ commonTypes interfaceSelectionSet =
ScopedSelectionSet (AliasedFields mempty) $
Map.fromList $ flip map (toList commonTypes) $
\commonType -> (commonType, getMemberSelectionSet commonType interfaceSelectionSet)
fromUnionSelectionSet _ _ commonTypes unionSelectionSet =
ScopedSelectionSet (AliasedFields mempty) $
Map.fromList $ flip map (toList commonTypes) $
\commonType -> (commonType, getMemberSelectionSet commonType unionSelectionSet)

View File

@ -1,7 +1,9 @@
{-# LANGUAGE GADTs #-}
module Hasura.GraphQL.Validate.Types
( InpValInfo(..)
, ParamMap
, typenameFld
, ObjFldInfo(..)
, mkHsraObjFldInfo
, ObjFieldMap
@ -16,10 +18,19 @@ module Hasura.GraphQL.Validate.Types
, mkObjTyInfo
, mkHsraObjTyInfo
, IFaceTyInfo(..)
-- Don't expose 'IFaceTyInfo' constructor. Instead use 'mkIFaceTyInfo'
-- which will auto-insert the compulsory '__typename' field.
, IFaceTyInfo
, _ifDesc
, _ifName
, _ifFields
, _ifMemberTypes
, mkIFaceTyInfo
, IFacesSet
, UnionTyInfo(..)
, FragDef(..)
, FragmentTypeInfo(..)
, FragDefMap
, AnnVarVals
, AnnInpVal(..)
@ -46,7 +57,7 @@ module Hasura.GraphQL.Validate.Types
, TypeInfo(..)
, isObjTy
, isIFaceTy
, getPossibleObjTypes'
, getPossibleObjTypes
, getObjTyM
, getUnionTyM
, mkScalarTy
@ -54,7 +65,6 @@ module Hasura.GraphQL.Validate.Types
, getNamedTy
, mkTyInfoMap
, fromTyDef
, fromTyDefQ
, fromSchemaDoc
, fromSchemaDocQ
, TypeMap
@ -88,7 +98,6 @@ import qualified Data.Aeson as J
import qualified Data.Aeson.Casing as J
import qualified Data.Aeson.TH as J
import qualified Data.HashMap.Strict as Map
import qualified Data.HashMap.Strict.InsOrd as OMap
import qualified Data.HashSet as Set
import qualified Data.Text as T
import qualified Language.GraphQL.Draft.Syntax as G
@ -99,6 +108,7 @@ import Control.Lens (makePrisms)
import qualified Hasura.RQL.Types.Column as RQL
import Hasura.GraphQL.NormalForm
import Hasura.GraphQL.Utils
import Hasura.RQL.Instances ()
import Hasura.RQL.Types.Common
@ -122,10 +132,11 @@ fromEnumValDef (G.EnumValueDefinition descM val _) =
data EnumValuesInfo
= EnumValuesSynthetic !(Map.HashMap G.EnumValue EnumValInfo)
-- ^ Values for an enum that exists only in the GraphQL schema and does not have any external
-- source of truth.
-- ^ Values for an enum that exists only in the GraphQL schema and does not
-- have any external source of truth.
| EnumValuesReference !RQL.EnumReference
-- ^ Values for an enum that is backed by an enum table reference (see "Hasura.RQL.Schema.Enum").
-- ^ Values for an enum that is backed by an enum table reference (see
-- "Hasura.RQL.Schema.Enum").
deriving (Show, Eq, TH.Lift)
normalizeEnumValues :: EnumValuesInfo -> Map.HashMap G.EnumValue EnumValInfo
@ -241,10 +252,11 @@ instance Semigroup ObjTyInfo where
}
mkObjTyInfo
:: Maybe G.Description -> G.NamedType -> IFacesSet -> ObjFieldMap -> TypeLoc -> ObjTyInfo
mkObjTyInfo descM ty iFaces flds loc =
:: Maybe G.Description -> G.NamedType
-> IFacesSet -> ObjFieldMap -> TypeLoc -> ObjTyInfo
mkObjTyInfo descM ty iFaces flds _ =
ObjTyInfo descM ty iFaces $ Map.insert (_fiName newFld) newFld flds
where newFld = typenameFld loc
where newFld = typenameFld
mkHsraObjTyInfo
:: Maybe G.Description
@ -257,15 +269,16 @@ mkHsraObjTyInfo descM ty implIFaces flds =
mkIFaceTyInfo
:: Maybe G.Description -> G.NamedType
-> Map.HashMap G.Name ObjFldInfo -> TypeLoc -> IFaceTyInfo
mkIFaceTyInfo descM ty flds loc =
-> Map.HashMap G.Name ObjFldInfo -> MemberTypes -> IFaceTyInfo
mkIFaceTyInfo descM ty flds =
IFaceTyInfo descM ty $ Map.insert (_fiName newFld) newFld flds
where newFld = typenameFld loc
where
newFld = typenameFld
typenameFld :: TypeLoc -> ObjFldInfo
typenameFld loc =
typenameFld :: ObjFldInfo
typenameFld =
ObjFldInfo (Just desc) "__typename" Map.empty
(G.toGT $ G.toNT $ G.NamedType "String") loc
(G.toGT $ G.toNT $ G.NamedType "String") TLHasuraType
where
desc = "The name of the current Object type at runtime"
@ -277,9 +290,10 @@ fromObjTyDef (G.ObjectTypeDefinition descM n ifaces _ flds) loc =
data IFaceTyInfo
= IFaceTyInfo
{ _ifDesc :: !(Maybe G.Description)
, _ifName :: !G.NamedType
, _ifFields :: !ObjFieldMap
{ _ifDesc :: !(Maybe G.Description)
, _ifName :: !G.NamedType
, _ifFields :: !ObjFieldMap
, _ifMemberTypes :: !MemberTypes
} deriving (Show, Eq, TH.Lift)
instance EquatableGType IFaceTyInfo where
@ -287,19 +301,18 @@ instance EquatableGType IFaceTyInfo where
(G.NamedType, Map.HashMap G.Name (G.Name, G.GType, ParamMap))
getEqProps a = (,) (_ifName a) (Map.map getEqProps (_ifFields a))
instance Monoid IFaceTyInfo where
mempty = IFaceTyInfo Nothing (G.NamedType "") Map.empty
instance Semigroup IFaceTyInfo where
objA <> objB =
objA { _ifFields = Map.union (_ifFields objA) (_ifFields objB)
}
fromIFaceDef :: G.InterfaceTypeDefinition -> TypeLoc -> IFaceTyInfo
fromIFaceDef (G.InterfaceTypeDefinition descM n _ flds) loc =
mkIFaceTyInfo descM (G.NamedType n) fldMap loc
fromIFaceDef
:: InterfaceImplementations -> G.InterfaceTypeDefinition -> TypeLoc -> IFaceTyInfo
fromIFaceDef interfaceImplementations (G.InterfaceTypeDefinition descM n _ flds) loc =
mkIFaceTyInfo descM (G.NamedType n) fldMap implementations
where
fldMap = Map.fromList [(G._fldName fld, fromFldDef fld loc) | fld <- flds]
fldMap = Map.fromList [(G._fldName fld, fromFldDef fld loc) | fld <- flds]
implementations = fromMaybe mempty $ Map.lookup (G.NamedType n) interfaceImplementations
type MemberTypes = Set.HashSet G.NamedType
@ -398,23 +411,23 @@ data TypeInfo
instance J.ToJSON TypeInfo where
toJSON _ = J.String "toJSON not implemented for TypeInfo"
instance J.FromJSON TypeInfo where
parseJSON _ = fail "FromJSON not implemented for TypeInfo"
data AsObjType
= AOTObj ObjTyInfo
| AOTIFace IFaceTyInfo
= AOTIFace IFaceTyInfo
| AOTUnion UnionTyInfo
getPossibleObjTypes' :: TypeMap -> AsObjType -> Map.HashMap G.NamedType ObjTyInfo
getPossibleObjTypes' _ (AOTObj obj) = toObjMap [obj]
getPossibleObjTypes' tyMap (AOTIFace i) = toObjMap $ mapMaybe previewImplTypeM $ Map.elems tyMap
where
previewImplTypeM = \case
TIObj objTyInfo -> bool Nothing (Just objTyInfo) $
_ifName i `elem` _otiImplIFaces objTyInfo
_ -> Nothing
getPossibleObjTypes' tyMap (AOTUnion u) = toObjMap $ mapMaybe (extrObjTyInfoM tyMap) $ Set.toList $ _utiMemberTypes u
getPossibleObjTypes :: TypeMap -> AsObjType -> Map.HashMap G.NamedType ObjTyInfo
getPossibleObjTypes tyMap = \case
(AOTIFace i) ->
toObjMap $ mapMaybe (extrObjTyInfoM tyMap) $ Set.toList $ _ifMemberTypes i
(AOTUnion u) ->
toObjMap $ mapMaybe (extrObjTyInfoM tyMap) $ Set.toList $ _utiMemberTypes u
-- toObjMap $ mapMaybe previewImplTypeM $ Map.elems tyMap
-- where
-- previewImplTypeM = \case
-- TIObj objTyInfo -> bool Nothing (Just objTyInfo) $
-- _ifName i `elem` _otiImplIFaces objTyInfo
-- _ -> Nothing
toObjMap :: [ObjTyInfo] -> Map.HashMap G.NamedType ObjTyInfo
toObjMap = foldr (\o -> Map.insert (_otiName o) o) Map.empty
@ -471,7 +484,7 @@ showSPTxt :: SchemaPath -> Text
showSPTxt p = showSPTxt' p <> showSP p
validateIFace :: MonadError Text f => IFaceTyInfo -> f ()
validateIFace (IFaceTyInfo _ n flds) =
validateIFace (IFaceTyInfo _ n flds _) =
when (isFldListEmpty flds) $ throwError $ "List of fields cannot be empty for interface " <> showNamedTy n
validateObj :: TypeMap -> ObjTyInfo -> Either Text ()
@ -615,20 +628,31 @@ mkTyInfoMap :: [TypeInfo] -> TypeMap
mkTyInfoMap tyInfos =
Map.fromList [(getNamedTy tyInfo, tyInfo) | tyInfo <- tyInfos]
fromTyDef :: G.TypeDefinition -> TypeLoc -> TypeInfo
fromTyDef tyDef loc = case tyDef of
fromTyDef :: InterfaceImplementations -> TypeLoc -> G.TypeDefinition -> TypeInfo
fromTyDef interfaceImplementations loc tyDef = case tyDef of
G.TypeDefinitionScalar t -> TIScalar $ fromScalarTyDef t loc
G.TypeDefinitionObject t -> TIObj $ fromObjTyDef t loc
G.TypeDefinitionInterface t -> TIIFace $ fromIFaceDef t loc
G.TypeDefinitionInterface t -> TIIFace $ fromIFaceDef interfaceImplementations t loc
G.TypeDefinitionUnion t -> TIUnion $ fromUnionTyDef t
G.TypeDefinitionEnum t -> TIEnum $ fromEnumTyDef t loc
G.TypeDefinitionInputObject t -> TIInpObj $ fromInpObjTyDef t loc
type InterfaceImplementations = Map.HashMap G.NamedType MemberTypes
fromSchemaDoc :: G.SchemaDocument -> TypeLoc -> Either Text TypeMap
fromSchemaDoc (G.SchemaDocument tyDefs) loc = do
let tyMap = mkTyInfoMap $ map (`fromTyDef` loc) tyDefs
let tyMap = mkTyInfoMap $ map (fromTyDef interfaceImplementations loc) tyDefs
validateTypeMap tyMap
return tyMap
where
interfaceImplementations :: InterfaceImplementations
interfaceImplementations =
foldr (Map.unionWith (<>)) mempty $ flip mapMaybe tyDefs $ \case
G.TypeDefinitionObject objectDefinition ->
Just $ Map.fromList $ zip
(G._otdImplementsInterfaces objectDefinition)
(repeat $ Set.singleton $ G.NamedType $ G._otdName objectDefinition)
_ -> Nothing
validateTypeMap :: TypeMap -> Either Text ()
validateTypeMap tyMap = mapM_ validateTy $ Map.elems tyMap
@ -638,9 +662,6 @@ validateTypeMap tyMap = mapM_ validateTy $ Map.elems tyMap
validateTy (TIIFace i) = validateIFace i
validateTy _ = return ()
fromTyDefQ :: G.TypeDefinition -> TypeLoc -> TH.Q TH.Exp
fromTyDefQ tyDef loc = TH.lift $ fromTyDef tyDef loc
fromSchemaDocQ :: G.SchemaDocument -> TypeLoc -> TH.Q TH.Exp
fromSchemaDocQ sd loc = case fromSchemaDoc sd loc of
Left e -> fail $ T.unpack e
@ -681,64 +702,21 @@ defDirectivesMap = mapFromL _diName defaultDirectives
data FragDef
= FragDef
{ _fdName :: !G.Name
, _fdTyInfo :: !ObjTyInfo
, _fdTyInfo :: !FragmentTypeInfo
, _fdSelSet :: !G.SelectionSet
} deriving (Show, Eq)
data FragmentTypeInfo
= FragmentTyObject !ObjTyInfo
| FragmentTyInterface !IFaceTyInfo
| FragmentTyUnion !UnionTyInfo
deriving (Show, Eq)
type FragDefMap = Map.HashMap G.Name FragDef
type AnnVarVals =
Map.HashMap G.Variable AnnInpVal
-- TODO document me
data AnnInpVal
= AnnInpVal
{ _aivType :: !G.GType
, _aivVariable :: !(Maybe G.Variable)
, _aivValue :: !AnnGValue
} deriving (Show, Eq)
type AnnGObject = OMap.InsOrdHashMap G.Name AnnInpVal
-- | See 'EnumValuesInfo' for information about what these cases mean.
data AnnGEnumValue
= AGESynthetic !(Maybe G.EnumValue)
| AGEReference !RQL.EnumReference !(Maybe RQL.EnumValue)
deriving (Show, Eq)
data AnnGValue
= AGScalar !PGScalarType !(Maybe PGScalarValue)
| AGEnum !G.NamedType !AnnGEnumValue
| AGObject !G.NamedType !(Maybe AnnGObject)
| AGArray !G.ListType !(Maybe [AnnInpVal])
deriving (Show, Eq)
$(J.deriveToJSON (J.aesonDrop 4 J.camelCase){J.omitNothingFields=True}
''AnnInpVal
)
instance J.ToJSON AnnGValue where
-- toJSON (AGScalar ty valM) =
toJSON = const J.Null
-- J.
-- J.toJSON [J.toJSON ty, J.toJSON valM]
hasNullVal :: AnnGValue -> Bool
hasNullVal = \case
AGScalar _ Nothing -> True
AGEnum _ (AGESynthetic Nothing) -> True
AGEnum _ (AGEReference _ Nothing) -> True
AGObject _ Nothing -> True
AGArray _ Nothing -> True
_ -> False
getAnnInpValKind :: AnnGValue -> Text
getAnnInpValKind = \case
AGScalar _ _ -> "scalar"
AGEnum _ _ -> "enum"
AGObject _ _ -> "object"
AGArray _ _ -> "array"
stripTypenames :: [G.ExecutableDefinition] -> [G.ExecutableDefinition]
stripTypenames = map filterExecDef
where
@ -810,6 +788,7 @@ class (Monad m) => MonadReusability m where
instance (MonadReusability m) => MonadReusability (ReaderT r m) where
recordVariableUse a b = lift $ recordVariableUse a b
markNotReusable = lift markNotReusable
instance (MonadReusability m) => MonadReusability (StateT s m) where
recordVariableUse a b = lift $ recordVariableUse a b
markNotReusable = lift markNotReusable

View File

@ -11,6 +11,7 @@ module Hasura.Prelude
, afold
, bsToTxt
, txtToBs
, base64Decode
, spanMaybeM
-- * Efficient coercions
, coerce
@ -66,6 +67,9 @@ import Test.QuickCheck.Arbitrary.Generic as M
import Text.Read as M (readEither, readMaybe)
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString.Base64.Lazy as Base64
import Data.Coerce
import qualified Data.HashMap.Strict as Map
import qualified Data.Set as Set
@ -106,6 +110,11 @@ bsToTxt = TE.decodeUtf8With TE.lenientDecode
txtToBs :: Text -> B.ByteString
txtToBs = TE.encodeUtf8
base64Decode :: Text -> BL.ByteString
base64Decode =
Base64.decodeLenient . BL.fromStrict . txtToBs
-- Like 'span', but monadic and with a function that produces 'Maybe' instead of 'Bool'
spanMaybeM
:: (Foldable f, Monad m)

View File

@ -14,11 +14,11 @@ module Hasura.RQL.DDL.RemoteSchema
import qualified Data.Aeson as J
import qualified Data.HashMap.Strict as Map
import qualified Data.HashSet as S
import qualified Data.Sequence as Seq
import qualified Data.Text as T
import qualified Database.PG.Query as Q
import Hasura.EncJSON
import Hasura.GraphQL.NormalForm
import Hasura.GraphQL.RemoteServer
import Hasura.GraphQL.Schema.Merge
import Hasura.Prelude
@ -161,8 +161,9 @@ runIntrospectRemoteSchema (RemoteSchemaNameQuery rsName) = do
(rootSelSet, _) <- flip runReaderT rGCtx $ VT.runReusabilityT $ VQ.validateGQ queryParts
schemaField <-
case rootSelSet of
VQ.RQuery (Seq.viewl -> selSet) -> getSchemaField selSet
_ -> throw500 "expected query for introspection"
VQ.RQuery selSet -> getSchemaField $ toList $ unAliasedFields $
unObjectSelectionSet selSet
_ -> throw500 "expected query for introspection"
(introRes, _) <- flip runReaderT rGCtx $ VT.runReusabilityT $ RI.schemaR schemaField
pure $ wrapInSpecKeys introRes
where
@ -171,8 +172,7 @@ runIntrospectRemoteSchema (RemoteSchemaNameQuery rsName) = do
[ ( T.pack "data"
, encJFromAssocList [(T.pack "__schema", encJFromJValue introObj)])
]
getSchemaField =
\case
Seq.EmptyL -> throw500 "found empty when looking for __schema field"
(f Seq.:< Seq.Empty) -> pure f
_ -> throw500 "expected __schema field, found many fields"
getSchemaField = \case
[] -> throw500 "found empty when looking for __schema field"
[f] -> pure f
_ -> throw500 "expected __schema field, found many fields"

View File

@ -32,6 +32,7 @@ import Data.Aeson
import Data.List (nub)
import qualified Hasura.GraphQL.Context as GC
import qualified Hasura.GraphQL.RelaySchema as Relay
import qualified Hasura.GraphQL.Schema as GS
import qualified Hasura.GraphQL.Validate.Types as VT
import qualified Hasura.Incremental as Inc
@ -47,8 +48,8 @@ import Hasura.RQL.DDL.ComputedField
import Hasura.RQL.DDL.CustomTypes
import Hasura.RQL.DDL.Deps
import Hasura.RQL.DDL.EventTrigger
import Hasura.RQL.DDL.ScheduledTrigger
import Hasura.RQL.DDL.RemoteSchema
import Hasura.RQL.DDL.ScheduledTrigger
import Hasura.RQL.DDL.Schema.Cache.Common
import Hasura.RQL.DDL.Schema.Cache.Dependencies
import Hasura.RQL.DDL.Schema.Cache.Fields
@ -178,6 +179,9 @@ buildSchemaCacheRule = proc (catalogMetadata, invalidationKeys) -> do
, _boRemoteRelationshipTypes resolvedOutputs
)
-- Step 4: Build the relay GraphQL schema
relayGQLSchema <- bindA -< Relay.mkRelayGCtxMap (_boTables resolvedOutputs) (_boFunctions resolvedOutputs)
returnA -< SchemaCache
{ scTables = _boTables resolvedOutputs
, scActions = _boActions resolvedOutputs
@ -187,6 +191,7 @@ buildSchemaCacheRule = proc (catalogMetadata, invalidationKeys) -> do
, scCustomTypes = _boCustomTypes resolvedOutputs
, scGCtxMap = gqlSchema
, scDefaultRemoteGCtx = remoteGQLSchema
, scRelayGCtxMap = relayGQLSchema
, scDepMap = resolvedDependencies
, scInconsistentObjs =
inconsistentObjects <> dependencyInconsistentObjects <> toList gqlSchemaInconsistentObjects

View File

@ -111,7 +111,7 @@ mutateAndFetchCols qt cols (cte, p) strfyNum =
tabFrom = FromIden aliasIden
tabPerm = TablePerm annBoolExpTrue Nothing
selFlds = flip map cols $
\ci -> (fromPGCol $ pgiColumn ci, mkAnnColFieldAsText ci)
\ci -> (fromPGCol $ pgiColumn ci, mkAnnColumnFieldAsText ci)
sql = toSQL selectWith
selectWith = S.SelectWith [(S.Alias aliasIden, cte)] select
@ -127,7 +127,7 @@ mutateAndFetchCols qt cols (cte, p) strfyNum =
, S.selFrom = Just $ S.FromExp [S.FIIden aliasIden]
}
colSel = S.SESelect $ mkSQLSelect JASMultipleRows $
AnnSelG selFlds tabFrom tabPerm noTableArgs strfyNum
AnnSelectG selFlds tabFrom tabPerm noSelectArgs strfyNum
-- | Note:- Using sorted columns is necessary to enable casting the rows returned by VALUES expression to table type.
-- For example, let's consider the table, `CREATE TABLE test (id serial primary key, name text not null, age int)`.

View File

@ -2,16 +2,17 @@
module Hasura.RQL.DML.RemoteJoin
( executeQueryWithRemoteJoins
, getRemoteJoins
, getRemoteJoinsAggSel
, getRemoteJoinsAggregateSelect
, getRemoteJoinsMutationOutput
, getRemoteJoinsConnectionSelect
, RemoteJoins
) where
import Hasura.Prelude
import Control.Lens
import Data.Validation
import Data.List (nub)
import Data.Validation
import Hasura.EncJSON
import Hasura.GraphQL.RemoteServer (execRemoteGQ')
@ -25,6 +26,8 @@ import Hasura.Server.Version (HasVersion)
import Hasura.Session
import Hasura.SQL.Types ((<<>))
import qualified Hasura.SQL.DML as S
import qualified Data.Aeson as A
import qualified Data.Aeson.Ordered as AO
import qualified Data.HashMap.Strict as Map
@ -95,7 +98,7 @@ data RemoteJoin
= RemoteJoin
{ _rjName :: !FieldName -- ^ The remote join field name.
, _rjArgs :: ![RemoteFieldArgument] -- ^ User-provided arguments with variables.
, _rjSelSet :: ![G.Field] -- ^ User-provided selection set of remote field.
, _rjSelSet :: !G.SelectionSet -- ^ User-provided selection set of remote field.
, _rjHasuraFields :: !(HashSet FieldName) -- ^ Table fields.
, _rjFieldCall :: !(NonEmpty FieldCall) -- ^ Remote server fields.
, _rjRemoteSchema :: !RemoteSchemaInfo -- ^ The remote schema server info.
@ -122,13 +125,16 @@ transformSelect path sel = do
transformedFields <- transformAnnFields path fields
pure sel{_asnFields = transformedFields}
-- | Traverse through 'AnnAggSel' and collect remote join fields (if any).
getRemoteJoinsAggSel :: AnnAggSel -> (AnnAggSel, Maybe RemoteJoins)
getRemoteJoinsAggSel =
second mapToNonEmpty . flip runState mempty . transformAggSelect mempty
-- | Traverse through @'AnnAggregateSelect' and collect remote join fields (if any).
getRemoteJoinsAggregateSelect :: AnnAggregateSelect -> (AnnAggregateSelect, Maybe RemoteJoins)
getRemoteJoinsAggregateSelect =
second mapToNonEmpty . flip runState mempty . transformAggregateSelect mempty
transformAggSelect :: FieldPath -> AnnAggSel -> State RemoteJoinMap AnnAggSel
transformAggSelect path sel = do
transformAggregateSelect
:: FieldPath
-> AnnAggregateSelect
-> State RemoteJoinMap AnnAggregateSelect
transformAggregateSelect path sel = do
let aggFields = _asnFields sel
transformedFields <- forM aggFields $ \(fieldName, aggField) ->
(fieldName,) <$> case aggField of
@ -137,6 +143,34 @@ transformAggSelect path sel = do
TAFExp t -> pure $ TAFExp t
pure sel{_asnFields = transformedFields}
-- | Traverse through @'ConnectionSelect' and collect remote join fields (if any).
getRemoteJoinsConnectionSelect :: ConnectionSelect S.SQLExp -> (ConnectionSelect S.SQLExp, Maybe RemoteJoins)
getRemoteJoinsConnectionSelect =
second mapToNonEmpty . flip runState mempty . transformConnectionSelect mempty
transformConnectionSelect
:: FieldPath
-> ConnectionSelect S.SQLExp
-> State RemoteJoinMap (ConnectionSelect S.SQLExp)
transformConnectionSelect path ConnectionSelect{..} = do
let connectionFields = _asnFields _csSelect
transformedFields <- forM connectionFields $ \(fieldName, field) ->
(fieldName,) <$> case field of
ConnectionTypename t -> pure $ ConnectionTypename t
ConnectionPageInfo p -> pure $ ConnectionPageInfo p
ConnectionEdges edges -> ConnectionEdges <$> transformEdges (appendPath fieldName path) edges
let select = _csSelect{_asnFields = transformedFields}
pure $ ConnectionSelect _csPrimaryKeyColumns _csSplit _csSlice select
where
transformEdges edgePath edgeFields =
forM edgeFields $ \(fieldName, edgeField) ->
(fieldName,) <$> case edgeField of
EdgeTypename t -> pure $ EdgeTypename t
EdgeCursor -> pure EdgeCursor
EdgeNode annFields ->
EdgeNode <$> transformAnnFields (appendPath fieldName edgePath) annFields
-- | Traverse through 'MutationOutput' and collect remote join fields (if any)
getRemoteJoinsMutationOutput :: MutationOutput -> (MutationOutput, Maybe RemoteJoins)
getRemoteJoinsMutationOutput =
@ -157,10 +191,10 @@ getRemoteJoinsMutationOutput =
MExp t -> pure $ MExp t
MRet annFields -> MRet <$> transformAnnFields fieldPath annFields
transformAnnFields :: FieldPath -> AnnFlds -> State RemoteJoinMap AnnFlds
transformAnnFields :: FieldPath -> AnnFields -> State RemoteJoinMap AnnFields
transformAnnFields path fields = do
let pgColumnFields = map fst $ getFields _FCol fields
remoteSelects = getFields _FRemote fields
let pgColumnFields = map fst $ getFields _AFColumn fields
remoteSelects = getFields _AFRemote fields
remoteJoins = flip map remoteSelects $ \(fieldName, remoteSelect) ->
let RemoteSelect argsMap selSet hasuraColumns remoteFields rsi = remoteSelect
hasuraColumnL = toList hasuraColumns
@ -171,35 +205,46 @@ transformAnnFields path fields = do
transformedFields <- forM fields $ \(fieldName, field) -> do
let fieldPath = appendPath fieldName path
(fieldName,) <$> case field of
FCol c -> pure $ FCol c
FObj annRel -> FObj <$> transformAnnRel fieldPath annRel
FArr (ASSimple annRel) -> FArr . ASSimple <$> transformAnnRel fieldPath annRel
FArr (ASAgg aggRel) -> FArr . ASAgg <$> transformAnnAggRel fieldPath aggRel
FComputedField computedField ->
FComputedField <$> case computedField of
AFNodeId qt pkeys -> pure $ AFNodeId qt pkeys
AFColumn c -> pure $ AFColumn c
AFObjectRelation annRel ->
AFObjectRelation <$> transformAnnRelation fieldPath annRel
AFArrayRelation (ASSimple annRel) ->
AFArrayRelation . ASSimple <$> transformAnnRelation fieldPath annRel
AFArrayRelation (ASAggregate aggRel) ->
AFArrayRelation . ASAggregate <$> transformAnnAggregateRelation fieldPath aggRel
AFArrayRelation (ASConnection annRel) ->
AFArrayRelation . ASConnection <$> transformArrayConnection fieldPath annRel
AFComputedField computedField ->
AFComputedField <$> case computedField of
CFSScalar _ -> pure computedField
CFSTable jas annSel -> CFSTable jas <$> transformSelect fieldPath annSel
FRemote rs -> pure $ FRemote rs
FExp t -> pure $ FExp t
AFRemote rs -> pure $ AFRemote rs
AFExpression t -> pure $ AFExpression t
case NE.nonEmpty remoteJoins of
Nothing -> pure transformedFields
Just nonEmptyRemoteJoins -> do
let phantomColumns = map (\ci -> (fromPGCol $ pgiColumn ci, FCol $ AnnColField ci False Nothing)) $
let phantomColumns = map (\ci -> (fromPGCol $ pgiColumn ci, AFColumn $ AnnColumnField ci False Nothing)) $
concatMap _rjPhantomFields remoteJoins
modify (Map.insert path nonEmptyRemoteJoins)
pure $ transformedFields <> phantomColumns
where
getFields f = mapMaybe (sequence . second (^? f))
transformAnnRel fieldPath annRel = do
let annSel = aarAnnSel annRel
transformAnnRelation fieldPath annRel = do
let annSel = aarAnnSelect annRel
transformedSel <- transformSelect fieldPath annSel
pure annRel{aarAnnSel = transformedSel}
pure annRel{aarAnnSelect = transformedSel}
transformAnnAggRel fieldPath annRel = do
let annSel = aarAnnSel annRel
transformedSel <- transformAggSelect fieldPath annSel
pure annRel{aarAnnSel = transformedSel}
transformAnnAggregateRelation fieldPath annRel = do
let annSel = aarAnnSelect annRel
transformedSel <- transformAggregateSelect fieldPath annSel
pure annRel{aarAnnSelect = transformedSel}
transformArrayConnection fieldPath annRel = do
let connectionSelect = aarAnnSelect annRel
transformedConnectionSelect <- transformConnectionSelect fieldPath connectionSelect
pure annRel{aarAnnSelect = transformedConnectionSelect}
type CompositeObject a = OMap.InsOrdHashMap Text (CompositeValue a)
@ -304,8 +349,8 @@ fetchRemoteJoinFields manager reqHdrs userInfo remoteJoins = do
results <- forM (Map.toList remoteSchemaBatch) $ \(rsi, batch) -> do
let batchList = toList batch
gqlReq = fieldsToRequest G.OperationTypeQuery
(map _rjfField $ batchList)
(concat (map _rjfVariables $ batchList))
(map _rjfField batchList)
(concatMap _rjfVariables batchList)
gqlReqUnparsed = (GQLQueryText . G.renderExecutableDoc . G.ExecutableDocument . unGQLExecDoc) <$> gqlReq
-- NOTE: discard remote headers (for now):
(_, _, respBody) <- execRemoteGQ' manager userInfo reqHdrs gqlReqUnparsed rsi G.OperationTypeQuery
@ -405,10 +450,10 @@ replaceRemoteFields compositeJson remoteServerResponse =
-- | Fold nested 'FieldCall's into a bare 'Field', inserting the passed
-- selection set at the leaf of the tree we construct.
fieldCallsToField
:: MonadError QErr m
:: forall m. MonadError QErr m
=> [G.Argument]
-> Map.HashMap G.Variable G.Value
-> [G.Field]
-> G.SelectionSet
-- ^ Inserted at leaf of nested FieldCalls
-> G.Alias
-- ^ Top-level name to set for this Field
@ -418,12 +463,13 @@ fieldCallsToField rrArguments variables finalSelSet topAlias =
fmap (\f -> f{G._fAlias = Just topAlias}) . nest
where
-- almost: `foldr nest finalSelSet`
nest :: NonEmpty FieldCall -> m G.Field
nest ((FieldCall name remoteArgs) :| rest) = do
templatedArguments <- createArguments variables remoteArgs
(args, selSet) <- case NE.nonEmpty rest of
Just f -> do
s <- nest f
pure (templatedArguments, pure s)
pure (templatedArguments, [G.SelectionField s])
Nothing ->
let argsToMap = Map.fromList . map (G._aName &&& G._aValue)
arguments = map (uncurry G.Argument) $ Map.toList $
@ -431,7 +477,7 @@ fieldCallsToField rrArguments variables finalSelSet topAlias =
(argsToMap rrArguments)
(argsToMap templatedArguments)
in pure (arguments, finalSelSet)
pure $ G.Field Nothing name args [] $ map G.SelectionField selSet
pure $ G.Field Nothing name args [] selSet
-- This is a kind of "deep merge".
-- For e.g. suppose the input argument of the remote field is something like:

View File

@ -12,7 +12,7 @@ import qualified Hasura.SQL.DML as S
data MutFldG v
= MCount
| MExp !T.Text
| MRet !(AnnFldsG v)
| MRet !(AnnFieldsG v)
deriving (Show, Eq)
traverseMutFld
@ -23,7 +23,7 @@ traverseMutFld
traverseMutFld f = \case
MCount -> pure MCount
MExp t -> pure $ MExp t
MRet flds -> MRet <$> traverse (traverse (traverseAnnFld f)) flds
MRet flds -> MRet <$> traverse (traverse (traverseAnnField f)) flds
type MutFld = MutFldG S.SQLExp
@ -31,7 +31,7 @@ type MutFldsG v = Fields (MutFldG v)
data MutationOutputG v
= MOutMultirowFields !(MutFldsG v)
| MOutSinglerowObject !(AnnFldsG v)
| MOutSinglerowObject !(AnnFieldsG v)
deriving (Show, Eq)
traverseMutationOutput
@ -42,7 +42,7 @@ traverseMutationOutput f = \case
MOutMultirowFields mutationFields ->
MOutMultirowFields <$> traverse (traverse (traverseMutFld f)) mutationFields
MOutSinglerowObject annFields ->
MOutSinglerowObject <$> traverseAnnFlds f annFields
MOutSinglerowObject <$> traverseAnnFields f annFields
type MutationOutput = MutationOutputG S.SQLExp
@ -59,15 +59,15 @@ type MutFlds = MutFldsG S.SQLExp
hasNestedFld :: MutationOutputG a -> Bool
hasNestedFld = \case
MOutMultirowFields flds -> any isNestedMutFld flds
MOutSinglerowObject annFlds -> any isNestedAnnFld annFlds
MOutSinglerowObject annFlds -> any isNestedAnnField annFlds
where
isNestedMutFld (_, mutFld) = case mutFld of
MRet annFlds -> any isNestedAnnFld annFlds
MRet annFlds -> any isNestedAnnField annFlds
_ -> False
isNestedAnnFld (_, annFld) = case annFld of
FObj _ -> True
FArr _ -> True
_ -> False
isNestedAnnField (_, annFld) = case annFld of
AFObjectRelation _ -> True
AFArrayRelation _ -> True
_ -> False
pgColsFromMutFld :: MutFld -> [(PGCol, PGColumnType)]
pgColsFromMutFld = \case
@ -75,16 +75,16 @@ pgColsFromMutFld = \case
MExp _ -> []
MRet selFlds ->
flip mapMaybe selFlds $ \(_, annFld) -> case annFld of
FCol (AnnColField (PGColumnInfo col _ _ colTy _ _) _ _) -> Just (col, colTy)
_ -> Nothing
AFColumn (AnnColumnField (PGColumnInfo col _ _ colTy _ _) _ _) -> Just (col, colTy)
_ -> Nothing
pgColsFromMutFlds :: MutFlds -> [(PGCol, PGColumnType)]
pgColsFromMutFlds = concatMap (pgColsFromMutFld . snd)
pgColsToSelFlds :: [PGColumnInfo] -> [(FieldName, AnnFld)]
pgColsToSelFlds :: [PGColumnInfo] -> [(FieldName, AnnField)]
pgColsToSelFlds cols =
flip map cols $
\pgColInfo -> (fromPGCol $ pgiColumn pgColInfo, mkAnnColField pgColInfo Nothing)
\pgColInfo -> (fromPGCol $ pgiColumn pgColInfo, mkAnnColumnField pgColInfo Nothing)
mkDefaultMutFlds :: Maybe [PGColumnInfo] -> MutationOutput
mkDefaultMutFlds = MOutMultirowFields . \case
@ -107,7 +107,7 @@ mkMutFldExp cteAlias preCalAffRows strfyNum = \case
let tabFrom = FromIden cteAlias
tabPerm = TablePerm annBoolExpTrue Nothing
in S.SESelect $ mkSQLSelect JASMultipleRows $
AnnSelG selFlds tabFrom tabPerm noTableArgs strfyNum
AnnSelectG selFlds tabFrom tabPerm noSelectArgs strfyNum
{- Note [Mutation output expression]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@ -171,7 +171,7 @@ mkMutationOutputExp qt allCols preCalAffRows cte mutOutput strfyNum =
let tabFrom = FromIden allColumnsAlias
tabPerm = TablePerm annBoolExpTrue Nothing
in S.SESelect $ mkSQLSelect JASSingleObject $
AnnSelG annFlds tabFrom tabPerm noTableArgs strfyNum
AnnSelectG annFlds tabFrom tabPerm noSelectArgs strfyNum
checkRetCols

View File

@ -1,7 +1,5 @@
module Hasura.RQL.DML.Select
( selectP2
, selectQuerySQL
, selectAggQuerySQL
, convSelectQuery
, asSingleRowJsonResp
, module Hasura.RQL.DML.Select.Internal
@ -105,7 +103,7 @@ convOrderByElem
=> SessVarBldr m
-> (FieldInfoMap FieldInfo, SelPermInfo)
-> OrderByCol
-> m AnnObCol
-> m (AnnOrderByElement S.SQLExp)
convOrderByElem sessVarBldr (flds, spi) = \case
OCPG fldName -> do
fldInfo <- askFieldInfo flds fldName
@ -118,7 +116,7 @@ convOrderByElem sessVarBldr (flds, spi) = \case
[ fldName <<> " has type 'geometry'"
, " and cannot be used in order_by"
]
else return $ AOCPG $ pgiColumn colInfo
else return $ AOCColumn colInfo
FIRelationship _ -> throw400 UnexpectedPayload $ mconcat
[ fldName <<> " is a"
, " relationship and should be expanded"
@ -149,7 +147,7 @@ convOrderByElem sessVarBldr (flds, spi) = \case
]
(relFim, relSpi) <- fetchRelDet (riName relInfo) (riRTable relInfo)
resolvedSelFltr <- convAnnBoolExpPartialSQL sessVarBldr $ spiFilter relSpi
AOCObj relInfo resolvedSelFltr <$>
AOCObjectRelation relInfo resolvedSelFltr <$>
convOrderByElem sessVarBldr (relFim, relSpi) rest
FIRemoteRelationship {} ->
throw400 UnexpectedPayload (mconcat [ fldName <<> " is a remote field" ])
@ -168,12 +166,12 @@ convSelectQ fieldInfoMap selPermInfo selQ sessVarBldr prepValBldr = do
indexedForM (sqColumns selQ) $ \case
(ECSimple pgCol) -> do
colInfo <- convExtSimple fieldInfoMap selPermInfo pgCol
return (fromPGCol pgCol, mkAnnColField colInfo Nothing)
return (fromPGCol pgCol, mkAnnColumnField colInfo Nothing)
(ECRel relName mAlias relSelQ) -> do
annRel <- convExtRel fieldInfoMap relName mAlias
relSelQ sessVarBldr prepValBldr
return ( fromRel $ fromMaybe relName mAlias
, either FObj FArr annRel
, either AFObjectRelation AFArrayRelation annRel
)
-- let spiT = spiTable selPermInfo
@ -198,11 +196,11 @@ convSelectQ fieldInfoMap selPermInfo selQ sessVarBldr prepValBldr = do
let tabFrom = FromTable $ spiTable selPermInfo
tabPerm = TablePerm resolvedSelFltr mPermLimit
tabArgs = TableArgs wClause annOrdByM mQueryLimit
tabArgs = SelectArgs wClause annOrdByM mQueryLimit
(S.intToSQLExp <$> mQueryOffset) Nothing
strfyNum <- stringifyNum <$> askSQLGenCtx
return $ AnnSelG annFlds tabFrom tabPerm tabArgs strfyNum
return $ AnnSelectG annFlds tabFrom tabPerm tabArgs strfyNum
where
mQueryOffset = sqOffset selQ
@ -229,7 +227,7 @@ convExtRel
-> SelectQExt
-> SessVarBldr m
-> (PGColumnType -> Value -> m S.SQLExp)
-> m (Either ObjSel ArrSel)
-> m (Either ObjectRelationSelect ArraySelect)
convExtRel fieldInfoMap relName mAlias selQ sessVarBldr prepValBldr = do
-- Point to the name key
relInfo <- withPathK "name" $
@ -240,9 +238,9 @@ convExtRel fieldInfoMap relName mAlias selQ sessVarBldr prepValBldr = do
case relTy of
ObjRel -> do
when misused $ throw400 UnexpectedPayload objRelMisuseMsg
return $ Left $ AnnRelG (fromMaybe relName mAlias) colMapping annSel
return $ Left $ AnnRelationSelectG (fromMaybe relName mAlias) colMapping annSel
ArrRel ->
return $ Right $ ASSimple $ AnnRelG (fromMaybe relName mAlias)
return $ Right $ ASSimple $ AnnRelationSelectG (fromMaybe relName mAlias)
colMapping annSel
where
pgWhenRelErr = "only relationships can be expanded"
@ -270,16 +268,7 @@ convSelectQuery sessVarBldr prepArgBuilder (DMLQuery qt selQ) = do
let fieldInfo = _tciFieldInfoMap $ _tiCoreInfo tabInfo
extSelQ <- resolveStar fieldInfo selPermInfo selQ
validateHeaders $ spiRequiredHeaders selPermInfo
convSelectQ fieldInfo selPermInfo
extSelQ sessVarBldr prepArgBuilder
selectQuerySQL :: JsonAggSelect -> AnnSimpleSel -> Q.Query
selectQuerySQL jsonAggSelect sel =
Q.fromBuilder $ toSQL $ mkSQLSelect jsonAggSelect sel
selectAggQuerySQL :: AnnAggSel -> Q.Query
selectAggQuerySQL =
Q.fromBuilder . toSQL . mkAggSelect
convSelectQ fieldInfo selPermInfo extSelQ sessVarBldr prepArgBuilder
selectP2 :: JsonAggSelect -> (AnnSimpleSel, DS.Seq Q.PrepArg) -> Q.TxE QErr EncJSON
selectP2 jsonAggSelect (sel, p) =
@ -288,6 +277,14 @@ selectP2 jsonAggSelect (sel, p) =
where
selectSQL = toSQL $ mkSQLSelect jsonAggSelect sel
-- selectQuerySQL :: JsonAggSelect -> AnnSimpleSel -> Q.Query
-- selectQuerySQL jsonAggSelect sel =
-- Q.fromBuilder $ toSQL $ mkSQLSelect jsonAggSelect sel
-- selectAggQuerySQL :: AnnAggregateSelect -> Q.Query
-- selectAggQuerySQL =
-- Q.fromBuilder . toSQL . mkAggregateSelect
asSingleRowJsonResp :: Q.Query -> [Q.PrepArg] -> Q.TxE QErr EncJSON
asSingleRowJsonResp query args =
encJFromBS . runIdentity . Q.getRow

File diff suppressed because it is too large Load Diff

View File

@ -7,12 +7,12 @@ import Control.Lens hiding ((.=))
import Data.Aeson.Types
import Language.Haskell.TH.Syntax (Lift)
import qualified Data.Aeson as J
import qualified Data.HashMap.Strict as HM
import qualified Data.List.NonEmpty as NE
import qualified Data.Sequence as Seq
import qualified Data.Text as T
import qualified Language.GraphQL.Draft.Syntax as G
import qualified Data.Aeson as J
import Hasura.Prelude
import Hasura.RQL.Types
@ -24,7 +24,8 @@ type SelectQExt = SelectG ExtCol BoolExp Int
data JsonAggSelect
= JASMultipleRows
| JASSingleObject
deriving (Show, Eq)
deriving (Show, Eq, Generic)
instance Hashable JsonAggSelect
-- Columns in RQL
data ExtCol
@ -53,174 +54,187 @@ instance FromJSON ExtCol where
, "object (relationship)"
]
data AnnAggOrdBy
data AnnAggregateOrderBy
= AAOCount
| AAOOp !T.Text !PGCol
deriving (Show, Eq)
| AAOOp !T.Text !PGColumnInfo
deriving (Show, Eq, Generic)
instance Hashable AnnAggregateOrderBy
data AnnObColG v
= AOCPG !PGCol
| AOCObj !RelInfo !(AnnBoolExp v) !(AnnObColG v)
| AOCAgg !RelInfo !(AnnBoolExp v) !AnnAggOrdBy
deriving (Show, Eq)
data AnnOrderByElementG v
= AOCColumn !PGColumnInfo
| AOCObjectRelation !RelInfo !v !(AnnOrderByElementG v)
| AOCArrayAggregation !RelInfo !v !AnnAggregateOrderBy
deriving (Show, Eq, Generic, Functor)
instance (Hashable v) => Hashable (AnnOrderByElementG v)
traverseAnnObCol
type AnnOrderByElement v = AnnOrderByElementG (AnnBoolExp v)
traverseAnnOrderByElement
:: (Applicative f)
=> (a -> f b) -> AnnObColG a -> f (AnnObColG b)
traverseAnnObCol f = \case
AOCPG pgColInfo -> pure $ AOCPG pgColInfo
AOCObj relInfo annBoolExp annObCol ->
AOCObj relInfo
=> (a -> f b) -> AnnOrderByElement a -> f (AnnOrderByElement b)
traverseAnnOrderByElement f = \case
AOCColumn pgColInfo -> pure $ AOCColumn pgColInfo
AOCObjectRelation relInfo annBoolExp annObCol ->
AOCObjectRelation relInfo
<$> traverseAnnBoolExp f annBoolExp
<*> traverseAnnObCol f annObCol
AOCAgg relInfo annBoolExp annAggOb ->
AOCAgg relInfo
<*> traverseAnnOrderByElement f annObCol
AOCArrayAggregation relInfo annBoolExp annAggOb ->
AOCArrayAggregation relInfo
<$> traverseAnnBoolExp f annBoolExp
<*> pure annAggOb
type AnnObCol = AnnObColG S.SQLExp
type AnnOrderByItemG v = OrderByItemG (AnnObColG v)
type AnnOrderByItemG v = OrderByItemG (AnnOrderByElement v)
traverseAnnOrderByItem
:: (Applicative f)
=> (a -> f b) -> AnnOrderByItemG a -> f (AnnOrderByItemG b)
traverseAnnOrderByItem f =
traverse (traverseAnnObCol f)
traverse (traverseAnnOrderByElement f)
type AnnOrderByItem = AnnOrderByItemG S.SQLExp
data AnnRelG a
= AnnRelG
{ aarName :: !RelName -- Relationship name
, aarMapping :: !(HashMap PGCol PGCol) -- Column of left table to join with
, aarAnnSel :: !a -- Current table. Almost ~ to SQL Select
type OrderByItemExp =
OrderByItemG (AnnOrderByElement S.SQLExp, (S.Alias, S.SQLExp))
data AnnRelationSelectG a
= AnnRelationSelectG
{ aarRelationshipName :: !RelName -- Relationship name
, aarColumnMapping :: !(HashMap PGCol PGCol) -- Column of left table to join with
, aarAnnSelect :: !a -- Current table. Almost ~ to SQL Select
} deriving (Show, Eq, Functor, Foldable, Traversable)
type ObjSelG v = AnnRelG (AnnSimpleSelG v)
type ObjSel = ObjSelG S.SQLExp
type ObjectRelationSelectG v = AnnRelationSelectG (AnnSimpleSelG v)
type ObjectRelationSelect = ObjectRelationSelectG S.SQLExp
type ArrRelG v = AnnRelG (AnnSimpleSelG v)
type ArrRelAggG v = AnnRelG (AnnAggSelG v)
type ArrRelAgg = ArrRelAggG S.SQLExp
type ArrayRelationSelectG v = AnnRelationSelectG (AnnSimpleSelG v)
type ArrayAggregateSelectG v = AnnRelationSelectG (AnnAggregateSelectG v)
type ArrayConnectionSelect v = AnnRelationSelectG (ConnectionSelect v)
type ArrayAggregateSelect = ArrayAggregateSelectG S.SQLExp
data ComputedFieldScalarSel v
= ComputedFieldScalarSel
data ComputedFieldScalarSelect v
= ComputedFieldScalarSelect
{ _cfssFunction :: !QualifiedFunction
, _cfssArguments :: !(FunctionArgsExpTableRow v)
, _cfssType :: !PGScalarType
, _cfssColumnOp :: !(Maybe ColOp)
, _cfssColumnOp :: !(Maybe ColumnOp)
} deriving (Show, Eq, Functor, Foldable, Traversable)
data ComputedFieldSel v
= CFSScalar !(ComputedFieldScalarSel v)
data ComputedFieldSelect v
= CFSScalar !(ComputedFieldScalarSelect v)
| CFSTable !JsonAggSelect !(AnnSimpleSelG v)
deriving (Show, Eq)
traverseComputedFieldSel
traverseComputedFieldSelect
:: (Applicative f)
=> (v -> f w)
-> ComputedFieldSel v -> f (ComputedFieldSel w)
traverseComputedFieldSel fv = \case
-> ComputedFieldSelect v -> f (ComputedFieldSelect w)
traverseComputedFieldSelect fv = \case
CFSScalar scalarSel -> CFSScalar <$> traverse fv scalarSel
CFSTable b tableSel -> CFSTable b <$> traverseAnnSimpleSel fv tableSel
CFSTable b tableSel -> CFSTable b <$> traverseAnnSimpleSelect fv tableSel
type Fields a = [(FieldName, a)]
data ArrSelG v
= ASSimple !(ArrRelG v)
| ASAgg !(ArrRelAggG v)
data ArraySelectG v
= ASSimple !(ArrayRelationSelectG v)
| ASAggregate !(ArrayAggregateSelectG v)
| ASConnection !(ArrayConnectionSelect v)
deriving (Show, Eq)
traverseArrSel
traverseArraySelect
:: (Applicative f)
=> (a -> f b)
-> ArrSelG a
-> f (ArrSelG b)
traverseArrSel f = \case
ASSimple arrRel -> ASSimple <$> traverse (traverseAnnSimpleSel f) arrRel
ASAgg arrRelAgg -> ASAgg <$> traverse (traverseAnnAggSel f) arrRelAgg
-> ArraySelectG a
-> f (ArraySelectG b)
traverseArraySelect f = \case
ASSimple arrRel ->
ASSimple <$> traverse (traverseAnnSimpleSelect f) arrRel
ASAggregate arrRelAgg ->
ASAggregate <$> traverse (traverseAnnAggregateSelect f) arrRelAgg
ASConnection relConnection ->
ASConnection <$> traverse (traverseConnectionSelect f) relConnection
type ArrSel = ArrSelG S.SQLExp
type ArraySelect = ArraySelectG S.SQLExp
type ArrSelFldsG v = Fields (ArrSelG v)
type ArraySelectFieldsG v = Fields (ArraySelectG v)
data ColOp
= ColOp
data ColumnOp
= ColumnOp
{ _colOp :: S.SQLOp
, _colExp :: S.SQLExp
} deriving (Show, Eq)
data AnnColField
= AnnColField
data AnnColumnField
= AnnColumnField
{ _acfInfo :: !PGColumnInfo
, _acfAsText :: !Bool
-- ^ If this field is 'True', columns are explicitly casted to @text@ when fetched, which avoids
-- an issue that occurs because we dont currently have proper support for array types. See
-- https://github.com/hasura/graphql-engine/pull/3198 for more details.
, _acfOp :: !(Maybe ColOp)
, _acfOp :: !(Maybe ColumnOp)
} deriving (Show, Eq)
data RemoteFieldArgument
= RemoteFieldArgument
{ _rfaArgument :: !G.Argument
, _rfaVariable :: !(Maybe [(G.VariableDefinition,J.Value)])
}
deriving (Eq,Show)
} deriving (Eq,Show)
data RemoteSelect
= RemoteSelect
{ _rselArgs :: ![RemoteFieldArgument]
, _rselSelection :: ![G.Field]
, _rselSelection :: !G.SelectionSet
, _rselHasuraColumns :: !(HashSet PGColumnInfo)
, _rselFieldCall :: !(NonEmpty FieldCall)
, _rselRemoteSchema :: !RemoteSchemaInfo
} deriving (Show, Eq)
data AnnFldG v
= FCol !AnnColField
| FObj !(ObjSelG v)
| FArr !(ArrSelG v)
| FComputedField !(ComputedFieldSel v)
| FRemote !RemoteSelect
| FExp !T.Text
data AnnFieldG v
= AFColumn !AnnColumnField
| AFObjectRelation !(ObjectRelationSelectG v)
| AFArrayRelation !(ArraySelectG v)
| AFComputedField !(ComputedFieldSelect v)
| AFRemote !RemoteSelect
| AFNodeId !QualifiedTable !(NonEmpty PGColumnInfo)
| AFExpression !T.Text
deriving (Show, Eq)
mkAnnColField :: PGColumnInfo -> Maybe ColOp -> AnnFldG v
mkAnnColField ci colOpM =
FCol $ AnnColField ci False colOpM
mkAnnColumnField :: PGColumnInfo -> Maybe ColumnOp -> AnnFieldG v
mkAnnColumnField ci colOpM =
AFColumn $ AnnColumnField ci False colOpM
mkAnnColFieldAsText :: PGColumnInfo -> AnnFldG v
mkAnnColFieldAsText ci =
FCol $ AnnColField ci True Nothing
mkAnnColumnFieldAsText :: PGColumnInfo -> AnnFieldG v
mkAnnColumnFieldAsText ci =
AFColumn $ AnnColumnField ci True Nothing
traverseAnnFld
traverseAnnField
:: (Applicative f)
=> (a -> f b) -> AnnFldG a -> f (AnnFldG b)
traverseAnnFld f = \case
FCol colFld -> pure $ FCol colFld
FObj sel -> FObj <$> traverse (traverseAnnSimpleSel f) sel
FArr sel -> FArr <$> traverseArrSel f sel
FComputedField sel -> FComputedField <$> traverseComputedFieldSel f sel
FExp t -> FExp <$> pure t
FRemote s -> pure $ FRemote s
=> (a -> f b) -> AnnFieldG a -> f (AnnFieldG b)
traverseAnnField f = \case
AFColumn colFld -> pure $ AFColumn colFld
AFObjectRelation sel -> AFObjectRelation <$> traverse (traverseAnnSimpleSelect f) sel
AFArrayRelation sel -> AFArrayRelation <$> traverseArraySelect f sel
AFComputedField sel -> AFComputedField <$> traverseComputedFieldSelect f sel
AFRemote s -> pure $ AFRemote s
AFNodeId qt pKeys -> pure $ AFNodeId qt pKeys
AFExpression t -> AFExpression <$> pure t
type AnnFld = AnnFldG S.SQLExp
type AnnField = AnnFieldG S.SQLExp
data TableArgsG v
= TableArgs
{ _taWhere :: !(Maybe (AnnBoolExp v))
, _taOrderBy :: !(Maybe (NE.NonEmpty (AnnOrderByItemG v)))
, _taLimit :: !(Maybe Int)
, _taOffset :: !(Maybe S.SQLExp)
, _taDistCols :: !(Maybe (NE.NonEmpty PGCol))
} deriving (Show, Eq)
data SelectArgsG v
= SelectArgs
{ _saWhere :: !(Maybe (AnnBoolExp v))
, _saOrderBy :: !(Maybe (NE.NonEmpty (AnnOrderByItemG v)))
, _saLimit :: !(Maybe Int)
, _saOffset :: !(Maybe S.SQLExp)
, _saDistinct :: !(Maybe (NE.NonEmpty PGCol))
} deriving (Show, Eq, Generic)
instance (Hashable v) => Hashable (SelectArgsG v)
traverseTableArgs
traverseSelectArgs
:: (Applicative f)
=> (a -> f b) -> TableArgsG a -> f (TableArgsG b)
traverseTableArgs f (TableArgs wh ordBy lmt ofst distCols) =
TableArgs
=> (a -> f b) -> SelectArgsG a -> f (SelectArgsG b)
traverseSelectArgs f (SelectArgs wh ordBy lmt ofst distCols) =
SelectArgs
<$> traverse (traverseAnnBoolExp f) wh
-- traversing through maybe -> nonempty -> annorderbyitem
<*> traverse (traverse (traverseAnnOrderByItem f)) ordBy
@ -228,63 +242,104 @@ traverseTableArgs f (TableArgs wh ordBy lmt ofst distCols) =
<*> pure ofst
<*> pure distCols
type TableArgs = TableArgsG S.SQLExp
type SelectArgs = SelectArgsG S.SQLExp
noTableArgs :: TableArgsG v
noTableArgs = TableArgs Nothing Nothing Nothing Nothing Nothing
noSelectArgs :: SelectArgsG v
noSelectArgs = SelectArgs Nothing Nothing Nothing Nothing Nothing
data PGColFld
= PCFCol !PGCol
| PCFExp !T.Text
deriving (Show, Eq)
type ColFlds = Fields PGColFld
type ColumnFields = Fields PGColFld
data AggOp
= AggOp
{ _aoOp :: !T.Text
, _aoFlds :: !ColFlds
data AggregateOp
= AggregateOp
{ _aoOp :: !T.Text
, _aoFields :: !ColumnFields
} deriving (Show, Eq)
data AggFld
data AggregateField
= AFCount !S.CountType
| AFOp !AggOp
| AFOp !AggregateOp
| AFExp !T.Text
deriving (Show, Eq)
type AggFlds = Fields AggFld
type AnnFldsG v = Fields (AnnFldG v)
type AggregateFields = Fields AggregateField
type AnnFieldsG v = Fields (AnnFieldG v)
traverseAnnFlds
traverseAnnFields
:: (Applicative f)
=> (a -> f b) -> AnnFldsG a -> f (AnnFldsG b)
traverseAnnFlds f = traverse (traverse (traverseAnnFld f))
=> (a -> f b) -> AnnFieldsG a -> f (AnnFieldsG b)
traverseAnnFields f = traverse (traverse (traverseAnnField f))
type AnnFlds = AnnFldsG S.SQLExp
type AnnFields = AnnFieldsG S.SQLExp
data TableAggFldG v
= TAFAgg !AggFlds
| TAFNodes !(AnnFldsG v)
data TableAggregateFieldG v
= TAFAgg !AggregateFields
| TAFNodes !(AnnFieldsG v)
| TAFExp !T.Text
deriving (Show, Eq)
traverseTableAggFld
data PageInfoField
= PageInfoTypename !Text
| PageInfoHasNextPage
| PageInfoHasPreviousPage
| PageInfoStartCursor
| PageInfoEndCursor
deriving (Show, Eq)
type PageInfoFields = Fields PageInfoField
data EdgeField v
= EdgeTypename !Text
| EdgeCursor
| EdgeNode !(AnnFieldsG v)
deriving (Show, Eq)
type EdgeFields v = Fields (EdgeField v)
traverseEdgeField
:: (Applicative f)
=> (a -> f b) -> TableAggFldG a -> f (TableAggFldG b)
traverseTableAggFld f = \case
=> (a -> f b) -> EdgeField a -> f (EdgeField b)
traverseEdgeField f = \case
EdgeTypename t -> pure $ EdgeTypename t
EdgeCursor -> pure EdgeCursor
EdgeNode fields -> EdgeNode <$> traverseAnnFields f fields
data ConnectionField v
= ConnectionTypename !Text
| ConnectionPageInfo !PageInfoFields
| ConnectionEdges !(EdgeFields v)
deriving (Show, Eq)
type ConnectionFields v = Fields (ConnectionField v)
traverseConnectionField
:: (Applicative f)
=> (a -> f b) -> ConnectionField a -> f (ConnectionField b)
traverseConnectionField f = \case
ConnectionTypename t -> pure $ ConnectionTypename t
ConnectionPageInfo fields -> pure $ ConnectionPageInfo fields
ConnectionEdges fields ->
ConnectionEdges <$> traverse (traverse (traverseEdgeField f)) fields
traverseTableAggregateField
:: (Applicative f)
=> (a -> f b) -> TableAggregateFieldG a -> f (TableAggregateFieldG b)
traverseTableAggregateField f = \case
TAFAgg aggFlds -> pure $ TAFAgg aggFlds
TAFNodes annFlds -> TAFNodes <$> traverseAnnFlds f annFlds
TAFNodes annFlds -> TAFNodes <$> traverseAnnFields f annFlds
TAFExp t -> pure $ TAFExp t
type TableAggFld = TableAggFldG S.SQLExp
type TableAggFldsG v = Fields (TableAggFldG v)
type TableAggFlds = TableAggFldsG S.SQLExp
type TableAggregateField = TableAggregateFieldG S.SQLExp
type TableAggregateFieldsG v = Fields (TableAggregateFieldG v)
type TableAggregateFields = TableAggregateFieldsG S.SQLExp
data ArgumentExp a
= AETableRow !(Maybe Iden) -- ^ table row accessor
| AESession !a -- ^ JSON/JSONB hasura session variable object
| AEInput !a
deriving (Show, Eq, Functor, Foldable, Traversable)
deriving (Show, Eq, Functor, Foldable, Traversable, Generic)
instance (Hashable v) => Hashable (ArgumentExp v)
type FunctionArgsExpTableRow v = FunctionArgsExpG (ArgumentExp v)
@ -293,8 +348,10 @@ data SelectFromG v
| FromIden !Iden
| FromFunction !QualifiedFunction
!(FunctionArgsExpTableRow v)
-- a definition list
!(Maybe [(PGCol, PGScalarType)])
deriving (Show, Eq, Functor, Foldable, Traversable)
deriving (Show, Eq, Functor, Foldable, Traversable, Generic)
instance (Hashable v) => Hashable (SelectFromG v)
type SelectFrom = SelectFromG S.SQLExp
@ -302,7 +359,8 @@ data TablePermG v
= TablePerm
{ _tpFilter :: !(AnnBoolExp v)
, _tpLimit :: !(Maybe Int)
} deriving (Eq, Show)
} deriving (Eq, Show, Generic)
instance (Hashable v) => Hashable (TablePermG v)
traverseTablePerm
:: (Applicative f)
@ -320,62 +378,105 @@ noTablePermissions =
type TablePerm = TablePermG S.SQLExp
data AnnSelG a v
= AnnSelG
data AnnSelectG a v
= AnnSelectG
{ _asnFields :: !a
, _asnFrom :: !(SelectFromG v)
, _asnPerm :: !(TablePermG v)
, _asnArgs :: !(TableArgsG v)
, _asnArgs :: !(SelectArgsG v)
, _asnStrfyNum :: !Bool
} deriving (Show, Eq)
getPermLimit :: AnnSelG a v -> Maybe Int
getPermLimit = _tpLimit . _asnPerm
traverseAnnSimpleSel
traverseAnnSimpleSelect
:: (Applicative f)
=> (a -> f b)
-> AnnSimpleSelG a -> f (AnnSimpleSelG b)
traverseAnnSimpleSel f = traverseAnnSel (traverseAnnFlds f) f
traverseAnnSimpleSelect f = traverseAnnSelect (traverseAnnFields f) f
traverseAnnAggSel
traverseAnnAggregateSelect
:: (Applicative f)
=> (a -> f b)
-> AnnAggSelG a -> f (AnnAggSelG b)
traverseAnnAggSel f =
traverseAnnSel (traverse (traverse (traverseTableAggFld f))) f
-> AnnAggregateSelectG a -> f (AnnAggregateSelectG b)
traverseAnnAggregateSelect f =
traverseAnnSelect (traverse (traverse (traverseTableAggregateField f))) f
traverseAnnSel
traverseAnnSelect
:: (Applicative f)
=> (a -> f b) -> (v -> f w)
-> AnnSelG a v -> f (AnnSelG b w)
traverseAnnSel f1 f2 (AnnSelG flds tabFrom perm args strfyNum) =
AnnSelG
-> AnnSelectG a v -> f (AnnSelectG b w)
traverseAnnSelect f1 f2 (AnnSelectG flds tabFrom perm args strfyNum) =
AnnSelectG
<$> f1 flds
<*> traverse f2 tabFrom
<*> traverseTablePerm f2 perm
<*> traverseTableArgs f2 args
<*> traverseSelectArgs f2 args
<*> pure strfyNum
type AnnSimpleSelG v = AnnSelG (AnnFldsG v) v
type AnnSimpleSelG v = AnnSelectG (AnnFieldsG v) v
type AnnSimpleSel = AnnSimpleSelG S.SQLExp
type AnnAggSelG v = AnnSelG (TableAggFldsG v) v
type AnnAggSel = AnnAggSelG S.SQLExp
type AnnAggregateSelectG v = AnnSelectG (TableAggregateFieldsG v) v
type AnnAggregateSelect = AnnAggregateSelectG S.SQLExp
data ConnectionSlice
= SliceFirst !Int
| SliceLast !Int
deriving (Show, Eq, Generic)
instance Hashable ConnectionSlice
data ConnectionSplitKind
= CSKBefore
| CSKAfter
deriving (Show, Eq, Generic)
instance Hashable ConnectionSplitKind
data ConnectionSplit v
= ConnectionSplit
{ _csKind :: !ConnectionSplitKind
, _csValue :: !v
, _csOrderBy :: !(OrderByItemG (AnnOrderByElementG ()))
} deriving (Show, Eq, Functor, Generic, Foldable, Traversable)
instance (Hashable v) => Hashable (ConnectionSplit v)
traverseConnectionSplit
:: (Applicative f)
=> (a -> f b) -> ConnectionSplit a -> f (ConnectionSplit b)
traverseConnectionSplit f (ConnectionSplit k v ob) =
ConnectionSplit k <$> f v <*> pure ob
data ConnectionSelect v
= ConnectionSelect
{ _csPrimaryKeyColumns :: !(NE.NonEmpty PGColumnInfo)
, _csSplit :: !(Maybe (NE.NonEmpty (ConnectionSplit v)))
, _csSlice :: !(Maybe ConnectionSlice)
, _csSelect :: !(AnnSelectG (ConnectionFields v) v)
} deriving (Show, Eq)
traverseConnectionSelect
:: (Applicative f)
=> (a -> f b)
-> ConnectionSelect a -> f (ConnectionSelect b)
traverseConnectionSelect f (ConnectionSelect pkCols cSplit cSlice sel) =
ConnectionSelect pkCols
<$> traverse (traverse (traverseConnectionSplit f)) cSplit
<*> pure cSlice
<*> traverseAnnSelect (traverse (traverse (traverseConnectionField f))) f sel
data FunctionArgsExpG a
= FunctionArgsExp
{ _faePositional :: ![a]
, _faeNamed :: !(HM.HashMap Text a)
} deriving (Show, Eq, Functor, Foldable, Traversable)
} deriving (Show, Eq, Functor, Foldable, Traversable, Generic)
instance (Hashable a) => Hashable (FunctionArgsExpG a)
emptyFunctionArgsExp :: FunctionArgsExpG a
emptyFunctionArgsExp = FunctionArgsExp [] HM.empty
type FunctionArgExp = FunctionArgsExpG S.SQLExp
-- | If argument positional index is less than or equal to length of 'positional' arguments then
-- insert the value in 'positional' arguments else insert the value with argument name in 'named' arguments
-- | If argument positional index is less than or equal to length of
-- 'positional' arguments then insert the value in 'positional' arguments else
-- insert the value with argument name in 'named' arguments
insertFunctionArg
:: FunctionArgName
-> Int
@ -390,114 +491,106 @@ insertFunctionArg argName idx value (FunctionArgsExp positional named) =
where
insertAt i a = toList . Seq.insertAt i a . Seq.fromList
data BaseNode
= BaseNode
{ _bnPrefix :: !Iden
, _bnDistinct :: !(Maybe S.DistinctExpr)
, _bnFrom :: !S.FromItem
, _bnWhere :: !S.BoolExp
, _bnOrderBy :: !(Maybe S.OrderByExp)
, _bnLimit :: !(Maybe Int)
, _bnOffset :: !(Maybe S.SQLExp)
data SourcePrefixes
= SourcePrefixes
{ _pfThis :: !Iden -- ^ Current source prefix
, _pfBase :: !Iden
-- ^ Base table source row identifier to generate
-- the table's column identifiers for computed field
-- function input parameters
} deriving (Show, Eq, Generic)
instance Hashable SourcePrefixes
, _bnExtrs :: !(HM.HashMap S.Alias S.SQLExp)
, _bnObjs :: !(HM.HashMap RelName ObjNode)
, _bnArrs :: !(HM.HashMap S.Alias ArrNode)
, _bnComputedFieldTables :: !(HM.HashMap FieldName CFTableNode)
data SelectSource
= SelectSource
{ _ssPrefix :: !Iden
, _ssFrom :: !S.FromItem
, _ssDistinct :: !(Maybe S.DistinctExpr)
, _ssWhere :: !S.BoolExp
, _ssOrderBy :: !(Maybe S.OrderByExp)
, _ssLimit :: !(Maybe Int)
, _ssOffset :: !(Maybe S.SQLExp)
} deriving (Show, Eq, Generic)
instance Hashable SelectSource
data SelectNode
= SelectNode
{ _snExtractors :: !(HM.HashMap S.Alias S.SQLExp)
, _snJoinTree :: !JoinTree
} deriving (Show, Eq)
mergeBaseNodes :: BaseNode -> BaseNode -> BaseNode
mergeBaseNodes lNodeDet rNodeDet =
BaseNode pfx dExp f whr ordBy limit offset
(HM.union lExtrs rExtrs)
(HM.unionWith mergeObjNodes lObjs rObjs)
(HM.unionWith mergeArrNodes lArrs rArrs)
(HM.unionWith mergeCFTableNodes lCFTables rCFTables)
where
BaseNode pfx dExp f whr ordBy limit offset lExtrs lObjs lArrs lCFTables
= lNodeDet
BaseNode _ _ _ _ _ _ _ rExtrs rObjs rArrs rCFTables
= rNodeDet
instance Semigroup SelectNode where
SelectNode lExtrs lJoinTree <> SelectNode rExtrs rJoinTree =
SelectNode (lExtrs <> rExtrs) (lJoinTree <> rJoinTree)
data OrderByNode
= OBNNothing
| OBNObjNode !RelName !ObjNode
| OBNArrNode !S.Alias !ArrNode
data ObjectRelationSource
= ObjectRelationSource
{ _orsRelationshipName :: !RelName
, _orsRelationMapping :: !(HM.HashMap PGCol PGCol)
, _orsSelectSource :: !SelectSource
} deriving (Show, Eq, Generic)
instance Hashable ObjectRelationSource
data ArrayRelationSource
= ArrayRelationSource
{ _arsAlias :: !S.Alias
, _arsRelationMapping :: !(HM.HashMap PGCol PGCol)
, _arsSelectSource :: !SelectSource
} deriving (Show, Eq, Generic)
instance Hashable ArrayRelationSource
data ArraySelectNode
= ArraySelectNode
{ _asnTopExtractors :: ![S.Extractor]
, _asnSelectNode :: !SelectNode
} deriving (Show, Eq)
instance Semigroup ArraySelectNode where
ArraySelectNode lTopExtrs lSelNode <> ArraySelectNode rTopExtrs rSelNode =
ArraySelectNode (lTopExtrs <> rTopExtrs) (lSelNode <> rSelNode)
data ComputedFieldTableSetSource
= ComputedFieldTableSetSource
{ _cftssFieldName :: !FieldName
, _cftssSelectType :: !JsonAggSelect
, _cftssSelectSource :: !SelectSource
} deriving (Show, Eq, Generic)
instance Hashable ComputedFieldTableSetSource
data ArrayConnectionSource
= ArrayConnectionSource
{ _acsAlias :: !S.Alias
, _acsRelationMapping :: !(HM.HashMap PGCol PGCol)
, _acsSplitFilter :: !(Maybe S.BoolExp)
, _acsSlice :: !(Maybe ConnectionSlice)
, _acsSource :: !SelectSource
} deriving (Show, Eq, Generic)
instance Hashable ArrayConnectionSource
data JoinTree
= JoinTree
{ _jtObjectRelations :: !(HM.HashMap ObjectRelationSource SelectNode)
, _jtArrayRelations :: !(HM.HashMap ArrayRelationSource ArraySelectNode)
, _jtArrayConnections :: !(HM.HashMap ArrayConnectionSource ArraySelectNode)
, _jtComputedFieldTableSets :: !(HM.HashMap ComputedFieldTableSetSource SelectNode)
} deriving (Show, Eq)
instance Semigroup JoinTree where
JoinTree lObjs lArrs lArrConns lCfts <> JoinTree rObjs rArrs rArrConns rCfts =
JoinTree (HM.unionWith (<>) lObjs rObjs)
(HM.unionWith (<>) lArrs rArrs)
(HM.unionWith (<>) lArrConns rArrConns)
(HM.unionWith (<>) lCfts rCfts)
instance Monoid JoinTree where
mempty = JoinTree mempty mempty mempty mempty
data PermissionLimitSubQuery
= PLSQRequired !Int -- ^ Permission limit
| PLSQNotRequired
deriving (Show, Eq)
data ArrRelCtxG v
= ArrRelCtx
{ aacFields :: !(ArrSelFldsG v)
, aacAggOrdBys :: ![RelName]
} deriving (Show, Eq)
type ArrRelCtx = ArrRelCtxG S.SQLExp
emptyArrRelCtx :: ArrRelCtxG a
emptyArrRelCtx = ArrRelCtx [] []
data ArrNodeItemG v
= ANIField !(FieldName, ArrSelG v)
| ANIAggOrdBy !RelName
deriving (Show, Eq)
type ArrNodeItem = ArrNodeItemG S.SQLExp
data ObjNode
= ObjNode
{ _rnRelMapping :: !(HashMap PGCol PGCol)
, _rnNodeDet :: !BaseNode
} deriving (Show, Eq)
mergeObjNodes :: ObjNode -> ObjNode -> ObjNode
mergeObjNodes lNode rNode =
ObjNode colMapping $ mergeBaseNodes lBN rBN
where
ObjNode colMapping lBN = lNode
ObjNode _ rBN = rNode
-- simple array select, aggregate select and order by
-- nodes differ in extractors
data ArrNode
= ArrNode
{ _anExtr :: ![S.Extractor]
, _anRelMapping :: !(HashMap PGCol PGCol)
, _anNodeDet :: !BaseNode
} deriving (Show, Eq)
mergeArrNodes :: ArrNode -> ArrNode -> ArrNode
mergeArrNodes lNode rNode =
ArrNode (lExtrs `union` rExtrs) colMapping $
mergeBaseNodes lBN rBN
where
ArrNode lExtrs colMapping lBN = lNode
ArrNode rExtrs _ rBN = rNode
data ArrNodeInfo
= ArrNodeInfo
{ _aniAlias :: !S.Alias
, _aniPrefix :: !Iden
, _aniSubQueryRequired :: !Bool
} deriving (Show, Eq)
-- | Node for computed field returning setof <table>
data CFTableNode
= CFTableNode
{ _ctnSelectType :: !JsonAggSelect
, _ctnNode :: !BaseNode
} deriving (Show, Eq)
mergeCFTableNodes :: CFTableNode -> CFTableNode -> CFTableNode
mergeCFTableNodes lNode rNode =
CFTableNode
(_ctnSelectType rNode)
(mergeBaseNodes (_ctnNode lNode) (_ctnNode rNode))
data Prefixes
= Prefixes
{ _pfThis :: !Iden -- Current node prefix
, _pfBase :: !Iden -- Base table row identifier for computed field function
} deriving (Show, Eq)
$(makeLenses ''AnnSelG)
$(makePrisms ''AnnFldG)
$(makeLenses ''AnnSelectG)
$(makePrisms ''AnnFieldG)
$(makePrisms ''AnnOrderByElementG)

View File

@ -65,6 +65,7 @@ data GExists a
instance (NFData a) => NFData (GExists a)
instance (Data a) => Plated (GExists a)
instance (Cacheable a) => Cacheable (GExists a)
instance (Hashable a) => Hashable (GExists a)
gExistsToJSON :: (a -> (Text, Value)) -> GExists a -> Value
gExistsToJSON f (GExists qt wh) =
@ -91,6 +92,7 @@ data GBoolExp a
instance (NFData a) => NFData (GBoolExp a)
instance (Data a) => Plated (GBoolExp a)
instance (Cacheable a) => Cacheable (GBoolExp a)
instance (Hashable a) => Hashable (GBoolExp a)
gBoolExpTrue :: GBoolExp a
gBoolExpTrue = BoolAnd []
@ -142,6 +144,7 @@ data DWithinGeomOp a =
} deriving (Show, Eq, Functor, Foldable, Traversable, Generic, Data)
instance (NFData a) => NFData (DWithinGeomOp a)
instance (Cacheable a) => Cacheable (DWithinGeomOp a)
instance (Hashable a) => Hashable (DWithinGeomOp a)
$(deriveJSON (aesonDrop 6 snakeCase) ''DWithinGeomOp)
data DWithinGeogOp a =
@ -152,6 +155,7 @@ data DWithinGeogOp a =
} deriving (Show, Eq, Functor, Foldable, Traversable, Generic, Data)
instance (NFData a) => NFData (DWithinGeogOp a)
instance (Cacheable a) => Cacheable (DWithinGeogOp a)
instance (Hashable a) => Hashable (DWithinGeogOp a)
$(deriveJSON (aesonDrop 6 snakeCase) ''DWithinGeogOp)
data STIntersectsNbandGeommin a =
@ -161,6 +165,7 @@ data STIntersectsNbandGeommin a =
} deriving (Show, Eq, Functor, Foldable, Traversable, Generic, Data)
instance (NFData a) => NFData (STIntersectsNbandGeommin a)
instance (Cacheable a) => Cacheable (STIntersectsNbandGeommin a)
instance (Hashable a) => Hashable (STIntersectsNbandGeommin a)
$(deriveJSON (aesonDrop 4 snakeCase) ''STIntersectsNbandGeommin)
data STIntersectsGeomminNband a =
@ -170,6 +175,7 @@ data STIntersectsGeomminNband a =
} deriving (Show, Eq, Functor, Foldable, Traversable, Generic, Data)
instance (NFData a) => NFData (STIntersectsGeomminNband a)
instance (Cacheable a) => Cacheable (STIntersectsGeomminNband a)
instance (Hashable a) => Hashable (STIntersectsGeomminNband a)
$(deriveJSON (aesonDrop 4 snakeCase) ''STIntersectsGeomminNband)
type CastExp a = M.HashMap PGScalarType [OpExpG a]
@ -229,6 +235,7 @@ data OpExpG a
deriving (Eq, Show, Functor, Foldable, Traversable, Generic, Data)
instance (NFData a) => NFData (OpExpG a)
instance (Cacheable a) => Cacheable (OpExpG a)
instance (Hashable a) => Hashable (OpExpG a)
opExpDepCol :: OpExpG a -> Maybe PGCol
opExpDepCol = \case
@ -302,6 +309,7 @@ data AnnBoolExpFld a
deriving (Show, Eq, Functor, Foldable, Traversable, Generic)
instance (NFData a) => NFData (AnnBoolExpFld a)
instance (Cacheable a) => Cacheable (AnnBoolExpFld a)
instance (Hashable a) => Hashable (AnnBoolExpFld a)
type AnnBoolExp a
= GBoolExp (AnnBoolExpFld a)

View File

@ -57,7 +57,7 @@ import Data.Aeson.Casing
import Data.Aeson.TH
import Data.URL.Template
import Instances.TH.Lift ()
import Language.Haskell.TH.Syntax (Q, TExp, Lift)
import Language.Haskell.TH.Syntax (Lift, Q, TExp)
import qualified Data.HashMap.Strict as HM
import qualified Data.Text as T
@ -156,6 +156,7 @@ data RelInfo
} deriving (Show, Eq, Generic)
instance NFData RelInfo
instance Cacheable RelInfo
instance Hashable RelInfo
$(deriveToJSON (aesonDrop 2 snakeCase) ''RelInfo)
newtype FieldName
@ -163,6 +164,7 @@ newtype FieldName
deriving ( Show, Eq, Ord, Hashable, FromJSON, ToJSON
, FromJSONKey, ToJSONKey, Lift, Data, Generic
, IsString, Arbitrary, NFData, Cacheable
, Semigroup
)
instance IsIden FieldName where
@ -225,7 +227,7 @@ data PrimaryKey a
= PrimaryKey
{ _pkConstraint :: !Constraint
, _pkColumns :: !(NonEmpty a)
} deriving (Show, Eq, Generic)
} deriving (Show, Eq, Generic, Foldable)
instance (NFData a) => NFData (PrimaryKey a)
instance (Cacheable a) => Cacheable (PrimaryKey a)
$(makeLenses ''PrimaryKey)

View File

@ -101,6 +101,7 @@ instance (FromJSON a) => FromJSON (DMLQuery a) where
newtype OrderType
= OrderType { unOrderType :: S.OrderType }
deriving (Show, Eq, Lift, Generic)
instance Hashable OrderType
instance FromJSON OrderType where
parseJSON =
@ -112,6 +113,7 @@ instance FromJSON OrderType where
newtype NullsOrder
= NullsOrder { unNullsOrder :: S.NullsOrder }
deriving (Show, Eq, Lift, Generic)
instance Hashable NullsOrder
instance FromJSON NullsOrder where
parseJSON =
@ -176,7 +178,8 @@ data OrderByItemG a
{ obiType :: !(Maybe OrderType)
, obiColumn :: !a
, obiNulls :: !(Maybe NullsOrder)
} deriving (Show, Eq, Lift, Functor, Foldable, Traversable)
} deriving (Show, Eq, Lift, Functor, Foldable, Traversable, Generic)
instance (Hashable a) => Hashable (OrderByItemG a)
type OrderByItem = OrderByItemG OrderByCol

View File

@ -217,6 +217,7 @@ data SchemaCache
, scCustomTypes :: !(NonObjectTypeMap, AnnotatedObjects)
, scGCtxMap :: !GC.GCtxMap
, scDefaultRemoteGCtx :: !GC.GCtx
, scRelayGCtxMap :: !GC.GCtxMap
, scDepMap :: !DepMap
, scInconsistentObjs :: ![InconsistentMetadata]
, scCronTriggers :: !(M.HashMap TriggerName CronTriggerInfo)

View File

@ -38,6 +38,7 @@ data Select
} deriving (Show, Eq, Generic, Data)
instance NFData Select
instance Cacheable Select
instance Hashable Select
mkSelect :: Select
mkSelect = Select [] Nothing [] Nothing
@ -46,7 +47,7 @@ mkSelect = Select [] Nothing [] Nothing
newtype LimitExp
= LimitExp SQLExp
deriving (Show, Eq, NFData, Data, Cacheable)
deriving (Show, Eq, NFData, Data, Cacheable, Hashable)
instance ToSQL LimitExp where
toSQL (LimitExp se) =
@ -54,15 +55,15 @@ instance ToSQL LimitExp where
newtype OffsetExp
= OffsetExp SQLExp
deriving (Show, Eq, NFData, Data, Cacheable)
deriving (Show, Eq, NFData, Data, Cacheable, Hashable)
instance ToSQL OffsetExp where
toSQL (OffsetExp se) =
"OFFSET" <-> toSQL se
newtype OrderByExp
= OrderByExp [OrderByItem]
deriving (Show, Eq, NFData, Data, Cacheable)
= OrderByExp (NonEmpty OrderByItem)
deriving (Show, Eq, NFData, Data, Cacheable, Hashable)
data OrderByItem
= OrderByItem
@ -72,6 +73,7 @@ data OrderByItem
} deriving (Show, Eq, Generic, Data)
instance NFData OrderByItem
instance Cacheable OrderByItem
instance Hashable OrderByItem
instance ToSQL OrderByItem where
toSQL (OrderByItem e ot no) =
@ -81,6 +83,7 @@ data OrderType = OTAsc | OTDesc
deriving (Show, Eq, Lift, Generic, Data)
instance NFData OrderType
instance Cacheable OrderType
instance Hashable OrderType
instance ToSQL OrderType where
toSQL OTAsc = "ASC"
@ -92,6 +95,7 @@ data NullsOrder
deriving (Show, Eq, Lift, Generic, Data)
instance NFData NullsOrder
instance Cacheable NullsOrder
instance Hashable NullsOrder
instance ToSQL NullsOrder where
toSQL NFirst = "NULLS FIRST"
@ -99,11 +103,11 @@ instance ToSQL NullsOrder where
instance ToSQL OrderByExp where
toSQL (OrderByExp l) =
"ORDER BY" <-> (", " <+> l)
"ORDER BY" <-> (", " <+> toList l)
newtype GroupByExp
= GroupByExp [SQLExp]
deriving (Show, Eq, NFData, Data, Cacheable)
deriving (Show, Eq, NFData, Data, Cacheable, Hashable)
instance ToSQL GroupByExp where
toSQL (GroupByExp idens) =
@ -111,7 +115,7 @@ instance ToSQL GroupByExp where
newtype FromExp
= FromExp [FromItem]
deriving (Show, Eq, NFData, Data, Cacheable)
deriving (Show, Eq, NFData, Data, Cacheable, Hashable)
instance ToSQL FromExp where
toSQL (FromExp items) =
@ -151,7 +155,7 @@ mkRowExp extrs = let
newtype HavingExp
= HavingExp BoolExp
deriving (Show, Eq, NFData, Data, Cacheable)
deriving (Show, Eq, NFData, Data, Cacheable, Hashable)
instance ToSQL HavingExp where
toSQL (HavingExp be) =
@ -159,7 +163,7 @@ instance ToSQL HavingExp where
newtype WhereFrag
= WhereFrag { getWFBoolExp :: BoolExp }
deriving (Show, Eq, NFData, Data, Cacheable)
deriving (Show, Eq, NFData, Data, Cacheable, Hashable)
instance ToSQL WhereFrag where
toSQL (WhereFrag be) =
@ -194,6 +198,7 @@ data Qual
deriving (Show, Eq, Generic, Data)
instance NFData Qual
instance Cacheable Qual
instance Hashable Qual
mkQual :: QualifiedTable -> Qual
mkQual = QualTable
@ -211,6 +216,7 @@ data QIden
deriving (Show, Eq, Generic, Data)
instance NFData QIden
instance Cacheable QIden
instance Hashable QIden
instance ToSQL QIden where
toSQL (QIden qual iden) =
@ -218,7 +224,7 @@ instance ToSQL QIden where
newtype SQLOp
= SQLOp {sqlOpTxt :: T.Text}
deriving (Show, Eq, NFData, Data, Cacheable)
deriving (Show, Eq, NFData, Data, Cacheable, Hashable)
incOp :: SQLOp
incOp = SQLOp "+"
@ -240,7 +246,7 @@ jsonbDeleteAtPathOp = SQLOp "#-"
newtype TypeAnn
= TypeAnn { unTypeAnn :: T.Text }
deriving (Show, Eq, NFData, Data, Cacheable)
deriving (Show, Eq, NFData, Data, Cacheable, Hashable)
instance ToSQL TypeAnn where
toSQL (TypeAnn ty) = "::" <> TB.text ty
@ -266,6 +272,9 @@ jsonTypeAnn = mkTypeAnn $ PGTypeScalar PGJSON
jsonbTypeAnn :: TypeAnn
jsonbTypeAnn = mkTypeAnn $ PGTypeScalar PGJSONB
boolTypeAnn :: TypeAnn
boolTypeAnn = mkTypeAnn $ PGTypeScalar PGBoolean
data CountType
= CTStar
| CTSimple ![PGCol]
@ -273,6 +282,7 @@ data CountType
deriving (Show, Eq, Generic, Data)
instance NFData CountType
instance Cacheable CountType
instance Hashable CountType
instance ToSQL CountType where
toSQL CTStar = "*"
@ -283,7 +293,7 @@ instance ToSQL CountType where
newtype TupleExp
= TupleExp [SQLExp]
deriving (Show, Eq, NFData, Data, Cacheable)
deriving (Show, Eq, NFData, Data, Cacheable, Hashable)
instance ToSQL TupleExp where
toSQL (TupleExp exps) =
@ -308,6 +318,7 @@ data SQLExp
| SEBool !BoolExp
| SEExcluded !Iden
| SEArray ![SQLExp]
| SEArrayIndex !SQLExp !SQLExp
| SETuple !TupleExp
| SECount !CountType
| SENamedArg !Iden !SQLExp
@ -315,6 +326,7 @@ data SQLExp
deriving (Show, Eq, Generic, Data)
instance NFData SQLExp
instance Cacheable SQLExp
instance Hashable SQLExp
withTyAnn :: PGScalarType -> SQLExp -> SQLExp
withTyAnn colTy v = SETyAnn v . mkTypeAnn $ PGTypeScalar colTy
@ -324,7 +336,7 @@ instance J.ToJSON SQLExp where
newtype Alias
= Alias { getAlias :: Iden }
deriving (Show, Eq, NFData, Hashable, Data, Cacheable)
deriving (Show, Eq, NFData, Data, Cacheable, Hashable)
instance IsIden Alias where
toIden (Alias iden) = iden
@ -376,6 +388,9 @@ instance ToSQL SQLExp where
<> toSQL i
toSQL (SEArray exps) = "ARRAY" <> TB.char '['
<> (", " <+> exps) <> TB.char ']'
toSQL (SEArrayIndex arrayExp indexExp) =
paren (toSQL arrayExp)
<> TB.char '[' <> toSQL indexExp <> TB.char ']'
toSQL (SETuple tup) = toSQL tup
toSQL (SECount ty) = "COUNT" <> paren (toSQL ty)
-- https://www.postgresql.org/docs/current/sql-syntax-calling-funcs.html
@ -390,6 +405,7 @@ data Extractor = Extractor !SQLExp !(Maybe Alias)
deriving (Show, Eq, Generic, Data)
instance NFData Extractor
instance Cacheable Extractor
instance Hashable Extractor
mkSQLOpExp
:: SQLOp
@ -437,6 +453,7 @@ data DistinctExpr
deriving (Show, Eq, Generic, Data)
instance NFData DistinctExpr
instance Cacheable DistinctExpr
instance Hashable DistinctExpr
instance ToSQL DistinctExpr where
toSQL DistinctSimple = "DISTINCT"
@ -450,6 +467,7 @@ data FunctionArgs
} deriving (Show, Eq, Generic, Data)
instance NFData FunctionArgs
instance Cacheable FunctionArgs
instance Hashable FunctionArgs
instance ToSQL FunctionArgs where
toSQL (FunctionArgs positionalArgs namedArgsMap) =
@ -464,6 +482,7 @@ data DefinitionListItem
} deriving (Show, Eq, Data, Generic)
instance NFData DefinitionListItem
instance Cacheable DefinitionListItem
instance Hashable DefinitionListItem
instance ToSQL DefinitionListItem where
toSQL (DefinitionListItem column columnType) =
@ -476,6 +495,7 @@ data FunctionAlias
} deriving (Show, Eq, Data, Generic)
instance NFData FunctionAlias
instance Cacheable FunctionAlias
instance Hashable FunctionAlias
mkSimpleFunctionAlias :: Iden -> FunctionAlias
mkSimpleFunctionAlias identifier =
@ -500,6 +520,7 @@ data FunctionExp
} deriving (Show, Eq, Generic, Data)
instance NFData FunctionExp
instance Cacheable FunctionExp
instance Hashable FunctionExp
instance ToSQL FunctionExp where
toSQL (FunctionExp qf args alsM) =
@ -511,11 +532,13 @@ data FromItem
| FIFunc !FunctionExp
| FIUnnest ![SQLExp] !Alias ![SQLExp]
| FISelect !Lateral !Select !Alias
| FISelectWith !Lateral !(SelectWithG Select) !Alias
| FIValues !ValuesExp !Alias !(Maybe [PGCol])
| FIJoin !JoinExpr
deriving (Show, Eq, Generic, Data)
instance NFData FromItem
instance Cacheable FromItem
instance Hashable FromItem
mkSelFromItem :: Select -> Alias -> FromItem
mkSelFromItem = FISelect (Lateral False)
@ -538,6 +561,8 @@ instance ToSQL FromItem where
"UNNEST" <> paren (", " <+> args) <-> toSQL als <> paren (", " <+> cols)
toSQL (FISelect mla sel al) =
toSQL mla <-> paren (toSQL sel) <-> toSQL al
toSQL (FISelectWith mla selWith al) =
toSQL mla <-> paren (toSQL selWith) <-> toSQL al
toSQL (FIValues valsExp al mCols) =
paren (toSQL valsExp) <-> toSQL al
<-> toSQL (toColTupExp <$> mCols)
@ -545,7 +570,7 @@ instance ToSQL FromItem where
toSQL je
newtype Lateral = Lateral Bool
deriving (Show, Eq, Data, NFData, Cacheable)
deriving (Show, Eq, Data, NFData, Cacheable, Hashable)
instance ToSQL Lateral where
toSQL (Lateral True) = "LATERAL"
@ -560,6 +585,7 @@ data JoinExpr
} deriving (Show, Eq, Generic, Data)
instance NFData JoinExpr
instance Cacheable JoinExpr
instance Hashable JoinExpr
instance ToSQL JoinExpr where
toSQL je =
@ -576,6 +602,7 @@ data JoinType
deriving (Eq, Show, Generic, Data)
instance NFData JoinType
instance Cacheable JoinType
instance Hashable JoinType
instance ToSQL JoinType where
toSQL Inner = "INNER JOIN"
@ -589,6 +616,7 @@ data JoinCond
deriving (Show, Eq, Generic, Data)
instance NFData JoinCond
instance Cacheable JoinCond
instance Hashable JoinCond
instance ToSQL JoinCond where
toSQL (JoinOn be) =
@ -612,6 +640,7 @@ data BoolExp
deriving (Show, Eq, Generic, Data)
instance NFData BoolExp
instance Cacheable BoolExp
instance Hashable BoolExp
-- removes extraneous 'AND true's
simplifyBoolExp :: BoolExp -> BoolExp
@ -667,6 +696,7 @@ data BinOp = AndOp | OrOp
deriving (Show, Eq, Generic, Data)
instance NFData BinOp
instance Cacheable BinOp
instance Hashable BinOp
instance ToSQL BinOp where
toSQL AndOp = "AND"
@ -695,6 +725,7 @@ data CompareOp
deriving (Eq, Generic, Data)
instance NFData CompareOp
instance Cacheable CompareOp
instance Hashable CompareOp
instance Show CompareOp where
show = \case
@ -841,7 +872,7 @@ instance ToSQL SQLConflict where
newtype ValuesExp
= ValuesExp [TupleExp]
deriving (Show, Eq, Data, NFData, Cacheable)
deriving (Show, Eq, Data, NFData, Cacheable, Hashable)
instance ToSQL ValuesExp where
toSQL (ValuesExp tuples) =
@ -880,14 +911,20 @@ instance ToSQL CTE where
CTEUpdate q -> toSQL q
CTEDelete q -> toSQL q
data SelectWith
data SelectWithG v
= SelectWith
{ swCTEs :: [(Alias, CTE)]
{ swCTEs :: ![(Alias, v)]
, swSelect :: !Select
} deriving (Show, Eq)
} deriving (Show, Eq, Generic, Data)
instance ToSQL SelectWith where
instance (NFData v) => NFData (SelectWithG v)
instance (Cacheable v) => Cacheable (SelectWithG v)
instance (Hashable v) => Hashable (SelectWithG v)
instance (ToSQL v) => ToSQL (SelectWithG v) where
toSQL (SelectWith ctes sel) =
"WITH " <> (", " <+> map f ctes) <-> toSQL sel
where
f (Alias al, q) = toSQL al <-> "AS" <-> paren (toSQL q)
type SelectWith = SelectWithG CTE

View File

@ -1,5 +1,6 @@
module Hasura.SQL.Rewrite
( prefixNumToAliases
, prefixNumToAliasesSelectWith
) where
import qualified Data.HashMap.Strict as Map
@ -20,6 +21,11 @@ prefixNumToAliases :: S.Select -> S.Select
prefixNumToAliases s =
uSelect s `evalState` UniqSt 0 Map.empty
prefixNumToAliasesSelectWith
:: S.SelectWithG S.Select -> S.SelectWithG S.Select
prefixNumToAliasesSelectWith s =
uSelectWith s `evalState` UniqSt 0 Map.empty
type Rewrite a = State a
data UniqSt
@ -56,6 +62,12 @@ restoringIdens action = do
modify' $ \s -> s { _uqIdens = idens }
return res
uSelectWith :: S.SelectWithG S.Select -> Uniq (S.SelectWithG S.Select)
uSelectWith (S.SelectWith ctes baseSelect) =
S.SelectWith
<$> forM ctes (\(als, sel) -> (als,) <$> restoringIdens (uSelect sel))
<*> uSelect baseSelect
uSelect :: S.Select -> Uniq S.Select
uSelect sel = do
-- this has to be the first thing to process
@ -114,6 +126,10 @@ uFromItem fromItem = case fromItem of
newSel <- restoringIdens $ uSelect sel
newAls <- addAlias al
return $ S.FISelect isLateral newSel newAls
S.FISelectWith isLateral selectWith al -> do
newSelectWith <- uSelectWith selectWith
newAls <- addAlias al
return $ S.FISelectWith isLateral newSelectWith newAls
S.FIValues (S.ValuesExp tups) als mCols -> do
newValExp <- fmap S.ValuesExp $
forM tups $ \(S.TupleExp ts) ->
@ -197,8 +213,10 @@ uSqlExp = restoringIdens . \case
S.SEExcluded <$> return t
S.SEArray l ->
S.SEArray <$> mapM uSqlExp l
S.SEArrayIndex arrayExp indexExp ->
S.SEArrayIndex <$> uSqlExp arrayExp <*> uSqlExp indexExp
S.SETuple (S.TupleExp l) ->
S.SEArray <$> mapM uSqlExp l
S.SETuple . S.TupleExp <$> mapM uSqlExp l
S.SECount cty -> return $ S.SECount cty
S.SENamedArg arg val -> S.SENamedArg arg <$> uSqlExp val
S.SEFunction funcExp -> S.SEFunction <$> uFunctionExp funcExp

View File

@ -319,8 +319,8 @@ v1QueryHandler query = do
v1Alpha1GQHandler
:: (HasVersion, MonadIO m)
=> GH.GQLBatchedReqs GH.GQLQueryText -> Handler m (HttpResponse EncJSON)
v1Alpha1GQHandler query = do
=> E.GraphQLQueryType -> GH.GQLBatchedReqs GH.GQLQueryText -> Handler m (HttpResponse EncJSON)
v1Alpha1GQHandler queryType query = do
userInfo <- asks hcUser
reqHeaders <- asks hcReqHeaders
manager <- scManager . hcServerCtx <$> ask
@ -335,12 +335,19 @@ v1Alpha1GQHandler query = do
responseErrorsConfig <- scResponseInternalErrorsConfig . hcServerCtx <$> ask
let execCtx = E.ExecutionCtx logger sqlGenCtx pgExecCtx planCache
(lastBuiltSchemaCache sc) scVer manager enableAL
flip runReaderT execCtx $ GH.runGQBatched requestId responseErrorsConfig userInfo reqHeaders query
flip runReaderT execCtx $
GH.runGQBatched requestId responseErrorsConfig userInfo reqHeaders queryType query
v1GQHandler
:: (HasVersion, MonadIO m)
=> GH.GQLBatchedReqs GH.GQLQueryText
-> Handler m (HttpResponse EncJSON)
v1GQHandler = v1Alpha1GQHandler E.QueryHasura
v1GQRelayHandler
:: (HasVersion, MonadIO m)
=> GH.GQLBatchedReqs GH.GQLQueryText -> Handler m (HttpResponse EncJSON)
v1GQHandler = v1Alpha1GQHandler
v1GQRelayHandler = v1Alpha1GQHandler E.QueryRelay
gqlExplainHandler :: (HasVersion, MonadIO m) => GE.GQLExplain -> Handler m (HttpResponse EncJSON)
gqlExplainHandler query = do
@ -610,11 +617,14 @@ httpApp corsCfg serverCtx enableConsole consoleAssetsDir enableTelemetry = do
when enableGraphQL $ do
Spock.post "v1alpha1/graphql" $ spockAction GH.encodeGQErr id $
mkPostHandler $ mkAPIRespHandler v1Alpha1GQHandler
mkPostHandler $ mkAPIRespHandler $ v1Alpha1GQHandler E.QueryHasura
Spock.post "v1/graphql" $ spockAction GH.encodeGQErr allMod200 $
mkPostHandler $ mkAPIRespHandler v1GQHandler
Spock.post "v1/relay" $ spockAction GH.encodeGQErr allMod200 $
mkPostHandler $ mkAPIRespHandler v1GQRelayHandler
when (isDeveloperAPIEnabled serverCtx) $ do
Spock.get "dev/ekg" $ spockAction encodeQErr id $
mkGetHandler $ do

View File

@ -5,12 +5,12 @@ import Hasura.Prelude
import Control.Lens ((^..))
import Data.Aeson
import Data.Aeson.Internal
import Data.Char
import Language.Haskell.TH.Syntax (Lift, Q, TExp)
import System.Environment
import System.Exit
import System.Process
import Data.Aeson.Internal
import qualified Data.ByteString as B
import qualified Data.CaseInsensitive as CI
@ -20,6 +20,7 @@ import qualified Data.Text as T
import qualified Data.Text.IO as TI
import qualified Data.UUID as UUID
import qualified Data.UUID.V4 as UUID
import qualified Data.Vector as V
import qualified Language.Haskell.TH.Syntax as TH
import qualified Network.HTTP.Client as HC
import qualified Network.HTTP.Types as HTTP
@ -27,7 +28,6 @@ import qualified Network.Wreq as Wreq
import qualified Text.Regex.TDFA as TDFA
import qualified Text.Regex.TDFA.ReadRegex as TDFA
import qualified Text.Regex.TDFA.TDFA as TDFA
import qualified Data.Vector as V
import Hasura.RQL.Instances ()

View File

@ -25,6 +25,6 @@ query:
response:
errors:
- extensions:
path: $.selectionSet.author.selectionSet.authorFragment.selectionSet.articles.selectionSet.articleFragment.selectionSet.author.selectionSet.authorFragment
path: $.selectionSet.author.selectionSet.authorFragment.selectionSet.articles.selectionSet.articleFragment.selectionSet.author.selectionSet
code: validation-failed
message: cannot spread fragment "authorFragment" within itself via articleFragment,authorFragment

View File

@ -0,0 +1,49 @@
description: Get last page of articles with 3 items
url: /v1/relay
status: 200
query:
query: |
query {
article_connection(
last: 3
){
pageInfo{
startCursor
endCursor
hasPreviousPage
hasNextPage
}
edges{
cursor
node{
title
content
author_id
}
}
}
}
response:
data:
article_connection:
pageInfo:
startCursor: eyJpZCIgOiA0fQ==
endCursor: eyJpZCIgOiA2fQ==
hasPreviousPage: true
hasNextPage: false
edges:
- cursor: eyJpZCIgOiA0fQ==
node:
title: Article 4
content: Sample article content 4
author_id: 2
- cursor: eyJpZCIgOiA1fQ==
node:
title: Article 5
content: Sample article content 5
author_id: 2
- cursor: eyJpZCIgOiA2fQ==
node:
title: Article 6
content: Sample article content 6
author_id: 3

View File

@ -0,0 +1,45 @@
description: Get last page of articles with 2 items before 'Article 4'
url: /v1/relay
status: 200
query:
query: |
query {
article_connection(
last: 2
before: "eyJpZCIgOiA0fQ=="
){
pageInfo{
startCursor
endCursor
hasPreviousPage
hasNextPage
}
edges{
cursor
node{
title
content
author_id
}
}
}
}
response:
data:
article_connection:
pageInfo:
startCursor: eyJpZCIgOiAyfQ==
endCursor: eyJpZCIgOiAzfQ==
hasPreviousPage: true
hasNextPage: true
edges:
- cursor: eyJpZCIgOiAyfQ==
node:
title: Article 2
content: Sample article content 2
author_id: 1
- cursor: eyJpZCIgOiAzfQ==
node:
title: Article 3
content: Sample article content 3
author_id: 1

View File

@ -0,0 +1,40 @@
description: Get last page of articles before 'Article 2'
url: /v1/relay
status: 200
query:
query: |
query {
article_connection(
last: 2
before: "eyJpZCIgOiAyfQ=="
){
pageInfo{
startCursor
endCursor
hasPreviousPage
hasNextPage
}
edges{
cursor
node{
title
content
author_id
}
}
}
}
response:
data:
article_connection:
pageInfo:
startCursor: eyJpZCIgOiAxfQ==
endCursor: eyJpZCIgOiAxfQ==
hasPreviousPage: false
hasNextPage: true
edges:
- cursor: eyJpZCIgOiAxfQ==
node:
title: Article 1
content: Sample article content 1
author_id: 1

View File

@ -0,0 +1,49 @@
description: Get 1st page of articles with 3 items
url: /v1/relay
status: 200
query:
query: |
query {
article_connection(
first: 3
){
pageInfo{
startCursor
endCursor
hasPreviousPage
hasNextPage
}
edges{
cursor
node{
title
content
author_id
}
}
}
}
response:
data:
article_connection:
pageInfo:
startCursor: eyJpZCIgOiAxfQ==
endCursor: eyJpZCIgOiAzfQ==
hasPreviousPage: false
hasNextPage: true
edges:
- cursor: eyJpZCIgOiAxfQ==
node:
title: Article 1
content: Sample article content 1
author_id: 1
- cursor: eyJpZCIgOiAyfQ==
node:
title: Article 2
content: Sample article content 2
author_id: 1
- cursor: eyJpZCIgOiAzfQ==
node:
title: Article 3
content: Sample article content 3
author_id: 1

View File

@ -0,0 +1,45 @@
description: Get 2nd page of articles with 2 items
url: /v1/relay
status: 200
query:
query: |
query {
article_connection(
first: 2
after: "eyJpZCIgOiAzfQ=="
){
pageInfo{
startCursor
endCursor
hasPreviousPage
hasNextPage
}
edges{
cursor
node{
title
content
author_id
}
}
}
}
response:
data:
article_connection:
pageInfo:
startCursor: eyJpZCIgOiA0fQ==
endCursor: eyJpZCIgOiA1fQ==
hasPreviousPage: true
hasNextPage: true
edges:
- cursor: eyJpZCIgOiA0fQ==
node:
title: Article 4
content: Sample article content 4
author_id: 2
- cursor: eyJpZCIgOiA1fQ==
node:
title: Article 5
content: Sample article content 5
author_id: 2

View File

@ -0,0 +1,40 @@
description: Get 3rd page of articles
url: /v1/relay
status: 200
query:
query: |
query {
article_connection(
first: 3
after: "eyJpZCIgOiA1fQ=="
){
pageInfo{
startCursor
endCursor
hasPreviousPage
hasNextPage
}
edges{
cursor
node{
title
content
author_id
}
}
}
}
response:
data:
article_connection:
pageInfo:
startCursor: eyJpZCIgOiA2fQ==
endCursor: eyJpZCIgOiA2fQ==
hasPreviousPage: true
hasNextPage: false
edges:
- cursor: eyJpZCIgOiA2fQ==
node:
title: Article 6
content: Sample article content 6
author_id: 3

View File

@ -0,0 +1,44 @@
description: Fetch 1st page from last of articles ordered by their article count
url: /v1/relay
status: 200
query:
query: |
query {
author_connection(
last: 1
order_by: {articles_aggregate: {count: asc}}
){
pageInfo{
startCursor
endCursor
hasPreviousPage
hasNextPage
}
edges{
cursor
node{
name
articles_aggregate{
aggregate{
count
}
}
}
}
}
}
response:
data:
author_connection:
pageInfo:
startCursor: eyJhcnRpY2xlc19hZ2dyZWdhdGUiIDogeyJjb3VudCIgOiAzfSwgImlkIiA6IDF9
endCursor: eyJhcnRpY2xlc19hZ2dyZWdhdGUiIDogeyJjb3VudCIgOiAzfSwgImlkIiA6IDF9
hasPreviousPage: true
hasNextPage: false
edges:
- cursor: eyJhcnRpY2xlc19hZ2dyZWdhdGUiIDogeyJjb3VudCIgOiAzfSwgImlkIiA6IDF9
node:
name: Author 1
articles_aggregate:
aggregate:
count: 3

View File

@ -0,0 +1,51 @@
description: Fetch 2nd page from last of articles ordered by their article count
url: /v1/relay
status: 200
query:
query: |
query {
author_connection(
last: 2
order_by: {articles_aggregate: {count: asc}}
before: "eyJhcnRpY2xlc19hZ2dyZWdhdGUiIDogeyJjb3VudCIgOiAzfSwgImlkIiA6IDF9"
){
pageInfo{
startCursor
endCursor
hasPreviousPage
hasNextPage
}
edges{
cursor
node{
name
articles_aggregate{
aggregate{
count
}
}
}
}
}
}
response:
data:
author_connection:
pageInfo:
startCursor: eyJhcnRpY2xlc19hZ2dyZWdhdGUiIDogeyJjb3VudCIgOiAxfSwgImlkIiA6IDN9
endCursor: eyJhcnRpY2xlc19hZ2dyZWdhdGUiIDogeyJjb3VudCIgOiAyfSwgImlkIiA6IDJ9
hasPreviousPage: true
hasNextPage: true
edges:
- cursor: eyJhcnRpY2xlc19hZ2dyZWdhdGUiIDogeyJjb3VudCIgOiAxfSwgImlkIiA6IDN9
node:
name: Author 3
articles_aggregate:
aggregate:
count: 1
- cursor: eyJhcnRpY2xlc19hZ2dyZWdhdGUiIDogeyJjb3VudCIgOiAyfSwgImlkIiA6IDJ9
node:
name: Author 2
articles_aggregate:
aggregate:
count: 2

View File

@ -0,0 +1,45 @@
description: Fetch 3rd page from last of articles ordered by their article count
url: /v1/relay
status: 200
query:
query: |
query {
author_connection(
last: 1
order_by: {articles_aggregate: {count: asc}}
before: "eyJhcnRpY2xlc19hZ2dyZWdhdGUiIDogeyJjb3VudCIgOiAxfSwgImlkIiA6IDN9"
){
pageInfo{
startCursor
endCursor
hasPreviousPage
hasNextPage
}
edges{
cursor
node{
name
articles_aggregate{
aggregate{
count
}
}
}
}
}
}
response:
data:
author_connection:
pageInfo:
startCursor: eyJhcnRpY2xlc19hZ2dyZWdhdGUiIDogeyJjb3VudCIgOiAwfSwgImlkIiA6IDR9
endCursor: eyJhcnRpY2xlc19hZ2dyZWdhdGUiIDogeyJjb3VudCIgOiAwfSwgImlkIiA6IDR9
hasPreviousPage: false
hasNextPage: true
edges:
- cursor: eyJhcnRpY2xlc19hZ2dyZWdhdGUiIDogeyJjb3VudCIgOiAwfSwgImlkIiA6IDR9
node:
name: Author 4
articles_aggregate:
aggregate:
count: 0

View File

@ -0,0 +1,50 @@
description: Fetch 1st page of articles ordered by their article count
url: /v1/relay
status: 200
query:
query: |
query {
author_connection(
first: 2
order_by: {articles_aggregate: {count: asc}}
){
pageInfo{
startCursor
endCursor
hasPreviousPage
hasNextPage
}
edges{
cursor
node{
name
articles_aggregate{
aggregate{
count
}
}
}
}
}
}
response:
data:
author_connection:
pageInfo:
startCursor: eyJhcnRpY2xlc19hZ2dyZWdhdGUiIDogeyJjb3VudCIgOiAwfSwgImlkIiA6IDR9
endCursor: eyJhcnRpY2xlc19hZ2dyZWdhdGUiIDogeyJjb3VudCIgOiAxfSwgImlkIiA6IDN9
hasPreviousPage: false
hasNextPage: true
edges:
- cursor: eyJhcnRpY2xlc19hZ2dyZWdhdGUiIDogeyJjb3VudCIgOiAwfSwgImlkIiA6IDR9
node:
name: Author 4
articles_aggregate:
aggregate:
count: 0
- cursor: eyJhcnRpY2xlc19hZ2dyZWdhdGUiIDogeyJjb3VudCIgOiAxfSwgImlkIiA6IDN9
node:
name: Author 3
articles_aggregate:
aggregate:
count: 1

View File

@ -0,0 +1,51 @@
description: Fetch 2nd page of articles ordered by their article count
url: /v1/relay
status: 200
query:
query: |
query {
author_connection(
first: 2
order_by: {articles_aggregate: {count: asc}}
after: "eyJhcnRpY2xlc19hZ2dyZWdhdGUiIDogeyJjb3VudCIgOiAxfSwgImlkIiA6IDN9"
){
pageInfo{
startCursor
endCursor
hasPreviousPage
hasNextPage
}
edges{
cursor
node{
name
articles_aggregate{
aggregate{
count
}
}
}
}
}
}
response:
data:
author_connection:
pageInfo:
startCursor: eyJhcnRpY2xlc19hZ2dyZWdhdGUiIDogeyJjb3VudCIgOiAyfSwgImlkIiA6IDJ9
endCursor: eyJhcnRpY2xlc19hZ2dyZWdhdGUiIDogeyJjb3VudCIgOiAzfSwgImlkIiA6IDF9
hasPreviousPage: true
hasNextPage: false
edges:
- cursor: eyJhcnRpY2xlc19hZ2dyZWdhdGUiIDogeyJjb3VudCIgOiAyfSwgImlkIiA6IDJ9
node:
name: Author 2
articles_aggregate:
aggregate:
count: 2
- cursor: eyJhcnRpY2xlc19hZ2dyZWdhdGUiIDogeyJjb3VudCIgOiAzfSwgImlkIiA6IDF9
node:
name: Author 1
articles_aggregate:
aggregate:
count: 3

View File

@ -0,0 +1,86 @@
description: Query articles connection with pageInfo and edges
url: /v1/relay
status: 200
query:
query: |
query {
article_connection{
pageInfo{
hasNextPage
startCursor
endCursor
hasPreviousPage
}
edges{
cursor
node_id: node{
id
}
node{
title
content
author{
name
}
}
}
}
}
response:
data:
article_connection:
pageInfo:
hasNextPage: false
startCursor: eyJpZCIgOiAxfQ==
endCursor: eyJpZCIgOiA2fQ==
hasPreviousPage: false
edges:
- cursor: eyJpZCIgOiAxfQ==
node_id:
id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImFydGljbGUifSwgImNvbHVtbnMiIDogeyJpZCIgOiAxfX0=
node:
title: Article 1
content: Sample article content 1
author:
name: Author 1
- cursor: eyJpZCIgOiAyfQ==
node_id:
id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImFydGljbGUifSwgImNvbHVtbnMiIDogeyJpZCIgOiAyfX0=
node:
title: Article 2
content: Sample article content 2
author:
name: Author 1
- cursor: eyJpZCIgOiAzfQ==
node_id:
id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImFydGljbGUifSwgImNvbHVtbnMiIDogeyJpZCIgOiAzfX0=
node:
title: Article 3
content: Sample article content 3
author:
name: Author 1
- cursor: eyJpZCIgOiA0fQ==
node_id:
id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImFydGljbGUifSwgImNvbHVtbnMiIDogeyJpZCIgOiA0fX0=
node:
title: Article 4
content: Sample article content 4
author:
name: Author 2
- cursor: eyJpZCIgOiA1fQ==
node_id:
id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImFydGljbGUifSwgImNvbHVtbnMiIDogeyJpZCIgOiA1fX0=
node:
title: Article 5
content: Sample article content 5
author:
name: Author 2
- cursor: eyJpZCIgOiA2fQ==
node_id:
id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImFydGljbGUifSwgImNvbHVtbnMiIDogeyJpZCIgOiA2fX0=
node:
title: Article 6
content: Sample article content 6
author:
name: Author 3

View File

@ -0,0 +1,128 @@
description: Query author connection with edges
url: /v1/relay
status: 200
query:
query: |
query {
author_connection{
edges{
cursor
node{
id
name
articles{
id
title
content
}
articles_aggregate{
aggregate{
count
}
}
articles_connection{
edges{
cursor
node{
id
title
content
}
}
}
}
}
}
}
response:
data:
author_connection:
edges:
- cursor: eyJpZCIgOiAxfQ==
node:
id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImF1dGhvciJ9LCAiY29sdW1ucyIgOiB7ImlkIiA6IDF9fQ==
name: Author 1
articles:
- id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImFydGljbGUifSwgImNvbHVtbnMiIDogeyJpZCIgOiAxfX0=
title: Article 1
content: Sample article content 1
- id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImFydGljbGUifSwgImNvbHVtbnMiIDogeyJpZCIgOiAyfX0=
title: Article 2
content: Sample article content 2
- id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImFydGljbGUifSwgImNvbHVtbnMiIDogeyJpZCIgOiAzfX0=
title: Article 3
content: Sample article content 3
articles_aggregate:
aggregate:
count: 3
articles_connection:
edges:
- cursor: eyJpZCIgOiAxfQ==
node:
id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImFydGljbGUifSwgImNvbHVtbnMiIDogeyJpZCIgOiAxfX0=
title: Article 1
content: Sample article content 1
- cursor: eyJpZCIgOiAyfQ==
node:
id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImFydGljbGUifSwgImNvbHVtbnMiIDogeyJpZCIgOiAyfX0=
title: Article 2
content: Sample article content 2
- cursor: eyJpZCIgOiAzfQ==
node:
id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImFydGljbGUifSwgImNvbHVtbnMiIDogeyJpZCIgOiAzfX0=
title: Article 3
content: Sample article content 3
- cursor: eyJpZCIgOiAyfQ==
node:
id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImF1dGhvciJ9LCAiY29sdW1ucyIgOiB7ImlkIiA6IDJ9fQ==
name: Author 2
articles:
- id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImFydGljbGUifSwgImNvbHVtbnMiIDogeyJpZCIgOiA0fX0=
title: Article 4
content: Sample article content 4
- id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImFydGljbGUifSwgImNvbHVtbnMiIDogeyJpZCIgOiA1fX0=
title: Article 5
content: Sample article content 5
articles_aggregate:
aggregate:
count: 2
articles_connection:
edges:
- cursor: eyJpZCIgOiA0fQ==
node:
id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImFydGljbGUifSwgImNvbHVtbnMiIDogeyJpZCIgOiA0fX0=
title: Article 4
content: Sample article content 4
- cursor: eyJpZCIgOiA1fQ==
node:
id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImFydGljbGUifSwgImNvbHVtbnMiIDogeyJpZCIgOiA1fX0=
title: Article 5
content: Sample article content 5
- cursor: eyJpZCIgOiAzfQ==
node:
id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImF1dGhvciJ9LCAiY29sdW1ucyIgOiB7ImlkIiA6IDN9fQ==
name: Author 3
articles:
- id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImFydGljbGUifSwgImNvbHVtbnMiIDogeyJpZCIgOiA2fX0=
title: Article 6
content: Sample article content 6
articles_aggregate:
aggregate:
count: 1
articles_connection:
edges:
- cursor: eyJpZCIgOiA2fQ==
node:
id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImFydGljbGUifSwgImNvbHVtbnMiIDogeyJpZCIgOiA2fX0=
title: Article 6
content: Sample article content 6
- cursor: eyJpZCIgOiA0fQ==
node:
id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImF1dGhvciJ9LCAiY29sdW1ucyIgOiB7ImlkIiA6IDR9fQ==
name: Author 4
articles: []
articles_aggregate:
aggregate:
count: 0
articles_connection:
edges: []

View File

@ -0,0 +1,19 @@
description: Query node interface with invalid node id
url: /v1/relay
status: 200
query:
query: |
query {
node(id: "eyJpZCIgOiA0fQ=="){
__typename
... on author{
name
}
}
}
response:
errors:
- extensions:
path: "$.selectionSet.node"
code: validation-failed
message: the node id is invalid

View File

@ -0,0 +1,67 @@
description: Query the relay Node interface
url: /v1/relay
status: 200
query:
variables:
author_id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImF1dGhvciJ9LCAiY29sdW1ucyIgOiB7ImlkIiA6IDJ9fQ==
article_id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImFydGljbGUifSwgImNvbHVtbnMiIDogeyJpZCIgOiAzfX0=
query: |
query nodeQuery($author_id: ID!, $article_id: ID!){
author_node: node(id: $author_id){
__typename
... on author{
name
articles_connection{
edges{
cursor
node{
title
content
}
}
}
}
}
article_node_with_author_id: node(id: $author_id){
id
... on article{
title
content
}
}
article_node: node(id: $article_id){
__typename
... on article{
title
content
author{
name
}
}
}
}
response:
data:
author_node:
__typename: author
name: Author 2
articles_connection:
edges:
- cursor: eyJpZCIgOiA0fQ==
node:
title: Article 4
content: Sample article content 4
- cursor: eyJpZCIgOiA1fQ==
node:
title: Article 5
content: Sample article content 5
article_node_with_author_id:
id: eyJ0YWJsZSIgOiB7InNjaGVtYSIgOiAicHVibGljIiwgIm5hbWUiIDogImF1dGhvciJ9LCAiY29sdW1ucyIgOiB7ImlkIiA6IDJ9fQ==
article_node:
__typename: article
title: Article 3
content: Sample article content 3
author:
name: Author 1

View File

@ -0,0 +1,46 @@
description: Query the article connection with only pageInfo
url: /v1/relay
status: 200
query:
query: |
query {
article_connection(
order_by: {title: asc}
last: 2
){
pageInfo{
startCursor
endCursor
hasPreviousPage
hasNextPage
}
startCursor: pageInfo{
startCursor
}
endCursor: pageInfo{
endCursor
}
hasPreviousPage: pageInfo{
hasPreviousPage
}
hasNextPage: pageInfo{
hasNextPage
}
}
}
response:
data:
article_connection:
pageInfo:
startCursor: eyJ0aXRsZSIgOiAiQXJ0aWNsZSA1IiwgImlkIiA6IDV9
endCursor: eyJ0aXRsZSIgOiAiQXJ0aWNsZSA2IiwgImlkIiA6IDZ9
hasPreviousPage: true
hasNextPage: false
startCursor:
startCursor: eyJ0aXRsZSIgOiAiQXJ0aWNsZSA1IiwgImlkIiA6IDV9
endCursor:
endCursor: eyJ0aXRsZSIgOiAiQXJ0aWNsZSA2IiwgImlkIiA6IDZ9
hasPreviousPage:
hasPreviousPage: true
hasNextPage:
hasNextPage: false

View File

@ -0,0 +1,21 @@
description: Use after and before arguments in the same query
url: /v1/relay
status: 200
query:
query: |
query {
author_connection(
after: "eyJpZCIgOiAyfQ=="
before: "eyJpZCIgOiA0fQ=="
){
edges{
cursor
}
}
}
response:
errors:
- extensions:
path: "$.selectionSet.author_connection"
code: validation-failed
message: '"after" and "before" are not allowed at once'

View File

@ -0,0 +1,21 @@
description: Use first and last arguments in the same query
url: /v1/relay
status: 200
query:
query: |
query {
author_connection(
first: 1
last: 2
){
edges{
cursor
}
}
}
response:
errors:
- extensions:
path: "$.selectionSet.author_connection"
code: validation-failed
message: '"first" and "last" are not allowed at once'

View File

@ -0,0 +1,79 @@
type: bulk
args:
- type: run_sql
args:
sql: |
CREATE TABLE author(
id SERIAL PRIMARY KEY,
name TEXT UNIQUE NOT NULL
);
INSERT INTO author (name)
VALUES ('Author 1'), ('Author 2'), ('Author 3'), ('Author 4');
CREATE TABLE article (
id SERIAL PRIMARY KEY,
title TEXT,
content TEXT,
author_id INTEGER REFERENCES author(id)
);
INSERT INTO article (title, content, author_id)
VALUES
(
'Article 1',
'Sample article content 1',
1
),
(
'Article 2',
'Sample article content 2',
1
),
(
'Article 3',
'Sample article content 3',
1
),
(
'Article 4',
'Sample article content 4',
2
),
(
'Article 5',
'Sample article content 5',
2
),
(
'Article 6',
'Sample article content 6',
3
);
# Track tables and define relationships
- type: track_table
args:
name: author
schema: public
- type: track_table
args:
name: article
schema: public
- type: create_object_relationship
args:
table: article
name: author
using:
foreign_key_constraint_on: author_id
- type: create_array_relationship
args:
table: author
name: articles
using:
foreign_key_constraint_on:
table: article
column: author_id

View File

@ -0,0 +1,8 @@
type: bulk
args:
- type: run_sql
args:
cascade: true
sql: |
DROP TABLE article;
DROP TABLE author;

View File

@ -650,3 +650,56 @@ class TestGraphQLExplain:
resp_sql = resp_json[0]['sql']
exp_sql = conf['response'][0]['sql']
assert resp_sql == exp_sql, resp_json
@pytest.mark.parametrize('transport', ['http', 'websocket'])
@usefixtures('per_class_tests_db_state')
class TestRelayQueries:
@classmethod
def dir(cls):
return 'queries/graphql_query/relay'
# Basic queries
def test_article_connection(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + '/basic/article_connection.yaml', transport)
def test_author_connection(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + '/basic/author_connection.yaml', transport)
def test_node(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + '/basic/node.yaml', transport)
def test_invalid_node(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + '/basic/invalid_node_id.yaml', transport)
def test_only_pageinfo(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + '/basic/only_pageinfo.yaml', transport)
# Articles forward pagination
def test_article_no_orderby_forward_pagination(self, hge_ctx, transport):
_test_relay_pagination(hge_ctx, transport, self.dir() + "/article_pagination_no_orderby/forward", 3)
# Articles backward pagination
def test_article_no_orderby_backward_pagination(self, hge_ctx, transport):
_test_relay_pagination(hge_ctx, transport, self.dir() + "/article_pagination_no_orderby/backward", 3)
# Authors forward pagination
def test_author_orderby_articles_aggregate_orderby_forward_pagination(self, hge_ctx, transport):
_test_relay_pagination(hge_ctx, transport, self.dir() + "/author_pagination_articles_aggregate_orderby/forward", 2)
# Authors backward pagination
def test_author_orderby_articles_aggregate_orderby_backward_pagination(self, hge_ctx, transport):
_test_relay_pagination(hge_ctx, transport, self.dir() + "/author_pagination_articles_aggregate_orderby/backward", 3)
# Pagination errors
def test_first_and_last_fail(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/pagination_errors/first_and_last.yaml", transport)
def test_after_and_before_fail(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + "/pagination_errors/after_and_before.yaml", transport)
def _test_relay_pagination(hge_ctx, transport, test_file_prefix, no_of_pages):
for i in range(no_of_pages):
page_no = i + 1
test_file = "page_" + str(page_no) + ".yaml"
check_query_f(hge_ctx, test_file_prefix + "/" + test_file, transport)

View File

@ -65,7 +65,7 @@ def check_event(hge_ctx, evts_webhook, trig_name, table, operation, exp_ev_data,
def test_forbidden_when_admin_secret_reqd(hge_ctx, conf):
if conf['url'] == '/v1/graphql':
if conf['url'] == '/v1/graphql' or conf['url'] == '/v1/relay':
if conf['status'] == 404:
status = [404]
else:
@ -104,7 +104,7 @@ def test_forbidden_when_admin_secret_reqd(hge_ctx, conf):
def test_forbidden_webhook(hge_ctx, conf):
if conf['url'] == '/v1/graphql':
if conf['url'] == '/v1/graphql' or conf['url'] == '/v1/relay':
if conf['status'] == 404:
status = [404]
else:
@ -211,7 +211,7 @@ def check_query(hge_ctx, conf, transport='http', add_auth=True, claims_namespace
def validate_gql_ws_q(hge_ctx, conf, headers, retry=False, via_subscription=False):
assert 'response' in conf
assert conf['url'].endswith('/graphql')
assert conf['url'].endswith('/graphql') or conf['url'].endswith('/relay')
endpoint = conf['url']
query = conf['query']
exp_http_response = conf['response']
@ -226,6 +226,8 @@ def validate_gql_ws_q(hge_ctx, conf, headers, retry=False, via_subscription=Fals
if endpoint == '/v1alpha1/graphql':
ws_client = GQLWsClient(hge_ctx, '/v1alpha1/graphql')
elif endpoint == '/v1/relay':
ws_client = GQLWsClient(hge_ctx, '/v1/relay')
else:
ws_client = hge_ctx.ws_client
print(ws_client.ws_url)