Adding UDF (user-defined-functions) support to Data Connectors - GDC-820

PR-URL: https://github.com/hasura/graphql-engine-mono/pull/8121
GitOrigin-RevId: ceb3e29e330bba294061f85c1f75700974d01452
This commit is contained in:
Lyndon Maydwell 2023-05-19 14:47:12 +10:00 committed by hasura-bot
parent 6caf33bf27
commit cb8e6feb2e
114 changed files with 3652 additions and 1333 deletions

View File

@ -139,7 +139,8 @@ The `GET /capabilities` endpoint is used by `graphql-engine` to discover the cap
"relationships": {},
"scalar_types": {
"DateTime": {"comparison_operators": {"DateTime": {"in_year": "Number"}}}
}
},
"user_defined_functions": {}
},
"config_schemas": {
"config_schema": {
@ -2425,6 +2426,181 @@ Breaking down the properties in the `delete`-typed mutation operation:
Delete operations return responses that are the same as insert and update operations, except the affected rows in `returning` are the deleted rows instead.
### User Defined Functions (UDFs)
Agents can implement user-defined-functions for root-field queries by:
* Including the `user_defined_functions: {}` capability
* Handling `"type": "function"` QueryRequests
The format of function query requests very closely matches table query requests (which was the only type of query request prior to UDF implementation).
Differences include:
* The `type` field is set to `function`
* This field was previously omitted, but is now either `table` or `function`.
* The change should not break older agents
* Since they will ignore the type field but also not expose a function capability
* So their assumption that the query is a table query will be correct
* The name of the function is specified in the `function` field of the query
* Arguments are provided in the `function_arguments` field
* This can include a session JSON argument as configured in metadata
* The relationships items can now contain a `source_function` field
* Instead of a `source_table` field
#### Example
Schema:
```json
{
"tables": [
{
"name": [
"Artist"
],
"type": "table",
"primary_key": [
"ArtistId"
],
"description": "Collection of artists of music",
"columns": [
{
"name": "ArtistId",
"type": "number",
"nullable": false,
"description": "Artist primary key identifier",
"insertable": false,
"updatable": false
},
{
"name": "Name",
"type": "string",
"nullable": true,
"description": "The name of the artist",
"insertable": false,
"updatable": false
}
],
"insertable": false,
"updatable": false,
"deletable": false
}
],
"functions": [
{
"name": ["fibonacci"],
"type": "read",
"returns": {
"type": "table",
"table": ["Artist"]
},
"arity": "many",
"args": [
{ "name": "take", "type": "Integer" },
{ "name": "__hasura_session", "type": "JSON" }
],
"description": "Fibonacci function - Take N Fibonacci numbers!"
}
]
}
```
GraphQL:
```graphql
{
fibonacci(args: {upto: 9}) {
ArtistId
Name
myself {
Name
}
}
}
```
Query Sent to Agent:
```json
{
"type": "function",
"function": [
"fibonacci"
],
"function_arguments": {
"upto": 9,
"__hasura_session": {
"x-hasura-artist-name": "patricia",
"x-hasura-role": "admin"
}
},
"relationships": [
{
"type": "function",
"source_function": [
"fibonacci"
],
"relationships": {
"myself": {
"target_table": [
"Artist"
],
"relationship_type": "object",
"column_mapping": {
"ArtistId": "ArtistId"
}
}
}
},
{
"type": "table",
"source_table": [
"Artist"
],
"relationships": {
"myself": {
"target_table": [
"Artist"
],
"relationship_type": "object",
"column_mapping": {
"ArtistId": "ArtistId"
}
}
}
}
],
"query": {
"fields": {
"Name": {
"type": "column",
"column": "Name",
"column_type": "string"
},
"myself": {
"type": "relationship",
"relationship": "myself",
"query": {
"fields": {
"Name": {
"type": "column",
"column": "Name",
"column_type": "string"
}
}
}
},
"ArtistId": {
"type": "column",
"column": "ArtistId",
"column_type": "number"
}
}
}
}
```
### Datasets
The `/datasets` resource is available to use in order to create new databases/schemas from templates.

View File

@ -1,6 +1,6 @@
{
"name": "@hasura/dc-api-types",
"version": "0.30.0",
"version": "0.31.0",
"description": "Hasura GraphQL Engine Data Connector Agent API types",
"author": "Hasura (https://github.com/hasura/graphql-engine)",
"license": "Apache-2.0",

View File

@ -530,6 +530,9 @@
},
"subscriptions": {
"$ref": "#/components/schemas/SubscriptionCapabilities"
},
"user_defined_functions": {
"$ref": "#/components/schemas/UserDefinedFunctionCapabilities"
}
},
"type": "object"
@ -706,6 +709,7 @@
"ExplainCapabilities": {},
"RawCapabilities": {},
"DatasetCapabilities": {},
"UserDefinedFunctionCapabilities": {},
"Licensing": {},
"ConfigSchemaResponse": {
"nullable": false,
@ -1035,6 +1039,14 @@
},
"SchemaResponse": {
"properties": {
"functions": {
"default": [],
"description": "Available functions",
"items": {
"$ref": "#/components/schemas/FunctionInfo"
},
"type": "array"
},
"objectTypes": {
"description": "Object type definitions referenced in this schema",
"items": {
@ -1248,6 +1260,131 @@
],
"type": "object"
},
"FunctionName": {
"description": "The fully qualified name of a function, where the last item in the array is the function name and any earlier items represent the namespacing of the function name",
"items": {
"type": "string"
},
"type": "array"
},
"FunctionType": {
"enum": [
"read",
"write"
],
"type": "string"
},
"FunctionReturnsTable": {
"properties": {
"table": {
"$ref": "#/components/schemas/TableName"
},
"type": {
"enum": [
"table"
],
"type": "string"
}
},
"required": [
"table",
"type"
],
"type": "object"
},
"FunctionReturnsUnknown": {
"properties": {
"type": {
"enum": [
"unknown"
],
"type": "string"
}
},
"required": [
"type"
],
"type": "object"
},
"FunctionReturnType": {
"discriminator": {
"mapping": {
"table": "FunctionReturnsTable",
"unknown": "FunctionReturnsUnknown"
},
"propertyName": "type"
},
"oneOf": [
{
"$ref": "#/components/schemas/FunctionReturnsTable"
},
{
"$ref": "#/components/schemas/FunctionReturnsUnknown"
}
]
},
"FunctionInformationArgument": {
"properties": {
"name": {
"description": "The name of the argument",
"type": "string"
},
"optional": {
"default": false,
"description": "If the argument can be omitted",
"type": "boolean"
},
"type": {
"$ref": "#/components/schemas/ScalarType"
}
},
"required": [
"name",
"type"
],
"type": "object"
},
"FunctionInfo": {
"properties": {
"args": {
"description": "argument info - name/types",
"items": {
"$ref": "#/components/schemas/FunctionInformationArgument"
},
"type": "array"
},
"description": {
"description": "Description of the table",
"nullable": true,
"type": "string"
},
"name": {
"$ref": "#/components/schemas/FunctionName"
},
"response_cardinality": {
"description": "object response if false, rows if true",
"enum": [
"one",
"many"
],
"type": "string"
},
"returns": {
"$ref": "#/components/schemas/FunctionReturnType"
},
"type": {
"$ref": "#/components/schemas/FunctionType"
}
},
"required": [
"name",
"type",
"returns",
"response_cardinality",
"args"
],
"type": "object"
},
"ObjectTypeDefinition": {
"properties": {
"columns": {
@ -1314,39 +1451,93 @@
"type": "null"
},
"QueryRequest": {
"discriminator": {
"mapping": {
"function": "FunctionRequest",
"table": "TableRequest"
},
"propertyName": "type"
},
"oneOf": [
{
"$ref": "#/components/schemas/FunctionRequest"
},
{
"$ref": "#/components/schemas/TableRequest"
}
]
},
"ScalarArgumentValue": {
"properties": {
"foreach": {
"description": "If present, a list of columns and values for the columns that the query must be repeated for, applying the column values as a filter for each query.",
"items": {
"additionalProperties": {
"$ref": "#/components/schemas/ScalarValue"
},
"type": "object"
},
"nullable": true,
"type": "array"
"type": {
"enum": [
"scalar"
],
"type": "string"
},
"query": {
"$ref": "#/components/schemas/Query"
"value": {
"additionalProperties": true
},
"table": {
"$ref": "#/components/schemas/TableName"
},
"table_relationships": {
"description": "The relationships between tables involved in the entire query request",
"items": {
"$ref": "#/components/schemas/TableRelationships"
},
"type": "array"
"value_type": {
"$ref": "#/components/schemas/ScalarType"
}
},
"required": [
"table",
"table_relationships",
"query"
"value",
"value_type",
"type"
],
"type": "object"
},
"ArgumentValue": {
"discriminator": {
"mapping": {
"scalar": "ScalarArgumentValue"
},
"propertyName": "type"
},
"oneOf": [
{
"$ref": "#/components/schemas/ScalarArgumentValue"
}
]
},
"NamedArgument": {
"properties": {
"name": {
"description": "The name of the named argument",
"type": "string"
},
"type": {
"enum": [
"named"
],
"type": "string"
},
"value": {
"$ref": "#/components/schemas/ArgumentValue"
}
},
"required": [
"name",
"value",
"type"
],
"type": "object"
},
"FunctionRequestArgument": {
"discriminator": {
"mapping": {
"named": "NamedArgument"
},
"propertyName": "type"
},
"oneOf": [
{
"$ref": "#/components/schemas/NamedArgument"
}
]
},
"RelationshipType": {
"enum": [
"object",
@ -1377,6 +1568,32 @@
],
"type": "object"
},
"FunctionRelationships": {
"properties": {
"relationships": {
"additionalProperties": {
"$ref": "#/components/schemas/Relationship"
},
"description": "A map of relationships from the source table to target tables. The key of the map is the relationship name",
"type": "object"
},
"source_function": {
"$ref": "#/components/schemas/FunctionName"
},
"type": {
"enum": [
"function"
],
"type": "string"
}
},
"required": [
"source_function",
"relationships",
"type"
],
"type": "object"
},
"TableRelationships": {
"properties": {
"relationships": {
@ -1388,14 +1605,38 @@
},
"source_table": {
"$ref": "#/components/schemas/TableName"
},
"type": {
"enum": [
"table"
],
"type": "string"
}
},
"required": [
"type",
"source_table",
"relationships"
],
"type": "object"
},
"Relationships": {
"discriminator": {
"mapping": {
"function": "FunctionRelationships",
"table": "TableRelationships"
},
"propertyName": "type"
},
"oneOf": [
{
"$ref": "#/components/schemas/FunctionRelationships"
},
{
"$ref": "#/components/schemas/TableRelationships"
}
]
},
"Query": {
"properties": {
"aggregates": {
@ -2154,6 +2395,44 @@
],
"type": "object"
},
"FunctionRequest": {
"properties": {
"function": {
"$ref": "#/components/schemas/FunctionName"
},
"function_arguments": {
"default": [],
"description": "Function Arguments. TODO. Improve this.",
"items": {
"$ref": "#/components/schemas/FunctionRequestArgument"
},
"type": "array"
},
"query": {
"$ref": "#/components/schemas/Query"
},
"relationships": {
"description": "The relationships between entities involved in the entire query request",
"items": {
"$ref": "#/components/schemas/Relationships"
},
"type": "array"
},
"type": {
"enum": [
"function"
],
"type": "string"
}
},
"required": [
"function",
"relationships",
"query",
"type"
],
"type": "object"
},
"ScalarValue": {
"properties": {
"value": {
@ -2169,6 +2448,47 @@
],
"type": "object"
},
"TableRequest": {
"properties": {
"foreach": {
"description": "If present, a list of columns and values for the columns that the query must be repeated for, applying the column values as a filter for each query.",
"items": {
"additionalProperties": {
"$ref": "#/components/schemas/ScalarValue"
},
"type": "object"
},
"nullable": true,
"type": "array"
},
"query": {
"$ref": "#/components/schemas/Query"
},
"table": {
"$ref": "#/components/schemas/TableName"
},
"table_relationships": {
"description": "The relationships between tables involved in the entire query request",
"items": {
"$ref": "#/components/schemas/Relationships"
},
"type": "array"
},
"type": {
"enum": [
"table"
],
"type": "string"
}
},
"required": [
"table",
"table_relationships",
"query",
"type"
],
"type": "object"
},
"ExplainResponse": {
"properties": {
"lines": {

View File

@ -9,6 +9,7 @@ export type { AnotherColumnComparison } from './models/AnotherColumnComparison';
export type { ApplyBinaryArrayComparisonOperator } from './models/ApplyBinaryArrayComparisonOperator';
export type { ApplyBinaryComparisonOperator } from './models/ApplyBinaryComparisonOperator';
export type { ApplyUnaryComparisonOperator } from './models/ApplyUnaryComparisonOperator';
export type { ArgumentValue } from './models/ArgumentValue';
export type { ArrayRelationInsertFieldValue } from './models/ArrayRelationInsertFieldValue';
export type { ArrayRelationInsertSchema } from './models/ArrayRelationInsertSchema';
export type { AtomicitySupportLevel } from './models/AtomicitySupportLevel';
@ -52,6 +53,16 @@ export type { ExplainResponse } from './models/ExplainResponse';
export type { Expression } from './models/Expression';
export type { Field } from './models/Field';
export type { ForeachCapabilities } from './models/ForeachCapabilities';
export type { FunctionInfo } from './models/FunctionInfo';
export type { FunctionInformationArgument } from './models/FunctionInformationArgument';
export type { FunctionName } from './models/FunctionName';
export type { FunctionRelationships } from './models/FunctionRelationships';
export type { FunctionRequest } from './models/FunctionRequest';
export type { FunctionRequestArgument } from './models/FunctionRequestArgument';
export type { FunctionReturnsTable } from './models/FunctionReturnsTable';
export type { FunctionReturnsUnknown } from './models/FunctionReturnsUnknown';
export type { FunctionReturnType } from './models/FunctionReturnType';
export type { FunctionType } from './models/FunctionType';
export type { GraphQLType } from './models/GraphQLType';
export type { InsertCapabilities } from './models/InsertCapabilities';
export type { InsertFieldSchema } from './models/InsertFieldSchema';
@ -63,6 +74,7 @@ export type { MutationOperation } from './models/MutationOperation';
export type { MutationOperationResults } from './models/MutationOperationResults';
export type { MutationRequest } from './models/MutationRequest';
export type { MutationResponse } from './models/MutationResponse';
export type { NamedArgument } from './models/NamedArgument';
export type { NestedObjectField } from './models/NestedObjectField';
export type { NotExpression } from './models/NotExpression';
export type { NullColumnFieldValue } from './models/NullColumnFieldValue';
@ -96,10 +108,12 @@ export type { RelatedTable } from './models/RelatedTable';
export type { Relationship } from './models/Relationship';
export type { RelationshipCapabilities } from './models/RelationshipCapabilities';
export type { RelationshipField } from './models/RelationshipField';
export type { Relationships } from './models/Relationships';
export type { RelationshipType } from './models/RelationshipType';
export type { ReturningCapabilities } from './models/ReturningCapabilities';
export type { RowObject } from './models/RowObject';
export type { RowUpdate } from './models/RowUpdate';
export type { ScalarArgumentValue } from './models/ScalarArgumentValue';
export type { ScalarType } from './models/ScalarType';
export type { ScalarTypeCapabilities } from './models/ScalarTypeCapabilities';
export type { ScalarTypesCapabilities } from './models/ScalarTypesCapabilities';
@ -116,6 +130,7 @@ export type { TableInfo } from './models/TableInfo';
export type { TableInsertSchema } from './models/TableInsertSchema';
export type { TableName } from './models/TableName';
export type { TableRelationships } from './models/TableRelationships';
export type { TableRequest } from './models/TableRequest';
export type { TableType } from './models/TableType';
export type { UnaryComparisonOperator } from './models/UnaryComparisonOperator';
export type { UniqueIdentifierGenerationStrategy } from './models/UniqueIdentifierGenerationStrategy';
@ -125,3 +140,4 @@ export type { UpdateColumnOperatorDefinition } from './models/UpdateColumnOperat
export type { UpdateColumnOperatorName } from './models/UpdateColumnOperatorName';
export type { UpdateColumnOperators } from './models/UpdateColumnOperators';
export type { UpdateMutationOperation } from './models/UpdateMutationOperation';
export type { UserDefinedFunctionCapabilities } from './models/UserDefinedFunctionCapabilities';

View File

@ -0,0 +1,8 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
import type { ScalarArgumentValue } from './ScalarArgumentValue';
export type ArgumentValue = ScalarArgumentValue;

View File

@ -14,6 +14,7 @@ import type { RawCapabilities } from './RawCapabilities';
import type { RelationshipCapabilities } from './RelationshipCapabilities';
import type { ScalarTypesCapabilities } from './ScalarTypesCapabilities';
import type { SubscriptionCapabilities } from './SubscriptionCapabilities';
import type { UserDefinedFunctionCapabilities } from './UserDefinedFunctionCapabilities';
export type Capabilities = {
comparisons?: ComparisonCapabilities;
@ -28,5 +29,6 @@ export type Capabilities = {
relationships?: RelationshipCapabilities;
scalar_types?: ScalarTypesCapabilities;
subscriptions?: SubscriptionCapabilities;
user_defined_functions?: UserDefinedFunctionCapabilities;
};

View File

@ -0,0 +1,27 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
import type { FunctionInformationArgument } from './FunctionInformationArgument';
import type { FunctionName } from './FunctionName';
import type { FunctionReturnType } from './FunctionReturnType';
import type { FunctionType } from './FunctionType';
export type FunctionInfo = {
/**
* argument info - name/types
*/
args: Array<FunctionInformationArgument>;
/**
* Description of the table
*/
description?: string | null;
name: FunctionName;
/**
* object response if false, rows if true
*/
response_cardinality: 'one' | 'many';
returns: FunctionReturnType;
type: FunctionType;
};

View File

@ -0,0 +1,18 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
import type { ScalarType } from './ScalarType';
export type FunctionInformationArgument = {
/**
* The name of the argument
*/
name: string;
/**
* If the argument can be omitted
*/
optional?: boolean;
type: ScalarType;
};

View File

@ -0,0 +1,8 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
/**
* The fully qualified name of a function, where the last item in the array is the function name and any earlier items represent the namespacing of the function name
*/
export type FunctionName = Array<string>;

View File

@ -0,0 +1,16 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
import type { FunctionName } from './FunctionName';
import type { Relationship } from './Relationship';
export type FunctionRelationships = {
/**
* A map of relationships from the source table to target tables. The key of the map is the relationship name
*/
relationships: Record<string, Relationship>;
source_function: FunctionName;
type: 'function';
};

View File

@ -0,0 +1,23 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
import type { FunctionName } from './FunctionName';
import type { FunctionRequestArgument } from './FunctionRequestArgument';
import type { Query } from './Query';
import type { Relationships } from './Relationships';
export type FunctionRequest = {
function: FunctionName;
/**
* Function Arguments. TODO. Improve this.
*/
function_arguments?: Array<FunctionRequestArgument>;
query: Query;
/**
* The relationships between entities involved in the entire query request
*/
relationships: Array<Relationships>;
type: 'function';
};

View File

@ -0,0 +1,8 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
import type { NamedArgument } from './NamedArgument';
export type FunctionRequestArgument = NamedArgument;

View File

@ -0,0 +1,9 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
import type { FunctionReturnsTable } from './FunctionReturnsTable';
import type { FunctionReturnsUnknown } from './FunctionReturnsUnknown';
export type FunctionReturnType = (FunctionReturnsTable | FunctionReturnsUnknown);

View File

@ -0,0 +1,11 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
import type { TableName } from './TableName';
export type FunctionReturnsTable = {
table: TableName;
type: 'table';
};

View File

@ -0,0 +1,8 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
export type FunctionReturnsUnknown = {
type: 'unknown';
};

View File

@ -0,0 +1,5 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
export type FunctionType = 'read' | 'write';

View File

@ -0,0 +1,15 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
import type { ArgumentValue } from './ArgumentValue';
export type NamedArgument = {
/**
* The name of the named argument
*/
name: string;
type: 'named';
value: ArgumentValue;
};

View File

@ -2,21 +2,8 @@
/* tslint:disable */
/* eslint-disable */
import type { Query } from './Query';
import type { ScalarValue } from './ScalarValue';
import type { TableName } from './TableName';
import type { TableRelationships } from './TableRelationships';
import type { FunctionRequest } from './FunctionRequest';
import type { TableRequest } from './TableRequest';
export type QueryRequest = {
/**
* If present, a list of columns and values for the columns that the query must be repeated for, applying the column values as a filter for each query.
*/
foreach?: Array<Record<string, ScalarValue>> | null;
query: Query;
table: TableName;
/**
* The relationships between tables involved in the entire query request
*/
table_relationships: Array<TableRelationships>;
};
export type QueryRequest = (FunctionRequest | TableRequest);

View File

@ -0,0 +1,9 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
import type { FunctionRelationships } from './FunctionRelationships';
import type { TableRelationships } from './TableRelationships';
export type Relationships = (FunctionRelationships | TableRelationships);

View File

@ -0,0 +1,12 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
import type { ScalarType } from './ScalarType';
export type ScalarArgumentValue = {
type: 'scalar';
value: any;
value_type: ScalarType;
};

View File

@ -2,10 +2,15 @@
/* tslint:disable */
/* eslint-disable */
import type { FunctionInfo } from './FunctionInfo';
import type { ObjectTypeDefinition } from './ObjectTypeDefinition';
import type { TableInfo } from './TableInfo';
export type SchemaResponse = {
/**
* Available functions
*/
functions?: Array<FunctionInfo>;
/**
* Object type definitions referenced in this schema
*/

View File

@ -11,5 +11,6 @@ export type TableRelationships = {
*/
relationships: Record<string, Relationship>;
source_table: TableName;
type: 'table';
};

View File

@ -0,0 +1,23 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
import type { Query } from './Query';
import type { Relationships } from './Relationships';
import type { ScalarValue } from './ScalarValue';
import type { TableName } from './TableName';
export type TableRequest = {
/**
* If present, a list of columns and values for the columns that the query must be repeated for, applying the column values as a filter for each query.
*/
foreach?: Array<Record<string, ScalarValue>> | null;
query: Query;
table: TableName;
/**
* The relationships between tables involved in the entire query request
*/
table_relationships: Array<Relationships>;
type: 'table';
};

View File

@ -0,0 +1,7 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
export type UserDefinedFunctionCapabilities = {
};

View File

@ -24,7 +24,7 @@
},
"dc-api-types": {
"name": "@hasura/dc-api-types",
"version": "0.30.0",
"version": "0.31.0",
"license": "Apache-2.0",
"devDependencies": {
"@tsconfig/node16": "^1.0.3",
@ -2227,7 +2227,7 @@
"license": "Apache-2.0",
"dependencies": {
"@fastify/cors": "^8.1.0",
"@hasura/dc-api-types": "0.30.0",
"@hasura/dc-api-types": "0.31.0",
"fastify": "^4.13.0",
"mathjs": "^11.0.0",
"pino-pretty": "^8.0.0",
@ -2547,7 +2547,7 @@
"license": "Apache-2.0",
"dependencies": {
"@fastify/cors": "^8.1.0",
"@hasura/dc-api-types": "0.30.0",
"@hasura/dc-api-types": "0.31.0",
"fastify": "^4.13.0",
"fastify-metrics": "^9.2.1",
"nanoid": "^3.3.4",
@ -2868,7 +2868,7 @@
"version": "file:reference",
"requires": {
"@fastify/cors": "^8.1.0",
"@hasura/dc-api-types": "0.30.0",
"@hasura/dc-api-types": "0.31.0",
"@tsconfig/node16": "^1.0.3",
"@types/node": "^16.11.49",
"@types/xml2js": "^0.4.11",
@ -3080,7 +3080,7 @@
"version": "file:sqlite",
"requires": {
"@fastify/cors": "^8.1.0",
"@hasura/dc-api-types": "0.30.0",
"@hasura/dc-api-types": "0.31.0",
"@tsconfig/node16": "^1.0.3",
"@types/node": "^16.11.49",
"@types/sqlite3": "^3.1.8",

View File

@ -10,7 +10,7 @@
"license": "Apache-2.0",
"dependencies": {
"@fastify/cors": "^8.1.0",
"@hasura/dc-api-types": "0.30.0",
"@hasura/dc-api-types": "0.31.0",
"fastify": "^4.13.0",
"mathjs": "^11.0.0",
"pino-pretty": "^8.0.0",
@ -52,7 +52,7 @@
"integrity": "sha512-lgHwxlxV1qIg1Eap7LgIeoBWIMFibOjbrYPIPJZcI1mmGAI2m3lNYpK12Y+GBdPQ0U1hRwSord7GIaawz962qQ=="
},
"node_modules/@hasura/dc-api-types": {
"version": "0.30.0",
"version": "0.31.0",
"license": "Apache-2.0",
"devDependencies": {
"@tsconfig/node16": "^1.0.3",

View File

@ -22,7 +22,7 @@
},
"dependencies": {
"@fastify/cors": "^8.1.0",
"@hasura/dc-api-types": "0.30.0",
"@hasura/dc-api-types": "0.31.0",
"fastify": "^4.13.0",
"mathjs": "^11.0.0",
"pino-pretty": "^8.0.0",

View File

@ -65,7 +65,8 @@ const capabilities: Capabilities = {
}
},
scalar_types: scalarTypes,
datasets: {}
datasets: {},
user_defined_functions: {}
}
export const capabilitiesResponse: CapabilitiesResponse = {

View File

@ -0,0 +1,717 @@
{
"tables": [
{
"name": ["Artist"],
"type": "table",
"primary_key": ["ArtistId"],
"description": "Collection of artists of music",
"columns": [
{
"name": "ArtistId",
"type": "number",
"nullable": false,
"description": "Artist primary key identifier",
"insertable": false,
"updatable": false
},
{
"name": "Name",
"type": "string",
"nullable": true,
"description": "The name of the artist",
"insertable": false,
"updatable": false
}
],
"insertable": false,
"updatable": false,
"deletable": false
},
{
"name": ["Album"],
"type": "table",
"primary_key": ["AlbumId"],
"foreign_keys": {
"Artist": {
"column_mapping": {
"ArtistId": "ArtistId"
},
"foreign_table": ["Artist"]
}
},
"description": "Collection of music albums created by artists",
"columns": [
{
"name": "AlbumId",
"type": "number",
"nullable": false,
"description": "Album primary key identifier",
"insertable": false,
"updatable": false
},
{
"name": "Title",
"type": "string",
"nullable": false,
"description": "The title of the album",
"insertable": false,
"updatable": false
},
{
"name": "ArtistId",
"type": "number",
"nullable": false,
"description": "The ID of the artist that created this album",
"insertable": false,
"updatable": false
}
],
"insertable": false,
"updatable": false,
"deletable": false
},
{
"name": ["Customer"],
"type": "table",
"primary_key": ["CustomerId"],
"foreign_keys": {
"CustomerSupportRep": {
"column_mapping": {
"SupportRepId": "EmployeeId"
},
"foreign_table": ["Employee"]
}
},
"description": "Collection of customers who can buy tracks",
"columns": [
{
"name": "CustomerId",
"type": "number",
"nullable": false,
"description": "Customer primary key identifier",
"insertable": false,
"updatable": false
},
{
"name": "FirstName",
"type": "string",
"nullable": false,
"description": "The customer's first name",
"insertable": false,
"updatable": false
},
{
"name": "LastName",
"type": "string",
"nullable": false,
"description": "The customer's last name",
"insertable": false,
"updatable": false
},
{
"name": "Company",
"type": "string",
"nullable": true,
"description": "The customer's company name",
"insertable": false,
"updatable": false
},
{
"name": "Address",
"type": "string",
"nullable": true,
"description": "The customer's address line (street number, street)",
"insertable": false,
"updatable": false
},
{
"name": "City",
"type": "string",
"nullable": true,
"description": "The customer's address city",
"insertable": false,
"updatable": false
},
{
"name": "State",
"type": "string",
"nullable": true,
"description": "The customer's address state",
"insertable": false,
"updatable": false
},
{
"name": "Country",
"type": "string",
"nullable": true,
"description": "The customer's address country",
"insertable": false,
"updatable": false
},
{
"name": "PostalCode",
"type": "string",
"nullable": true,
"description": "The customer's address postal code",
"insertable": false,
"updatable": false
},
{
"name": "Phone",
"type": "string",
"nullable": true,
"description": "The customer's phone number",
"insertable": false,
"updatable": false
},
{
"name": "Fax",
"type": "string",
"nullable": true,
"description": "The customer's fax number",
"insertable": false,
"updatable": false
},
{
"name": "Email",
"type": "string",
"nullable": false,
"description": "The customer's email address",
"insertable": false,
"updatable": false
},
{
"name": "SupportRepId",
"type": "number",
"nullable": true,
"description": "The ID of the Employee who is this customer's support representative",
"insertable": false,
"updatable": false
}
],
"insertable": false,
"updatable": false,
"deletable": false
},
{
"name": ["Employee"],
"type": "table",
"primary_key": ["EmployeeId"],
"foreign_keys": {
"EmployeeReportsTo": {
"column_mapping": {
"ReportsTo": "EmployeeId"
},
"foreign_table": ["Employee"]
}
},
"description": "Collection of employees who work for the business",
"columns": [
{
"name": "EmployeeId",
"type": "number",
"nullable": false,
"description": "Employee primary key identifier",
"insertable": false,
"updatable": false
},
{
"name": "LastName",
"type": "string",
"nullable": false,
"description": "The employee's last name",
"insertable": false,
"updatable": false
},
{
"name": "FirstName",
"type": "string",
"nullable": false,
"description": "The employee's first name",
"insertable": false,
"updatable": false
},
{
"name": "Title",
"type": "string",
"nullable": true,
"description": "The employee's job title",
"insertable": false,
"updatable": false
},
{
"name": "ReportsTo",
"type": "number",
"nullable": true,
"description": "The employee's manager",
"insertable": false,
"updatable": false
},
{
"name": "BirthDate",
"type": "DateTime",
"nullable": true,
"description": "The employee's birth date",
"insertable": false,
"updatable": false
},
{
"name": "HireDate",
"type": "DateTime",
"nullable": true,
"description": "The employee's hire date",
"insertable": false,
"updatable": false
},
{
"name": "Address",
"type": "string",
"nullable": true,
"description": "The employee's address line (street number, street)",
"insertable": false,
"updatable": false
},
{
"name": "City",
"type": "string",
"nullable": true,
"description": "The employee's address city",
"insertable": false,
"updatable": false
},
{
"name": "State",
"type": "string",
"nullable": true,
"description": "The employee's address state",
"insertable": false,
"updatable": false
},
{
"name": "Country",
"type": "string",
"nullable": true,
"description": "The employee's address country",
"insertable": false,
"updatable": false
},
{
"name": "PostalCode",
"type": "string",
"nullable": true,
"description": "The employee's address postal code",
"insertable": false,
"updatable": false
},
{
"name": "Phone",
"type": "string",
"nullable": true,
"description": "The employee's phone number",
"insertable": false,
"updatable": false
},
{
"name": "Fax",
"type": "string",
"nullable": true,
"description": "The employee's fax number",
"insertable": false,
"updatable": false
},
{
"name": "Email",
"type": "string",
"nullable": true,
"description": "The employee's email address",
"insertable": false,
"updatable": false
}
],
"insertable": false,
"updatable": false,
"deletable": false
},
{
"name": ["Genre"],
"type": "table",
"primary_key": ["GenreId"],
"description": "Genres of music",
"columns": [
{
"name": "GenreId",
"type": "number",
"nullable": false,
"description": "Genre primary key identifier",
"insertable": false,
"updatable": false
},
{
"name": "Name",
"type": "string",
"nullable": true,
"description": "The name of the genre",
"insertable": false,
"updatable": false
}
],
"insertable": false,
"updatable": false,
"deletable": false
},
{
"name": ["Invoice"],
"type": "table",
"primary_key": ["InvoiceId"],
"foreign_keys": {
"InvoiceCustomer": {
"column_mapping": {
"CustomerId": "CustomerId"
},
"foreign_table": ["Customer"]
}
},
"description": "Collection of invoices of music purchases by a customer",
"columns": [
{
"name": "InvoiceId",
"type": "number",
"nullable": false,
"description": "Invoice primary key identifier",
"insertable": false,
"updatable": false
},
{
"name": "CustomerId",
"type": "number",
"nullable": false,
"description": "ID of the customer who bought the music",
"insertable": false,
"updatable": false
},
{
"name": "InvoiceDate",
"type": "DateTime",
"nullable": false,
"description": "Date of the invoice",
"insertable": false,
"updatable": false
},
{
"name": "BillingAddress",
"type": "string",
"nullable": true,
"description": "The invoice's billing address line (street number, street)",
"insertable": false,
"updatable": false
},
{
"name": "BillingCity",
"type": "string",
"nullable": true,
"description": "The invoice's billing address city",
"insertable": false,
"updatable": false
},
{
"name": "BillingState",
"type": "string",
"nullable": true,
"description": "The invoice's billing address state",
"insertable": false,
"updatable": false
},
{
"name": "BillingCountry",
"type": "string",
"nullable": true,
"description": "The invoice's billing address country",
"insertable": false,
"updatable": false
},
{
"name": "BillingPostalCode",
"type": "string",
"nullable": true,
"description": "The invoice's billing address postal code",
"insertable": false,
"updatable": false
},
{
"name": "Total",
"type": "number",
"nullable": false,
"description": "The total amount due on the invoice",
"insertable": false,
"updatable": false
}
],
"insertable": false,
"updatable": false,
"deletable": false
},
{
"name": ["InvoiceLine"],
"type": "table",
"primary_key": ["InvoiceLineId"],
"foreign_keys": {
"Invoice": {
"column_mapping": {
"InvoiceId": "InvoiceId"
},
"foreign_table": ["Invoice"]
},
"Track": {
"column_mapping": {
"TrackId": "TrackId"
},
"foreign_table": ["Track"]
}
},
"description": "Collection of track purchasing line items of invoices",
"columns": [
{
"name": "InvoiceLineId",
"type": "number",
"nullable": false,
"description": "Invoice Line primary key identifier",
"insertable": false,
"updatable": false
},
{
"name": "InvoiceId",
"type": "number",
"nullable": false,
"description": "ID of the invoice the line belongs to",
"insertable": false,
"updatable": false
},
{
"name": "TrackId",
"type": "number",
"nullable": false,
"description": "ID of the music track being purchased",
"insertable": false,
"updatable": false
},
{
"name": "UnitPrice",
"type": "number",
"nullable": false,
"description": "Price of each individual track unit",
"insertable": false,
"updatable": false
},
{
"name": "Quantity",
"type": "number",
"nullable": false,
"description": "Quantity of the track purchased",
"insertable": false,
"updatable": false
}
],
"insertable": false,
"updatable": false,
"deletable": false
},
{
"name": ["MediaType"],
"type": "table",
"primary_key": ["MediaTypeId"],
"description": "Collection of media types that tracks can be encoded in",
"columns": [
{
"name": "MediaTypeId",
"type": "number",
"nullable": false,
"description": "Media Type primary key identifier",
"insertable": false,
"updatable": false
},
{
"name": "Name",
"type": "string",
"nullable": true,
"description": "The name of the media type format",
"insertable": false,
"updatable": false
}
],
"insertable": false,
"updatable": false,
"deletable": false
},
{
"name": ["Playlist"],
"type": "table",
"primary_key": ["PlaylistId"],
"description": "Collection of playlists",
"columns": [
{
"name": "PlaylistId",
"type": "number",
"nullable": false,
"description": "Playlist primary key identifier",
"insertable": false,
"updatable": false
},
{
"name": "Name",
"type": "string",
"nullable": true,
"description": "The name of the playlist",
"insertable": false,
"updatable": false
}
],
"insertable": false,
"updatable": false,
"deletable": false
},
{
"name": ["PlaylistTrack"],
"type": "table",
"primary_key": ["PlaylistId", "TrackId"],
"foreign_keys": {
"Playlist": {
"column_mapping": {
"PlaylistId": "PlaylistId"
},
"foreign_table": ["Playlist"]
},
"Track": {
"column_mapping": {
"TrackId": "TrackId"
},
"foreign_table": ["Track"]
}
},
"description": "Associations between playlists and tracks",
"columns": [
{
"name": "PlaylistId",
"type": "number",
"nullable": false,
"description": "The ID of the playlist",
"insertable": false,
"updatable": false
},
{
"name": "TrackId",
"type": "number",
"nullable": false,
"description": "The ID of the track",
"insertable": false,
"updatable": false
}
],
"insertable": false,
"updatable": false,
"deletable": false
},
{
"name": ["Track"],
"type": "table",
"primary_key": ["TrackId"],
"foreign_keys": {
"Album": {
"column_mapping": {
"AlbumId": "AlbumId"
},
"foreign_table": ["Album"]
},
"Genre": {
"column_mapping": {
"GenreId": "GenreId"
},
"foreign_table": ["Genre"]
},
"MediaType": {
"column_mapping": {
"MediaTypeId": "MediaTypeId"
},
"foreign_table": ["MediaType"]
}
},
"description": "Collection of music tracks",
"columns": [
{
"name": "TrackId",
"type": "number",
"nullable": false,
"description": "The ID of the track",
"insertable": false,
"updatable": false
},
{
"name": "Name",
"type": "string",
"nullable": false,
"description": "The name of the track",
"insertable": false,
"updatable": false
},
{
"name": "AlbumId",
"type": "number",
"nullable": true,
"description": "The ID of the album the track belongs to",
"insertable": false,
"updatable": false
},
{
"name": "MediaTypeId",
"type": "number",
"nullable": false,
"description": "The ID of the media type the track is encoded with",
"insertable": false,
"updatable": false
},
{
"name": "GenreId",
"type": "number",
"nullable": true,
"description": "The ID of the genre of the track",
"insertable": false,
"updatable": false
},
{
"name": "Composer",
"type": "string",
"nullable": true,
"description": "The name of the composer of the track",
"insertable": false,
"updatable": false
},
{
"name": "Milliseconds",
"type": "number",
"nullable": false,
"description": "The length of the track in milliseconds",
"insertable": false,
"updatable": false
},
{
"name": "Bytes",
"type": "number",
"nullable": true,
"description": "The size of the track in bytes",
"insertable": false,
"updatable": false
},
{
"name": "UnitPrice",
"type": "number",
"nullable": false,
"description": "The price of the track",
"insertable": false,
"updatable": false
}
],
"insertable": false,
"updatable": false,
"deletable": false
}
]
}

View File

@ -0,0 +1,108 @@
{
"tables": [
{
"name": ["Result"],
"type": "table",
"primary_key": ["Value"],
"description": "Collection of function results",
"columns": [
{
"name": "Value",
"type": "number",
"nullable": false,
"description": "Result primary key identifier and representation of function results",
"insertable": false,
"updatable": false
}
],
"insertable": false,
"updatable": false,
"deletable": false
},
{
"name": ["Authors"],
"type": "table",
"primary_key": ["id"],
"description": "Publication Authors",
"columns": [
{
"name": "id",
"type": "number",
"description": "primary key for Authors",
"nullable": false,
"insertable": false,
"updatable": false
},
{
"name": "name",
"type": "string",
"description": "Author name",
"nullable": false,
"insertable": false,
"updatable": false
}
],
"insertable": false,
"updatable": false,
"deletable": false
},
{
"name": ["Articles"],
"type": "table",
"primary_key": ["id"],
"description": "Published Articles",
"columns": [
{
"name": "id",
"type": "number",
"description": "primary key for Articles",
"nullable": false,
"insertable": false,
"updatable": false
},
{
"name": "title",
"type": "string",
"description": "Article name",
"nullable": false,
"insertable": false,
"updatable": false
},
{
"name": "author_id",
"type": "number",
"description": "Reference to Authors by ID",
"nullable": false,
"insertable": false,
"updatable": false
}
],
"insertable": false,
"updatable": false,
"deletable": false
}
],
"functions": [
{
"name": ["Fibonacci"],
"type": "read",
"returns": {"type": "table", "table": ["Result"]},
"response_cardinality": "many",
"args": [
{ "name": "take", "type": "Integer" },
{ "name": "__hasura_session", "type": "JSON" }
],
"description": "Fibonacci function - Take N Fibonacci numbers!"
},
{
"name": ["SearchArticles"],
"type": "read",
"returns": {"type": "table", "table": ["Articles"]},
"response_cardinality": "many",
"args": [
{ "name": "query", "type": "string" }
],
"description": "Search Articles for query."
}
]
}

Binary file not shown.

View File

@ -1,14 +1,18 @@
import { SchemaResponse, TableName } from "@hasura/dc-api-types"
import { FunctionInfo, SchemaResponse, TableName } from "@hasura/dc-api-types"
import { Casing, Config } from "../config";
import xml2js from "xml2js"
import fs from "fs"
import stream from "stream"
import zlib from "zlib"
import { parseNumbers } from "xml2js/lib/processors";
import { mapObject, mapObjectValues, tableNameEquals, unreachable } from "../util";
import { mapObject, mapObjectValues, nameEquals, unreachable } from "../util";
import { defaultDbStoreName, getDbStoreName } from "../datasets";
export type StaticData = {
[tableName: string]: Record<string, string | number | boolean | null>[]
schema: SchemaResponse,
data: {
[tableName: string]: Record<string, string | number | boolean | null>[]
}
}
const streamToBuffer = async (stream: stream.Readable): Promise<Buffer> => {
@ -32,38 +36,63 @@ const parseNumbersInNumericColumns = (schema: SchemaResponse) => {
}
export const staticDataExists = async(name: string): Promise<boolean> => {
return new Promise((resolve) => {
fs.access(__dirname + "/" + name, fs.constants.R_OK, err => err ? resolve(false) : resolve(true));
const dataOK = await new Promise((resolve) => {
const dataPath = mkDataPath(name);
fs.access(dataPath, fs.constants.R_OK, err => err ? resolve(false) : resolve(true));
});
const schemaOK = await new Promise((resolve) => {
const schemaPath = mkSchemaPath(name);
fs.access(schemaPath, fs.constants.R_OK, err => err ? resolve(false) : resolve(true));
});
return dataOK as boolean && schemaOK as boolean;
}
const mkDataPath = (name: string): string => {
return `${__dirname}/${name}.xml.gz`;
}
const mkSchemaPath = (name: string): string => {
return `${__dirname}/${name}.schema.json`;
}
export const loadStaticData = async (name: string): Promise<StaticData> => {
const gzipReadStream = fs.createReadStream(__dirname + "/" + name);
// Schema
const schemaPath = `${__dirname}/${name}.schema.json`;
const schema: SchemaResponse = JSON.parse(fs.readFileSync(schemaPath, 'utf-8'));
// Data
const dataPath = mkDataPath(name);
const gzipReadStream = fs.createReadStream(dataPath);
const unzipStream = stream.pipeline(gzipReadStream, zlib.createGunzip(), () => { });
const xmlStr = (await streamToBuffer(unzipStream)).toString("utf16le");
const xml = await xml2js.parseStringPromise(xmlStr, { explicitArray: false, emptyTag: () => null, valueProcessors: [parseNumbersInNumericColumns(schema)] });
const data = xml.ChinookDataSet;
const data = xml[`${name}DataSet`];
delete data["$"]; // Remove XML namespace property
return await data as StaticData;
// return await data as StaticData;
return {
data,
schema
} as StaticData;
}
export const filterAvailableTables = (staticData: StaticData, config: Config): StaticData => {
return Object.fromEntries(
Object.entries(staticData).filter(([name, _]) => config.tables === null ? true : config.tables.indexOf(name) >= 0)
const data = Object.fromEntries(
Object.entries(staticData.data).filter(([name, _]) => config.tables === null ? true : config.tables.indexOf(name) >= 0)
);
return { ...staticData, data };
}
export const getTable = (staticData: StaticData, config: Config): ((tableName: TableName) => Record<string, string | number | boolean | null>[] | undefined) => {
const cachedTransformedData: StaticData = {};
const cachedTransformedData: { [tableName: string]: Record<string, string | number | boolean | null>[]; } = {};
const lookupOriginalTable = (tableName: string): Record<string, string | number | boolean | null>[] => {
switch (config.table_name_casing) {
case "pascal_case":
return staticData[tableName];
return staticData.data[tableName];
case "lowercase":
const name = Object.keys(staticData).find(originalTableName => originalTableName.toLowerCase() === tableName);
if (name == undefined) throw new Error(`Unknown table name: ${tableName}`);
return staticData[name];
return staticData.data[name];
default:
return unreachable(config.table_name_casing);
}
@ -102,724 +131,6 @@ export const getTable = (staticData: StaticData, config: Config): ((tableName: T
};
}
const schema: SchemaResponse = {
tables: [
{
name: ["Artist"],
type: "table",
primary_key: ["ArtistId"],
description: "Collection of artists of music",
columns: [
{
name: "ArtistId",
type: "number",
nullable: false,
description: "Artist primary key identifier",
insertable: false,
updatable: false,
},
{
name: "Name",
type: "string",
nullable: true,
description: "The name of the artist",
insertable: false,
updatable: false,
}
],
insertable: false,
updatable: false,
deletable: false,
},
{
name: ["Album"],
type: "table",
primary_key: ["AlbumId"],
foreign_keys: {
"Artist": {
column_mapping: {
"ArtistId": "ArtistId"
},
foreign_table: ["Artist"],
}
},
description: "Collection of music albums created by artists",
columns: [
{
name: "AlbumId",
type: "number",
nullable: false,
description: "Album primary key identifier",
insertable: false,
updatable: false,
},
{
name: "Title",
type: "string",
nullable: false,
description: "The title of the album",
insertable: false,
updatable: false,
},
{
name: "ArtistId",
type: "number",
nullable: false,
description: "The ID of the artist that created this album",
insertable: false,
updatable: false,
}
],
insertable: false,
updatable: false,
deletable: false,
},
{
name: ["Customer"],
type: "table",
primary_key: ["CustomerId"],
foreign_keys: {
"CustomerSupportRep": {
column_mapping: {
"SupportRepId": "EmployeeId"
},
foreign_table: ["Employee"],
}
},
description: "Collection of customers who can buy tracks",
columns: [
{
name: "CustomerId",
type: "number",
nullable: false,
description: "Customer primary key identifier",
insertable: false,
updatable: false,
},
{
name: "FirstName",
type: "string",
nullable: false,
description: "The customer's first name",
insertable: false,
updatable: false,
},
{
name: "LastName",
type: "string",
nullable: false,
description: "The customer's last name",
insertable: false,
updatable: false,
},
{
name: "Company",
type: "string",
nullable: true,
description: "The customer's company name",
insertable: false,
updatable: false,
},
{
name: "Address",
type: "string",
nullable: true,
description: "The customer's address line (street number, street)",
insertable: false,
updatable: false,
},
{
name: "City",
type: "string",
nullable: true,
description: "The customer's address city",
insertable: false,
updatable: false,
},
{
name: "State",
type: "string",
nullable: true,
description: "The customer's address state",
insertable: false,
updatable: false,
},
{
name: "Country",
type: "string",
nullable: true,
description: "The customer's address country",
insertable: false,
updatable: false,
},
{
name: "PostalCode",
type: "string",
nullable: true,
description: "The customer's address postal code",
insertable: false,
updatable: false,
},
{
name: "Phone",
type: "string",
nullable: true,
description: "The customer's phone number",
insertable: false,
updatable: false,
},
{
name: "Fax",
type: "string",
nullable: true,
description: "The customer's fax number",
insertable: false,
updatable: false,
},
{
name: "Email",
type: "string",
nullable: false,
description: "The customer's email address",
insertable: false,
updatable: false,
},
{
name: "SupportRepId",
type: "number",
nullable: true,
description: "The ID of the Employee who is this customer's support representative",
insertable: false,
updatable: false,
}
],
insertable: false,
updatable: false,
deletable: false,
},
{
name: ["Employee"],
type: "table",
primary_key: ["EmployeeId"],
foreign_keys: {
"EmployeeReportsTo": {
column_mapping: {
"ReportsTo": "EmployeeId"
},
foreign_table: ["Employee"],
}
},
description: "Collection of employees who work for the business",
columns: [
{
name: "EmployeeId",
type: "number",
nullable: false,
description: "Employee primary key identifier",
insertable: false,
updatable: false,
},
{
name: "LastName",
type: "string",
nullable: false,
description: "The employee's last name",
insertable: false,
updatable: false,
},
{
name: "FirstName",
type: "string",
nullable: false,
description: "The employee's first name",
insertable: false,
updatable: false,
},
{
name: "Title",
type: "string",
nullable: true,
description: "The employee's job title",
insertable: false,
updatable: false,
},
{
name: "ReportsTo",
type: "number",
nullable: true,
description: "The employee's manager",
insertable: false,
updatable: false,
},
{
name: "BirthDate",
type: "DateTime",
nullable: true,
description: "The employee's birth date",
insertable: false,
updatable: false,
},
{
name: "HireDate",
type: "DateTime",
nullable: true,
description: "The employee's hire date",
insertable: false,
updatable: false,
},
{
name: "Address",
type: "string",
nullable: true,
description: "The employee's address line (street number, street)",
insertable: false,
updatable: false,
},
{
name: "City",
type: "string",
nullable: true,
description: "The employee's address city",
insertable: false,
updatable: false,
},
{
name: "State",
type: "string",
nullable: true,
description: "The employee's address state",
insertable: false,
updatable: false,
},
{
name: "Country",
type: "string",
nullable: true,
description: "The employee's address country",
insertable: false,
updatable: false,
},
{
name: "PostalCode",
type: "string",
nullable: true,
description: "The employee's address postal code",
insertable: false,
updatable: false,
},
{
name: "Phone",
type: "string",
nullable: true,
description: "The employee's phone number",
insertable: false,
updatable: false,
},
{
name: "Fax",
type: "string",
nullable: true,
description: "The employee's fax number",
insertable: false,
updatable: false,
},
{
name: "Email",
type: "string",
nullable: true,
description: "The employee's email address",
insertable: false,
updatable: false,
},
],
insertable: false,
updatable: false,
deletable: false,
},
{
name: ["Genre"],
type: "table",
primary_key: ["GenreId"],
description: "Genres of music",
columns: [
{
name: "GenreId",
type: "number",
nullable: false,
description: "Genre primary key identifier",
insertable: false,
updatable: false,
},
{
name: "Name",
type: "string",
nullable: true,
description: "The name of the genre",
insertable: false,
updatable: false,
}
],
insertable: false,
updatable: false,
deletable: false,
},
{
name: ["Invoice"],
type: "table",
primary_key: ["InvoiceId"],
foreign_keys: {
"InvoiceCustomer": {
column_mapping: {
"CustomerId": "CustomerId"
},
foreign_table: ["Customer"],
}
},
description: "Collection of invoices of music purchases by a customer",
columns: [
{
name: "InvoiceId",
type: "number",
nullable: false,
description: "Invoice primary key identifier",
insertable: false,
updatable: false,
},
{
name: "CustomerId",
type: "number",
nullable: false,
description: "ID of the customer who bought the music",
insertable: false,
updatable: false,
},
{
name: "InvoiceDate",
type: "DateTime",
nullable: false,
description: "Date of the invoice",
insertable: false,
updatable: false,
},
{
name: "BillingAddress",
type: "string",
nullable: true,
description: "The invoice's billing address line (street number, street)",
insertable: false,
updatable: false,
},
{
name: "BillingCity",
type: "string",
nullable: true,
description: "The invoice's billing address city",
insertable: false,
updatable: false,
},
{
name: "BillingState",
type: "string",
nullable: true,
description: "The invoice's billing address state",
insertable: false,
updatable: false,
},
{
name: "BillingCountry",
type: "string",
nullable: true,
description: "The invoice's billing address country",
insertable: false,
updatable: false,
},
{
name: "BillingPostalCode",
type: "string",
nullable: true,
description: "The invoice's billing address postal code",
insertable: false,
updatable: false,
},
{
name: "Total",
type: "number",
nullable: false,
description: "The total amount due on the invoice",
insertable: false,
updatable: false,
},
],
insertable: false,
updatable: false,
deletable: false,
},
{
name: ["InvoiceLine"],
type: "table",
primary_key: ["InvoiceLineId"],
foreign_keys: {
"Invoice": {
column_mapping: {
"InvoiceId": "InvoiceId"
},
foreign_table: ["Invoice"],
},
"Track": {
column_mapping: {
"TrackId": "TrackId"
},
foreign_table: ["Track"],
}
},
description: "Collection of track purchasing line items of invoices",
columns: [
{
name: "InvoiceLineId",
type: "number",
nullable: false,
description: "Invoice Line primary key identifier",
insertable: false,
updatable: false,
},
{
name: "InvoiceId",
type: "number",
nullable: false,
description: "ID of the invoice the line belongs to",
insertable: false,
updatable: false,
},
{
name: "TrackId",
type: "number",
nullable: false,
description: "ID of the music track being purchased",
insertable: false,
updatable: false,
},
{
name: "UnitPrice",
type: "number",
nullable: false,
description: "Price of each individual track unit",
insertable: false,
updatable: false,
},
{
name: "Quantity",
type: "number",
nullable: false,
description: "Quantity of the track purchased",
insertable: false,
updatable: false,
},
],
insertable: false,
updatable: false,
deletable: false,
},
{
name: ["MediaType"],
type: "table",
primary_key: ["MediaTypeId"],
description: "Collection of media types that tracks can be encoded in",
columns: [
{
name: "MediaTypeId",
type: "number",
nullable: false,
description: "Media Type primary key identifier",
insertable: false,
updatable: false,
},
{
name: "Name",
type: "string",
nullable: true,
description: "The name of the media type format",
insertable: false,
updatable: false,
},
],
insertable: false,
updatable: false,
deletable: false,
},
{
name: ["Playlist"],
type: "table",
primary_key: ["PlaylistId"],
description: "Collection of playlists",
columns: [
{
name: "PlaylistId",
type: "number",
nullable: false,
description: "Playlist primary key identifier",
insertable: false,
updatable: false,
},
{
name: "Name",
type: "string",
nullable: true,
description: "The name of the playlist",
insertable: false,
updatable: false,
},
],
insertable: false,
updatable: false,
deletable: false,
},
{
name: ["PlaylistTrack"],
type: "table",
primary_key: ["PlaylistId", "TrackId"],
foreign_keys: {
"Playlist": {
column_mapping: {
"PlaylistId": "PlaylistId"
},
foreign_table: ["Playlist"],
},
"Track": {
column_mapping: {
"TrackId": "TrackId"
},
foreign_table: ["Track"],
}
},
description: "Associations between playlists and tracks",
columns: [
{
name: "PlaylistId",
type: "number",
nullable: false,
description: "The ID of the playlist",
insertable: false,
updatable: false,
},
{
name: "TrackId",
type: "number",
nullable: false,
description: "The ID of the track",
insertable: false,
updatable: false,
},
],
insertable: false,
updatable: false,
deletable: false,
},
{
name: ["Track"],
type: "table",
primary_key: ["TrackId"],
foreign_keys: {
"Album": {
column_mapping: {
"AlbumId": "AlbumId"
},
foreign_table: ["Album"],
},
"Genre": {
column_mapping: {
"GenreId": "GenreId"
},
foreign_table: ["Genre"],
},
"MediaType": {
column_mapping: {
"MediaTypeId": "MediaTypeId"
},
foreign_table: ["MediaType"],
}
},
description: "Collection of music tracks",
columns: [
{
name: "TrackId",
type: "number",
nullable: false,
description: "The ID of the track",
insertable: false,
updatable: false,
},
{
name: "Name",
type: "string",
nullable: false,
description: "The name of the track",
insertable: false,
updatable: false,
},
{
name: "AlbumId",
type: "number",
nullable: true,
description: "The ID of the album the track belongs to",
insertable: false,
updatable: false,
},
{
name: "MediaTypeId",
type: "number",
nullable: false,
description: "The ID of the media type the track is encoded with",
insertable: false,
updatable: false,
},
{
name: "GenreId",
type: "number",
nullable: true,
description: "The ID of the genre of the track",
insertable: false,
updatable: false,
},
{
name: "Composer",
type: "string",
nullable: true,
description: "The name of the composer of the track",
insertable: false,
updatable: false,
},
{
name: "Milliseconds",
type: "number",
nullable: false,
description: "The length of the track in milliseconds",
insertable: false,
updatable: false,
},
{
name: "Bytes",
type: "number",
nullable: true,
description: "The size of the track in bytes",
insertable: false,
updatable: false,
},
{
name: "UnitPrice",
type: "number",
nullable: false,
description: "The price of the track",
insertable: false,
updatable: false,
},
],
insertable: false,
updatable: false,
deletable: false,
},
]
};
const applyCasing = (casing: Casing) => (str: string): string => {
switch (casing) {
case "pascal_case": return str;
@ -828,7 +139,7 @@ const applyCasing = (casing: Casing) => (str: string): string => {
}
}
export const getSchema = (config: Config): SchemaResponse => {
export const getSchema = (store: Record<string, StaticData>, config: Config): SchemaResponse => {
const applyTableNameCasing = applyCasing(config.table_name_casing);
const applyColumnNameCasing = applyCasing(config.column_name_casing);
@ -837,8 +148,15 @@ export const getSchema = (config: Config): SchemaResponse => {
? [config.schema, ...tableName]
: tableName;
const dbName = config.db ? getDbStoreName(config.db) : defaultDbStoreName;
const schema = store[dbName]?.schema;
if(!schema) {
throw new Error(`Couldn't find db store for ${dbName}`);
}
const filteredTables = schema.tables.filter(table =>
config.tables === null ? true : config.tables.map(n => [n]).find(tableNameEquals(table.name)) !== undefined
config.tables === null ? true : config.tables.map(n => [n]).find(nameEquals(table.name)) !== undefined
);
const prefixedTables = filteredTables.map(table => ({
@ -858,8 +176,11 @@ export const getSchema = (config: Config): SchemaResponse => {
}))
}));
const prefixedFunctions = (schema.functions ?? []); // TODO: Put some real prefixes here
return {
...schema,
tables: prefixedTables
tables: prefixedTables,
functions: prefixedFunctions,
};
};

View File

@ -2,17 +2,15 @@
import { loadStaticData, StaticData, staticDataExists } from './data';
export async function getDataset(name: string): Promise<DatasetGetTemplateResponse> {
const safePath = mkPath(name);
return {
exists: await staticDataExists(safePath)
exists: await staticDataExists(name)
};
}
export async function cloneDataset(store: Record<string, StaticData>, dbName: string, body: DatasetCreateCloneRequest): Promise<DatasetCreateCloneResponse> {
const storeName = getDbStoreName(dbName);
const safePathName = mkPath(body.from);
const data = await loadStaticData(safePathName);
store[storeName] = data;
const staticData = await loadStaticData(body.from);
store[storeName] = staticData;
return { config: { db: dbName } };
}
@ -31,8 +29,3 @@ export async function deleteDataset(store: Record<string, StaticData>, dbName: s
export const getDbStoreName = (dbName: string) => `$${dbName}`
export const defaultDbStoreName = "@default";
function mkPath(name: string): string {
const base = name.replace(/\//g,''); // TODO: Can this be made safer?
return `${base}.xml.gz`;
}

View File

@ -0,0 +1,98 @@

/**
* This Module defines some mock functions that can be invoked.
*/
import { ArgumentValue, FunctionRequest, NamedArgument, TableName } from "@hasura/dc-api-types";
import { prettyPrintTableName, Rows } from "./query";
export function respondToFunction(queryRequest: FunctionRequest, tableName: TableName): Rows {
const t = prettyPrintTableName(tableName);
const f = functions[t];
if(! f) {
throw(Error(`Couldn't find function ${t}`));
}
return f(queryRequest);
}
const functions: Record<string, ((x: any) => Rows)> = {
'[Fibonacci]': fibonacci,
'[Fibbbbbbbs]': fibbbbbbbs,
'[Fibbbbbbbs2]': fibbbbbbbs2,
'[FunkyAdd]': funky_add,
}
function namedArguments(args: Array<NamedArgument>): Record<string, ArgumentValue> {
return Object.fromEntries(args.map(a => [a.name, a.value]));
}
function fibonacci(q: FunctionRequest): Rows {
const argzArray = q.function_arguments;
if(! argzArray) { throw(Error('Expecting function_arguments')); }
const argz = namedArguments(argzArray);
const n = argz['take'].value;
if(! n) { throw(Error('Expecting 0th arg')); }
let rows = [];
let x = 1;
let y = 1;
let z = 1;
for(let i = 0; i < n; i++) {
rows.push({ Value: x });
z = x + y;
x = y;
y = z;
}
return rows;
}
function fibbbbbbbs(): Rows {
return [
{ ArtistId: 1, Name: 'Joe' },
{ ArtistId: 1, Name: 'Jim' },
{ ArtistId: 2, Name: 'James' },
{ ArtistId: 3, Name: 'Jack' },
{ ArtistId: 5, Name: 'Joel' },
]
}
function fibbbbbbbs2(q: FunctionRequest): Rows {
const argzArray = q.function_arguments;
if(! argzArray) { throw(Error('Expecting function_arguments')); }
const argz = namedArguments(argzArray);
const n = argz['upto'].value;
if(! argz) { throw(Error('Expecting 0th arg')); }
let rows = [];
let x = 1;
let y = 1;
let z = 1;
for(let i = 0; i < n; i++) {
rows.push({ ArtistId: x, Name: `Artist ${x}` });
z = x + y;
x = y;
y = z;
}
return rows;
}
function funky_add(q: FunctionRequest): Rows {
const argzArray = q.function_arguments;
if(! argzArray) { throw(Error('Expecting function_arguments')); }
const argz = namedArguments(argzArray);
if(! argz) { throw(Error('Expecting function_arguments')); }
const a = argz['a'].value;
if(! a) { throw(Error('Expecting "a" arg')); }
const b = argz['b'].value;
if(! b) { throw(Error('Expecting "b" arg')); }
const c = a + b;
return [{ ArtistId: c, Name: `Artist ${c}`}];
}

View File

@ -28,7 +28,7 @@ server.get<{ Reply: CapabilitiesResponse }>("/capabilities", async (request, _re
server.get<{ Reply: SchemaResponse }>("/schema", async (request, _response) => {
server.log.info({ headers: request.headers, query: request.body, }, "schema.request");
const config = getConfig(request);
return getSchema(config);
return getSchema(staticData, config);
});
server.post<{ Body: QueryRequest, Reply: QueryResponse }>("/query", async (request, _response) => {
@ -84,7 +84,7 @@ process.on('SIGINT', () => {
const start = async () => {
try {
staticData = {[defaultDbStoreName]: await loadStaticData("Chinook.xml.gz")};
staticData = {[defaultDbStoreName]: await loadStaticData("Chinook")};
await server.listen({port: port, host: "0.0.0.0"});
}
catch (err) {

View File

@ -1,6 +1,7 @@
import { QueryRequest, TableRelationships, Relationship, Query, Field, OrderBy, Expression, BinaryComparisonOperator, UnaryComparisonOperator, BinaryArrayComparisonOperator, ComparisonColumn, ComparisonValue, Aggregate, SingleColumnAggregate, ColumnCountAggregate, TableName, OrderByElement, OrderByRelation, ExistsInTable, ExistsExpression, ScalarValue } from "@hasura/dc-api-types";
import { coerceUndefinedToNull, filterIterable, mapIterable, reduceAndIterable, reduceOrIterable, skipIterable, tableNameEquals, takeIterable, unreachable } from "./util";
import { QueryRequest, TableRelationships, Relationship, Query, Field, OrderBy, Expression, BinaryComparisonOperator, UnaryComparisonOperator, BinaryArrayComparisonOperator, ComparisonColumn, ComparisonValue, Aggregate, SingleColumnAggregate, ColumnCountAggregate, TableName, OrderByElement, OrderByRelation, ExistsInTable, ExistsExpression, ScalarValue, FunctionName, FunctionRelationships } from "@hasura/dc-api-types";
import { coerceUndefinedToNull, filterIterable, mapIterable, nameEquals, reduceAndIterable, reduceOrIterable, skipIterable, takeIterable, unreachable } from "./util";
import * as math from "mathjs";
import { respondToFunction } from "./functions";
type RelationshipName = string
@ -109,7 +110,7 @@ const prettyPrintComparisonValue = (comparisonValue: ComparisonValue): string =>
}
};
const prettyPrintTableName = (tableName: TableName): string => {
export const prettyPrintTableName = (tableName: TableName): string => {
return tableName.map(t => `[${t}]`).join(".");
};
@ -370,12 +371,38 @@ const paginateRows = (rows: Iterable<Record<string, RawScalarValue>>, offset: nu
return limit !== null ? takeIterable(skipped, limit) : skipped;
};
const makeFindRelationship = (allTableRelationships: TableRelationships[], tableName: TableName) => (relationshipName: RelationshipName): Relationship => {
const relationship = allTableRelationships.find(r => tableNameEquals(r.source_table)(tableName))?.relationships?.[relationshipName];
if (relationship === undefined)
throw `No relationship named ${relationshipName} found for table ${tableName}`;
else
return relationship;
const makeFindRelationship = (type: 'table' | 'function', request: QueryRequest, name: TableName) => (relationshipName: RelationshipName): Relationship => {
const relationships = (() => {
switch(request.type) {
case 'table':
return request.table_relationships;
case 'function':
return request.relationships;
}})();
for(var r of relationships) {
switch(type) {
case 'table':
if(r.type === 'table') {
if(nameEquals(r.source_table)(name)) {
const relationship = r.relationships[relationshipName];
if(relationship) {
return relationship;
}
}
}
case 'function':
if(r.type === 'function') {
if(nameEquals(r.source_function)(name)) {
const relationship = r.relationships[relationshipName];
if(relationship) {
return relationship;
}
}
}
}
}
throw `No relationship named ${relationshipName} found for ${type} ${name}`;
};
const createFilterExpressionForRelationshipJoin = (row: Record<string, RawScalarValue>, relationship: Relationship): Expression | null => {
@ -565,16 +592,18 @@ const makeForeachFilterExpression = (foreachFilterIds: Record<string, ScalarValu
: { type: "and", expressions };
}
export type Rows = Record<string, RawScalarValue>[]; // Record<string, ScalarValue>[];
export const queryData = (getTable: (tableName: TableName) => Record<string, RawScalarValue>[] | undefined, queryRequest: QueryRequest): QueryResponse => {
const performQuery = (parentQueryRowChain: Record<string, RawScalarValue>[], tableName: TableName, query: Query): QueryResponse => {
const rows = getTable(tableName);
const performQuery = (parentQueryRowChain: Record<string, RawScalarValue>[], tableName: TableName, query: Query, previousResults: Rows | null): QueryResponse => {
const rows = previousResults ?? getTable(tableName);
if (rows === undefined) {
throw `${tableName} is not a valid table`;
}
const performSubquery = (sourceRow: Record<string, RawScalarValue>, tableName: TableName, query: Query): QueryResponse => {
return performQuery([...parentQueryRowChain, sourceRow], tableName, query);
return performQuery([...parentQueryRowChain, sourceRow], tableName, query, null);
};
const findRelationship = makeFindRelationship(queryRequest.table_relationships, tableName);
const findRelationship = makeFindRelationship(previousResults ? 'function' : 'table', queryRequest, tableName);
const getComparisonColumnValue = makeGetComparisonColumnValue(parentQueryRowChain);
const performExistsSubquery = makePerformExistsSubquery(findRelationship, performSubquery);
const getOrderByElementValue = makeGetOrderByElementValue(findRelationship, performNewQuery);
@ -603,28 +632,37 @@ export const queryData = (getTable: (tableName: TableName) => Record<string, Raw
rows: projectedRows,
}
}
const performNewQuery = (tableName: TableName, query: Query): QueryResponse => performQuery([], tableName, query);
if (queryRequest.foreach) {
return {
rows: queryRequest.foreach.map(foreachFilterIds => {
const foreachFilter = makeForeachFilterExpression(foreachFilterIds);
const where: Expression = queryRequest.query.where
? { type: "and", expressions: [foreachFilter, queryRequest.query.where] }
: foreachFilter;
const performNewQuery = (tableName: TableName, query: Query, previousResults: Rows|null = null): QueryResponse => performQuery([], tableName, query, previousResults);
const filteredQuery = {
... queryRequest.query,
where
}
const queryResponse = performNewQuery(queryRequest.table, filteredQuery);
switch(queryRequest.type) {
case 'function':
const rows = respondToFunction(queryRequest, queryRequest.function);
const result = performNewQuery(queryRequest.function, queryRequest.query, rows);
return result;
case 'table':
if (queryRequest.foreach) {
return {
"query": queryResponse,
rows: queryRequest.foreach.map(foreachFilterIds => {
const foreachFilter = makeForeachFilterExpression(foreachFilterIds);
const where: Expression = queryRequest.query.where
? { type: "and", expressions: [foreachFilter, queryRequest.query.where] }
: foreachFilter;
const filteredQuery = {
... queryRequest.query,
where
}
const queryResponse = performNewQuery(queryRequest.table, filteredQuery);
return {
"query": queryResponse,
};
})
};
})
};
} else {
return performNewQuery(queryRequest.table, queryRequest.query);
} else {
return performNewQuery(queryRequest.table, queryRequest.query);
}
}
};

View File

@ -68,9 +68,9 @@ export const reduceOrIterable = (iterable: Iterable<boolean>): boolean => {
return false;
}
export const tableNameEquals = (tableName1: TableName) => (tableName2: TableName): boolean => {
if (tableName1.length !== tableName2.length)
export const nameEquals = (name1: TableName) => (name2: TableName): boolean => {
if (name1.length !== name2.length)
return false;
return zip(tableName1, tableName2).every(([n1, n2]) => n1 === n2);
return zip(name1, name2).every(([n1, n2]) => n1 === n2);
}

View File

@ -10,7 +10,7 @@
"license": "Apache-2.0",
"dependencies": {
"@fastify/cors": "^8.1.0",
"@hasura/dc-api-types": "0.30.0",
"@hasura/dc-api-types": "0.31.0",
"fastify": "^4.13.0",
"fastify-metrics": "^9.2.1",
"nanoid": "^3.3.4",
@ -57,7 +57,7 @@
"integrity": "sha512-lgHwxlxV1qIg1Eap7LgIeoBWIMFibOjbrYPIPJZcI1mmGAI2m3lNYpK12Y+GBdPQ0U1hRwSord7GIaawz962qQ=="
},
"node_modules/@hasura/dc-api-types": {
"version": "0.30.0",
"version": "0.31.0",
"license": "Apache-2.0",
"devDependencies": {
"@tsconfig/node16": "^1.0.3",

View File

@ -22,7 +22,7 @@
},
"dependencies": {
"@fastify/cors": "^8.1.0",
"@hasura/dc-api-types": "0.30.0",
"@hasura/dc-api-types": "0.31.0",
"fastify-metrics": "^9.2.1",
"fastify": "^4.13.0",
"nanoid": "^3.3.4",

View File

@ -131,11 +131,21 @@ server.post<{ Body: QueryRequest, Reply: QueryResponse }>("/query", async (reque
server.log.info({ headers: request.headers, query: request.body, }, "query.request");
const end = queryHistogram.startTimer()
const config = getConfig(request);
try {
const result : QueryResponse = await queryData(config, sqlLogger, request.body);
return result;
} finally {
end();
const body = request.body;
switch(body.type) {
case 'function':
throw new ErrorWithStatusCode(
"User defined functions not supported in queries",
500,
{function: { name: body.function }}
);
case 'table':
try {
const result : QueryResponse = await queryData(config, sqlLogger, body);
return result;
} finally {
end();
}
}
});
@ -149,7 +159,17 @@ server.post<{ Body: RawRequest, Reply: RawResponse }>("/raw", async (request, _r
server.post<{ Body: QueryRequest, Reply: ExplainResponse}>("/explain", async (request, _response) => {
server.log.info({ headers: request.headers, query: request.body, }, "query.request");
const config = getConfig(request);
return explain(config, sqlLogger, request.body);
const body = request.body;
switch(body.type) {
case 'function':
throw new ErrorWithStatusCode(
"User defined functions not supported in queries",
500,
{function: { name: body.function }}
);
case 'table':
return explain(config, sqlLogger, body);
}
});
if(MUTATIONS) {

View File

@ -24,6 +24,8 @@ import {
OrderByElement,
OrderByTarget,
ScalarValue,
TableRequest,
FunctionRelationships,
} from "@hasura/dc-api-types";
import { customAlphabet } from "nanoid";
import { DEBUGGING_TAGS, QUERY_LENGTH_LIMIT } from "./environment";
@ -650,9 +652,10 @@ function offset(o: number | null): string {
}
}
function query(request: QueryRequest): string {
function query(request: TableRequest): string {
const tableRelationships = only_table_relationships(request.table_relationships);
const result = table_query(
request.table_relationships,
tableRelationships,
request.table,
null,
coerceUndefinedToNull(request.query.fields),
@ -709,11 +712,12 @@ function foreach_ids_table_value(foreachIds: Record<string, ScalarValue>[]): str
* SELECT table_subquery AS data
* ```
*/
function foreach_query(foreachIds: Record<string, ScalarValue>[], request: QueryRequest): string {
function foreach_query(foreachIds: Record<string, ScalarValue>[], request: TableRequest): string {
const randomSuffix = nanoid();
const foreachTableName: TableName = [`foreach_ids_${randomSuffix}`];
const foreachRelationshipName = "Foreach";
const foreachTableRelationship: TableRelationships = {
type: 'table',
source_table: foreachTableName,
relationships: {
[foreachRelationshipName]: {
@ -732,8 +736,9 @@ function foreach_query(foreachIds: Record<string, ScalarValue>[], request: Query
};
const foreachIdsTableValue = foreach_ids_table_value(foreachIds);
const tableRelationships = only_table_relationships(request.table_relationships);
const tableSubquery = table_query(
[foreachTableRelationship, ...request.table_relationships],
[foreachTableRelationship, ...(tableRelationships)],
foreachTableName,
null,
foreachQueryFields,
@ -747,6 +752,14 @@ function foreach_query(foreachIds: Record<string, ScalarValue>[], request: Query
return tag('foreach_query', `WITH ${escapeTableName(foreachTableName)} AS (${foreachIdsTableValue}) SELECT ${tableSubquery} AS data`);
}
function only_table_relationships(all: Array<TableRelationships | FunctionRelationships>): Array<TableRelationships> {
return all.filter(isTableRelationship);
}
function isTableRelationship(relationships: TableRelationships | FunctionRelationships,): relationships is TableRelationships {
return (relationships as TableRelationships).source_table !== undefined;
}
/** Function to add SQL comments to the generated SQL to tag which procedures generated what text.
*
* comment('a','b') => '/*\<a>\*\/ b /*\</a>*\/'
@ -805,12 +818,12 @@ function tag(t: string, s: string): string {
* ```
*
*/
export async function queryData(config: Config, sqlLogger: SqlLogger, queryRequest: QueryRequest): Promise<QueryResponse> {
export async function queryData(config: Config, sqlLogger: SqlLogger, request: TableRequest): Promise<QueryResponse> {
return await withConnection(config, defaultMode, sqlLogger, async db => {
const q =
queryRequest.foreach
? foreach_query(queryRequest.foreach, queryRequest)
: query(queryRequest);
request.foreach
? foreach_query(request.foreach, request)
: query(request);
if(q.length > QUERY_LENGTH_LIMIT) {
const error = new ErrorWithStatusCode(
@ -839,9 +852,9 @@ export async function queryData(config: Config, sqlLogger: SqlLogger, queryReque
* @param queryRequest
* @returns
*/
export async function explain(config: Config, sqlLogger: SqlLogger, queryRequest: QueryRequest): Promise<ExplainResponse> {
export async function explain(config: Config, sqlLogger: SqlLogger, request: TableRequest): Promise<ExplainResponse> {
return await withConnection(config, defaultMode, sqlLogger, async db => {
const q = query(queryRequest);
const q = query(request);
const result = await db.query(`EXPLAIN QUERY PLAN ${q}`);
return {
query: q,
@ -866,3 +879,4 @@ type AnalysisEntry = {
parent: number,
detail: string
}

View File

@ -4464,6 +4464,9 @@
],
"type": "string"
},
"response": {
"additionalProperties": true
},
"session_argument": {
"type": "string"
}
@ -4492,6 +4495,23 @@
],
"type": "object"
},
"FunctionReturnType": {
"discriminator": {
"mapping": {
"inferred": "InferredFunctionResponse",
"table": "TableFunctionResponse"
},
"propertyName": "type"
},
"oneOf": [
{
"$ref": "#/components/schemas/InferredFunctionResponse"
},
{
"$ref": "#/components/schemas/TableFunctionResponse"
}
]
},
"GraphQLName": {
"type": "string"
},
@ -4578,6 +4598,20 @@
},
"type": "object"
},
"InferredFunctionResponse": {
"properties": {
"type": {
"enum": [
"inferred"
],
"type": "string"
}
},
"required": [
"type"
],
"type": "object"
},
"InputObjectFieldDefinition": {
"properties": {
"description": {
@ -8789,6 +8823,27 @@
},
"type": "object"
},
"TableFunctionResponse": {
"properties": {
"table": {
"items": {
"type": "string"
},
"type": "array"
},
"type": {
"enum": [
"table"
],
"type": "string"
}
},
"required": [
"table",
"type"
],
"type": "object"
},
"TlsAllow": {
"properties": {
"host": {

View File

@ -145,7 +145,7 @@ tests = describe "Aggregate Query Tests" $ do
_mrrRecordedRequest
`shouldBe` Just
( Query $
mkQueryRequest
mkTableRequest
(mkTableName "Artist")
( emptyQuery
& API.qFields
@ -164,21 +164,22 @@ tests = describe "Aggregate Query Tests" $ do
]
& API.qLimit ?~ 1
)
& API.qrTableRelationships
& API.qrRelationships
.~ Set.fromList
[ API.TableRelationships
{ _trSourceTable = mkTableName "Artist",
_trRelationships =
HashMap.fromList
[ ( API.RelationshipName "Albums",
API.Relationship
{ _rTargetTable = mkTableName "Album",
_rRelationshipType = API.ArrayRelationship,
_rColumnMapping = HashMap.fromList [(API.ColumnName "ArtistId", API.ColumnName "ArtistId")]
}
)
]
}
[ API.RTable
API.TableRelationships
{ _trelSourceTable = mkTableName "Artist",
_trelRelationships =
HashMap.fromList
[ ( API.RelationshipName "Albums",
API.Relationship
{ _rTargetTable = mkTableName "Album",
_rRelationshipType = API.ArrayRelationship,
_rColumnMapping = HashMap.fromList [(API.ColumnName "ArtistId", API.ColumnName "ArtistId")]
}
)
]
}
]
)
@ -260,7 +261,7 @@ tests = describe "Aggregate Query Tests" $ do
_mrrRecordedRequest
`shouldBe` Just
( Query $
mkQueryRequest
mkTableRequest
(mkTableName "Invoice")
( emptyQuery
& API.qFields
@ -284,21 +285,22 @@ tests = describe "Aggregate Query Tests" $ do
& API.qLimit ?~ 2
& API.qAggregatesLimit ?~ 2
)
& API.qrTableRelationships
& API.qrRelationships
.~ Set.fromList
[ API.TableRelationships
{ _trSourceTable = mkTableName "Invoice",
_trRelationships =
HashMap.fromList
[ ( API.RelationshipName "InvoiceLines",
API.Relationship
{ _rTargetTable = mkTableName "InvoiceLine",
_rRelationshipType = API.ArrayRelationship,
_rColumnMapping = HashMap.fromList [(API.ColumnName "InvoiceId", API.ColumnName "InvoiceId")]
}
)
]
}
[ API.RTable
API.TableRelationships
{ _trelSourceTable = mkTableName "Invoice",
_trelRelationships =
HashMap.fromList
[ ( API.RelationshipName "InvoiceLines",
API.Relationship
{ _rTargetTable = mkTableName "InvoiceLine",
_rRelationshipType = API.ArrayRelationship,
_rColumnMapping = HashMap.fromList [(API.ColumnName "InvoiceId", API.ColumnName "InvoiceId")]
}
)
]
}
]
)

View File

@ -139,7 +139,7 @@ tests = describe "Basic Tests" $ do
_mrrRecordedRequest
`shouldBe` Just
( Query $
mkQueryRequest
mkTableRequest
(mkTableName "Album")
( emptyQuery
& API.qFields
@ -193,7 +193,7 @@ tests = describe "Basic Tests" $ do
_mrrRecordedRequest
`shouldBe` Just
( Query $
mkQueryRequest
mkTableRequest
(mkTableName "Artist")
( emptyQuery
& API.qFields
@ -243,7 +243,7 @@ tests = describe "Basic Tests" $ do
_mrrRecordedRequest
`shouldBe` Just
( Query $
mkQueryRequest
mkTableRequest
(mkTableName "Customer")
( emptyQuery
& API.qFields

View File

@ -88,7 +88,7 @@ tests = describe "Custom scalar parsing tests" $ do
_mrrRecordedRequest
`shouldBe` Just
( Query $
mkQueryRequest
mkTableRequest
(mkTableName "MyCustomScalarsTable")
( emptyQuery
& API.qFields
@ -145,7 +145,7 @@ tests = describe "Custom scalar parsing tests" $ do
_mrrRecordedRequest
`shouldBe` Just
( Query $
mkQueryRequest
mkTableRequest
(mkTableName "MyCustomScalarsTable")
( emptyQuery
& API.qFields

View File

@ -181,8 +181,8 @@ tests = do
& API.mrTableRelationships
.~ Set.fromList
[ API.TableRelationships
{ API._trSourceTable = mkTableName "Album",
API._trRelationships =
{ API._trelSourceTable = mkTableName "Album",
API._trelRelationships =
HashMap.fromList
[ ( API.RelationshipName "Artist",
API.Relationship
@ -288,8 +288,8 @@ tests = do
& API.mrTableRelationships
.~ Set.fromList
[ API.TableRelationships
{ API._trSourceTable = mkTableName "Album",
API._trRelationships =
{ API._trelSourceTable = mkTableName "Album",
API._trelRelationships =
HashMap.fromList
[ ( API.RelationshipName "Artist",
API.Relationship

View File

@ -93,7 +93,7 @@ tests = describe "Error Protocol Tests" $ do
_mrrRecordedRequest
`shouldBe` Just
( Query $
mkQueryRequest
mkTableRequest
(mkTableName "Album")
( emptyQuery
& API.qFields

View File

@ -173,8 +173,8 @@ tests = do
& API.mrTableRelationships
.~ Set.fromList
[ API.TableRelationships
{ API._trSourceTable = mkTableName "Album",
API._trRelationships =
{ API._trelSourceTable = mkTableName "Album",
API._trelRelationships =
HashMap.fromList
[ ( API.RelationshipName "Artist",
API.Relationship

View File

@ -114,7 +114,7 @@ tests = describe "Order By Tests" $ do
_mrrRecordedRequest
`shouldBe` Just
( Query $
mkQueryRequest
mkTableRequest
(mkTableName "Album")
( emptyQuery
& API.qFields
@ -157,7 +157,7 @@ tests = describe "Order By Tests" $ do
_mrrRecordedRequest
`shouldBe` Just
( Query $
mkQueryRequest
mkTableRequest
(mkTableName "Artist")
( emptyQuery
& API.qFields ?~ mkFieldsMap [("Name", API.ColumnField (API.ColumnName "Name") (API.ScalarType "string"))]
@ -184,21 +184,22 @@ tests = describe "Order By Tests" $ do
]
)
)
& API.qrTableRelationships
& API.qrRelationships
.~ Set.fromList
[ API.TableRelationships
{ _trSourceTable = mkTableName "Artist",
_trRelationships =
HashMap.fromList
[ ( API.RelationshipName "Albums",
API.Relationship
{ _rTargetTable = mkTableName "Album",
_rRelationshipType = API.ArrayRelationship,
_rColumnMapping = HashMap.fromList [(API.ColumnName "ArtistId", API.ColumnName "ArtistId")]
}
)
]
}
[ API.RTable
API.TableRelationships
{ _trelSourceTable = mkTableName "Artist",
_trelRelationships =
HashMap.fromList
[ ( API.RelationshipName "Albums",
API.Relationship
{ _rTargetTable = mkTableName "Album",
_rRelationshipType = API.ArrayRelationship,
_rColumnMapping = HashMap.fromList [(API.ColumnName "ArtistId", API.ColumnName "ArtistId")]
}
)
]
}
]
)

View File

@ -193,7 +193,7 @@ tests = describe "Object Relationships Tests" $ do
_mrrRecordedRequest
`shouldBe` Just
( Query $
mkQueryRequest
mkTableRequest
(mkTableName "Track")
( emptyQuery
& API.qFields
@ -216,30 +216,31 @@ tests = describe "Object Relationships Tests" $ do
]
& API.qLimit ?~ 1
)
& API.qrTableRelationships
& API.qrRelationships
.~ Set.fromList
[ API.TableRelationships
{ _trSourceTable = mkTableName "Track",
_trRelationships =
HashMap.fromList
[ ( API.RelationshipName "Genre",
API.Relationship
{ _rTargetTable = mkTableName "Genre",
_rRelationshipType = API.ObjectRelationship,
_rColumnMapping = HashMap.fromList [(API.ColumnName "GenreId", API.ColumnName "GenreId")]
}
),
( API.RelationshipName "MediaType",
API.Relationship
{ _rTargetTable = mkTableName "MediaType",
_rRelationshipType = API.ObjectRelationship,
_rColumnMapping =
HashMap.fromList
[(API.ColumnName "MediaTypeId", API.ColumnName "MediaTypeId")]
}
)
]
}
[ API.RTable
API.TableRelationships
{ _trelSourceTable = mkTableName "Track",
_trelRelationships =
HashMap.fromList
[ ( API.RelationshipName "Genre",
API.Relationship
{ _rTargetTable = mkTableName "Genre",
_rRelationshipType = API.ObjectRelationship,
_rColumnMapping = HashMap.fromList [(API.ColumnName "GenreId", API.ColumnName "GenreId")]
}
),
( API.RelationshipName "MediaType",
API.Relationship
{ _rTargetTable = mkTableName "MediaType",
_rRelationshipType = API.ObjectRelationship,
_rColumnMapping =
HashMap.fromList
[(API.ColumnName "MediaTypeId", API.ColumnName "MediaTypeId")]
}
)
]
}
]
)
@ -291,7 +292,7 @@ tests = describe "Object Relationships Tests" $ do
_mrrRecordedRequest
`shouldBe` Just
( Query $
mkQueryRequest
mkTableRequest
(mkTableName "Track")
( emptyQuery
& API.qFields
@ -340,34 +341,36 @@ tests = describe "Object Relationships Tests" $ do
]
)
)
& API.qrTableRelationships
& API.qrRelationships
.~ Set.fromList
[ API.TableRelationships
{ _trSourceTable = mkTableName "Track",
_trRelationships =
HashMap.fromList
[ ( API.RelationshipName "Album",
API.Relationship
{ _rTargetTable = mkTableName "Album",
_rRelationshipType = API.ObjectRelationship,
_rColumnMapping = HashMap.fromList [(API.ColumnName "AlbumId", API.ColumnName "AlbumId")]
}
)
]
},
API.TableRelationships
{ _trSourceTable = mkTableName "Album",
_trRelationships =
HashMap.fromList
[ ( API.RelationshipName "Artist",
API.Relationship
{ _rTargetTable = mkTableName "Artist",
_rRelationshipType = API.ObjectRelationship,
_rColumnMapping = HashMap.fromList [(API.ColumnName "ArtistId", API.ColumnName "ArtistId")]
}
)
]
}
[ API.RTable
API.TableRelationships
{ _trelSourceTable = mkTableName "Track",
_trelRelationships =
HashMap.fromList
[ ( API.RelationshipName "Album",
API.Relationship
{ _rTargetTable = mkTableName "Album",
_rRelationshipType = API.ObjectRelationship,
_rColumnMapping = HashMap.fromList [(API.ColumnName "AlbumId", API.ColumnName "AlbumId")]
}
)
]
},
API.RTable
API.TableRelationships
{ _trelSourceTable = mkTableName "Album",
_trelRelationships =
HashMap.fromList
[ ( API.RelationshipName "Artist",
API.Relationship
{ _rTargetTable = mkTableName "Artist",
_rRelationshipType = API.ObjectRelationship,
_rColumnMapping = HashMap.fromList [(API.ColumnName "ArtistId", API.ColumnName "ArtistId")]
}
)
]
}
]
)
@ -400,7 +403,7 @@ tests = describe "Object Relationships Tests" $ do
_mrrRecordedRequest
`shouldBe` Just
( Query $
mkQueryRequest
mkTableRequest
(mkTableName "Employee")
( emptyQuery
& API.qFields ?~ mkFieldsMap [("EmployeeId", API.ColumnField (API.ColumnName "EmployeeId") $ API.ScalarType "number")]
@ -431,34 +434,36 @@ tests = describe "Object Relationships Tests" $ do
)
(API.OrderByElement [API.RelationshipName "SupportRepForCustomers"] API.OrderByStarCountAggregate API.Descending :| [])
)
& API.qrTableRelationships
& API.qrRelationships
.~ Set.fromList
[ API.TableRelationships
{ _trSourceTable = mkTableName "Customer",
_trRelationships =
HashMap.fromList
[ ( API.RelationshipName "SupportRep",
API.Relationship
{ _rTargetTable = mkTableName "Employee",
_rRelationshipType = API.ObjectRelationship,
_rColumnMapping = HashMap.fromList [(API.ColumnName "SupportRepId", API.ColumnName "EmployeeId")]
}
)
]
},
API.TableRelationships
{ _trSourceTable = mkTableName "Employee",
_trRelationships =
HashMap.fromList
[ ( API.RelationshipName "SupportRepForCustomers",
API.Relationship
{ _rTargetTable = mkTableName "Customer",
_rRelationshipType = API.ArrayRelationship,
_rColumnMapping = HashMap.fromList [(API.ColumnName "EmployeeId", API.ColumnName "SupportRepId")]
}
)
]
}
[ API.RTable
API.TableRelationships
{ _trelSourceTable = mkTableName "Customer",
_trelRelationships =
HashMap.fromList
[ ( API.RelationshipName "SupportRep",
API.Relationship
{ _rTargetTable = mkTableName "Employee",
_rRelationshipType = API.ObjectRelationship,
_rColumnMapping = HashMap.fromList [(API.ColumnName "SupportRepId", API.ColumnName "EmployeeId")]
}
)
]
},
API.RTable
API.TableRelationships
{ _trelSourceTable = mkTableName "Employee",
_trelRelationships =
HashMap.fromList
[ ( API.RelationshipName "SupportRepForCustomers",
API.Relationship
{ _rTargetTable = mkTableName "Customer",
_rRelationshipType = API.ArrayRelationship,
_rColumnMapping = HashMap.fromList [(API.ColumnName "EmployeeId", API.ColumnName "SupportRepId")]
}
)
]
}
]
)

View File

@ -265,7 +265,7 @@ tests = do
_mrrRecordedRequest
`shouldBe` Just
( Query $
mkQueryRequest
mkTableRequest
(mkTableName "Album")
( emptyQuery
& API.qFields
@ -274,7 +274,8 @@ tests = do
("Title", API.ColumnField (API.ColumnName "Title") $ API.ScalarType "string")
]
)
& API.qrForeach
& API._QRTable
. API.trForeach
?~ NonEmpty.fromList
[ HashMap.fromList [(API.ColumnName "ArtistId", API.ScalarValue (J.Number 1) (API.ScalarType "number"))],
HashMap.fromList [(API.ColumnName "ArtistId", API.ScalarValue (J.Number 2) (API.ScalarType "number"))]
@ -355,7 +356,7 @@ tests = do
_mrrRecordedRequest
`shouldBe` Just
( Query $
mkQueryRequest
mkTableRequest
(mkTableName "Album")
( emptyQuery
& API.qFields
@ -364,7 +365,8 @@ tests = do
("Title", API.ColumnField (API.ColumnName "Title") $ API.ScalarType "string")
]
)
& API.qrForeach
& API._QRTable
. API.trForeach
?~ NonEmpty.fromList
[ HashMap.fromList [(API.ColumnName "AlbumId", API.ScalarValue (J.Number 3) (API.ScalarType "number"))],
HashMap.fromList [(API.ColumnName "AlbumId", API.ScalarValue (J.Number 1) (API.ScalarType "number"))],
@ -457,7 +459,7 @@ tests = do
_mrrRecordedRequest
`shouldBe` Just
( Query $
mkQueryRequest
mkTableRequest
(mkTableName "Album")
( emptyQuery
& API.qFields
@ -467,7 +469,8 @@ tests = do
]
& API.qAggregates ?~ mkFieldsMap [("aggregate_count", API.StarCount)]
)
& API.qrForeach
& API._QRTable
. API.trForeach
?~ NonEmpty.fromList
[ HashMap.fromList [(API.ColumnName "ArtistId", API.ScalarValue (J.Number 1) (API.ScalarType "number"))],
HashMap.fromList [(API.ColumnName "ArtistId", API.ScalarValue (J.Number 2) (API.ScalarType "number"))]

View File

@ -1,6 +1,6 @@
module Test.DataConnector.MockAgent.TestHelpers
( mkTableName,
mkQueryRequest,
mkTableRequest,
emptyQuery,
emptyMutationRequest,
mkRowsQueryResponse,
@ -18,8 +18,8 @@ import Hasura.Prelude
mkTableName :: Text -> API.TableName
mkTableName name = API.TableName (name :| [])
mkQueryRequest :: API.TableName -> API.Query -> API.QueryRequest
mkQueryRequest tableName query = API.QueryRequest tableName mempty query Nothing
mkTableRequest :: API.TableName -> API.Query -> API.QueryRequest
mkTableRequest tableName query = API.QRTable $ API.TableRequest tableName mempty query Nothing
emptyQuery :: API.Query
emptyQuery = API.Query Nothing Nothing Nothing Nothing Nothing Nothing Nothing

View File

@ -134,7 +134,7 @@ tests = describe "Transformed Configuration Tests" $ do
_mrrRecordedRequest
`shouldBe` Just
( Query $
mkQueryRequest
mkTableRequest
(mkTableName "Album")
( emptyQuery
& API.qFields

View File

@ -195,8 +195,8 @@ tests = do
& API.mrTableRelationships
.~ Set.fromList
[ API.TableRelationships
{ API._trSourceTable = mkTableName "Track",
API._trRelationships =
{ API._trelSourceTable = mkTableName "Track",
API._trelRelationships =
HashMap.fromList
[ ( API.RelationshipName "Genre",
API.Relationship
@ -384,8 +384,8 @@ tests = do
& API.mrTableRelationships
.~ Set.fromList
[ API.TableRelationships
{ API._trSourceTable = mkTableName "Track",
API._trRelationships =
{ API._trelSourceTable = mkTableName "Track",
API._trelRelationships =
HashMap.fromList
[ ( API.RelationshipName "Genre",
API.Relationship

View File

@ -78,7 +78,8 @@ library
servant-openapi3,
text,
unordered-containers,
http-media
http-media,
witch
exposed-modules:
Hasura.Backends.DataConnector.API
@ -89,6 +90,7 @@ library
Hasura.Backends.DataConnector.API.V0.ConfigSchema
Hasura.Backends.DataConnector.API.V0.ErrorResponse
Hasura.Backends.DataConnector.API.V0.Expression
Hasura.Backends.DataConnector.API.V0.Function
Hasura.Backends.DataConnector.API.V0.Mutations
Hasura.Backends.DataConnector.API.V0.OrderBy
Hasura.Backends.DataConnector.API.V0.Query
@ -175,4 +177,5 @@ test-suite tests-dc-api
Test.Specs.QuerySpec.OrderBySpec
Test.Specs.QuerySpec.RelationshipsSpec
Test.Specs.SchemaSpec
Test.Specs.UDFSpec
Test.TestHelpers

View File

@ -5,6 +5,7 @@ module Hasura.Backends.DataConnector.API.V0
module ConfigSchema,
module Expression,
module ErrorResponse,
module Function,
module Mutations,
module OrderBy,
module Query,
@ -26,6 +27,7 @@ import Hasura.Backends.DataConnector.API.V0.Dataset as Dataset
import Hasura.Backends.DataConnector.API.V0.ErrorResponse as ErrorResponse
import Hasura.Backends.DataConnector.API.V0.Explain as Explain
import Hasura.Backends.DataConnector.API.V0.Expression as Expression
import Hasura.Backends.DataConnector.API.V0.Function as Function
import Hasura.Backends.DataConnector.API.V0.Mutations as Mutations
import Hasura.Backends.DataConnector.API.V0.OrderBy as OrderBy
import Hasura.Backends.DataConnector.API.V0.Query as Query

View File

@ -21,6 +21,7 @@ module Hasura.Backends.DataConnector.API.V0.Capabilities
cExplain,
cRaw,
cDatasets,
cUserDefinedFunctions,
defaultCapabilities,
DataSchemaCapabilities (..),
dscSupportsPrimaryKeys,
@ -34,6 +35,7 @@ module Hasura.Backends.DataConnector.API.V0.Capabilities
MutationCapabilities (..),
InsertCapabilities (..),
UpdateCapabilities (..),
UserDefinedFunctionCapabilities (..),
DeleteCapabilities (..),
AtomicitySupportLevel (..),
ReturningCapabilities (..),
@ -101,6 +103,7 @@ data Capabilities = Capabilities
_cExplain :: Maybe ExplainCapabilities,
_cRaw :: Maybe RawCapabilities,
_cDatasets :: Maybe DatasetCapabilities,
_cUserDefinedFunctions :: Maybe UserDefinedFunctionCapabilities,
_cLicensing :: Maybe Licensing
}
deriving stock (Eq, Show, Generic)
@ -108,7 +111,7 @@ data Capabilities = Capabilities
deriving (FromJSON, ToJSON, ToSchema) via Autodocodec Capabilities
defaultCapabilities :: Capabilities
defaultCapabilities = Capabilities defaultDataSchemaCapabilities Nothing Nothing Nothing mempty Nothing Nothing Nothing Nothing Nothing Nothing Nothing
defaultCapabilities = Capabilities defaultDataSchemaCapabilities Nothing Nothing Nothing mempty Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing
instance HasCodec Capabilities where
codec =
@ -125,6 +128,7 @@ instance HasCodec Capabilities where
<*> optionalField "explain" "The agent's explain capabilities" .= _cExplain
<*> optionalField "raw" "The agent's raw query capabilities" .= _cRaw
<*> optionalField "datasets" "The agent's dataset capabilities" .= _cDatasets
<*> optionalField "user_defined_functions" "The agent's UDF capabilities" .= _cUserDefinedFunctions
<*> optionalField "licensing" "The agent's licensing requirements" .= _cLicensing
--------------------------------------------------------------------------------
@ -510,6 +514,15 @@ instance HasCodec DatasetCapabilities where
codec =
object "DatasetCapabilities" $ pure DatasetCapabilities
data UserDefinedFunctionCapabilities = UserDefinedFunctionCapabilities {}
deriving stock (Eq, Ord, Show, Generic, Data)
deriving anyclass (NFData, Hashable)
deriving (FromJSON, ToJSON, ToSchema) via Autodocodec UserDefinedFunctionCapabilities
instance HasCodec UserDefinedFunctionCapabilities where
codec =
object "UserDefinedFunctionCapabilities" $ pure UserDefinedFunctionCapabilities
data CapabilitiesResponse = CapabilitiesResponse
{ _crCapabilities :: Capabilities,
_crConfigSchemaResponse :: ConfigSchemaResponse,

View File

@ -0,0 +1,174 @@
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedLists #-}
{-# LANGUAGE TemplateHaskell #-}
-- | Core representation of FunctionRequests and related types.
module Hasura.Backends.DataConnector.API.V0.Function
( FunctionName (..),
FunctionInfo (..),
FunctionType (..),
FunctionArg (..),
FunctionReturnType (..),
FunctionArity (..),
functionNameToText,
fiDescription,
fiFunctionType,
fiInputArgs,
fiName,
fiReturns,
fiResponseCardinality,
faInputArgOptional,
faInputArgName,
faInputArgType,
_FunctionReturnsTable,
_FunctionReturnsUnknown,
)
where
--------------------------------------------------------------------------------
import Autodocodec
import Autodocodec.OpenAPI ()
import Control.DeepSeq (NFData)
import Control.Lens.TH (makeLenses, makePrisms)
import Data.Aeson (FromJSON, ToJSON)
import Data.Data (Data)
import Data.HashMap.Strict as HashMap
import Data.Hashable (Hashable ())
import Data.List.NonEmpty qualified as NonEmpty
import Data.OpenApi (ToSchema)
import Data.Text (Text, intercalate)
import GHC.Generics (Generic)
import Hasura.Backends.DataConnector.API.V0.Scalar qualified as API
import Hasura.Backends.DataConnector.API.V0.Table qualified as API
import Witch qualified
import Prelude
--------------------------------------------------------------------------------
-- | The fully qualified name of a function. The last element in the list is the function name
-- and all other elements represent namespacing of the function name.
-- Matches the structure of TableName.
newtype FunctionName = FunctionName {unFunctionName :: NonEmpty.NonEmpty Text}
deriving stock (Eq, Ord, Show, Generic, Data)
deriving anyclass (NFData, Hashable)
deriving (FromJSON, ToJSON, ToSchema) via Autodocodec FunctionName
-- | Encode a text representation of a function name for display purposes
-- Note, this loses some fidelity due to not being a bidirectional encoding
-- so should only be used for display/logging purposes, not transport.
functionNameToText :: FunctionName -> Text
functionNameToText (FunctionName tns) = intercalate "." (NonEmpty.toList tns)
instance Witch.From (NonEmpty.NonEmpty Text) FunctionName
instance HasCodec FunctionName where
codec =
named "FunctionName" $
dimapCodec FunctionName unFunctionName codec
<?> "The fully qualified name of a function, where the last item in the array is the function name and any earlier items represent the namespacing of the function name"
--------------------------------------------------------------------------------
data FunctionType = FRead | FWrite
deriving stock (Eq, Ord, Show, Generic)
deriving anyclass (NFData, Hashable)
deriving (FromJSON, ToJSON, ToSchema) via Autodocodec FunctionType
instance HasCodec FunctionType where
codec = named "FunctionType" $ stringConstCodec [(FRead, "read"), (FWrite, "write")]
-- TODO: This should be extended to support positional args, etc. in future
-- Example: `data FunctionArgIdentifier = NamedArg Text | PositionalArg Int`
-- Serialized: `{ "type": "name", "name": "arg1" }` or `{ "type": "positional", "index": 0 }`
--
data FunctionArg = FunctionArg
{ _faInputArgName :: Text,
_faInputArgType :: API.ScalarType,
_faInputArgOptional :: Bool
}
deriving stock (Eq, Ord, Show, Generic)
deriving anyclass (NFData, Hashable)
deriving (FromJSON, ToJSON, ToSchema) via Autodocodec FunctionArg
instance HasCodec FunctionArg where
codec =
object "FunctionInformationArgument" $
FunctionArg
<$> requiredField "name" "The name of the argument" .= _faInputArgName
<*> requiredField "type" "The type of the argument" .= _faInputArgType
<*> optionalFieldWithDefault "optional" False "If the argument can be omitted" .= _faInputArgOptional
data FunctionReturnType
= FunctionReturnsTable API.TableName
| FunctionReturnsUnknown
-- TODO: Integrate Logical Model support
-- TODO: Integrate Scalar support
deriving stock (Eq, Show, Ord, Generic)
deriving anyclass (NFData, Hashable)
deriving (FromJSON, ToJSON, ToSchema) via Autodocodec FunctionReturnType
-- This is very similar to the definitions for tableFunctionResponseCodec and FunctionResponse
-- in server/src-lib/Hasura/Function/Cache.hs and server/src-lib/Hasura/RQL/Types/Backend.hs
instance HasCodec FunctionReturnType where
codec =
named "FunctionReturnType" $
object "FunctionReturnType" $
discriminatedUnionCodec "type" enc dec
where
enc = \case
FunctionReturnsTable rt -> ("table", mapToEncoder rt (FunctionReturnsTable <$> requiredField' "table"))
FunctionReturnsUnknown -> ("unknown", mapToEncoder () (pureCodec FunctionReturnsUnknown))
dec =
HashMap.fromList
[ ("table", ("FunctionReturnsTable", mapToDecoder FunctionReturnsTable (requiredField' "table"))),
("unknown", ("FunctionReturnsUnknown", pure FunctionReturnsUnknown))
]
data FunctionArity = FunctionArityOne | FunctionArityMany
deriving stock (Eq, Show, Ord, Generic)
deriving anyclass (NFData, Hashable)
instance HasCodec FunctionArity where
codec =
stringConstCodec
[ (FunctionArityOne, "one"),
(FunctionArityMany, "many")
]
-- | Function schema data from the 'SchemaResponse'. -- TODO: Adapt this to more closely match: PGRawFunctionInfo
-- Just mostly reuse PGRawFunctionInfo for now, and refine as we think of how to refine it.
-- May want to represent this much more like a GraphQL definition.
data FunctionInfo = FunctionInfo
{ -- NOTE: Some fields from PG are omitted here due to initial implementation, or non-generality.
_fiName :: FunctionName,
_fiFunctionType :: FunctionType,
_fiReturns :: FunctionReturnType, -- Functions must currently return tables as per PG.
_fiResponseCardinality :: FunctionArity,
_fiInputArgs :: [FunctionArg], -- Args info is listed grouped unlike PG.
_fiDescription :: Maybe Text
}
deriving stock (Eq, Ord, Show, Generic)
deriving anyclass (NFData, Hashable)
deriving (FromJSON, ToJSON, ToSchema) via Autodocodec FunctionInfo
instance HasCodec FunctionInfo where
codec =
object "FunctionInfo" $
FunctionInfo
<$> requiredField "name" "The name of the table" .= _fiName
<*> requiredField "type" "read/write classification of the function" .= _fiFunctionType
<*> requiredField "returns" "table listed in schema that matches the return type of the function - to relax later" .= _fiReturns
<*> requiredField "response_cardinality" "object response if false, rows if true" .= _fiResponseCardinality
<*> requiredField "args" "argument info - name/types" .= _fiInputArgs
<*> optionalFieldOrNull "description" "Description of the table" .= _fiDescription
--------------------------------------------------------------------------------
$(makeLenses ''FunctionInfo)
$(makeLenses ''FunctionArg)
$(makePrisms ''FunctionReturnType)
$(makePrisms ''FunctionArity)
$(makePrisms ''FunctionType)

View File

@ -78,6 +78,8 @@ import Prelude
--
-- The table relationships and insert schema represent metadata that will be
-- used by agents interpreting the operations, and are shared across all operations.
--
-- TODO: Does this need to be enhanced ala. QueryRequest to support FunctionRequests?
data MutationRequest = MutationRequest
{ _mrTableRelationships :: Set API.V0.TableRelationships,
_mrInsertSchema :: Set TableInsertSchema,

View File

@ -1,12 +1,28 @@
{-# LANGUAGE OverloadedLists #-}
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE TemplateHaskell #-}
module Hasura.Backends.DataConnector.API.V0.Query
( QueryRequest (..),
qrTable,
qrTableRelationships,
_QRTable,
_QRFunction,
TableRequest (..),
pattern TableQueryRequest,
FunctionRequest (..),
pattern FunctionQueryRequest,
FunctionArgument (..),
ArgumentValue (..),
qrRelationships,
qrQuery,
qrForeach,
trTable,
trRelationships,
trQuery,
trForeach,
frFunction,
frFunctionArguments,
frQuery,
frRelationships,
FieldName (..),
Query (..),
qFields,
@ -39,7 +55,7 @@ where
import Autodocodec.Extended
import Autodocodec.OpenAPI ()
import Control.Arrow (left)
import Control.Lens (Lens', Prism', lens, prism')
import Control.Lens (Lens', Prism', Traversal', lens, prism')
import Control.Lens.TH (makeLenses, makePrisms)
import Data.Aeson (FromJSON, ToJSON, Value)
import Data.Aeson qualified as J
@ -58,6 +74,7 @@ import GHC.Show (appPrec, appPrec1)
import Hasura.Backends.DataConnector.API.V0.Aggregate qualified as API.V0
import Hasura.Backends.DataConnector.API.V0.Column qualified as API.V0
import Hasura.Backends.DataConnector.API.V0.Expression qualified as API.V0
import Hasura.Backends.DataConnector.API.V0.Function qualified as API.V0
import Hasura.Backends.DataConnector.API.V0.OrderBy qualified as API.V0
import Hasura.Backends.DataConnector.API.V0.Relationships qualified as API.V0
import Hasura.Backends.DataConnector.API.V0.Scalar qualified as API.V0
@ -67,27 +84,138 @@ import Prelude
-- | A serializable request to retrieve strutured data from some
-- source.
data QueryRequest = QueryRequest
{ _qrTable :: API.V0.TableName,
_qrTableRelationships :: Set API.V0.TableRelationships,
_qrQuery :: Query,
_qrForeach :: Maybe (NonEmpty (HashMap API.V0.ColumnName API.V0.ScalarValue))
}
data QueryRequest
= QRTable TableRequest
| QRFunction FunctionRequest
deriving stock (Eq, Ord, Show, Generic)
deriving (FromJSON, ToJSON, ToSchema) via Autodocodec QueryRequest
-- Can we build this with existing traversals without a case?
qrRelationships :: Lens' QueryRequest (Set API.V0.Relationships)
qrRelationships = lens get set
where
get (QRTable (TableRequest {_trRelationships})) = _trRelationships
get (QRFunction (FunctionRequest {_frRelationships})) = _frRelationships
set (QRTable qrt) r = QRTable (qrt {_trRelationships = r})
set (QRFunction qrf) r = QRFunction (qrf {_frRelationships = r})
qrQuery :: Lens' QueryRequest Query
qrQuery = lens get set
where
get (QRTable (TableRequest {_trQuery})) = _trQuery
get (QRFunction (FunctionRequest {_frQuery})) = _frQuery
set (QRTable qrt) x = QRTable (qrt {_trQuery = x})
set (QRFunction qrf) x = QRFunction (qrf {_frQuery = x})
instance HasCodec QueryRequest where
codec =
object "QueryRequest" $
QueryRequest
<$> requiredField "table" "The name of the table to query"
.= _qrTable
<*> requiredField "table_relationships" "The relationships between tables involved in the entire query request"
.= _qrTableRelationships
<*> requiredField "query" "The details of the query against the table"
.= _qrQuery
<*> optionalFieldOrNull "foreach" "If present, a list of columns and values for the columns that the query must be repeated for, applying the column values as a filter for each query."
.= _qrForeach
named "QueryRequest" $
object "QueryRequest" $
discriminatedUnionCodec "type" enc dec
where
enc = \case
QRTable qrt -> ("table", mapToEncoder qrt objectCodec)
QRFunction qrf -> ("function", mapToEncoder qrf objectCodec)
dec =
HashMap.fromList
[ ("table", ("TableRequest", mapToDecoder QRTable objectCodec)),
("function", ("FunctionRequest", mapToDecoder QRFunction objectCodec))
]
pattern TableQueryRequest :: API.V0.TableName -> Set API.V0.Relationships -> Query -> Maybe (NonEmpty (HashMap API.V0.ColumnName API.V0.ScalarValue)) -> QueryRequest
pattern TableQueryRequest table relationships query foreach = QRTable (TableRequest table relationships query foreach)
-- | A serializable request to retrieve strutured data from tables.
data TableRequest = TableRequest
{ _trTable :: API.V0.TableName,
_trRelationships :: Set API.V0.Relationships,
_trQuery :: Query,
_trForeach :: Maybe (NonEmpty (HashMap API.V0.ColumnName API.V0.ScalarValue))
}
deriving stock (Eq, Ord, Show, Generic)
instance HasObjectCodec TableRequest where
objectCodec =
TableRequest
<$> requiredField "table" "The name of the table to query"
.= _trTable
-- TODO: Rename this field to "relationships" at some point in the future ala FunctionRequest.
-- NOTE: This can't be done immediately as it would break compatibility in agents.
<*> requiredField "table_relationships" "The relationships between tables involved in the entire query request"
.= _trRelationships
<*> requiredField "query" "The details of the query against the table"
.= _trQuery
<*> optionalFieldOrNull "foreach" "If present, a list of columns and values for the columns that the query must be repeated for, applying the column values as a filter for each query."
.= _trForeach
pattern FunctionQueryRequest :: API.V0.FunctionName -> [FunctionArgument] -> Set API.V0.Relationships -> Query -> QueryRequest
pattern FunctionQueryRequest function args relationships query = QRFunction (FunctionRequest function args relationships query)
-- | A serializable request to compute strutured data from a function.
data FunctionRequest = FunctionRequest
{ _frFunction :: API.V0.FunctionName,
_frFunctionArguments :: [FunctionArgument],
_frRelationships :: Set API.V0.Relationships,
_frQuery :: Query
}
deriving stock (Eq, Ord, Show, Generic)
-- | Note: Only named arguments are currently supported,
-- however this is reified explicitly since so that it can be extended to ordinal or other types in future.
-- We reuse the same type for the Codec and ObjectCodec since we only have one constructor but still
-- wish to make the type explicit.
data FunctionArgument = NamedArgument
{ _faName :: Text,
_faValue :: ArgumentValue
}
deriving stock (Eq, Ord, Show, Generic)
newtype ArgumentValue = ScalarArgumentValue
{ _savValue :: API.V0.ScalarValue
}
deriving stock (Eq, Ord, Show, Generic)
instance HasCodec ArgumentValue where
codec =
object "ArgumentValue" $
discriminatedUnionCodec "type" enc dec
where
enc = \case
(ScalarArgumentValue n) -> ("scalar", mapToEncoder n objectCodec)
dec =
HashMap.fromList
[ ("scalar", ("ScalarArgumentValue", mapToDecoder ScalarArgumentValue objectCodec))
]
namedArgumentObjectCodec :: JSONObjectCodec FunctionArgument
namedArgumentObjectCodec =
NamedArgument
<$> requiredField "name" "The name of the named argument" .= _faName
<*> requiredField "value" "The value of the named argument" .= _faValue
instance HasCodec FunctionArgument where
codec =
object "FunctionRequestArgument" $
discriminatedUnionCodec "type" enc dec
where
enc = \case
n -> ("named", mapToEncoder n namedArgumentObjectCodec)
dec =
HashMap.fromList
[ ("named", ("NamedArgument", mapToDecoder id namedArgumentObjectCodec))
]
instance HasObjectCodec FunctionRequest where
objectCodec =
FunctionRequest
<$> requiredField "function" "The name of the function to query"
.= _frFunction
<*> optionalFieldWithDefault "function_arguments" mempty "Function Arguments. TODO. Improve this."
.= _frFunctionArguments
<*> requiredField "relationships" "The relationships between entities involved in the entire query request"
.= _frRelationships
<*> requiredField "query" "The details of the query against the table"
.= _frQuery
newtype FieldName = FieldName {unFieldName :: Text}
deriving stock (Eq, Ord, Show, Generic, Data)
@ -335,7 +463,13 @@ _NestedObjFieldValue = prism' mkNestedObjFieldValue (either (const Nothing) Just
_NestedArrayFieldValue :: Prism' FieldValue [FieldValue]
_NestedArrayFieldValue = prism' mkNestedArrayFieldValue (either (const Nothing) Just . deserializeAsNestedArrayFieldValue)
$(makePrisms ''QueryRequest)
$(makeLenses ''TableRequest)
$(makeLenses ''FunctionRequest)
$(makeLenses ''QueryRequest)
$(makeLenses ''Query)
$(makeLenses ''QueryResponse)
$(makePrisms ''FieldValue)
qrForeach :: Traversal' QueryRequest (Maybe (NonEmpty (HashMap API.V0.ColumnName API.V0.ScalarValue)))
qrForeach = _QRTable . trForeach

View File

@ -1,10 +1,17 @@
{-# LANGUAGE OverloadedLists #-}
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE TemplateHaskell #-}
module Hasura.Backends.DataConnector.API.V0.Relationships
( TableRelationships (..),
trSourceTable,
trRelationships,
( Relationships (..),
pattern RTableRelationships,
pattern RFunctionRelationships,
FunctionRelationships (..),
TableRelationships (..),
trelSourceTable,
trelRelationships,
frelRelationships,
frelSourceFunction,
Relationship (..),
rTargetTable,
rRelationshipType,
@ -19,7 +26,7 @@ where
import Autodocodec.Extended
import Autodocodec.OpenAPI ()
import Control.DeepSeq (NFData)
import Control.Lens (makeLenses)
import Control.Lens (makeLenses, makePrisms)
import Data.Aeson (FromJSON, FromJSONKey, ToJSON, ToJSONKey)
import Data.Data (Data)
import Data.HashMap.Strict qualified as HashMap
@ -28,23 +35,72 @@ import Data.OpenApi (ToSchema)
import Data.Text (Text)
import GHC.Generics (Generic)
import Hasura.Backends.DataConnector.API.V0.Column qualified as API.V0
import Hasura.Backends.DataConnector.API.V0.Function qualified as API.V0
import Hasura.Backends.DataConnector.API.V0.Table qualified as API.V0
import Prelude
data Relationships = RTable TableRelationships | RFunction FunctionRelationships
deriving stock (Eq, Ord, Show, Generic, Data)
deriving (FromJSON, ToJSON, ToSchema) via Autodocodec Relationships
pattern RTableRelationships :: API.V0.TableName -> HashMap.HashMap RelationshipName Relationship -> Relationships
pattern RTableRelationships source rels = RTable (TableRelationships source rels)
pattern RFunctionRelationships :: API.V0.FunctionName -> HashMap.HashMap RelationshipName Relationship -> Relationships
pattern RFunctionRelationships source rels = RFunction (FunctionRelationships source rels)
instance HasCodec Relationships where
codec =
named "Relationships" $
object "Relationships" $
discriminatedUnionCodec "type" enc dec
where
enc = \case
RTable rt -> ("table", mapToEncoder rt objectCodec)
RFunction rf -> ("function", mapToEncoder rf objectCodec)
dec =
HashMap.fromList
[ ("table", ("TableRelationships", mapToDecoder RTable objectCodec)),
("function", ("FunctionRelationships", mapToDecoder RFunction objectCodec))
]
-- NOTE: Prefix is `trel` due to TableRequest conflicting with `tr` prefix.
data TableRelationships = TableRelationships
{ _trSourceTable :: API.V0.TableName,
_trRelationships :: HashMap.HashMap RelationshipName Relationship
{ _trelSourceTable :: API.V0.TableName,
_trelRelationships :: HashMap.HashMap RelationshipName Relationship
}
deriving stock (Eq, Ord, Show, Generic, Data)
deriving (FromJSON, ToJSON, ToSchema) via Autodocodec TableRelationships
instance HasCodec TableRelationships where
codec =
object "TableRelationships" $
TableRelationships
<$> requiredField "source_table" "The name of the source table in the relationship" .= _trSourceTable
<*> requiredField "relationships" "A map of relationships from the source table to target tables. The key of the map is the relationship name" .= _trRelationships
instance HasObjectCodec TableRelationships where
objectCodec =
TableRelationships
<$> requiredField "source_table" "The name of the source table in the relationship" .= _trelSourceTable
<*> requiredField "relationships" "A map of relationships from the source table to target tables. The key of the map is the relationship name" .= _trelRelationships
-- Note: This instance is defined because MutationRequest uses TableRelationships directly without wrapping it in RTable.
instance HasCodec TableRelationships where
codec = object "TableRelationships" $ typeTag *> objectFields
where
typeTag = requiredFieldWith' "type" (literalTextCodec "table") .= const "table"
objectFields =
TableRelationships
<$> requiredField "source_table" "The name of the source table in the relationship" .= _trelSourceTable
<*> requiredField "relationships" "A map of relationships from the source table to target tables. The key of the map is the relationship name" .= _trelRelationships
data FunctionRelationships = FunctionRelationships
{ _frelSourceFunction :: API.V0.FunctionName,
_frelRelationships :: HashMap.HashMap RelationshipName Relationship
}
deriving stock (Eq, Ord, Show, Generic, Data)
instance HasObjectCodec FunctionRelationships where
objectCodec =
FunctionRelationships
<$> requiredField "source_function" "The name of the source function in the relationship" .= _frelSourceFunction
<*> requiredField "relationships" "A map of relationships from the source table to target tables. The key of the map is the relationship name" .= _frelRelationships
-- Top level seperation of tables and functions should be adopted here too.
data Relationship = Relationship
{ _rTargetTable :: API.V0.TableName,
_rRelationshipType :: RelationshipType,
@ -84,3 +140,5 @@ type TargetColumnName = API.V0.ColumnName
$(makeLenses 'TableRelationships)
$(makeLenses 'Relationship)
$(makeLenses 'FunctionRelationships)
$(makePrisms ''Relationships)

View File

@ -15,6 +15,7 @@ import Data.OpenApi (ToSchema)
import Data.Text (Text)
import GHC.Generics (Generic)
import Hasura.Backends.DataConnector.API.V0.Column qualified as API.V0
import Hasura.Backends.DataConnector.API.V0.Function qualified as API.V0
import Hasura.Backends.DataConnector.API.V0.Table qualified as API.V0
import Language.GraphQL.Draft.Syntax qualified as G
import Servant.API qualified as Servant
@ -27,6 +28,7 @@ import Prelude
-- 'Capabilities' supported by the service.
data SchemaResponse = SchemaResponse
{ _srTables :: [API.V0.TableInfo],
_srFunctions :: [API.V0.FunctionInfo],
_srObjectTypes :: Maybe (NonEmpty ObjectTypeDefinition)
}
deriving stock (Eq, Show, Generic)
@ -38,6 +40,7 @@ instance HasCodec SchemaResponse where
object "SchemaResponse" $
SchemaResponse
<$> requiredField "tables" "Available tables" .= _srTables
<*> optionalFieldWithOmittedDefault "functions" [] "Available functions" .= _srFunctions
<*> optionalField "objectTypes" "Object type definitions referenced in this schema" .= _srObjectTypes
instance Servant.HasStatus SchemaResponse where

View File

@ -5,6 +5,7 @@
module Hasura.Backends.DataConnector.API.V0.Table
( TableName (..),
TableInfo (..),
singletonTableName,
tableNameToText,
tiName,
tiType,
@ -53,6 +54,9 @@ newtype TableName = TableName {unTableName :: NonEmpty.NonEmpty Text}
deriving anyclass (NFData, Hashable)
deriving (FromJSON, ToJSON, ToSchema) via Autodocodec TableName
singletonTableName :: Text -> TableName
singletonTableName name = TableName (NonEmpty.singleton name)
tableNameToText :: TableName -> Text
tableNameToText (TableName tns) = intercalate "." (NonEmpty.toList tns)

View File

@ -47,6 +47,8 @@ newtype SandwichArguments = SandwichArguments [String]
data TestConfig = TestConfig
{ _tcTableNamePrefix :: [Text],
_tcTableNameCasing :: NameCasing,
_tcFunctionNamePrefix :: [Text],
_tcFunctionNameCasing :: NameCasing,
_tcColumnNameCasing :: NameCasing
}
@ -167,6 +169,21 @@ testConfigParser =
<> help ("The casing style to use for table names (" <> casingOptions <> "). Default: PascalCase")
<> value PascalCase
)
<*> option
jsonValue
( long "function-name-prefix"
<> short 'f'
<> metavar "FUNCTION_PREFIX"
<> help "The prefix to use for all function names, as a JSON array of strings"
<> value []
)
<*> option
auto
( long "function-name-casing"
<> metavar "FUNCTION_CASING"
<> help ("The casing style to use for function names (" <> casingOptions <> "). Default: PascalCase")
<> value PascalCase
)
<*> option
auto
( long "column-name-casing"

View File

@ -2,7 +2,7 @@ module Main (main) where
--------------------------------------------------------------------------------
import Command (AgentConfig (..), AgentOptions (..), Command (..), SandwichArguments (..), TestOptions (..), parseCommandLine)
import Command (AgentConfig (..), AgentOptions (..), Command (..), SandwichArguments (..), TestConfig, TestOptions (..), parseCommandLine)
import Control.Exception (bracket)
import Data.Aeson.Text (encodeToLazyText)
import Data.ByteString.Char8 qualified as Char8
@ -15,7 +15,7 @@ import Servant.Client ((//))
import System.Environment qualified as Env
import Test.AgentAPI (guardCapabilitiesResponse, guardSchemaResponse, mergeAgentConfig)
import Test.AgentClient (AgentAuthKey (..), AgentIOClient (..), introduceAgentClient, mkAgentClientConfig, mkAgentIOClient)
import Test.AgentDatasets (DatasetCloneInfo (..), chinookTemplate, createClone, deleteClone, testingEdgeCasesTemplate, usesDataset)
import Test.AgentDatasets (DatasetCloneInfo (..), chinookTemplate, createClone, deleteClone, functionsTemplate, testingEdgeCasesTemplate, usesDataset)
import Test.AgentTestContext (AgentTestContext (..), introduceAgentTestContext)
import Test.Data (EdgeCasesTestData, TestData, mkEdgeCasesTestData, mkTestData)
import Test.DataExport (exportData)
@ -29,6 +29,7 @@ import Test.Specs.MetricsSpec qualified
import Test.Specs.MutationSpec qualified
import Test.Specs.QuerySpec qualified
import Test.Specs.SchemaSpec qualified
import Test.Specs.UDFSpec qualified
import Test.TestHelpers (AgentTestSpec)
import Prelude
@ -37,8 +38,8 @@ import Prelude
testSourceName :: API.SourceName
testSourceName = "dc-api-tests"
tests :: TestData -> Maybe EdgeCasesTestData -> API.CapabilitiesResponse -> AgentTestSpec
tests testData edgeCasesTestData capabilitiesResponse@API.CapabilitiesResponse {..} = do
tests :: TestData -> TestConfig -> Maybe EdgeCasesTestData -> API.CapabilitiesResponse -> AgentTestSpec
tests testData testConfig edgeCasesTestData capabilitiesResponse@API.CapabilitiesResponse {..} = do
usesDataset chinookTemplate $ do
Test.Specs.HealthSpec.spec
Test.Specs.CapabilitiesSpec.spec capabilitiesResponse
@ -48,6 +49,9 @@ tests testData edgeCasesTestData capabilitiesResponse@API.CapabilitiesResponse {
for_ (API._cMetrics _crCapabilities) \m -> Test.Specs.MetricsSpec.spec m
for_ (API._cExplain _crCapabilities) \_ -> Test.Specs.ExplainSpec.spec testData _crCapabilities
for_ (API._cMutations _crCapabilities) \_ -> Test.Specs.MutationSpec.spec testData edgeCasesTestData _crCapabilities
for_ (API._cUserDefinedFunctions _crCapabilities) \_ -> do
usesDataset functionsTemplate do
Test.Specs.UDFSpec.spec testConfig _crCapabilities
getCloneSchema :: Maybe API.Config -> API.DatasetTemplateName -> AgentIOClient -> IO API.SchemaResponse
getCloneSchema mergeConfig datasetTemplate (AgentIOClient agentClient) =
@ -100,7 +104,7 @@ main = do
let testContext = AgentTestContext testSourceName agentCapabilities (_aoAgentConfig _toAgentOptions)
runSandwichWithCommandLineArgs Sandwich.defaultOptions $
introduceAgentTestContext testContext . introduceAgentClient agentClientConfig $
tests testData edgeCasesTestData agentCapabilities
tests testData _toTestConfig edgeCasesTestData agentCapabilities
pure ()
ExportOpenAPISpec ->
Text.putStrLn $ encodeToLazyText openApiSchema

View File

@ -5,6 +5,7 @@ module Test.AgentDatasets
DatasetCloneInfo (..),
usesDataset,
chinookTemplate,
functionsTemplate,
testingEdgeCasesTemplate,
HasDatasetContext,
getDatasetContext,
@ -34,6 +35,9 @@ import Prelude
chinookTemplate :: API.DatasetTemplateName
chinookTemplate = API.DatasetTemplateName "Chinook"
functionsTemplate :: API.DatasetTemplateName
functionsTemplate = API.DatasetTemplateName "Functions"
testingEdgeCasesTemplate :: API.DatasetTemplateName
testingEdgeCasesTemplate = API.DatasetTemplateName "TestingEdgeCases"

View File

@ -10,6 +10,9 @@ module Test.Data
-- = TestingEdgeCases Test Data
EdgeCasesTestData (..),
mkEdgeCasesTestData,
-- = Functions Test Data
FunctionsTestData (..),
mkFunctionsTestData,
-- = Utilities
emptyQuery,
emptyMutationRequest,
@ -339,6 +342,12 @@ genresTableName = mkTableName "Genre"
genresRows :: [HashMap API.FieldName API.FieldValue]
genresRows = sortBy (API.FieldName "GenreId") $ readTableFromXmlIntoRows genresTableName
mkFibonacciRows :: Int -> [HashMap API.FieldName API.FieldValue]
mkFibonacciRows n = take n $ fibonacciRow <$> fibs
where
fibs = 1 : 1 : zipWith (+) fibs (tail fibs)
fibonacciRow x = HashMap.singleton (API.FieldName "Value") (API.mkColumnFieldValue (J.Number x))
genresTableRelationships :: API.TableRelationships
genresTableRelationships =
let joinFieldMapping = HashMap.fromList [(API.ColumnName "GenreId", API.ColumnName "GenreId")]
@ -516,12 +525,12 @@ mkTestData schemaResponse testConfig =
formatTableRelationships :: API.TableRelationships -> API.TableRelationships
formatTableRelationships =
prefixTableRelationships
>>> API.trRelationships . traverse . API.rColumnMapping %~ (HashMap.toList >>> fmap (bimap (formatColumnName testConfig) (formatColumnName testConfig)) >>> HashMap.fromList)
>>> API.trelRelationships . traverse . API.rColumnMapping %~ (HashMap.toList >>> fmap (bimap (formatColumnName testConfig) (formatColumnName testConfig)) >>> HashMap.fromList)
prefixTableRelationships :: API.TableRelationships -> API.TableRelationships
prefixTableRelationships =
API.trSourceTable %~ formatTableName testConfig
>>> API.trRelationships . traverse . API.rTargetTable %~ formatTableName testConfig
API.trelSourceTable %~ formatTableName testConfig
>>> API.trelRelationships . traverse . API.rTargetTable %~ formatTableName testConfig
-- | Test data from the TestingEdgeCases dataset template
data EdgeCasesTestData = EdgeCasesTestData
@ -562,9 +571,30 @@ mkEdgeCasesTestData testConfig schemaResponse =
defaultedPrimaryKeyTableName = formatTableName testConfig (API.TableName $ "DefaultedPrimaryKey" :| [])
allColumnsDefaultableTableName = formatTableName testConfig (API.TableName $ "AllColumnsDefaultable" :| [])
-- | Test data from the FunctionsTestData dataset template
data FunctionsTestData = FunctionsTestData
{ -- = Functions
_ftdFunctionField :: API.FunctionName -> Text -> API.Field,
_ftdFibonacciRows :: Int -> [HashMap API.FieldName API.FieldValue],
_ftdFibonacciFunctionName :: API.FunctionName,
_ftdSearchArticlesFunctionName :: API.FunctionName
}
mkFunctionsTestData :: API.SchemaResponse -> TestConfig -> FunctionsTestData
mkFunctionsTestData schemaResponse testConfig =
FunctionsTestData
{ _ftdFunctionField = functionField schemaResponse testConfig,
_ftdFibonacciRows = mkFibonacciRows,
_ftdFibonacciFunctionName = formatFunctionName testConfig (API.FunctionName (NonEmpty.singleton "Fibonacci")),
_ftdSearchArticlesFunctionName = formatFunctionName testConfig (API.FunctionName (NonEmpty.singleton "SearchArticles"))
}
formatTableName :: TestConfig -> API.TableName -> API.TableName
formatTableName TestConfig {..} = applyTableNamePrefix _tcTableNamePrefix . API.TableName . fmap (applyNameCasing _tcTableNameCasing) . API.unTableName
formatFunctionName :: TestConfig -> API.FunctionName -> API.FunctionName
formatFunctionName TestConfig {..} = applyFunctionNamePrefix _tcFunctionNamePrefix . API.FunctionName . fmap (applyNameCasing _tcFunctionNameCasing) . API.unFunctionName
formatTableInfo :: TestConfig -> API.TableInfo -> API.TableInfo
formatTableInfo testConfig =
API.tiName %~ formatTableName testConfig
@ -581,6 +611,12 @@ applyTableNamePrefix prefix tableName@(API.TableName rawTableName) =
Just prefix' -> API.TableName (prefix' <> rawTableName)
Nothing -> tableName
applyFunctionNamePrefix :: [Text] -> API.FunctionName -> API.FunctionName
applyFunctionNamePrefix prefix functionName@(API.FunctionName rawFunctionName) =
case NonEmpty.nonEmpty prefix of
Just prefix' -> API.FunctionName (prefix' <> rawFunctionName)
Nothing -> functionName
applyNameCasing :: NameCasing -> Text -> Text
applyNameCasing casing text = case casing of
PascalCase -> text
@ -597,6 +633,14 @@ columnField schemaResponse testConfig tableName columnName =
columnName' = formatColumnName testConfig $ API.ColumnName columnName
scalarType = findColumnScalarType schemaResponse tableName columnName'
functionField :: API.SchemaResponse -> TestConfig -> API.FunctionName -> Text -> API.Field
functionField schemaResponse@API.SchemaResponse {..} testConfig functionName columnName =
columnField schemaResponse testConfig tableName columnName
where
tableName = fromMaybe (error $ "Return type of function " <> show functionName <> " not supported") (functionReturnType ^? API._FunctionReturnsTable)
functionReturnType = maybe (error $ "Can't find the function " <> show functionName <> " in " <> show (API._fiName <$> _srFunctions)) API._fiReturns functionInfo
functionInfo = find (\API.FunctionInfo {..} -> _fiName == functionName) _srFunctions
mkDefaultTableInsertSchema :: API.SchemaResponse -> TestConfig -> [API.TableInfo] -> API.TableName -> API.TableInsertSchema
mkDefaultTableInsertSchema schemaResponse testConfig expectedSchemaTables tableName =
API.TableInsertSchema
@ -681,7 +725,7 @@ renameColumns columns =
onlyKeepRelationships :: [API.RelationshipName] -> API.TableRelationships -> API.TableRelationships
onlyKeepRelationships names tableRels =
tableRels & API.trRelationships %~ HashMap.filterWithKey (\relName _ -> relName `elem` names)
tableRels & API.trelRelationships %~ HashMap.filterWithKey (\relName _ -> relName `elem` names)
queryFields :: API.Query -> HashMap API.FieldName API.Field
queryFields = fromMaybe mempty . API._qFields

View File

@ -27,6 +27,6 @@ spec TestData {..} = describe "Error Protocol" do
(CustomBinaryComparisonOperator "FOOBAR")
(_tdCurrentComparisonColumn "ArtistId" artistIdScalarType)
(Data.scalarValueComparison (Number 1) $ artistIdScalarType)
in QueryRequest _tdArtistsTableName mempty query Nothing
in TableQueryRequest _tdArtistsTableName mempty query Nothing
artistIdScalarType = _tdFindColumnScalarType _tdArtistsTableName "ArtistId"

View File

@ -1,7 +1,9 @@
{-# LANGUAGE PatternSynonyms #-}
module Test.Specs.ExplainSpec (spec) where
import Control.Lens ((&), (?~))
import Hasura.Backends.DataConnector.API (Capabilities (..), ExplainResponse (..), QueryRequest (..), qFields)
import Hasura.Backends.DataConnector.API (Capabilities (..), ExplainResponse (..), QueryRequest (..), qFields, pattern TableQueryRequest)
import Test.AgentAPI (explain)
import Test.Data (TestData (..))
import Test.Data qualified as Data
@ -26,4 +28,4 @@ spec TestData {..} _ = do
artistsQueryRequest =
let fields = Data.mkFieldsMap [("ArtistId", _tdColumnField _tdArtistsTableName "ArtistId"), ("Name", _tdColumnField _tdArtistsTableName "Name")]
query = Data.emptyQuery & qFields ?~ fields
in QueryRequest _tdArtistsTableName mempty query Nothing
in TableQueryRequest _tdArtistsTableName mempty query Nothing

View File

@ -12,6 +12,7 @@ import Data.List.NonEmpty qualified as NonEmpty
import Data.Maybe (fromMaybe, maybeToList)
import Data.Set qualified as Set
import Hasura.Backends.DataConnector.API
import Hasura.Backends.DataConnector.API.V0.Relationships as API
import Test.AgentAPI (mutationGuarded, queryGuarded)
import Test.AgentDatasets (chinookTemplate, usesDataset)
import Test.Data (EdgeCasesTestData (..), TestData (..))
@ -138,7 +139,7 @@ spec TestData {..} edgeCasesTestData Capabilities {..} = describe "Delete Mutati
pure $ track ^? Data.field "Composer" . Data._ColumnFieldString /= Just "Eric Clapton"
)
receivedInvoiceLines <- Data.sortResponseRowsBy "InvoiceLineId" <$> queryGuarded (invoiceLinesQueryRequest & qrTableRelationships .~ tableRelationships)
receivedInvoiceLines <- Data.sortResponseRowsBy "InvoiceLineId" <$> queryGuarded (invoiceLinesQueryRequest & qrRelationships .~ Set.map API.RTable tableRelationships)
Data.responseRows receivedInvoiceLines `rowsShouldBe` expectedRemainingRows
for_ (_cMutations >>= _mcReturningCapabilities) $ \_returningCapabilities -> describe "returning" $ do
@ -452,7 +453,7 @@ spec TestData {..} edgeCasesTestData Capabilities {..} = describe "Delete Mutati
invoiceLinesQueryRequest :: QueryRequest
invoiceLinesQueryRequest =
let query = Data.emptyQuery & qFields ?~ invoiceLinesFields & qOrderBy ?~ OrderBy mempty (_tdOrderByColumn [] "InvoiceId" Ascending :| [])
in QueryRequest _tdInvoiceLinesTableName mempty query Nothing
in TableQueryRequest _tdInvoiceLinesTableName mempty query Nothing
invoiceIdScalarType = _tdFindColumnScalarType _tdInvoiceLinesTableName "InvoiceId"
invoiceLineIdScalarType = _tdFindColumnScalarType _tdInvoiceLinesTableName "InvoiceLineId"

View File

@ -717,7 +717,7 @@ spec TestData {..} edgeCasesTestData Capabilities {..} = describe "Insert Mutati
Data.emptyQuery
& qFields ?~ mkFieldsFromExpectedData _tdArtistsTableName (expectedInsertedArtists artistsStartingId)
& qWhere ?~ ApplyBinaryArrayComparisonOperator In (_tdCurrentComparisonColumn "ArtistId" artistIdScalarType) (J.Number . fromInteger <$> artistIds) artistIdScalarType
in QueryRequest _tdArtistsTableName mempty query Nothing
in TableQueryRequest _tdArtistsTableName mempty query Nothing
albumsQueryRequest :: [Integer] -> QueryRequest
albumsQueryRequest albumIds =
@ -725,7 +725,7 @@ spec TestData {..} edgeCasesTestData Capabilities {..} = describe "Insert Mutati
Data.emptyQuery
& qFields ?~ mkFieldsFromExpectedData _tdAlbumsTableName (expectedInsertedAcdcAlbums albumsStartingId)
& qWhere ?~ ApplyBinaryArrayComparisonOperator In (_tdCurrentComparisonColumn "AlbumId" albumIdScalarType) (J.Number . fromInteger <$> albumIds) albumIdScalarType
in QueryRequest _tdAlbumsTableName mempty query Nothing
in TableQueryRequest _tdAlbumsTableName mempty query Nothing
employeesQueryRequest :: [Integer] -> QueryRequest
employeesQueryRequest employeeIds =
@ -733,7 +733,7 @@ spec TestData {..} edgeCasesTestData Capabilities {..} = describe "Insert Mutati
Data.emptyQuery
& qFields ?~ mkFieldsFromExpectedData _tdEmployeesTableName (expectedInsertedEmployees employeesStartingId)
& qWhere ?~ ApplyBinaryArrayComparisonOperator In (_tdCurrentComparisonColumn "EmployeeId" albumIdScalarType) (J.Number . fromInteger <$> employeeIds) employeeIdScalarType
in QueryRequest _tdEmployeesTableName mempty query Nothing
in TableQueryRequest _tdEmployeesTableName mempty query Nothing
artistsInsertSchema :: TableInsertSchema
artistsInsertSchema = _tdMkDefaultTableInsertSchema _tdArtistsTableName

View File

@ -13,6 +13,7 @@ import Data.List.NonEmpty (NonEmpty (..))
import Data.Maybe (catMaybes, mapMaybe, maybeToList)
import Data.Set qualified as Set
import Hasura.Backends.DataConnector.API
import Hasura.Backends.DataConnector.API.V0.Relationships as API
import Language.GraphQL.Draft.Syntax.QQ qualified as G
import Test.AgentAPI (mutationExpectError, mutationGuarded, queryGuarded)
import Test.AgentDatasets (chinookTemplate, usesDataset)
@ -124,7 +125,7 @@ spec TestData {..} edgeCasesTestData Capabilities {..} = describe "Update Mutati
)
& fmap (\artist -> artist & Data.field "Name" . Data._ColumnFieldString .~ "Metalika")
receivedArtists <- Data.sortResponseRowsBy "ArtistId" <$> queryGuarded (artistsQueryRequest whereExp & qrTableRelationships .~ tableRelationships)
receivedArtists <- Data.sortResponseRowsBy "ArtistId" <$> queryGuarded (artistsQueryRequest whereExp & qrRelationships .~ Set.map API.RTable tableRelationships)
Data.responseRows receivedArtists `rowsShouldBe` expectedModifiedRows
usesDataset chinookTemplate $ it "can set the value of a column differently using multiple operations" $ do
@ -385,7 +386,7 @@ spec TestData {..} edgeCasesTestData Capabilities {..} = describe "Update Mutati
let invoiceLineIds = expectedModifiedRows & mapMaybe (^? Data.field "InvoiceLineId" . Data._ColumnFieldNumber) & fmap J.Number
let alternateWhereExp = ApplyBinaryArrayComparisonOperator In (_tdCurrentComparisonColumn "InvoiceLineId" invoiceLineIdScalarType) invoiceLineIds invoiceLineIdScalarType
receivedInvoiceLines <- Data.sortResponseRowsBy "InvoiceLineId" <$> queryGuarded (invoiceLinesQueryRequest alternateWhereExp & qrTableRelationships .~ tableRelationships)
receivedInvoiceLines <- Data.sortResponseRowsBy "InvoiceLineId" <$> queryGuarded (invoiceLinesQueryRequest alternateWhereExp & qrRelationships .~ Set.map API.RTable tableRelationships)
Data.responseRows receivedInvoiceLines `rowsShouldBe` expectedModifiedRows
usesDataset chinookTemplate $ it "fails to update when post update check against related table fails" $ do
@ -718,7 +719,7 @@ spec TestData {..} edgeCasesTestData Capabilities {..} = describe "Update Mutati
artistsQueryRequest :: Expression -> QueryRequest
artistsQueryRequest whereExp =
let query = Data.emptyQuery & qFields ?~ artistsFields & qWhere ?~ whereExp
in QueryRequest _tdArtistsTableName mempty query Nothing
in TableQueryRequest _tdArtistsTableName mempty query Nothing
invoiceLinesFields :: HashMap FieldName Field
invoiceLinesFields =
@ -733,7 +734,7 @@ spec TestData {..} edgeCasesTestData Capabilities {..} = describe "Update Mutati
invoiceLinesQueryRequest :: Expression -> QueryRequest
invoiceLinesQueryRequest whereExp =
let query = Data.emptyQuery & qFields ?~ invoiceLinesFields & qWhere ?~ whereExp
in QueryRequest _tdInvoiceLinesTableName mempty query Nothing
in TableQueryRequest _tdInvoiceLinesTableName mempty query Nothing
incOperator :: UpdateColumnOperatorName
incOperator = UpdateColumnOperatorName $ [G.name|inc|]

View File

@ -3,7 +3,7 @@
module Test.Specs.QuerySpec.AggregatesSpec (spec) where
import Control.Arrow ((>>>))
import Control.Lens (ix, (%~), (&), (.~), (?~), (^?), _Just)
import Control.Lens (ix, (%~), (&), (.~), (?~), (^.), (^?), _Just)
import Control.Monad (when)
import Data.Aeson (Value (..))
import Data.HashMap.Strict (HashMap)
@ -16,6 +16,7 @@ import Data.Maybe (fromMaybe, isJust, mapMaybe)
import Data.Ord (Down (..))
import Data.Set qualified as Set
import Hasura.Backends.DataConnector.API
import Hasura.Backends.DataConnector.API.V0.Relationships as API
import Language.GraphQL.Draft.Syntax.QQ qualified as G
import Test.AgentAPI (queryGuarded)
import Test.Data (TestData (..))
@ -306,7 +307,7 @@ spec TestData {..} relationshipCapabilities = describe "Aggregate Queries" $ do
& aggregate (Number . minimum)
let expectedAggregates = Data.mkFieldsMap [("min", maxTotal)]
let expectedRows = Data.filterColumnsByQueryFields (_qrQuery queryRequest) <$> invoiceRows
let expectedRows = Data.filterColumnsByQueryFields (queryRequest ^. qrQuery) <$> invoiceRows
Data.responseRows response `rowsShouldBe` expectedRows
Data.responseAggregates response `jsonShouldBe` expectedAggregates
@ -321,7 +322,7 @@ spec TestData {..} relationshipCapabilities = describe "Aggregate Queries" $ do
let invoiceCount = length _tdInvoicesRows
let expectedAggregates = Data.mkFieldsMap [("count_all", Number $ fromIntegral invoiceCount)]
let expectedRows = take limit $ Data.filterColumnsByQueryFields (_qrQuery queryRequest) <$> _tdInvoicesRows
let expectedRows = take limit $ Data.filterColumnsByQueryFields (queryRequest ^. qrQuery) <$> _tdInvoicesRows
Data.responseAggregates response `jsonShouldBe` expectedAggregates
Data.responseRows response `rowsShouldBe` expectedRows
@ -356,7 +357,7 @@ spec TestData {..} relationshipCapabilities = describe "Aggregate Queries" $ do
& aggregate (Number . minimum)
let expectedAggregates = Data.mkFieldsMap [("min", maxTotal)]
let expectedRows = Data.filterColumnsByQueryFields (_qrQuery queryRequest) <$> invoiceRows
let expectedRows = Data.filterColumnsByQueryFields (queryRequest ^. qrQuery) <$> invoiceRows
Data.responseRows response `rowsShouldBe` expectedRows
Data.responseAggregates response `jsonShouldBe` expectedAggregates
@ -521,7 +522,7 @@ spec TestData {..} relationshipCapabilities = describe "Aggregate Queries" $ do
let aggregates = Data.mkFieldsMap [("max", singleColumnAggregateMax (_tdColumnName "Title") albumTitleScalarType)]
let queryRequest =
albumsQueryRequest
& qrTableRelationships .~ Set.fromList [Data.onlyKeepRelationships [_tdArtistRelationshipName] _tdAlbumsTableRelationships]
& qrRelationships .~ Set.fromList [API.RTable $ Data.onlyKeepRelationships [_tdArtistRelationshipName] _tdAlbumsTableRelationships]
& qrQuery
%~ ( qAggregates ?~ aggregates
>>> qOrderBy ?~ orderBy
@ -558,7 +559,7 @@ spec TestData {..} relationshipCapabilities = describe "Aggregate Queries" $ do
let aggregates = Data.mkFieldsMap [("max", singleColumnAggregateMax (_tdColumnName "Title") albumTitleScalarType)]
let queryRequest =
albumsQueryRequest
& qrTableRelationships .~ Set.fromList [Data.onlyKeepRelationships [_tdTracksRelationshipName] _tdAlbumsTableRelationships]
& qrRelationships .~ Set.fromList [API.RTable $ Data.onlyKeepRelationships [_tdTracksRelationshipName] _tdAlbumsTableRelationships]
& qrQuery
%~ ( qAggregates ?~ aggregates
>>> qOrderBy ?~ orderBy
@ -596,7 +597,7 @@ spec TestData {..} relationshipCapabilities = describe "Aggregate Queries" $ do
artistOrderBy = OrderBy mempty $ _tdOrderByColumn [] "ArtistId" Ascending :| []
artistQuery = Data.emptyQuery & qFields ?~ artistFields & qOrderBy ?~ artistOrderBy
artistsTableRelationships = Data.onlyKeepRelationships [_tdAlbumsRelationshipName] _tdArtistsTableRelationships
in QueryRequest _tdArtistsTableName (Set.fromList [artistsTableRelationships]) artistQuery Nothing
in QRTable $ TableRequest _tdArtistsTableName (Set.fromList [API.RTable artistsTableRelationships]) artistQuery Nothing
-- This query is basically what would be generated by this complex HGE GraphQL query
-- @
@ -664,12 +665,12 @@ spec TestData {..} relationshipCapabilities = describe "Aggregate Queries" $ do
]
artistOrderBy = OrderBy mempty $ _tdOrderByColumn [] "Name" Descending :| []
artistQuery = Query (Just artistFields) Nothing Nothing (Just 3) (Just 1) (Just artistWhere) (Just artistOrderBy)
in QueryRequest
in TableQueryRequest
_tdArtistsTableName
( Set.fromList
[ Data.onlyKeepRelationships [_tdAlbumsRelationshipName] _tdArtistsTableRelationships,
Data.onlyKeepRelationships [_tdTracksRelationshipName] _tdAlbumsTableRelationships,
Data.onlyKeepRelationships [_tdInvoiceLinesRelationshipName, _tdMediaTypeRelationshipName] _tdTracksTableRelationships
[ API.RTable $ Data.onlyKeepRelationships [_tdAlbumsRelationshipName] _tdArtistsTableRelationships,
API.RTable $ Data.onlyKeepRelationships [_tdTracksRelationshipName] _tdAlbumsTableRelationships,
API.RTable $ Data.onlyKeepRelationships [_tdInvoiceLinesRelationshipName, _tdMediaTypeRelationshipName] _tdTracksTableRelationships
]
)
artistQuery
@ -678,16 +679,16 @@ spec TestData {..} relationshipCapabilities = describe "Aggregate Queries" $ do
artistsQueryRequest :: HashMap FieldName Aggregate -> QueryRequest
artistsQueryRequest aggregates =
let query = Data.emptyQuery & qAggregates ?~ aggregates
in QueryRequest _tdArtistsTableName mempty query Nothing
in TableQueryRequest _tdArtistsTableName mempty query Nothing
invoicesQueryRequest :: HashMap FieldName Aggregate -> QueryRequest
invoicesQueryRequest aggregates =
let query = Data.emptyQuery & qAggregates ?~ aggregates
in QueryRequest _tdInvoicesTableName mempty query Nothing
in TableQueryRequest _tdInvoicesTableName mempty query Nothing
albumsQueryRequest :: QueryRequest
albumsQueryRequest =
QueryRequest _tdAlbumsTableName mempty Data.emptyQuery Nothing
TableQueryRequest _tdAlbumsTableName mempty Data.emptyQuery Nothing
aggregate :: (NonEmpty a -> Value) -> [a] -> Value
aggregate aggFn values =

View File

@ -74,10 +74,10 @@ spec TestData {..} = describe "Basic Queries" $ do
artistsQueryRequest =
let fields = Data.mkFieldsMap [("ArtistId", _tdColumnField _tdArtistsTableName "ArtistId"), ("Name", _tdColumnField _tdArtistsTableName "Name")]
query = Data.emptyQuery & qFields ?~ fields
in QueryRequest _tdArtistsTableName mempty query Nothing
in TableQueryRequest _tdArtistsTableName mempty query Nothing
albumsQueryRequest :: QueryRequest
albumsQueryRequest =
let fields = Data.mkFieldsMap [("AlbumId", _tdColumnField _tdAlbumsTableName "AlbumId"), ("ArtistId", _tdColumnField _tdAlbumsTableName "ArtistId"), ("Title", _tdColumnField _tdAlbumsTableName "Title")]
query = Data.emptyQuery & qFields ?~ fields
in QueryRequest _tdAlbumsTableName mempty query Nothing
in TableQueryRequest _tdAlbumsTableName mempty query Nothing

View File

@ -36,7 +36,7 @@ spec TestData {..} (ScalarTypesCapabilities scalarTypesCapabilities) = describe
let queryRequest =
let fields = Data.mkFieldsMap [(unColumnName columnName, _tdColumnField tableName (unColumnName columnName))]
query' = Data.emptyQuery & qFields ?~ fields
in QueryRequest tableName mempty query' Nothing
in TableQueryRequest tableName mempty query' Nothing
where' =
ApplyBinaryComparisonOperator
(CustomBinaryComparisonOperator (unName operatorName))

View File

@ -10,6 +10,7 @@ import Data.List (sortOn)
import Data.Maybe (isJust, mapMaybe)
import Data.Set qualified as Set
import Hasura.Backends.DataConnector.API
import Hasura.Backends.DataConnector.API.V0.Relationships as API
import Test.AgentAPI (queryGuarded)
import Test.Data (TestData (..))
import Test.Data qualified as Data
@ -228,7 +229,7 @@ spec TestData {..} comparisonCapabilities = describe "Filtering in Queries" $ do
ApplyBinaryComparisonOperator Equal (_tdCurrentComparisonColumn "Name" artistNameScalarType) (Data.scalarValueComparison (String "AC/DC") artistNameScalarType)
let query =
albumsQueryRequest
& qrTableRelationships .~ Set.fromList [Data.onlyKeepRelationships [_tdArtistRelationshipName] _tdAlbumsTableRelationships]
& qrRelationships .~ Set.fromList [API.RTable $ Data.onlyKeepRelationships [_tdArtistRelationshipName] _tdAlbumsTableRelationships]
& qrQuery . qWhere ?~ where'
receivedAlbums <- Data.sortResponseRowsBy "AlbumId" <$> queryGuarded query
@ -251,11 +252,11 @@ spec TestData {..} comparisonCapabilities = describe "Filtering in Queries" $ do
ApplyBinaryComparisonOperator Equal (_tdCurrentComparisonColumn "Name" genreNameScalarType) (Data.scalarValueComparison (String "Metal") genreNameScalarType)
let query =
artistsQueryRequest
& qrTableRelationships
& qrRelationships
.~ Set.fromList
[ Data.onlyKeepRelationships [_tdAlbumsRelationshipName] _tdArtistsTableRelationships,
Data.onlyKeepRelationships [_tdTracksRelationshipName] _tdAlbumsTableRelationships,
Data.onlyKeepRelationships [_tdGenreRelationshipName] _tdTracksTableRelationships
[ API.RTable $ Data.onlyKeepRelationships [_tdAlbumsRelationshipName] _tdArtistsTableRelationships,
API.RTable $ Data.onlyKeepRelationships [_tdTracksRelationshipName] _tdAlbumsTableRelationships,
API.RTable $ Data.onlyKeepRelationships [_tdGenreRelationshipName] _tdTracksTableRelationships
]
& qrQuery . qWhere ?~ where'
receivedArtists <- Data.sortResponseRowsBy "ArtistId" <$> queryGuarded query
@ -292,7 +293,7 @@ spec TestData {..} comparisonCapabilities = describe "Filtering in Queries" $ do
]
let query =
artistsQueryRequest
& qrTableRelationships .~ Set.fromList [Data.onlyKeepRelationships [_tdAlbumsRelationshipName] _tdArtistsTableRelationships]
& qrRelationships .~ Set.fromList [API.RTable $ Data.onlyKeepRelationships [_tdAlbumsRelationshipName] _tdArtistsTableRelationships]
& qrQuery . qWhere ?~ where'
receivedArtists <- Data.sortResponseRowsBy "ArtistId" <$> queryGuarded query
@ -312,13 +313,13 @@ spec TestData {..} comparisonCapabilities = describe "Filtering in Queries" $ do
artistsQueryRequest =
let fields = Data.mkFieldsMap [("ArtistId", _tdColumnField _tdArtistsTableName "ArtistId"), ("Name", _tdColumnField _tdArtistsTableName "Name")]
query = Data.emptyQuery & qFields ?~ fields
in QueryRequest _tdArtistsTableName mempty query Nothing
in TableQueryRequest _tdArtistsTableName mempty query Nothing
albumsQueryRequest :: QueryRequest
albumsQueryRequest =
let fields = Data.mkFieldsMap [("AlbumId", _tdColumnField _tdAlbumsTableName "AlbumId"), ("ArtistId", _tdColumnField _tdAlbumsTableName "ArtistId"), ("Title", _tdColumnField _tdAlbumsTableName "Title")]
query = Data.emptyQuery & qFields ?~ fields
in QueryRequest _tdAlbumsTableName mempty query Nothing
in TableQueryRequest _tdAlbumsTableName mempty query Nothing
albumIdScalarType = _tdFindColumnScalarType _tdAlbumsTableName "AlbumId"
albumTitleScalarType = _tdFindColumnScalarType _tdAlbumsTableName "Title"

View File

@ -14,6 +14,7 @@ import Data.Ord (Down (..))
import Data.Set qualified as Set
import Data.Text (Text)
import Hasura.Backends.DataConnector.API
import Hasura.Backends.DataConnector.API.V0.Relationships as API
import Test.AgentAPI (queryGuarded)
import Test.Data (TestData (..))
import Test.Data qualified as Data
@ -179,7 +180,7 @@ spec TestData {..} Capabilities {..} = describe "Foreach Queries" $ do
& qrForeach ?~ foreachIds
-- Add the Artist object relationship field
& qrQuery %~ (qFields . _Just . Data.fieldAt "Artist" ?~ RelField (RelationshipField _tdArtistRelationshipName artistsQuery))
& qrTableRelationships .~ Set.fromList [Data.onlyKeepRelationships [_tdArtistRelationshipName] _tdAlbumsTableRelationships]
& qrRelationships .~ Set.fromList [API.RTable $ Data.onlyKeepRelationships [_tdArtistRelationshipName] _tdAlbumsTableRelationships]
receivedForeachResponse <- queryGuarded query
let joinInArtist (album :: HashMap FieldName FieldValue) =
@ -212,7 +213,7 @@ spec TestData {..} Capabilities {..} = describe "Foreach Queries" $ do
& qrForeach ?~ foreachIds
-- Add the Tracks array relationship field
& qrQuery %~ (qFields . _Just . Data.fieldAt "Tracks" ?~ RelField (RelationshipField _tdTracksRelationshipName tracksQuery))
& qrTableRelationships .~ Set.fromList [Data.onlyKeepRelationships [_tdTracksRelationshipName] _tdAlbumsTableRelationships]
& qrRelationships .~ Set.fromList [API.RTable $ Data.onlyKeepRelationships [_tdTracksRelationshipName] _tdAlbumsTableRelationships]
receivedForeachResponse <- queryGuarded query
let joinInTracks (album :: HashMap FieldName FieldValue) =
@ -250,13 +251,13 @@ spec TestData {..} Capabilities {..} = describe "Foreach Queries" $ do
albumsQueryRequest =
let fields = Data.mkFieldsMap [("AlbumId", _tdColumnField _tdAlbumsTableName "AlbumId"), ("ArtistId", _tdColumnField _tdAlbumsTableName "ArtistId"), ("Title", _tdColumnField _tdAlbumsTableName "Title")]
query = Data.emptyQuery & qFields ?~ fields
in QueryRequest _tdAlbumsTableName mempty query Nothing
in TableQueryRequest _tdAlbumsTableName mempty query Nothing
playlistTracksQueryRequest :: QueryRequest
playlistTracksQueryRequest =
let fields = Data.mkFieldsMap [("PlaylistId", _tdColumnField _tdPlaylistTracksTableName "PlaylistId"), ("TrackId", _tdColumnField _tdPlaylistTracksTableName "TrackId")]
query = Data.emptyQuery & qFields ?~ fields
in QueryRequest _tdPlaylistTracksTableName mempty query Nothing
in TableQueryRequest _tdPlaylistTracksTableName mempty query Nothing
mkForeachIds :: TableName -> [(Text, J.Value)] -> HashMap ColumnName ScalarValue
mkForeachIds tableName =

View File

@ -1,3 +1,4 @@
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE QuasiQuotes #-}
module Test.Specs.QuerySpec.OrderBySpec (spec) where
@ -15,6 +16,7 @@ import Data.Maybe (fromMaybe, isJust)
import Data.Ord (Down (..))
import Data.Set qualified as Set
import Hasura.Backends.DataConnector.API
import Hasura.Backends.DataConnector.API.V0.Relationships as API
import Language.GraphQL.Draft.Syntax.QQ qualified as G
import Test.AgentAPI (queryGuarded)
import Test.Data (TestData (..))
@ -103,7 +105,7 @@ spec TestData {..} Capabilities {..} = describe "Order By in Queries" $ do
let query =
albumsQueryRequest
& qrQuery . qOrderBy ?~ orderBy
& qrTableRelationships .~ Set.fromList [Data.onlyKeepRelationships [_tdArtistRelationshipName] _tdAlbumsTableRelationships]
& qrRelationships .~ Set.fromList [API.RTable $ Data.onlyKeepRelationships [_tdArtistRelationshipName] _tdAlbumsTableRelationships]
receivedAlbums <- queryGuarded query
let getRelatedArtist (album :: HashMap FieldName FieldValue) =
@ -130,7 +132,7 @@ spec TestData {..} Capabilities {..} = describe "Order By in Queries" $ do
let query =
albumsQueryRequest
& qrQuery . qOrderBy ?~ orderBy
& qrTableRelationships .~ Set.fromList [Data.onlyKeepRelationships [_tdArtistRelationshipName] _tdAlbumsTableRelationships]
& qrRelationships .~ Set.fromList [API.RTable $ Data.onlyKeepRelationships [_tdArtistRelationshipName] _tdAlbumsTableRelationships]
receivedAlbums <- queryGuarded query
let getRelatedArtist (album :: HashMap FieldName FieldValue) = do
@ -174,10 +176,10 @@ spec TestData {..} Capabilities {..} = describe "Order By in Queries" $ do
let query =
tracksQueryRequest
& qrQuery . qOrderBy ?~ orderBy
& qrTableRelationships
& qrRelationships
.~ Set.fromList
[ Data.onlyKeepRelationships [_tdAlbumRelationshipName] _tdTracksTableRelationships,
Data.onlyKeepRelationships [_tdArtistRelationshipName] _tdAlbumsTableRelationships
[ API.RTable $ Data.onlyKeepRelationships [_tdAlbumRelationshipName] _tdTracksTableRelationships,
API.RTable $ Data.onlyKeepRelationships [_tdArtistRelationshipName] _tdAlbumsTableRelationships
]
receivedTracks <- queryGuarded query
@ -189,7 +191,7 @@ spec TestData {..} Capabilities {..} = describe "Order By in Queries" $ do
let expectedTracks =
_tdTracksRows
& fmap (\track -> (Data.filterColumnsByQueryFields (_qrQuery tracksQueryRequest) track, getRelatedArtist track, track))
& fmap (\track -> (Data.filterColumnsByQueryFields (tracksQueryRequest ^. qrQuery) track, getRelatedArtist track, track))
& sortOn (\row -> (Down (row ^? _2 . _Just . Data.field "Name"), row ^? _3 . Data.field "Name", row ^? _3 . Data.field "TrackId"))
& fmap (^. _1)
@ -225,10 +227,10 @@ spec TestData {..} Capabilities {..} = describe "Order By in Queries" $ do
>>> qOffset ?~ artistsOffset
>>> qLimit ?~ artistsLimit
)
& qrTableRelationships
& qrRelationships
.~ Set.fromList
[ Data.onlyKeepRelationships [_tdTracksRelationshipName] _tdAlbumsTableRelationships,
Data.onlyKeepRelationships [_tdAlbumsRelationshipName] _tdArtistsTableRelationships
[ API.RTable $ Data.onlyKeepRelationships [_tdTracksRelationshipName] _tdAlbumsTableRelationships,
API.RTable $ Data.onlyKeepRelationships [_tdAlbumsRelationshipName] _tdArtistsTableRelationships
]
receivedArtists <- queryGuarded query
@ -273,7 +275,7 @@ spec TestData {..} Capabilities {..} = describe "Order By in Queries" $ do
let query =
artistsQueryRequest
& qrQuery . qOrderBy ?~ orderBy
& qrTableRelationships .~ Set.fromList [Data.onlyKeepRelationships [_tdAlbumsRelationshipName] _tdArtistsTableRelationships]
& qrRelationships .~ Set.fromList [API.RTable $ Data.onlyKeepRelationships [_tdAlbumsRelationshipName] _tdArtistsTableRelationships]
receivedArtists <- queryGuarded query
let getAlbumsCount (artist :: HashMap FieldName FieldValue) = do
@ -302,7 +304,7 @@ spec TestData {..} Capabilities {..} = describe "Order By in Queries" $ do
let query =
artistsQueryRequest
& qrQuery . qOrderBy ?~ orderBy
& qrTableRelationships .~ Set.fromList [Data.onlyKeepRelationships [_tdAlbumsRelationshipName] _tdArtistsTableRelationships]
& qrRelationships .~ Set.fromList [API.RTable $ Data.onlyKeepRelationships [_tdAlbumsRelationshipName] _tdArtistsTableRelationships]
receivedArtists <- queryGuarded query
let getAlbumsCount (artist :: HashMap FieldName FieldValue) = do
@ -344,10 +346,10 @@ spec TestData {..} Capabilities {..} = describe "Order By in Queries" $ do
let query =
albumsQueryRequest
& qrQuery . qOrderBy ?~ orderBy
& qrTableRelationships
& qrRelationships
.~ Set.fromList
[ Data.onlyKeepRelationships [_tdArtistRelationshipName] _tdAlbumsTableRelationships,
Data.onlyKeepRelationships [_tdAlbumsRelationshipName] _tdArtistsTableRelationships
[ API.RTable $ Data.onlyKeepRelationships [_tdArtistRelationshipName] _tdAlbumsTableRelationships,
API.RTable $ Data.onlyKeepRelationships [_tdAlbumsRelationshipName] _tdArtistsTableRelationships
]
receivedAlbums <- queryGuarded query
@ -388,8 +390,8 @@ spec TestData {..} Capabilities {..} = describe "Order By in Queries" $ do
artistsQueryRequest
& qrQuery . qOrderBy ?~ orderBy
& qrQuery . qWhere ?~ whereExp
& qrTableRelationships
.~ Set.fromList [Data.onlyKeepRelationships [_tdAlbumsRelationshipName] _tdArtistsTableRelationships]
& qrRelationships
.~ Set.fromList [API.RTable $ Data.onlyKeepRelationships [_tdAlbumsRelationshipName] _tdArtistsTableRelationships]
receivedArtists <- queryGuarded query
let findRelatedAlbums (artist :: HashMap FieldName FieldValue) = fromMaybe [] do
@ -414,13 +416,13 @@ spec TestData {..} Capabilities {..} = describe "Order By in Queries" $ do
albumsQueryRequest :: QueryRequest
albumsQueryRequest =
QueryRequest _tdAlbumsTableName mempty albumsQuery Nothing
TableQueryRequest _tdAlbumsTableName mempty albumsQuery Nothing
artistsQueryRequest :: QueryRequest
artistsQueryRequest =
let fields = Data.mkFieldsMap [("ArtistId", _tdColumnField _tdArtistsTableName "ArtistId"), ("Name", _tdColumnField _tdArtistsTableName "Name")]
query = Data.emptyQuery & qFields ?~ fields
in QueryRequest _tdArtistsTableName mempty query Nothing
in TableQueryRequest _tdArtistsTableName mempty query Nothing
tracksQuery :: Query
tracksQuery =
@ -429,13 +431,13 @@ spec TestData {..} Capabilities {..} = describe "Order By in Queries" $ do
tracksQueryRequest :: QueryRequest
tracksQueryRequest =
QueryRequest _tdTracksTableName mempty tracksQuery Nothing
TableQueryRequest _tdTracksTableName mempty tracksQuery Nothing
invoicesQueryRequest :: QueryRequest
invoicesQueryRequest =
let fields = Data.mkFieldsMap [("InvoiceId", _tdColumnField _tdInvoicesTableName "InvoiceId"), ("BillingState", _tdColumnField _tdInvoicesTableName "BillingState")]
query = Data.emptyQuery & qFields ?~ fields
in QueryRequest _tdInvoicesTableName mempty query Nothing
in TableQueryRequest _tdInvoicesTableName mempty query Nothing
orderBySingleColumnAggregateMax :: ColumnName -> ScalarType -> OrderByTarget
orderBySingleColumnAggregateMax columnName resultType = OrderBySingleColumnAggregate $ SingleColumnAggregate (SingleColumnAggregateFunction [G.name|max|]) columnName resultType

View File

@ -11,6 +11,7 @@ import Data.List.NonEmpty qualified as NonEmpty
import Data.Maybe (fromMaybe, maybeToList)
import Data.Set qualified as Set
import Hasura.Backends.DataConnector.API
import Hasura.Backends.DataConnector.API.V0.Relationships as API
import Test.AgentAPI (queryGuarded)
import Test.Data (TestData (..))
import Test.Data qualified as Data
@ -246,7 +247,7 @@ spec TestData {..} subqueryComparisonCapabilities = describe "Relationship Queri
("Artist", RelField $ RelationshipField _tdArtistRelationshipName artistsSubquery)
]
query = albumsQuery & qFields ?~ fields
in QueryRequest _tdAlbumsTableName (Set.fromList [Data.onlyKeepRelationships [_tdArtistRelationshipName] _tdAlbumsTableRelationships]) query Nothing
in TableQueryRequest _tdAlbumsTableName (Set.fromList [API.RTable $ Data.onlyKeepRelationships [_tdArtistRelationshipName] _tdAlbumsTableRelationships]) query Nothing
artistsWithAlbumsQuery :: (Query -> Query) -> QueryRequest
artistsWithAlbumsQuery modifySubquery =
@ -260,7 +261,7 @@ spec TestData {..} subqueryComparisonCapabilities = describe "Relationship Queri
("Albums", RelField $ RelationshipField _tdAlbumsRelationshipName albumsSubquery)
]
query = artistsQuery & qFields ?~ fields
in QueryRequest _tdArtistsTableName (Set.fromList [Data.onlyKeepRelationships [_tdAlbumsRelationshipName] _tdArtistsTableRelationships]) query Nothing
in TableQueryRequest _tdArtistsTableName (Set.fromList [API.RTable $ Data.onlyKeepRelationships [_tdAlbumsRelationshipName] _tdArtistsTableRelationships]) query Nothing
employeesWithCustomersQuery :: (Query -> Query) -> QueryRequest
employeesWithCustomersQuery modifySubquery =
@ -272,7 +273,7 @@ spec TestData {..} subqueryComparisonCapabilities = describe "Relationship Queri
[ ("SupportRepForCustomers", RelField $ RelationshipField _tdSupportRepForCustomersRelationshipName customersSubquery)
]
query = employeesQuery & qFields ?~ fields
in QueryRequest _tdEmployeesTableName (Set.fromList [Data.onlyKeepRelationships [_tdSupportRepForCustomersRelationshipName] _tdEmployeesTableRelationships]) query Nothing
in TableQueryRequest _tdEmployeesTableName (Set.fromList [API.RTable $ Data.onlyKeepRelationships [_tdSupportRepForCustomersRelationshipName] _tdEmployeesTableRelationships]) query Nothing
customersWithSupportRepQuery :: (Query -> Query) -> QueryRequest
customersWithSupportRepQuery modifySubquery =
@ -283,7 +284,7 @@ spec TestData {..} subqueryComparisonCapabilities = describe "Relationship Queri
[ ("SupportRep", RelField $ RelationshipField _tdSupportRepRelationshipName supportRepSubquery)
]
query = customersQuery & qFields ?~ fields
in QueryRequest _tdCustomersTableName (Set.fromList [Data.onlyKeepRelationships [_tdSupportRepRelationshipName] _tdCustomersTableRelationships]) query Nothing
in TableQueryRequest _tdCustomersTableName (Set.fromList [API.RTable $ Data.onlyKeepRelationships [_tdSupportRepRelationshipName] _tdCustomersTableRelationships]) query Nothing
artistsQuery :: Query
artistsQuery =

View File

@ -0,0 +1,84 @@
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE OverloadedStrings #-}
-- | Module containing tests for user-defined-functions aka. "UDFs".
module Test.Specs.UDFSpec (spec) where
--------------------------------------------------------------------------------
import Command (TestConfig)
import Control.Lens ((?~))
import Control.Lens.Lens ((&))
import Control.Monad.Catch (MonadThrow)
import Control.Monad.IO.Class (MonadIO)
import Control.Monad.Reader (MonadReader)
import Data.Aeson (Value (..))
import Data.List (sort)
import Data.Text qualified as Text
import GHC.Stack (HasCallStack)
import Hasura.Backends.DataConnector.API
import Hasura.Backends.DataConnector.API qualified as API
import Test.AgentAPI (getSchemaGuarded, queryGuarded)
import Test.AgentClient (HasAgentClient, runAgentClientT)
import Test.AgentDatasets (HasDatasetContext)
import Test.AgentTestContext (HasAgentTestContext)
import Test.Data (FunctionsTestData (..), mkFunctionsTestData)
import Test.Data qualified as Data
import Test.Expectations (jsonShouldBe, rowsShouldBe)
import Test.Sandwich (HasLabel, Label (..), LabelValue, SpecFree, describe, getContext, introduce, (:>))
import Test.Sandwich.Misc (HasBaseContext)
import Test.TestHelpers (AgentDatasetTestSpec, it)
import Prelude
--------------------------------------------------------------------------------
fibonacciRows :: Int
fibonacciRows = 5 -- TODO: Make this a Gen
spec :: TestConfig -> API.Capabilities -> AgentDatasetTestSpec
spec testConfig API.Capabilities {} = describe "supports functions" $ preloadAgentSchema $ do
-- TODO: Check that the expected test tables are present too. Will require test-data changes.
it "returns functions from the Functions dataset" do
preloadedSchema <- getPreloadedAgentSchema
let FunctionsTestData {..} = mkFunctionsTestData preloadedSchema testConfig
extractFunctionNames = sort . fmap API._fiName
expectedFunctionNames = [_ftdFibonacciFunctionName, _ftdSearchArticlesFunctionName]
functionNames <- (extractFunctionNames . API._srFunctions) <$> getPreloadedAgentSchema
functionNames `jsonShouldBe` expectedFunctionNames
it "can query for a list Fibonacci numbers using the fibonacci function" $ do
preloadedSchema <- getPreloadedAgentSchema
let testData@FunctionsTestData {..} = mkFunctionsTestData preloadedSchema testConfig
query = fibonacciRequest testData
results <- queryGuarded query
Data.responseRows results `rowsShouldBe` _ftdFibonacciRows fibonacciRows
_qrAggregates results `jsonShouldBe` Nothing
where
fibonacciRequest :: FunctionsTestData -> QueryRequest
fibonacciRequest FunctionsTestData {..} =
let fields = Data.mkFieldsMap [("Value", _ftdFunctionField _ftdFibonacciFunctionName "Value")]
query = Data.emptyQuery & qFields ?~ fields
k = "take" :: Text.Text
v = API.ScalarValue (Number (fromIntegral fibonacciRows)) (API.ScalarType "number")
args = [NamedArgument k (API.ScalarArgumentValue v)]
in QRFunction $ FunctionRequest _ftdFibonacciFunctionName args mempty query
type AgentSchemaLabel = "agent-schema"
preloadAgentSchema :: forall context m. (MonadIO m, MonadThrow m, HasAgentClient context, HasBaseContext context, HasAgentTestContext context, HasDatasetContext context) => SpecFree (LabelValue AgentSchemaLabel API.SchemaResponse :> context) m () -> SpecFree context m ()
preloadAgentSchema = introduce "Preload agent schema" agentSchemaLabel getAgentSchema (const $ pure ())
where
getAgentSchema = runAgentClientT Nothing $ getSchemaGuarded
agentSchemaLabel :: Label AgentSchemaLabel API.SchemaResponse
agentSchemaLabel = Label
type HasPreloadedAgentSchema context = HasLabel context "agent-schema" API.SchemaResponse
getPreloadedAgentSchema :: (HasCallStack, HasPreloadedAgentSchema context, MonadReader context m) => m API.SchemaResponse
getPreloadedAgentSchema = getContext agentSchemaLabel

View File

@ -41,6 +41,7 @@ backendTypeConfig =
comparisons:
subquery:
supports_relations: true
user_defined_functions: {}
scalar_types:
DateTime:
comparison_operators:

View File

@ -80,6 +80,7 @@ capabilities =
API._cExplain = Just API.ExplainCapabilities {},
API._cRaw = Just API.RawCapabilities {},
API._cDatasets = Just API.DatasetCapabilities {},
API._cUserDefinedFunctions = Just API.UserDefinedFunctionCapabilities {},
API._cLicensing = Nothing
},
_crConfigSchemaResponse =
@ -145,7 +146,8 @@ capabilities =
schema :: API.SchemaResponse
schema =
API.SchemaResponse
{ API._srTables =
{ API._srFunctions = [],
API._srTables =
[ API.TableInfo
{ API._tiName = mkTableName "Artist",
API._tiType = API.Table,

View File

@ -45,7 +45,7 @@ buildFunctionInfo ::
SourceName ->
FunctionName 'BigQuery ->
SystemDefined ->
FunctionConfig ->
FunctionConfig 'BigQuery ->
FunctionPermissionsMap ->
RawFunctionInfo 'BigQuery ->
Maybe Text ->

View File

@ -29,5 +29,6 @@ instance BackendMetadata 'BigQuery where
throw400 UnexpectedPayload "Computed fields are not supported in boolean expressions"
supportsBeingRemoteRelationshipTarget _ = True
listAllTables = BigQuery.listAllTables
listAllTrackables _ = throw400 UnexpectedPayload "listAllTrackables not supported by BigQuery!"
getTableInfo _ _ = throw400 UnexpectedPayload "get_table_info not yet supported in BigQuery!"
validateNativeQuery _ _ _ _ = pure ()

View File

@ -6,7 +6,7 @@ module Hasura.Backends.DataConnector.Adapter.API () where
import Hasura.Prelude
import Hasura.RQL.Types.BackendType (BackendType (DataConnector))
import Hasura.Server.API.Backend (BackendAPI (..), relationshipCommands, remoteRelationshipCommands, sourceCommands, tableCommands, tablePermissionsCommands)
import Hasura.Server.API.Backend (BackendAPI (..), relationshipCommands, remoteRelationshipCommands, sourceCommands, tableCommands, tablePermissionsCommands, trackableCommands)
--------------------------------------------------------------------------------
@ -16,6 +16,7 @@ instance BackendAPI 'DataConnector where
[ sourceCommands @'DataConnector,
tableCommands @'DataConnector,
tablePermissionsCommands @'DataConnector,
trackableCommands @'DataConnector,
relationshipCommands @'DataConnector,
remoteRelationshipCommands @'DataConnector
]

View File

@ -46,8 +46,9 @@ instance Backend 'DataConnector where
type TableName 'DataConnector = DC.TableName
type FunctionName 'DataConnector = DC.FunctionName
type RawFunctionInfo 'DataConnector = XDisable
type FunctionArgument 'DataConnector = XDisable
type FunctionReturnType 'DataConnector = DC.FunctionReturnType
type RawFunctionInfo 'DataConnector = API.FunctionInfo
type FunctionArgument 'DataConnector = API.FunctionArg
type ConstraintName 'DataConnector = DC.ConstraintName
type BasicOrderType 'DataConnector = DC.OrderDirection
type NullsOrderType 'DataConnector = Unimplemented
@ -64,7 +65,7 @@ instance Backend 'DataConnector where
type BooleanOperators 'DataConnector = CustomBooleanOperator
type ExtraTableMetadata 'DataConnector = DC.ExtraTableMetadata
type ComputedFieldDefinition 'DataConnector = Unimplemented
type FunctionArgumentExp 'DataConnector = Const Unimplemented
type FunctionArgumentExp 'DataConnector = DC.ArgumentExp
type ComputedFieldImplicitArguments 'DataConnector = Unimplemented
type ComputedFieldReturn 'DataConnector = Unimplemented
@ -111,9 +112,7 @@ instance Backend 'DataConnector where
scalarValueToJSON :: ScalarValue 'DataConnector -> J.Value
scalarValueToJSON = id
functionToTable :: FunctionName 'DataConnector -> TableName 'DataConnector
functionToTable = error "functionToTable: not implemented for the Data Connector backend."
-- TODO: Fill in this definition for computed fields
computedFieldFunction :: ComputedFieldDefinition 'DataConnector -> FunctionName 'DataConnector
computedFieldFunction = error "computedFieldFunction: not implemented for the Data Connector backend"
@ -127,6 +126,9 @@ instance Backend 'DataConnector where
tableToFunction :: TableName 'DataConnector -> FunctionName 'DataConnector
tableToFunction = coerce
functionToTable :: FunctionName 'DataConnector -> TableName 'DataConnector
functionToTable = coerce
tableGraphQLName :: TableName 'DataConnector -> Either QErr G.Name
tableGraphQLName name = do
let snakedName = snakeCaseTableName @'DataConnector name
@ -134,7 +136,10 @@ instance Backend 'DataConnector where
`onNothing` throw400 ValidationFailed ("TableName " <> snakedName <> " is not a valid GraphQL identifier")
functionGraphQLName :: FunctionName 'DataConnector -> Either QErr G.Name
functionGraphQLName = error "functionGraphQLName: not implemented for the Data Connector backend."
functionGraphQLName name = do
let snakedName = snakeCaseTableName @'DataConnector (coerce name)
G.mkName snakedName
`onNothing` throw400 ValidationFailed ("FunctionName " <> snakedName <> " is not a valid GraphQL name")
snakeCaseTableName :: TableName 'DataConnector -> Text
snakeCaseTableName = Text.intercalate "_" . NonEmpty.toList . DC.unTableName

View File

@ -11,15 +11,19 @@ import Data.Aeson.KeyMap qualified as KM
import Data.Bifunctor (bimap)
import Data.Environment (Environment)
import Data.Has (Has (getter))
import Data.HashMap.Strict qualified as HashMap
import Data.HashMap.Strict.Extended qualified as HashMap
import Data.HashMap.Strict.NonEmpty qualified as NEHashMap
import Data.HashSet qualified as HashSet
import Data.List.NonEmpty qualified as NEList
import Data.Map.Strict qualified as Map
import Data.Semigroup.Foldable (Foldable1 (..))
import Data.Sequence qualified as Seq
import Data.Sequence.NonEmpty qualified as NESeq
import Data.Text.Extended (toTxt, (<<>), (<>>))
import Hasura.Backends.DataConnector.API (capabilitiesCase, errorResponseSummary, schemaCase)
import Hasura.Backends.DataConnector.API qualified as API
import Hasura.Backends.DataConnector.API.V0 (FunctionInfo (_fiDescription, _fiName))
import Hasura.Backends.DataConnector.API.V0.ErrorResponse (_crDetails)
import Hasura.Backends.DataConnector.API.V0.Table qualified as DC (TableType (..))
import Hasura.Backends.DataConnector.Adapter.Backend (columnTypeToScalarType)
@ -28,27 +32,49 @@ import Hasura.Backends.DataConnector.Adapter.Types qualified as DC
import Hasura.Backends.DataConnector.Agent.Client (AgentClientContext (..), runAgentClientT)
import Hasura.Backends.Postgres.SQL.Types (PGDescription (..))
import Hasura.Base.Error (Code (..), QErr (..), decodeValue, throw400, throw400WithDetail, withPathK)
import Hasura.Function.Cache
( FunctionConfig (..),
FunctionExposedAs (FEAMutation, FEAQuery),
FunctionInfo (..),
FunctionOverloads (FunctionOverloads),
FunctionPermissionsMap,
FunctionVolatility (FTSTABLE, FTVOLATILE),
InputArgument (..),
TrackableFunctionInfo (..),
TrackableInfo (..),
TrackableTableInfo (..),
getFuncArgNameTxt,
)
import Hasura.Function.Common
( getFunctionAggregateGQLName,
getFunctionArgsGQLName,
getFunctionGQLName,
)
import Hasura.Incremental qualified as Inc
import Hasura.Incremental.Select qualified as Inc
import Hasura.Logging (Hasura, Logger)
import Hasura.Prelude
import Hasura.RQL.DDL.Relationship (defaultBuildArrayRelationshipInfo, defaultBuildObjectRelationshipInfo)
import Hasura.RQL.IR.BoolExp (OpExpG (..), PartialSQLExp (..), RootOrCurrent (..), RootOrCurrentColumn (..))
import Hasura.RQL.Types.Backend (FunctionReturnType (..), functionGraphQLName)
import Hasura.RQL.Types.BackendType (BackendSourceKind (..), BackendType (..))
import Hasura.RQL.Types.Column qualified as RQL.T.C
import Hasura.RQL.Types.Common (OID (..), SourceName)
import Hasura.RQL.Types.Common (JsonAggSelect (JASMultipleRows, JASSingleObject), OID (..), SourceName, SystemDefined)
import Hasura.RQL.Types.CustomTypes (GraphQLType (..))
import Hasura.RQL.Types.EventTrigger (RecreateEventTriggers (RETDoNothing))
import Hasura.RQL.Types.Metadata (SourceMetadata (..))
import Hasura.RQL.Types.Metadata.Backend (BackendMetadata (..))
import Hasura.RQL.Types.Metadata.Object
import Hasura.RQL.Types.Relationships.Local (ArrRelDef, ObjRelDef, RelInfo)
import Hasura.RQL.Types.NamingCase (NamingCase)
import Hasura.RQL.Types.Relationships.Local (ArrRelDef, ObjRelDef, RelInfo ())
import Hasura.RQL.Types.SchemaCache (CacheRM, askSourceConfig, askSourceInfo)
import Hasura.RQL.Types.SchemaCache.Build
import Hasura.RQL.Types.SchemaCacheTypes (SchemaDependency)
import Hasura.RQL.Types.SchemaCacheTypes (DependencyReason (DRTable), SchemaDependency (SchemaDependency), SchemaObjId (SOSourceObj), SourceObjId (SOITable))
import Hasura.RQL.Types.Source (DBObjectsIntrospection (..), SourceInfo (..))
import Hasura.RQL.Types.Source.Column (ColumnValueGenerationStrategy (..), SourceColumnInfo (..))
import Hasura.RQL.Types.Source.Table (SourceConstraint (..), SourceForeignKeys (..), SourceTableInfo (..), SourceTableType (..))
import Hasura.RQL.Types.SourceCustomization (applyFieldNameCaseCust)
import Hasura.SQL.AnyBackend (mkAnyBackend)
import Hasura.SQL.Types (CollectableType (..))
import Hasura.Server.Migrate.Version (SourceCatalogMigrationState (..))
import Hasura.Server.Utils qualified as HSU
@ -73,24 +99,119 @@ instance BackendMetadata 'DataConnector where
parseBoolExpOperations = parseBoolExpOperations'
parseCollectableType = parseCollectableType'
buildComputedFieldInfo = error "buildComputedFieldInfo: not implemented for the Data Connector backend."
buildArrayRelationshipInfo = buildArrayRelationshipInfo'
buildObjectRelationshipInfo = buildObjectRelationshipInfo'
-- If/when we implement enums for Data Connector backend, we will also need to fix columnTypeToScalarType function
-- in Hasura.Backends.DataConnector.Adapter.Backend. See note there for more information.
fetchAndValidateEnumValues = error "fetchAndValidateEnumValues: not implemented for the Data Connector backend."
buildArrayRelationshipInfo = buildArrayRelationshipInfo'
buildObjectRelationshipInfo = buildObjectRelationshipInfo'
buildFunctionInfo = error "buildFunctionInfo: not implemented for the Data Connector backend."
buildFunctionInfo = buildFunctionInfo'
updateColumnInEventTrigger = error "updateColumnInEventTrigger: not implemented for the Data Connector backend."
postDropSourceHook _sourceConfig _tableTriggerMap = pure ()
buildComputedFieldBooleanExp _ _ _ _ _ _ =
error "buildComputedFieldBooleanExp: not implemented for the Data Connector backend."
columnInfoToFieldInfo = columnInfoToFieldInfo'
listAllTables = listAllTables'
listAllTrackables = listAllTrackables'
getTableInfo = getTableInfo'
supportsBeingRemoteRelationshipTarget = supportsBeingRemoteRelationshipTarget'
arityJsonAggSelect :: API.FunctionArity -> JsonAggSelect
arityJsonAggSelect = \case
API.FunctionArityOne -> JASSingleObject
API.FunctionArityMany -> JASMultipleRows
functionReturnTypeFromAPI ::
MonadError QErr m =>
DC.FunctionName ->
(Maybe (FunctionReturnType 'DataConnector), API.FunctionReturnType) ->
m DC.TableName
functionReturnTypeFromAPI funcGivenName = \case
(Just (DC.FunctionReturnsTable t), _) -> pure t
(_, API.FunctionReturnsTable t) -> pure (Witch.into t)
_ ->
throw400 NotSupported $
"Function "
<> toTxt funcGivenName
<> " is missing a return type - This should be explicit in metadata, or inferred from agent"
buildFunctionInfo' ::
MonadError QErr m =>
SourceName ->
DC.FunctionName ->
SystemDefined ->
FunctionConfig 'DataConnector ->
FunctionPermissionsMap ->
API.FunctionInfo ->
Maybe Text ->
NamingCase ->
m
( Hasura.Function.Cache.FunctionInfo 'DataConnector,
SchemaDependency
)
buildFunctionInfo'
sourceName
funcName
sysDefined
funcConfig@FunctionConfig {..}
permissionMap
(API.FunctionInfo infoName infoType returnType infoSet infoArgs infoDesc)
funcComment
namingCase =
do
funcGivenName <- functionGraphQLName @'DataConnector funcName `onLeft` throwError
let (volitility, exposeAs) = case infoType of
API.FRead -> (FTSTABLE, FEAQuery)
API.FWrite -> (FTVOLATILE, FEAMutation)
setNamingCase = applyFieldNameCaseCust namingCase
objid <-
case (_fcResponse, returnType) of
(Just (DC.FunctionReturnsTable t), _) -> pure $ SOSourceObj sourceName $ mkAnyBackend $ SOITable @'DataConnector t
(_, API.FunctionReturnsTable t) -> pure $ SOSourceObj sourceName $ mkAnyBackend $ SOITable @'DataConnector (Witch.into t)
_ ->
throw400 NotSupported $
"Function "
<> tshow funcName
<> " is missing a return type - This should be explicit in metadata, or inferred from agent"
inputArguments <- do
let argNames = map API._faInputArgName infoArgs
invalidArgs = filter (isNothing . GQL.mkName) argNames
unless (null invalidArgs) $ throw400 NotSupported $ "Invalid argument names: " <> tshow invalidArgs
-- Modified version of makeInputArguments from PG:
case _fcSessionArgument of
Nothing -> pure $ Seq.fromList $ map IAUserProvided infoArgs
Just sessionArgName -> do
unless (any (\arg -> getFuncArgNameTxt sessionArgName == API._faInputArgName arg) infoArgs) $
throw400 NotSupported $
"Session argument not mappable: " <> tshow sessionArgName
pure $
Seq.fromList $
flip map infoArgs $ \arg ->
if getFuncArgNameTxt sessionArgName == API._faInputArgName arg
then IASessionVariables sessionArgName
else IAUserProvided arg
functionReturnType <- functionReturnTypeFromAPI funcName (_fcResponse, returnType)
let funcInfo =
FunctionInfo
{ _fiSQLName = Witch.into infoName, -- Converts to DC.FunctionName
_fiGQLName = getFunctionGQLName funcGivenName funcConfig setNamingCase,
_fiGQLArgsName = getFunctionArgsGQLName funcGivenName funcConfig setNamingCase,
_fiGQLAggregateName = getFunctionAggregateGQLName funcGivenName funcConfig setNamingCase,
_fiSystemDefined = sysDefined,
_fiVolatility = volitility,
_fiExposedAs = exposeAs,
_fiInputArgs = inputArguments,
_fiReturnType = functionReturnType,
_fiDescription = infoDesc,
_fiPermissions = permissionMap,
_fiJsonAggSelect = arityJsonAggSelect infoSet,
_fiComment = funcComment
}
pure $ (funcInfo, SchemaDependency objid DRTable)
resolveBackendInfo' ::
forall arr m.
( ArrowChoice arr,
@ -228,11 +349,17 @@ resolveDatabaseMetadata' logger SourceMetadata {_smName} sourceConfig@DC.SourceC
},
_ptmiCustomObjectTypes = Just customObjectTypes
}
pure (coerce _tiName, meta)
pure (Witch.into _tiName, meta)
functions =
let sorted = sortOn _fiName _srFunctions
grouped = NEList.groupBy ((==) `on` _fiName) sorted
in HashMap.fromList do
infos@(API.FunctionInfo {..} NEList.:| _) <- grouped
pure (Witch.into _fiName, FunctionOverloads infos)
in pure $
DBObjectsIntrospection
{ _rsTables = tables,
_rsFunctions = mempty,
_rsFunctions = functions,
_rsScalars = mempty
}
@ -500,6 +627,23 @@ listAllTables' sourceName = do
schemaResponse <- requestDatabaseSchema logger sourceName sourceConfig
pure $ fmap (Witch.from . API._tiName) $ API._srTables schemaResponse
listAllTrackables' :: (CacheRM m, Has (Logger Hasura) r, MonadIO m, MonadBaseControl IO m, MonadReader r m, MonadError QErr m, MetadataM m) => SourceName -> m (TrackableInfo 'DataConnector)
listAllTrackables' sourceName = do
(logger :: Logger Hasura) <- asks getter
sourceConfig <- askSourceConfig @'DataConnector sourceName
schemaResponse <- requestDatabaseSchema logger sourceName sourceConfig
let functions = fmap (\fi -> TrackableFunctionInfo (Witch.into (API._fiName fi)) (getVolatility (API._fiFunctionType fi))) $ API._srFunctions schemaResponse
let tables = fmap (TrackableTableInfo . Witch.into . API._tiName) $ API._srTables schemaResponse
pure
TrackableInfo
{ trackableTables = tables,
trackableFunctions = functions
}
getVolatility :: API.FunctionType -> FunctionVolatility
getVolatility API.FRead = FTSTABLE
getVolatility API.FWrite = FTVOLATILE
getTableInfo' :: (CacheRM m, MetadataM m, MonadError QErr m) => SourceName -> DC.TableName -> m (Maybe (SourceTableInfo 'DataConnector))
getTableInfo' sourceName tableName = do
SourceInfo {_siDbObjectsIntrospection} <- askSourceInfo @'DataConnector sourceName

View File

@ -1,4 +1,5 @@
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE ViewPatterns #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Hasura.Backends.DataConnector.Adapter.Schema () where
@ -11,13 +12,16 @@ import Data.Has
import Data.HashMap.Strict.Extended qualified as HashMap
import Data.List.NonEmpty qualified as NE
import Data.Scientific (fromFloatDigits)
import Data.Sequence qualified as Seq
import Data.Text.Casing (GQLNameIdentifier, fromAutogeneratedName, fromCustomName)
import Data.Text.Extended (toTxt, (<<>))
import Data.Text.Extended (toTxt, (<<>), (<>>))
import Data.Traversable (mapAccumL)
import Hasura.Backends.DataConnector.API qualified as API
import Hasura.Backends.DataConnector.Adapter.Backend (CustomBooleanOperator (..), columnTypeToScalarType)
import Hasura.Backends.DataConnector.Adapter.Types qualified as DC
import Hasura.Backends.DataConnector.Adapter.Types.Mutations qualified as DC
import Hasura.Base.Error
import Hasura.Function.Cache qualified as RQL
import Hasura.GraphQL.Parser.Class
import Hasura.GraphQL.Schema.Backend (BackendSchema (..), BackendTableSelectSchema (..), BackendUpdateOperatorsSchema (..), ComparisonExp, MonadBuildSchema)
import Hasura.GraphQL.Schema.BoolExp qualified as GS.BE
@ -25,6 +29,8 @@ import Hasura.GraphQL.Schema.Build qualified as GS.B
import Hasura.GraphQL.Schema.Common qualified as GS.C
import Hasura.GraphQL.Schema.Parser qualified as P
import Hasura.GraphQL.Schema.Select qualified as GS.S
import Hasura.GraphQL.Schema.Table qualified as GS.T
import Hasura.GraphQL.Schema.Typename qualified as GS.N
import Hasura.GraphQL.Schema.Update qualified as GS.U
import Hasura.GraphQL.Schema.Update.Batch qualified as GS.U.B
import Hasura.Name qualified as Name
@ -39,6 +45,8 @@ import Hasura.RQL.IR.Value qualified as IR
import Hasura.RQL.Types.Backend qualified as RQL
import Hasura.RQL.Types.BackendType (BackendType (..))
import Hasura.RQL.Types.Column qualified as RQL
import Hasura.RQL.Types.Common qualified as RQL
import Hasura.RQL.Types.ComputedField as RQL
import Hasura.RQL.Types.NamingCase
import Hasura.RQL.Types.Schema.Options qualified as Options
import Hasura.RQL.Types.Source qualified as RQL
@ -54,7 +62,7 @@ instance BackendSchema 'DataConnector where
buildTableRelayQueryFields = experimentalBuildTableRelayQueryFields
buildFunctionQueryFields _ _ _ _ = pure []
buildFunctionQueryFields = buildFunctionQueryFields'
buildFunctionRelayQueryFields _ _ _ _ _ = pure []
buildFunctionMutationFields _ _ _ _ = pure []
buildTableInsertMutationFields = buildTableInsertMutationFields'
@ -99,6 +107,208 @@ instance BackendUpdateOperatorsSchema 'DataConnector where
--------------------------------------------------------------------------------
buildFunctionQueryFields' ::
forall r m n.
( MonadError QErr m,
P.MonadMemoize m,
MonadParse n,
Has (RQL.SourceInfo 'DataConnector) r,
Has GS.C.SchemaContext r,
Has Options.SchemaOptions r
) =>
RQL.MkRootFieldName ->
DC.FunctionName ->
RQL.FunctionInfo 'DataConnector ->
DC.TableName ->
GS.C.SchemaT
r
m
[ P.FieldParser
n
( IR.QueryDB
'DataConnector
(IR.RemoteRelationshipField IR.UnpreparedValue)
(IR.UnpreparedValue 'DataConnector)
)
]
buildFunctionQueryFields' mkRootFieldName functionName functionInfo tableName = do
let -- Implementation modified from buildFunctionQueryFieldsPG
funcDesc =
Just . GQL.Description $
flip fromMaybe (RQL._fiComment functionInfo <|> RQL._fiDescription functionInfo) $
"execute function " <> functionName <<> " which returns " <>> tableName
queryResultType =
case RQL._fiJsonAggSelect functionInfo of
RQL.JASMultipleRows -> IR.QDBMultipleRows
RQL.JASSingleObject -> IR.QDBSingleRow
catMaybes
<$> sequenceA
[ GS.C.optionalFieldParser queryResultType $ selectFunction mkRootFieldName functionInfo funcDesc
-- TODO: Aggregations are not currently supported.
-- See: GS.C.optionalFieldParser (QDBAggregation) $ selectFunctionAggregate mkRootFieldName functionInfo funcAggDesc
]
-- | User-defined function (AKA custom function) -- Modified from PG variant.
selectFunction ::
forall r m n.
( MonadBuildSchema 'DataConnector r m n
) =>
RQL.MkRootFieldName ->
-- | SQL function info
RQL.FunctionInfo 'DataConnector -> -- TODO: The function return type should have already been resolved by this point - Into TableName
-- | field description, if any
Maybe GQL.Description ->
GS.C.SchemaT r m (Maybe (P.FieldParser n (GS.C.SelectExp 'DataConnector)))
selectFunction mkRootFieldName fi@RQL.FunctionInfo {..} description = runMaybeT do
sourceInfo :: RQL.SourceInfo 'DataConnector <- asks getter
roleName <- GS.C.retrieve GS.C.scRole
let customization = RQL._siCustomization sourceInfo
tCase = RQL._rscNamingConvention customization
tableInfo <- lift $ GS.C.askTableInfo _fiReturnType
selectPermissions <- hoistMaybe $ GS.T.tableSelectPermissions roleName tableInfo
selectionSetParser <- MaybeT $ returnFunctionParser tableInfo
lift do
stringifyNumbers <- GS.C.retrieve Options.soStringifyNumbers
tableArgsParser <- tableArguments tableInfo
functionArgsParser <- customFunctionArgs fi _fiGQLName _fiGQLArgsName
let argsParser = liftA2 (,) functionArgsParser tableArgsParser
functionFieldName = RQL.runMkRootFieldName mkRootFieldName _fiGQLName
pure $
P.subselection functionFieldName description argsParser selectionSetParser
<&> \((funcArgs, tableArgs''), fields) ->
IR.AnnSelectG
{ IR._asnFields = fields,
IR._asnFrom = IR.FromFunction _fiSQLName funcArgs Nothing,
IR._asnPerm = GS.S.tablePermissionsInfo selectPermissions,
IR._asnArgs = tableArgs'',
IR._asnStrfyNum = stringifyNumbers,
IR._asnNamingConvention = Just tCase
}
where
returnFunctionParser =
case _fiJsonAggSelect of
RQL.JASSingleObject -> tableSelectionSet
RQL.JASMultipleRows -> GS.S.tableSelectionList
-- Modified version of the PG Reference: customSQLFunctionArgs.
-- | The custom SQL functions' input "args" field parser
-- > function_name(args: function_args)
customFunctionArgs ::
MonadBuildSchema 'DataConnector r m n =>
RQL.FunctionInfo 'DataConnector ->
GQL.Name ->
GQL.Name ->
GS.C.SchemaT r m (P.InputFieldsParser n (RQL.FunctionArgsExp 'DataConnector (IR.UnpreparedValue 'DataConnector)))
customFunctionArgs RQL.FunctionInfo {..} functionName functionArgsName =
functionArgs'
( FTACustomFunction $
RQL.CustomFunctionNames
{ cfnFunctionName = functionName,
cfnArgsName = functionArgsName
}
)
_fiInputArgs
-- NOTE: Modified version of server/src-lib/Hasura/Backends/Postgres/Schema/Select.hs ~ functionArgs
functionArgs' ::
forall r m n.
MonadBuildSchema 'DataConnector r m n =>
FunctionTrackedAs 'DataConnector ->
Seq.Seq (RQL.FunctionInputArgument 'DataConnector) ->
GS.C.SchemaT r m (P.InputFieldsParser n (RQL.FunctionArgsExp 'DataConnector (IR.UnpreparedValue 'DataConnector)))
functionArgs' functionTrackedAs (toList -> inputArgs) = do
sourceInfo :: RQL.SourceInfo 'DataConnector <- asks getter
let customization = RQL._siCustomization sourceInfo
tCase = RQL._rscNamingConvention customization
mkTypename = GS.N.runMkTypename $ RQL._rscTypeNames customization
(names, session, optional, mandatory) = mconcat $ snd $ mapAccumL splitArguments 1 inputArgs
defaultArguments = RQL.FunctionArgsExp (snd <$> session) HashMap.empty
if
| length session > 1 ->
throw500 "there shouldn't be more than one session argument"
| null optional && null mandatory ->
pure $ pure defaultArguments
| otherwise -> do
argumentParsers <- sequenceA $ optional <> mandatory
objectName <-
mkTypename . RQL.applyTypeNameCaseIdentifier tCase
<$> case functionTrackedAs of
FTAComputedField computedFieldName _sourceName tableName -> do
tableInfo <- GS.C.askTableInfo tableName
computedFieldGQLName <- GS.C.textToName $ computedFieldNameToText computedFieldName
tableGQLName <- GS.T.getTableIdentifierName @'DataConnector tableInfo
pure $ RQL.mkFunctionArgsTypeName computedFieldGQLName tableGQLName
FTACustomFunction (CustomFunctionNames {cfnArgsName}) ->
pure $ fromCustomName cfnArgsName
let fieldName = Name._args
fieldDesc =
case functionTrackedAs of
FTAComputedField computedFieldName _sourceName tableName ->
GQL.Description $
"input parameters for computed field "
<> computedFieldName <<> " defined on table " <>> tableName
FTACustomFunction (CustomFunctionNames {cfnFunctionName}) ->
GQL.Description $ "input parameters for function " <>> cfnFunctionName
objectParser =
P.object objectName Nothing (sequenceA argumentParsers) `P.bind` \arguments -> do
let foundArguments = HashMap.fromList $ catMaybes arguments <> session
argsWithNames = zip names inputArgs
-- All args have names in DC for now
named <- HashMap.fromList . catMaybes <$> traverse (namedArgument foundArguments) argsWithNames
pure $ RQL.FunctionArgsExp [] named
pure $ P.field fieldName (Just fieldDesc) objectParser
where
sessionPlaceholder :: DC.ArgumentExp (IR.UnpreparedValue b)
sessionPlaceholder = DC.AEInput IR.UVSession
splitArguments ::
Int ->
RQL.FunctionInputArgument 'DataConnector ->
( Int,
( [Text], -- graphql names, in order
[(Text, DC.ArgumentExp (IR.UnpreparedValue 'DataConnector))], -- session argument
[GS.C.SchemaT r m (P.InputFieldsParser n (Maybe (Text, DC.ArgumentExp (IR.UnpreparedValue 'DataConnector))))], -- optional argument
[GS.C.SchemaT r m (P.InputFieldsParser n (Maybe (Text, DC.ArgumentExp (IR.UnpreparedValue 'DataConnector))))] -- mandatory argument
)
)
splitArguments positionalIndex (RQL.IASessionVariables name) =
let argName = RQL.getFuncArgNameTxt name
in (positionalIndex, ([argName], [(argName, sessionPlaceholder)], [], []))
splitArguments positionalIndex (RQL.IAUserProvided arg@(API.FunctionArg faName _faType _faOptional)) =
let (argName, newIndex) = (faName, positionalIndex) -- Names are currently always present
in -- NOTE: Positional defaults are not implemented here, but named arguments should support this.
-- See: `if Postgres.unHasDefault $ Postgres.faHasDefault arg`
(newIndex, ([argName], [], [], [parseArgument arg argName]))
parseArgument :: RQL.FunctionArgument 'DataConnector -> Text -> GS.C.SchemaT r m (P.InputFieldsParser n (Maybe (Text, DC.ArgumentExp (IR.UnpreparedValue 'DataConnector))))
parseArgument (API.FunctionArg faName faType _faOptional) name = do
typedParser <- columnParser (RQL.ColumnScalar $ convertScalarType faType) (GQL.Nullability True)
fieldName <- GS.C.textToName name
let argParser = P.fieldOptional fieldName Nothing typedParser
pure $ argParser `GS.C.mapField` ((faName,) . DC.AEInput . IR.mkParameter)
namedArgument ::
HashMap Text (DC.ArgumentExp (IR.UnpreparedValue 'DataConnector)) ->
(Text, RQL.FunctionInputArgument 'DataConnector) ->
n (Maybe (Text, DC.ArgumentExp (IR.UnpreparedValue 'DataConnector)))
namedArgument dictionary (name, inputArgument) = case inputArgument of
RQL.IASessionVariables _ -> pure $ Just (name, sessionPlaceholder)
RQL.IAUserProvided (API.FunctionArg _faName _faType faOptional) -> case HashMap.lookup name dictionary of
Just parsedValue -> pure $ Just (name, parsedValue) -- Names are currently always present
Nothing ->
if faOptional
then pure Nothing
else P.parseErrorWith P.NotSupported "Non default arguments cannot be omitted"
convertScalarType :: API.ScalarType -> RQL.ScalarType 'DataConnector
convertScalarType t = DC.ScalarType (API.getScalarType t) Nothing -- TODO: GQL Type Name
buildTableInsertMutationFields' ::
(MonadBuildSchema 'DataConnector r m n) =>
RQL.MkRootFieldName ->

View File

@ -20,11 +20,13 @@ module Hasura.Backends.DataConnector.Adapter.Types
ConstraintName (..),
ColumnName (..),
FunctionName (..),
FunctionReturnType (..),
CountAggregate (..),
Literal (..),
OrderDirection (..),
API.GraphQLType (..),
ScalarType (..),
ArgumentExp (..),
mkScalarType,
fromGQLType,
ExtraTableMetadata (..),
@ -44,6 +46,7 @@ import Data.Aeson.Types (parseEither, toJSONKeyText)
import Data.Environment (Environment)
import Data.HashMap.Strict qualified as HashMap
import Data.List.NonEmpty qualified as NonEmpty
import Data.OpenApi (ToSchema)
import Data.Text qualified as Text
import Data.Text.Extended (ToTxt (..))
import Hasura.Backends.DataConnector.API qualified as API
@ -178,6 +181,40 @@ instance J.ToJSON SourceConfig where
--------------------------------------------------------------------------------
-- | This represents what information can be known about the return type of a user-defined function.
-- For now, either the return type will be the name of a table that exists in the schema,
-- or "Unknown" - implying that this information can be derived from another source,
-- or if there is no other source, then it is an error.
-- In future, this type may be extended with additional constructors including scalar and row types
-- from the Logical Models feature.
--
-- Note: This is very similar to ComputedFieldReturnType defined above.
-- The two types may be unified in future.
data FunctionReturnType
= FunctionReturnsTable TableName
| FunctionReturnsUnknown
deriving (Show, Eq, NFData, Hashable, Generic)
deriving (ToSchema, ToJSON, FromJSON) via AC.Autodocodec FunctionReturnType
instance AC.HasCodec FunctionReturnType where
codec =
AC.named "FunctionReturnType" $
AC.object "FunctionReturnType" $
AC.discriminatedUnionCodec "type" enc dec
where
typeField = pure ()
tableField = AC.requiredField' "table"
enc = \case
FunctionReturnsTable rt -> ("table", AC.mapToEncoder rt tableField)
FunctionReturnsUnknown -> ("inferred", AC.mapToEncoder () typeField) -- We hook into the type field because it's madatory
dec =
HashMap.fromList
[ ("table", ("TableFunctionResponse", AC.mapToDecoder FunctionReturnsTable tableField)),
("inferred", ("InferredFunctionResponse", AC.mapToDecoder (const FunctionReturnsUnknown) typeField))
]
------------
data DataConnectorOptions = DataConnectorOptions
{ _dcoUri :: BaseUrl,
_dcoDisplayName :: Maybe Text
@ -292,6 +329,12 @@ newtype FunctionName = FunctionName {unFunctionName :: NonEmpty Text}
deriving stock (Data, Eq, Generic, Ord, Show)
deriving newtype (FromJSON, Hashable, NFData, ToJSON)
instance Witch.From FunctionName API.FunctionName
instance Witch.From API.FunctionName FunctionName
instance Witch.From (NonEmpty Text) FunctionName
instance HasCodec FunctionName where
codec = AC.dimapCodec FunctionName unFunctionName codec
@ -304,6 +347,21 @@ instance ToTxt FunctionName where
instance ToErrorValue FunctionName where
toErrorValue = ErrorValue.squote . toTxt
-- Modified from Hasura.Backends.Postgres.Types.Function
-- Initially just handles literal input arguments.
data ArgumentExp a
= -- | Table row accessor
-- AETableRow
-- | -- | Hardcoded reference to @hdb_catalog.hdb_action_log.response_payload@
-- AEActionResponsePayload
-- | -- | JSON/JSONB hasura session variable object
-- AESession a
-- |
AEInput a
deriving stock (Eq, Show, Functor, Foldable, Traversable, Generic)
instance (Hashable a) => Hashable (ArgumentExp a)
--------------------------------------------------------------------------------
data CountAggregate

View File

@ -1,3 +1,6 @@
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DerivingStrategies #-}
-- | This module contains Data Connector request/response planning code and utility
-- functions and types that are common across the different categories of requests
-- (ie queries, mutations, etc). It contains code and concepts that are independent
@ -9,6 +12,7 @@
module Hasura.Backends.DataConnector.Plan.Common
( Plan (..),
TableRelationships (..),
TableRelationshipsKey (..),
FieldPrefix,
noPrefix,
prefixWith,
@ -65,10 +69,18 @@ data Plan request response = Plan
--------------------------------------------------------------------------------
-- | Key datatype for TableRelationships to avoid having an Either directly as the key,
-- and make extending the types of relationships easier in future.
data TableRelationshipsKey
= FunctionNameKey API.FunctionName
| TableNameKey API.TableName
deriving stock (Eq, Show, Generic)
deriving anyclass (Hashable)
-- | A monoidal data structure used to record Table Relationships encountered during request
-- translation. Used with 'recordTableRelationship'.
newtype TableRelationships = TableRelationships
{unTableRelationships :: HashMap API.TableName (HashMap API.RelationshipName API.Relationship)}
{unTableRelationships :: HashMap TableRelationshipsKey (HashMap API.RelationshipName API.Relationship)}
deriving stock (Eq, Show)
instance Semigroup TableRelationships where
@ -84,12 +96,12 @@ recordTableRelationship ::
Monoid writerOutput,
MonadError QErr m
) =>
API.TableName ->
TableRelationshipsKey ->
API.RelationshipName ->
API.Relationship ->
CPS.WriterT writerOutput m ()
recordTableRelationship sourceTableName relationshipName relationship =
let newRelationship = TableRelationships $ HashMap.singleton sourceTableName (HashMap.singleton relationshipName relationship)
recordTableRelationship sourceName relationshipName relationship =
let newRelationship = TableRelationships $ HashMap.singleton sourceName (HashMap.singleton relationshipName relationship)
in CPS.tell $ modifier (const newRelationship) mempty
recordTableRelationshipFromRelInfo ::
@ -97,7 +109,7 @@ recordTableRelationshipFromRelInfo ::
Monoid writerOutput,
MonadError QErr m
) =>
API.TableName ->
TableRelationshipsKey ->
RelInfo 'DataConnector ->
CPS.WriterT writerOutput m (API.RelationshipName, API.Relationship)
recordTableRelationshipFromRelInfo sourceTableName RelInfo {..} = do
@ -214,11 +226,11 @@ translateBoolExpToExpression ::
MonadError QErr m
) =>
SessionVariables ->
API.TableName ->
TableRelationshipsKey ->
AnnBoolExp 'DataConnector (UnpreparedValue 'DataConnector) ->
CPS.WriterT writerOutput m (Maybe API.Expression)
translateBoolExpToExpression sessionVariables sourceTableName boolExp = do
removeAlwaysTrueExpression <$> translateBoolExp sessionVariables sourceTableName boolExp
translateBoolExpToExpression sessionVariables sourceName boolExp = do
removeAlwaysTrueExpression <$> translateBoolExp sessionVariables sourceName boolExp
translateBoolExp ::
( Has TableRelationships writerOutput,
@ -226,25 +238,25 @@ translateBoolExp ::
MonadError QErr m
) =>
SessionVariables ->
API.TableName ->
TableRelationshipsKey ->
AnnBoolExp 'DataConnector (UnpreparedValue 'DataConnector) ->
CPS.WriterT writerOutput m API.Expression
translateBoolExp sessionVariables sourceTableName = \case
translateBoolExp sessionVariables sourceName = \case
BoolAnd xs ->
mkIfZeroOrMany API.And . mapMaybe removeAlwaysTrueExpression <$> traverse (translateBoolExp' sourceTableName) xs
mkIfZeroOrMany API.And . mapMaybe removeAlwaysTrueExpression <$> traverse (translateBoolExp' sourceName) xs
BoolOr xs ->
mkIfZeroOrMany API.Or . mapMaybe removeAlwaysFalseExpression <$> traverse (translateBoolExp' sourceTableName) xs
mkIfZeroOrMany API.Or . mapMaybe removeAlwaysFalseExpression <$> traverse (translateBoolExp' sourceName) xs
BoolNot x ->
API.Not <$> (translateBoolExp' sourceTableName) x
API.Not <$> (translateBoolExp' sourceName) x
BoolField (AVColumn c xs) ->
lift $ mkIfZeroOrMany API.And <$> traverse (translateOp sessionVariables (Witch.from $ ciColumn c) (Witch.from . columnTypeToScalarType $ ciType c)) xs
BoolField (AVRelationship relationshipInfo (RelationshipFilters {rfTargetTablePermissions, rfFilter})) -> do
(relationshipName, API.Relationship {..}) <- recordTableRelationshipFromRelInfo sourceTableName relationshipInfo
(relationshipName, API.Relationship {..}) <- recordTableRelationshipFromRelInfo sourceName relationshipInfo
-- TODO: How does this function keep track of the root table?
API.Exists (API.RelatedTable relationshipName) <$> translateBoolExp' _rTargetTable (BoolAnd [rfTargetTablePermissions, rfFilter])
API.Exists (API.RelatedTable relationshipName) <$> translateBoolExp' (TableNameKey _rTargetTable) (BoolAnd [rfTargetTablePermissions, rfFilter])
BoolExists GExists {..} ->
let tableName = Witch.from _geTable
in API.Exists (API.UnrelatedTable tableName) <$> translateBoolExp' tableName _geWhere
in API.Exists (API.UnrelatedTable tableName) <$> translateBoolExp' (TableNameKey tableName) _geWhere
where
translateBoolExp' = translateBoolExp sessionVariables

View File

@ -92,21 +92,22 @@ translateMutationDB ::
translateMutationDB sessionVariables = \case
MDBInsert insert -> do
(insertOperation, (tableRelationships, tableInsertSchemas)) <- CPS.runWriterT $ translateInsert sessionVariables insert
let apiTableRelationships = Set.fromList $ uncurry API.TableRelationships <$> HashMap.toList (unTableRelationships tableRelationships)
let apiTableInsertSchema =
unTableInsertSchemas tableInsertSchemas
& HashMap.toList
& fmap (\(tableName, TableInsertSchema {..}) -> API.TableInsertSchema tableName _tisPrimaryKey _tisFields)
& Set.fromList
let apiTableRelationships = Set.fromList $ uncurry API.TableRelationships <$> rights (map eitherKey (HashMap.toList (unTableRelationships tableRelationships)))
pure $
API.MutationRequest
{ _mrTableRelationships = apiTableRelationships,
_mrInsertSchema = apiTableInsertSchema,
_mrInsertSchema = Set.fromList apiTableInsertSchema,
_mrOperations = [API.InsertOperation insertOperation]
}
MDBUpdate update -> do
(updateOperations, tableRelationships) <- CPS.runWriterT $ translateUpdate sessionVariables update
let apiTableRelationships = Set.fromList $ uncurry API.TableRelationships <$> HashMap.toList (unTableRelationships tableRelationships)
let apiTableRelationships =
Set.fromList $
uncurry API.TableRelationships <$> rights (map eitherKey (HashMap.toList (unTableRelationships tableRelationships)))
pure $
API.MutationRequest
{ _mrTableRelationships = apiTableRelationships,
@ -115,7 +116,9 @@ translateMutationDB sessionVariables = \case
}
MDBDelete delete -> do
(deleteOperation, tableRelationships) <- CPS.runWriterT $ translateDelete sessionVariables delete
let apiTableRelationships = Set.fromList $ uncurry API.TableRelationships <$> HashMap.toList (unTableRelationships tableRelationships)
let apiTableRelationships =
Set.fromList $
uncurry API.TableRelationships <$> rights (map eitherKey (HashMap.toList (unTableRelationships tableRelationships)))
pure $
API.MutationRequest
{ _mrTableRelationships = apiTableRelationships,
@ -125,6 +128,10 @@ translateMutationDB sessionVariables = \case
MDBFunction _returnsSet _select ->
throw400 NotSupported "translateMutationDB: function mutations not implemented for the Data Connector backend."
eitherKey :: (TableRelationshipsKey, c) -> Either (API.FunctionName, c) (API.TableName, c)
eitherKey (FunctionNameKey f, x) = Left (f, x)
eitherKey (TableNameKey t, x) = Right (t, x)
translateInsert ::
MonadError QErr m =>
SessionVariables ->
@ -133,7 +140,7 @@ translateInsert ::
translateInsert sessionVariables AnnotatedInsert {_aiData = AnnotatedInsertData {..}, ..} = do
captureTableInsertSchema tableName _aiTableColumns _aiPrimaryKey _aiExtraTableMetadata
rows <- lift $ traverse (translateInsertRow sessionVariables tableName _aiTableColumns _aiPresetValues) _aiInsertObject
postInsertCheck <- translateBoolExpToExpression sessionVariables tableName insertCheckCondition
postInsertCheck <- translateBoolExpToExpression sessionVariables (TableNameKey tableName) insertCheckCondition
returningFields <- translateMutationOutputToReturningFields sessionVariables tableName _aiOutput
pure $
API.InsertMutationOperation
@ -231,8 +238,8 @@ translateUpdateBatch ::
CPS.WriterT TableRelationships m API.UpdateMutationOperation
translateUpdateBatch sessionVariables AnnotatedUpdateG {..} UpdateBatch {..} = do
updates <- lift $ translateUpdateOperations sessionVariables _ubOperations
whereExp <- translateBoolExpToExpression sessionVariables tableName (BoolAnd [_auUpdatePermissions, _ubWhere])
postUpdateCheck <- translateBoolExpToExpression sessionVariables tableName _auCheck
whereExp <- translateBoolExpToExpression sessionVariables (TableNameKey tableName) (BoolAnd [_auUpdatePermissions, _ubWhere])
postUpdateCheck <- translateBoolExpToExpression sessionVariables (TableNameKey tableName) _auCheck
returningFields <- translateMutationOutputToReturningFields sessionVariables tableName _auOutput
pure $
@ -244,7 +251,7 @@ translateUpdateBatch sessionVariables AnnotatedUpdateG {..} UpdateBatch {..} = d
API._umoReturningFields = HashMap.mapKeys (API.FieldName . getFieldNameTxt) returningFields
}
where
tableName = Witch.from _auTable
tableName :: API.TableName = Witch.from _auTable
translateUpdateOperations ::
forall m.
@ -275,7 +282,7 @@ translateDelete ::
AnnDelG 'DataConnector Void (UnpreparedValue 'DataConnector) ->
CPS.WriterT TableRelationships m API.DeleteMutationOperation
translateDelete sessionVariables AnnDel {..} = do
whereExp <- translateBoolExpToExpression sessionVariables tableName (BoolAnd [permissionFilter, whereClause])
whereExp <- translateBoolExpToExpression sessionVariables (TableNameKey tableName) (BoolAnd [permissionFilter, whereClause])
returningFields <- translateMutationOutputToReturningFields sessionVariables tableName _adOutput
pure $
API.DeleteMutationOperation
@ -284,7 +291,7 @@ translateDelete sessionVariables AnnDel {..} = do
API._dmoReturningFields = HashMap.mapKeys (API.FieldName . getFieldNameTxt) returningFields
}
where
tableName = Witch.from _adTable
tableName :: API.TableName = Witch.from _adTable
(permissionFilter, whereClause) = _adWhere
translateMutationOutputToReturningFields ::
@ -298,7 +305,7 @@ translateMutationOutputToReturningFields ::
CPS.WriterT writerOutput m (HashMap FieldName API.Field)
translateMutationOutputToReturningFields sessionVariables tableName = \case
MOutSinglerowObject annFields ->
translateAnnFields sessionVariables noPrefix tableName annFields
translateAnnFields sessionVariables noPrefix (TableNameKey tableName) annFields
MOutMultirowFields mutFields ->
HashMap.unions <$> traverse (uncurry $ translateMutField sessionVariables tableName) mutFields
@ -323,7 +330,7 @@ translateMutField sessionVariables tableName fieldName = \case
-- to us
pure mempty
MRet annFields ->
translateAnnFields sessionVariables (prefixWith fieldName) tableName annFields
translateAnnFields sessionVariables (prefixWith fieldName) (TableNameKey tableName) annFields
--------------------------------------------------------------------------------

View File

@ -22,11 +22,13 @@ import Data.HashMap.Strict qualified as HashMap
import Data.List.NonEmpty qualified as NE
import Data.Semigroup (Min (..))
import Data.Set qualified as Set
import Data.Text.Extended (toTxt)
import Hasura.Backends.DataConnector.API qualified as API
import Hasura.Backends.DataConnector.Adapter.Backend
import Hasura.Backends.DataConnector.Adapter.Types
import Hasura.Backends.DataConnector.Plan.Common
import Hasura.Base.Error
import Hasura.Function.Cache qualified as Function
import Hasura.Prelude
import Hasura.RQL.IR.BoolExp
import Hasura.RQL.IR.OrderBy
@ -101,29 +103,63 @@ translateAnnSelectToQueryRequest ::
forall m fieldType.
MonadError QErr m =>
SessionVariables ->
(API.TableName -> Fields (fieldType (UnpreparedValue 'DataConnector)) -> CPS.WriterT TableRelationships m FieldsAndAggregates) ->
(TableRelationshipsKey -> Fields (fieldType (UnpreparedValue 'DataConnector)) -> CPS.WriterT TableRelationships m FieldsAndAggregates) ->
AnnSelectG 'DataConnector fieldType (UnpreparedValue 'DataConnector) ->
m API.QueryRequest
translateAnnSelectToQueryRequest sessionVariables translateFieldsAndAggregates selectG = do
tableName <- extractTableName selectG
(query, (TableRelationships tableRelationships)) <- CPS.runWriterT (translateAnnSelect sessionVariables translateFieldsAndAggregates tableName selectG)
let apiTableRelationships = Set.fromList $ uncurry API.TableRelationships <$> HashMap.toList tableRelationships
pure $
API.QueryRequest
{ _qrTable = tableName,
_qrTableRelationships = apiTableRelationships,
_qrQuery = query,
_qrForeach = Nothing
}
extractTableName :: MonadError QErr m => AnnSelectG 'DataConnector fieldsType valueType -> m API.TableName
extractTableName selectG =
case _asnFrom selectG of
FromTable tn -> pure $ Witch.from tn
FromIdentifier _ -> throw400 NotSupported "AnnSelectG: FromIdentifier not supported"
FromFunction {} -> throw400 NotSupported "AnnSelectG: FromFunction not supported"
FromNativeQuery {} -> throw400 NotSupported "AnnSelectG: FromNativeQuery not supported"
FromStoredProcedure {} -> throw400 NotSupported "AnnSelectG: FromStoredProcedure not supported"
FromTable tableName -> do
(query, TableRelationships tableRelationships) <-
CPS.runWriterT (translateAnnSelect sessionVariables translateFieldsAndAggregates (TableNameKey (Witch.into tableName)) selectG)
let relationships = mkRelationships <$> HashMap.toList tableRelationships
pure $
API.QRTable
API.TableRequest
{ _trTable = Witch.into tableName,
_trRelationships = Set.fromList relationships,
_trQuery = query,
_trForeach = Nothing
}
FromFunction fn@(FunctionName functionName) argsExp _dListM -> do
args <- mkArgs sessionVariables argsExp fn
(query, TableRelationships tableRelationships) <-
CPS.runWriterT (translateAnnSelect sessionVariables translateFieldsAndAggregates (FunctionNameKey (Witch.into functionName)) selectG)
let relationships = mkRelationships <$> HashMap.toList tableRelationships
pure $
API.QRFunction
API.FunctionRequest
{ _frFunction = Witch.into functionName,
_frRelationships = Set.fromList relationships,
_frQuery = query,
_frFunctionArguments = args
}
mkRelationships :: (TableRelationshipsKey, (HashMap API.RelationshipName API.Relationship)) -> API.Relationships
mkRelationships (FunctionNameKey functionName, relationships) = API.RFunction (API.FunctionRelationships functionName relationships)
mkRelationships (TableNameKey tableName, relationships) = API.RTable (API.TableRelationships tableName relationships)
mkArgs ::
( MonadError QErr m
) =>
SessionVariables ->
Function.FunctionArgsExpG (ArgumentExp (UnpreparedValue 'DataConnector)) ->
FunctionName ->
m [API.FunctionArgument]
mkArgs sessionVariables (Function.FunctionArgsExp ps ns) functionName = do
unless (null ps) $ throw400 NotSupported $ "Positional arguments not supported in function " <> toTxt functionName
getNamed
where
getNamed = mapM mkArg (HashMap.toList ns)
mkArg (n, input) = (API.NamedArgument n . API.ScalarArgumentValue) <$> getValue input
getValue (AEInput x) = case x of
UVLiteral _ -> throw400 NotSupported "Literal not supported in Data Connector function args."
UVSessionVar _ _ -> throw400 NotSupported "SessionVar not supported in Data Connector function args."
UVParameter _ (ColumnValue t v) -> pure (API.ScalarValue v (coerce (toTxt t)))
UVSession -> pure (API.ScalarValue (J.toJSON sessionVariables) (API.ScalarType "json"))
translateAnnSelect ::
( Has TableRelationships writerOutput,
@ -131,18 +167,18 @@ translateAnnSelect ::
MonadError QErr m
) =>
SessionVariables ->
(API.TableName -> Fields (fieldType (UnpreparedValue 'DataConnector)) -> CPS.WriterT writerOutput m FieldsAndAggregates) ->
API.TableName ->
(TableRelationshipsKey -> Fields (fieldType (UnpreparedValue 'DataConnector)) -> CPS.WriterT writerOutput m FieldsAndAggregates) ->
TableRelationshipsKey ->
AnnSelectG 'DataConnector fieldType (UnpreparedValue 'DataConnector) ->
CPS.WriterT writerOutput m API.Query
translateAnnSelect sessionVariables translateFieldsAndAggregates tableName selectG = do
FieldsAndAggregates {..} <- translateFieldsAndAggregates tableName (_asnFields selectG)
translateAnnSelect sessionVariables translateFieldsAndAggregates entityName selectG = do
FieldsAndAggregates {..} <- translateFieldsAndAggregates entityName (_asnFields selectG)
let whereClauseWithPermissions =
case _saWhere (_asnArgs selectG) of
Just expr -> BoolAnd [expr, _tpFilter (_asnPerm selectG)]
Nothing -> _tpFilter (_asnPerm selectG)
whereClause <- translateBoolExpToExpression sessionVariables tableName whereClauseWithPermissions
orderBy <- traverse (translateOrderBy sessionVariables tableName) (_saOrderBy $ _asnArgs selectG)
whereClause <- translateBoolExpToExpression sessionVariables entityName whereClauseWithPermissions
orderBy <- traverse (translateOrderBy sessionVariables entityName) (_saOrderBy $ _asnArgs selectG)
pure
API.Query
{ _qFields = mapFieldNameHashMap <$> _faaFields,
@ -166,13 +202,13 @@ translateOrderBy ::
MonadError QErr m
) =>
SessionVariables ->
API.TableName ->
TableRelationshipsKey ->
NE.NonEmpty (AnnotatedOrderByItemG 'DataConnector (UnpreparedValue 'DataConnector)) ->
CPS.WriterT writerOutput m API.OrderBy
translateOrderBy sessionVariables sourceTableName orderByItems = do
translateOrderBy sessionVariables sourceName orderByItems = do
orderByElementsAndRelations <- for orderByItems \OrderByItemG {..} -> do
let orderDirection = maybe API.Ascending Witch.from obiType
translateOrderByElement sessionVariables sourceTableName orderDirection [] obiColumn
translateOrderByElement sessionVariables sourceName orderDirection [] obiColumn
relations <- lift . mergeOrderByRelations $ snd <$> orderByElementsAndRelations
pure
API.OrderBy
@ -186,12 +222,12 @@ translateOrderByElement ::
MonadError QErr m
) =>
SessionVariables ->
API.TableName ->
TableRelationshipsKey ->
API.OrderDirection ->
[API.RelationshipName] ->
AnnotatedOrderByElement 'DataConnector (UnpreparedValue 'DataConnector) ->
CPS.WriterT writerOutput m (API.OrderByElement, HashMap API.RelationshipName API.OrderByRelation)
translateOrderByElement sessionVariables sourceTableName orderDirection targetReversePath = \case
translateOrderByElement sessionVariables sourceName orderDirection targetReversePath = \case
AOCColumn (ColumnInfo {..}) ->
pure
( API.OrderByElement
@ -202,15 +238,15 @@ translateOrderByElement sessionVariables sourceTableName orderDirection targetRe
mempty
)
AOCObjectRelation relationshipInfo filterExp orderByElement -> do
(relationshipName, API.Relationship {..}) <- recordTableRelationshipFromRelInfo sourceTableName relationshipInfo
(translatedOrderByElement, subOrderByRelations) <- translateOrderByElement sessionVariables _rTargetTable orderDirection (relationshipName : targetReversePath) orderByElement
(relationshipName, API.Relationship {..}) <- recordTableRelationshipFromRelInfo sourceName relationshipInfo
(translatedOrderByElement, subOrderByRelations) <- translateOrderByElement sessionVariables (TableNameKey _rTargetTable) orderDirection (relationshipName : targetReversePath) orderByElement
targetTableWhereExp <- translateBoolExpToExpression sessionVariables _rTargetTable filterExp
targetTableWhereExp <- translateBoolExpToExpression sessionVariables (TableNameKey _rTargetTable) filterExp
let orderByRelations = HashMap.fromList [(relationshipName, API.OrderByRelation targetTableWhereExp subOrderByRelations)]
pure (translatedOrderByElement, orderByRelations)
AOCArrayAggregation relationshipInfo filterExp aggregateOrderByElement -> do
(relationshipName, API.Relationship {..}) <- recordTableRelationshipFromRelInfo sourceTableName relationshipInfo
(relationshipName, API.Relationship {..}) <- recordTableRelationshipFromRelInfo sourceName relationshipInfo
orderByTarget <- case aggregateOrderByElement of
AAOCount ->
pure API.OrderByStarCountAggregate
@ -226,7 +262,7 @@ translateOrderByElement sessionVariables sourceTableName orderDirection targetRe
_obeOrderDirection = orderDirection
}
targetTableWhereExp <- translateBoolExpToExpression sessionVariables _rTargetTable filterExp
targetTableWhereExp <- translateBoolExpToExpression sessionVariables (TableNameKey _rTargetTable) filterExp
let orderByRelations = HashMap.fromList [(relationshipName, API.OrderByRelation targetTableWhereExp mempty)]
pure (translatedOrderByElement, orderByRelations)
@ -256,11 +292,11 @@ translateAnnFieldsWithNoAggregates ::
) =>
SessionVariables ->
FieldPrefix ->
API.TableName ->
TableRelationshipsKey ->
AnnFieldsG 'DataConnector Void (UnpreparedValue 'DataConnector) ->
CPS.WriterT writerOutput m FieldsAndAggregates
translateAnnFieldsWithNoAggregates sessionVariables fieldNamePrefix sourceTableName fields =
(\fields' -> FieldsAndAggregates (Just fields') Nothing) <$> translateAnnFields sessionVariables fieldNamePrefix sourceTableName fields
translateAnnFieldsWithNoAggregates sessionVariables fieldNamePrefix sourceName fields =
(\fields' -> FieldsAndAggregates (Just fields') Nothing) <$> translateAnnFields sessionVariables fieldNamePrefix sourceName fields
translateAnnFields ::
( Has TableRelationships writerOutput,
@ -269,11 +305,11 @@ translateAnnFields ::
) =>
SessionVariables ->
FieldPrefix ->
API.TableName ->
TableRelationshipsKey ->
AnnFieldsG 'DataConnector Void (UnpreparedValue 'DataConnector) ->
CPS.WriterT writerOutput m (HashMap FieldName API.Field)
translateAnnFields sessionVariables fieldNamePrefix sourceTableName fields = do
translatedFields <- traverse (traverse (translateAnnField sessionVariables sourceTableName)) fields
translateAnnFields sessionVariables fieldNamePrefix sourceName fields = do
translatedFields <- traverse (traverse (translateAnnField sessionVariables sourceName)) fields
pure $ HashMap.fromList (mapMaybe (\(fieldName, field) -> (applyPrefix fieldNamePrefix fieldName,) <$> field) translatedFields)
translateAnnField ::
@ -282,21 +318,23 @@ translateAnnField ::
MonadError QErr m
) =>
SessionVariables ->
API.TableName ->
TableRelationshipsKey ->
AnnFieldG 'DataConnector Void (UnpreparedValue 'DataConnector) ->
CPS.WriterT writerOutput m (Maybe API.Field)
translateAnnField sessionVariables sourceTableName = \case
AFNestedObject nestedObj ->
Just . API.NestedObjField (Witch.from $ _anosColumn nestedObj)
<$> translateNestedObjectSelect sessionVariables sourceTableName nestedObj
AFColumn colField ->
-- TODO: make sure certain fields in colField are not in use, since we don't
-- support them
-- TODO: make sure certain fields in colField are not in use, since we don't support them
pure . Just $ API.ColumnField (Witch.from $ _acfColumn colField) (Witch.from . columnTypeToScalarType $ _acfType colField)
AFObjectRelation objRel ->
case _aosTarget (_aarAnnSelect objRel) of
FromTable tableName -> do
let targetTable = Witch.from tableName
let relationshipName = mkRelationshipName $ _aarRelationshipName objRel
fields <- translateAnnFields sessionVariables noPrefix targetTable (_aosFields (_aarAnnSelect objRel))
whereClause <- translateBoolExpToExpression sessionVariables targetTable (_aosTargetFilter (_aarAnnSelect objRel))
fields <- translateAnnFields sessionVariables noPrefix (TableNameKey targetTable) (_aosFields (_aarAnnSelect objRel))
whereClause <- translateBoolExpToExpression sessionVariables (TableNameKey targetTable) (_aosTargetFilter (_aarAnnSelect objRel))
recordTableRelationship
sourceTableName
@ -330,9 +368,6 @@ translateAnnField sessionVariables sourceTableName = \case
-- and add them back to the response JSON when we reshape what the agent returns
-- to us
pure Nothing
AFNestedObject nestedObj ->
Just . API.NestedObjField (Witch.from $ _anosColumn nestedObj)
<$> translateNestedObjectSelect sessionVariables sourceTableName nestedObj
translateArrayRelationSelect ::
( Has TableRelationships writerOutput,
@ -340,28 +375,33 @@ translateArrayRelationSelect ::
MonadError QErr m
) =>
SessionVariables ->
API.TableName ->
(API.TableName -> Fields (fieldType (UnpreparedValue 'DataConnector)) -> CPS.WriterT writerOutput m FieldsAndAggregates) ->
TableRelationshipsKey ->
(TableRelationshipsKey -> Fields (fieldType (UnpreparedValue 'DataConnector)) -> CPS.WriterT writerOutput m FieldsAndAggregates) ->
AnnRelationSelectG 'DataConnector (AnnSelectG 'DataConnector fieldType (UnpreparedValue 'DataConnector)) ->
CPS.WriterT writerOutput m API.Field
translateArrayRelationSelect sessionVariables sourceTableName translateFieldsAndAggregates arrRel = do
targetTable <- lift $ extractTableName (_aarAnnSelect arrRel)
query <- translateAnnSelect sessionVariables translateFieldsAndAggregates targetTable (_aarAnnSelect arrRel)
let relationshipName = mkRelationshipName $ _aarRelationshipName arrRel
translateArrayRelationSelect sessionVariables sourceName translateFieldsAndAggregates arrRel = do
case _asnFrom (_aarAnnSelect arrRel) of
FromIdentifier _ -> lift $ throw400 NotSupported "AnnSelectG: FromIdentifier not supported"
FromNativeQuery {} -> lift $ throw400 NotSupported "AnnSelectG: FromNativeQuery not supported"
FromStoredProcedure {} -> lift $ throw400 NotSupported "AnnSelectG: FromStoredProcedure not supported"
FromFunction {} -> lift $ throw400 NotSupported "translateArrayRelationSelect: FromFunction not currently supported"
FromTable targetTable -> do
query <- translateAnnSelect sessionVariables translateFieldsAndAggregates (TableNameKey (Witch.into targetTable)) (_aarAnnSelect arrRel)
let relationshipName = mkRelationshipName $ _aarRelationshipName arrRel
recordTableRelationship
sourceTableName
relationshipName
API.Relationship
{ _rTargetTable = targetTable,
_rRelationshipType = API.ArrayRelationship,
_rColumnMapping = HashMap.fromList $ bimap Witch.from Witch.from <$> HashMap.toList (_aarColumnMapping arrRel)
}
recordTableRelationship
sourceName
relationshipName
API.Relationship
{ _rTargetTable = Witch.into targetTable,
_rRelationshipType = API.ArrayRelationship,
_rColumnMapping = HashMap.fromList $ bimap Witch.from Witch.from <$> HashMap.toList (_aarColumnMapping arrRel)
}
pure . API.RelField $
API.RelationshipField
relationshipName
query
pure . API.RelField $
API.RelationshipField
relationshipName
query
translateTableAggregateFields ::
( Has TableRelationships writerOutput,
@ -369,11 +409,11 @@ translateTableAggregateFields ::
MonadError QErr m
) =>
SessionVariables ->
API.TableName ->
TableRelationshipsKey ->
TableAggregateFieldsG 'DataConnector Void (UnpreparedValue 'DataConnector) ->
CPS.WriterT writerOutput m FieldsAndAggregates
translateTableAggregateFields sessionVariables sourceTableName fields = do
mconcat <$> traverse (uncurry (translateTableAggregateField sessionVariables sourceTableName)) fields
translateTableAggregateFields sessionVariables sourceName fields = do
mconcat <$> traverse (uncurry (translateTableAggregateField sessionVariables sourceName)) fields
translateTableAggregateField ::
( Has TableRelationships writerOutput,
@ -381,11 +421,11 @@ translateTableAggregateField ::
MonadError QErr m
) =>
SessionVariables ->
API.TableName ->
TableRelationshipsKey ->
FieldName ->
TableAggregateFieldG 'DataConnector Void (UnpreparedValue 'DataConnector) ->
CPS.WriterT writerOutput m FieldsAndAggregates
translateTableAggregateField sessionVariables sourceTableName fieldName = \case
translateTableAggregateField sessionVariables sourceName fieldName = \case
TAFAgg aggregateFields -> do
let fieldNamePrefix = prefixWith fieldName
translatedAggregateFields <- lift $ mconcat <$> traverse (uncurry (translateAggregateField fieldNamePrefix)) aggregateFields
@ -394,7 +434,7 @@ translateTableAggregateField sessionVariables sourceTableName fieldName = \case
Nothing
(Just translatedAggregateFields)
TAFNodes _ fields ->
translateAnnFieldsWithNoAggregates sessionVariables (prefixWith fieldName) sourceTableName fields
translateAnnFieldsWithNoAggregates sessionVariables (prefixWith fieldName) sourceName fields
TAFExp _txt ->
-- We ignore literal text fields (we don't send them to the data connector agent)
-- and add them back to the response JSON when we reshape what the agent returns
@ -446,11 +486,11 @@ translateNestedObjectSelect ::
MonadError QErr m
) =>
SessionVariables ->
API.TableName ->
TableRelationshipsKey ->
AnnNestedObjectSelectG 'DataConnector Void (UnpreparedValue 'DataConnector) ->
CPS.WriterT writerOutput m API.Query
translateNestedObjectSelect sessionVariables tableName selectG = do
FieldsAndAggregates {..} <- translateAnnFieldsWithNoAggregates sessionVariables noPrefix tableName $ _anosFields selectG
translateNestedObjectSelect sessionVariables relationshipKey selectG = do
FieldsAndAggregates {..} <- translateAnnFieldsWithNoAggregates sessionVariables noPrefix relationshipKey $ _anosFields selectG
pure
API.Query
{ _qFields = mapFieldNameHashMap <$> _faaFields,
@ -572,6 +612,12 @@ reshapeField ::
m J.Encoding
reshapeField field responseFieldValue =
case field of
AFNestedObject nestedObj -> do
nestedObjectFieldValue <- API.deserializeAsNestedObjFieldValue <$> responseFieldValue
case nestedObjectFieldValue of
Left err -> throw500 $ "Expected object in field returned by Data Connector agent: " <> err -- TODO(dmoverton): Add pathing information for error clarity
Right nestedResponse ->
reshapeAnnFields noPrefix (_anosFields nestedObj) nestedResponse
AFColumn _columnField -> do
columnFieldValue <- API.deserializeAsColumnFieldValue <$> responseFieldValue
pure $ J.toEncoding columnFieldValue
@ -587,12 +633,6 @@ reshapeField field responseFieldValue =
AFArrayRelation (ASAggregate aggregateArrayRelationField) ->
reshapeAnnRelationSelect reshapeTableAggregateFields aggregateArrayRelationField =<< responseFieldValue
AFExpression txt -> pure $ JE.text txt
AFNestedObject nestedObj -> do
nestedObjectFieldValue <- API.deserializeAsNestedObjFieldValue <$> responseFieldValue
case nestedObjectFieldValue of
Left err -> throw500 $ "Expected object in field returned by Data Connector agent: " <> err -- TODO(dmoverton): Add pathing information for error clarity
Right nestedResponse ->
reshapeAnnFields noPrefix (_anosFields nestedObj) nestedResponse
reshapeAnnRelationSelect ::
MonadError QErr m =>

View File

@ -80,26 +80,34 @@ mkRemoteRelationshipPlan sessionVariables _sourceConfig joinIds joinIdsSchema ar
FromTable table -> Witch.from table
other -> error $ "translateAnnObjectSelectToQueryRequest: " <> show other
((fields, whereClause), (TableRelationships tableRelationships)) <- CPS.runWriterT $ do
fields <- QueryPlan.translateAnnFields sessionVariables noPrefix tableName _aosFields
whereClause <- translateBoolExpToExpression sessionVariables tableName _aosTargetFilter
fields <- QueryPlan.translateAnnFields sessionVariables noPrefix (TableNameKey tableName) _aosFields
whereClause <- translateBoolExpToExpression sessionVariables (TableNameKey tableName) _aosTargetFilter
pure (fields, whereClause)
let apiTableRelationships = Set.fromList $ uncurry API.TableRelationships <$> HashMap.toList tableRelationships
let apiTableRelationships = Set.fromList $ tableRelationshipsToList tableRelationships
pure $
API.QueryRequest
{ _qrTable = tableName,
_qrTableRelationships = apiTableRelationships,
_qrQuery =
API.Query
{ _qFields = Just $ mapFieldNameHashMap fields,
_qAggregates = Nothing,
_qAggregatesLimit = Nothing,
_qLimit = Nothing,
_qOffset = Nothing,
_qWhere = whereClause,
_qOrderBy = Nothing
},
_qrForeach = Just foreachRowFilter
}
API.QRTable $
API.TableRequest
{ _trTable = tableName,
_trRelationships = apiTableRelationships,
_trQuery =
API.Query
{ _qFields = Just $ mapFieldNameHashMap fields,
_qAggregates = Nothing,
_qAggregatesLimit = Nothing,
_qLimit = Nothing,
_qOffset = Nothing,
_qWhere = whereClause,
_qOrderBy = Nothing
},
_trForeach = Just foreachRowFilter
}
tableRelationshipsToList :: HashMap TableRelationshipsKey (HashMap API.RelationshipName API.Relationship) -> [API.Relationships]
tableRelationshipsToList m = map (either (API.RFunction . uncurry API.FunctionRelationships) (API.RTable . uncurry API.TableRelationships) . tableRelationshipsKeyToEither) (HashMap.toList m)
tableRelationshipsKeyToEither :: (TableRelationshipsKey, c) -> Either (API.FunctionName, c) (API.TableName, c)
tableRelationshipsKeyToEither (FunctionNameKey f, x) = Left (f, x)
tableRelationshipsKeyToEither (TableNameKey t, x) = Right (t, x)
translateForeachRowFilter :: MonadError QErr m => FieldName -> HashMap FieldName (ColumnName, ScalarType) -> J.Object -> m (HashMap API.ColumnName API.ScalarValue)
translateForeachRowFilter argumentIdFieldName joinIdsSchema joinIds =

View File

@ -69,7 +69,7 @@ buildFunctionInfo ::
SourceName ->
FunctionName 'MSSQL ->
SystemDefined ->
FunctionConfig ->
FunctionConfig 'MSSQL ->
FunctionPermissionsMap ->
RawFunctionInfo 'MSSQL ->
Maybe Text ->

View File

@ -31,6 +31,8 @@ instance BackendMetadata 'MSSQL where
throw500 "Computed fields are not yet defined for MSSQL backends"
supportsBeingRemoteRelationshipTarget _ = True
listAllTables = MSSQL.listAllTables
listAllTrackables _ =
throw500 "Computed fields are not yet defined for MSSQL backends"
getTableInfo _ _ = throw400 UnexpectedPayload "get_table_info not yet supported in MSSQL!"
validateNativeQuery _ _ _ _ = pure () -- for now, all queries are valid
validateStoredProcedure _ _ _ _ = pure () -- for now, all stored procedures are valid

View File

@ -66,7 +66,7 @@ buildFunctionInfo ::
SourceName ->
QualifiedFunction ->
SystemDefined ->
FunctionConfig ->
FunctionConfig ('Postgres pgKind) ->
FunctionPermissionsMap ->
RawFunctionInfo ('Postgres pgKind) ->
Maybe Text ->

View File

@ -285,3 +285,5 @@ instance
`onLeftM` \err -> throwError (prefixQErr "failed to fetch source tables: " err)
pure [QualifiedObject {..} | (qSchema, qName) <- results]
listAllTrackables _ = throw500 "listAllTrackables not supported by Postgres"

View File

@ -110,18 +110,19 @@ selectFunctionAggregate mkRootFieldName fi@FunctionInfo {..} description = runMa
let customization = _siCustomization sourceInfo
tCase = _rscNamingConvention customization
mkTypename = runMkTypename $ _rscTypeNames customization
targetTableInfo <- askTableInfo _fiReturnType
selectPermissions <- hoistMaybe $ tableSelectPermissions roleName targetTableInfo
guard $ spiAllowAgg selectPermissions
xNodesAgg <- hoistMaybe $ nodesAggExtension @('Postgres pgKind)
tableInfo <- askTableInfo _fiReturnType
nodesParser <- MaybeT $ tableSelectionList tableInfo
nodesParser <- MaybeT $ tableSelectionList targetTableInfo
lift do
stringifyNumbers <- retrieve Options.soStringifyNumbers
tableGQLName <- getTableIdentifierName tableInfo
tableArgsParser <- tableArguments tableInfo
tableGQLName <- getTableIdentifierName targetTableInfo
tableArgsParser <- tableArguments targetTableInfo
functionArgsParser <- customSQLFunctionArgs fi _fiGQLAggregateName _fiGQLArgsName
aggregateParser <- tableAggregationFields tableInfo
aggregateParser <- tableAggregationFields targetTableInfo
let aggregateFieldName = runMkRootFieldName mkRootFieldName _fiGQLAggregateName
argsParser = liftA2 (,) functionArgsParser tableArgsParser
selectionName = mkTypename (applyTypeNameCaseIdentifier tCase $ mkTableAggregateTypeName tableGQLName)
@ -165,15 +166,15 @@ selectFunctionConnection mkRootFieldName fi@FunctionInfo {..} description pkeyCo
roleName <- retrieve scRole
let customization = _siCustomization sourceInfo
tCase = _rscNamingConvention customization
returnTableInfo <- lift $ askTableInfo _fiReturnType
selectPermissions <- hoistMaybe $ tableSelectPermissions roleName returnTableInfo
xRelayInfo <- hoistMaybe $ relayExtension @('Postgres pgKind)
tableInfo <- lift $ askTableInfo _fiReturnType
selectionSetParser <- MaybeT $ tableConnectionSelectionSet tableInfo
selectionSetParser <- MaybeT $ tableConnectionSelectionSet returnTableInfo
lift do
let fieldName = runMkRootFieldName mkRootFieldName $ _fiGQLName <> Name.__connection
stringifyNumbers <- retrieve Options.soStringifyNumbers
tableConnectionArgsParser <- tableConnectionArgs pkeyColumns tableInfo
tableConnectionArgsParser <- tableConnectionArgs pkeyColumns returnTableInfo
functionArgsParser <- customSQLFunctionArgs fi _fiGQLName _fiGQLArgsName
let argsParser = liftA2 (,) functionArgsParser tableConnectionArgsParser
pure $

View File

@ -77,7 +77,7 @@ trackFunctionP2 ::
(MonadError QErr m, CacheRWM m, MetadataM m, BackendMetadata b) =>
SourceName ->
FunctionName b ->
FunctionConfig ->
FunctionConfig b ->
Maybe Text ->
m EncJSON
trackFunctionP2 sourceName qf config comment = do
@ -114,7 +114,7 @@ runTrackFunc (TrackFunction qf) = do
data TrackFunctionV2 (b :: BackendType) = TrackFunctionV2
{ _tfv2Source :: SourceName,
_tfv2Function :: FunctionName b,
_tfv2Configuration :: FunctionConfig,
_tfv2Configuration :: FunctionConfig b,
_tfv2Comment :: Maybe Text
}
@ -239,13 +239,15 @@ runCreateFunctionPermission (FunctionPermissionArgument functionName source role
"permission of role "
<> role <<> " already exists for function "
<> functionName <<> " in source: " <>> source
functionTableInfo <-
unsafeTableInfo @b source (_fiReturnType functionInfo) sourceCache
`onNothing` throw400 NotExists ("function's return table " <> _fiReturnType functionInfo <<> " not found in the cache")
(functionTableName, functionTableInfo) <- do
let tn = _fiReturnType functionInfo
case unsafeTableInfo @b source tn sourceCache of
Nothing -> throw400 NotExists ("function's return table " <> tn <<> " not found in the cache")
Just info -> pure (tn, info)
unless (role `HashMap.member` _tiRolePermInfoMap functionTableInfo) $
throw400 NotSupported $
"function permission can only be added when the function's return table "
<> _fiReturnType functionInfo <<> " has select permission configured for role: " <>> role
<> functionTableName <<> " has select permission configured for role: " <>> role
buildSchemaCacheFor
( MOSourceObjId source $
AB.mkAnyBackend (SMOFunctionPermission @b functionName role)
@ -306,7 +308,7 @@ runDropFunctionPermission (FunctionPermissionArgument functionName source role)
data SetFunctionCustomization b = SetFunctionCustomization
{ _sfcSource :: SourceName,
_sfcFunction :: FunctionName b,
_sfcConfiguration :: FunctionConfig
_sfcConfiguration :: FunctionConfig b
}
deriving instance Backend b => Show (SetFunctionCustomization b)

View File

@ -19,6 +19,9 @@ module Hasura.Function.Cache
InputArgument (..),
FunctionArgsExpG (..),
FunctionArgsExp,
TrackableFunctionInfo (..),
TrackableTableInfo (..),
TrackableInfo (..),
emptyFunctionConfig,
emptyFunctionCustomRootFields,
funcTypToTxt,
@ -26,16 +29,7 @@ module Hasura.Function.Cache
)
where
import Autodocodec
( HasCodec (codec),
bimapCodec,
dimapCodec,
optionalField',
optionalFieldWith',
optionalFieldWithDefault',
requiredField',
stringConstCodec,
)
import Autodocodec (HasCodec (codec))
import Autodocodec qualified as AC
import Autodocodec.Extended (graphQLFieldNameCodec)
import Control.Lens
@ -82,8 +76,8 @@ instance Show FunctionVolatility where
newtype FunctionArgName = FunctionArgName {getFuncArgNameTxt :: Text}
deriving (Show, Eq, Ord, NFData, ToJSON, ToJSONKey, FromJSON, FromJSONKey, ToTxt, IsString, Generic, Hashable, Lift, Data)
instance HasCodec FunctionArgName where
codec = dimapCodec FunctionArgName getFuncArgNameTxt codec
instance AC.HasCodec FunctionArgName where
codec = AC.dimapCodec FunctionArgName getFuncArgNameTxt codec
data InputArgument a
= IAUserProvided a
@ -104,7 +98,7 @@ data FunctionExposedAs = FEAQuery | FEAMutation
instance NFData FunctionExposedAs
instance HasCodec FunctionExposedAs where
codec = stringConstCodec [(FEAQuery, "query"), (FEAMutation, "mutation")]
codec = AC.stringConstCodec [(FEAQuery, "query"), (FEAMutation, "mutation")]
instance FromJSON FunctionExposedAs where
parseJSON = genericParseJSON defaultOptions {sumEncoding = UntaggedValue, constructorTagModifier = map toLower . drop 3}
@ -121,7 +115,7 @@ newtype FunctionPermissionInfo = FunctionPermissionInfo
instance HasCodec FunctionPermissionInfo where
codec =
AC.object "FunctionPermissionInfo" $
FunctionPermissionInfo <$> requiredField' "role" AC..= _fpmRole
FunctionPermissionInfo <$> AC.requiredField' "role" AC..= _fpmRole
instance FromJSON FunctionPermissionInfo where
parseJSON = genericParseJSON hasuraJSON
@ -146,11 +140,11 @@ instance NFData FunctionCustomRootFields
instance HasCodec FunctionCustomRootFields where
codec =
bimapCodec checkForDup id $
AC.bimapCodec checkForDup id $
AC.object "FunctionCustomRootFields" $
FunctionCustomRootFields
<$> optionalFieldWith' "function" graphQLFieldNameCodec AC..= _fcrfFunction
<*> optionalFieldWith' "function_aggregate" graphQLFieldNameCodec AC..= _fcrfFunctionAggregate
<$> AC.optionalFieldWith' "function" graphQLFieldNameCodec AC..= _fcrfFunction
<*> AC.optionalFieldWith' "function_aggregate" graphQLFieldNameCodec AC..= _fcrfFunctionAggregate
where
checkForDup (FunctionCustomRootFields (Just f) (Just fa))
| f == fa =
@ -204,7 +198,8 @@ data FunctionInfo (b :: BackendType) = FunctionInfo
-- | NOTE: when a table is created, a new composite type of the same name is
-- automatically created; so strictly speaking this field means "the function
-- returns the composite type corresponding to this table".
_fiReturnType :: TableName b,
_fiReturnType :: TableName b, -- NOTE: We will extend this in future, but for now always resolves to a (TableName b)
-- | this field represents the description of the function as present on the database
_fiDescription :: Maybe Text,
-- | Roles to which the function is accessible
@ -223,11 +218,56 @@ instance (Backend b) => ToJSON (FunctionInfo b) where
type FunctionCache b = HashMap (FunctionName b) (FunctionInfo b) -- info of all functions
data TrackableFunctionInfo b = TrackableFunctionInfo
{ tfiFunctionName :: FunctionName b,
tfiFunctionVolitility :: FunctionVolatility
}
deriving (Generic)
deriving instance Backend b => Show (TrackableFunctionInfo b)
deriving instance Backend b => Eq (TrackableFunctionInfo b)
instance (Backend b) => ToJSON (TrackableFunctionInfo b) where
toJSON (TrackableFunctionInfo name volitility) =
object
[ "name" Data.Aeson..= name,
"volitility" Data.Aeson..= volitility
]
newtype TrackableTableInfo b = TrackableTableInfo
{tfTableiName :: TableName b}
deriving (Generic)
deriving instance Backend b => Show (TrackableTableInfo b)
deriving instance Backend b => Eq (TrackableTableInfo b)
instance (Backend b) => ToJSON (TrackableTableInfo b) where
toJSON (TrackableTableInfo ti) = object ["name" Data.Aeson..= ti]
data TrackableInfo b = TrackableInfo
{ trackableFunctions :: [TrackableFunctionInfo b],
trackableTables :: [TrackableTableInfo b]
}
deriving (Generic)
deriving instance Backend b => Show (TrackableInfo b)
deriving instance Backend b => Eq (TrackableInfo b)
instance (Backend b) => ToJSON (TrackableInfo b) where
toJSON (TrackableInfo functions tables) =
object
[ "tables" Data.Aeson..= tables,
"functions" Data.Aeson..= functions
]
-- Metadata requests related types
-- | Tracked function configuration, and payload of the 'pg_track_function' and
-- 'pg_set_function_customization' API calls.
data FunctionConfig = FunctionConfig
data FunctionConfig b = FunctionConfig
{ _fcSessionArgument :: Maybe FunctionArgName,
-- | In which top-level field should we expose this function?
--
@ -236,36 +276,43 @@ data FunctionConfig = FunctionConfig
-- docs for details of validation, etc.
_fcExposedAs :: Maybe FunctionExposedAs,
_fcCustomRootFields :: FunctionCustomRootFields,
_fcCustomName :: Maybe G.Name
_fcCustomName :: Maybe G.Name,
_fcResponse :: Maybe (FunctionReturnType b)
}
deriving (Show, Eq, Generic)
deriving (Generic)
instance NFData FunctionConfig
deriving stock instance Backend b => Show (FunctionConfig b)
instance HasCodec FunctionConfig where
deriving stock instance Backend b => Eq (FunctionConfig b)
instance Backend b => NFData (FunctionConfig b)
instance Backend b => HasCodec (FunctionConfig b) where
codec =
AC.object "FunctionConfig" $
FunctionConfig
<$> optionalField' "session_argument" AC..= _fcSessionArgument
<*> optionalField' "exposed_as" AC..= _fcExposedAs
<*> optionalFieldWithDefault' "custom_root_fields" emptyFunctionCustomRootFields AC..= _fcCustomRootFields
<*> optionalFieldWith' "custom_name" graphQLFieldNameCodec AC..= _fcCustomName
<$> AC.optionalField' "session_argument" AC..= _fcSessionArgument
<*> AC.optionalField' "exposed_as" AC..= _fcExposedAs
<*> AC.optionalFieldWithDefault' "custom_root_fields" emptyFunctionCustomRootFields AC..= _fcCustomRootFields
<*> AC.optionalFieldWith' "custom_name" graphQLFieldNameCodec AC..= _fcCustomName
<*> AC.optionalFieldWith' "response" codec AC..= _fcResponse
instance FromJSON FunctionConfig where
instance Backend b => FromJSON (FunctionConfig b) where
parseJSON = withObject "FunctionConfig" $ \obj ->
FunctionConfig
<$> obj .:? "session_argument"
<*> obj .:? "exposed_as"
<*> obj .:? "custom_root_fields" .!= emptyFunctionCustomRootFields
<*> obj .:? "custom_name"
<*> obj .:? "response"
instance ToJSON FunctionConfig where
instance Backend b => ToJSON (FunctionConfig b) where
toJSON = genericToJSON hasuraJSON {omitNothingFields = True}
toEncoding = genericToEncoding hasuraJSON {omitNothingFields = True}
-- | The default function config; v1 of the API implies this.
emptyFunctionConfig :: FunctionConfig
emptyFunctionConfig = FunctionConfig Nothing Nothing emptyFunctionCustomRootFields Nothing
emptyFunctionConfig :: FunctionConfig b
emptyFunctionConfig = FunctionConfig Nothing Nothing emptyFunctionCustomRootFields Nothing Nothing
type DBFunctionsMetadata b = HashMap (FunctionName b) (FunctionOverloads b)
@ -273,6 +320,8 @@ newtype FunctionOverloads b = FunctionOverloads {getFunctionOverloads :: NonEmpt
deriving newtype instance Backend b => Eq (FunctionOverloads b)
deriving newtype instance Backend b => Show (FunctionOverloads b)
deriving newtype instance FromJSON (RawFunctionInfo b) => FromJSON (FunctionOverloads b)
data FunctionArgsExpG a = FunctionArgsExp

Some files were not shown because too many files have changed in this diff Show More