Add table/function filtering and detail level to schema introspection for Data Connectors to improve performance

PR-URL: https://github.com/hasura/graphql-engine-mono/pull/10074
GitOrigin-RevId: 12de7387746f456cbefd9f76e446f56e9e643dc9
This commit is contained in:
Daniel Chambers 2023-08-18 14:27:49 +10:00 committed by hasura-bot
parent d141303da3
commit c726b528c4
32 changed files with 501 additions and 94 deletions

View File

@ -415,6 +415,53 @@ Columns can have their value generated by the database, for example auto-increme
If the agent declares a lack of mutability support in its capabilities, it should not declare tables/columns as mutable in its schema here.
#### Schema Request
The `/schema` endpoint may be sent a request body with optional filtering details designed to reduce the amount of schema data returned. Here's an example request body:
```json
{
"filters": {
"only_tables": [
["Artist"],
["Album"]
],
"only_functions": [
["SearchAlbums"]
]
},
"detail_level": "basic_info"
}
```
The `filters` property may contain an object with the following properties:
* `only_tables`: This is a list of table names, and the schema response must only contain the tables specified in this list, if it is specified. An empty list means return no tables.
* `only_functions`: This is a list of function names, and the schema response must only contain the functions specified in this list, if it is specified. An empty list means return no functions.
The `detail_level` property controls what data needs to be returned about functions and tables. There are two values: `everything` and `basic_info` (the default, if omitted, is `everything`). `everything` requires the agent to return all properties described above. `basic_info` requires the agent to only return a reduced set of properties about tables and functions; specifically only table names and types, and function names and types should be returned. All other properties should be omitted.
Here's an example response to the above request:
```json
{
"tables": [
{
"name": ["Artist"],
"type": "table"
},
{
"name": ["Album"],
"type": "table"
}
],
"functions": [
{
"name": ["SearchAlbums"],
"type": "read"
}
]
}
```
#### Type definitions
The `SchemaResponse` TypeScript type from [the reference implementation](./reference/src/types/index.ts) describes the valid response body for the `GET /schema` endpoint.

View File

@ -1,6 +1,6 @@
{
"name": "@hasura/dc-api-types",
"version": "0.40.0",
"version": "0.41.0",
"description": "Hasura GraphQL Engine Data Connector Agent API types",
"author": "Hasura (https://github.com/hasura/graphql-engine)",
"license": "Apache-2.0",

View File

@ -63,6 +63,15 @@
}
}
],
"requestBody": {
"content": {
"application/json;charset=utf-8": {
"schema": {
"$ref": "#/components/schemas/SchemaRequest"
}
}
}
},
"responses": {
"200": {
"content": {
@ -82,7 +91,7 @@
}
}
},
"description": " or `X-Hasura-DataConnector-Config` or `X-Hasura-DataConnector-SourceName`"
"description": " or `body` or `X-Hasura-DataConnector-Config` or `X-Hasura-DataConnector-SourceName`"
},
"500": {
"content": {
@ -1286,6 +1295,7 @@
"TableInfo": {
"properties": {
"columns": {
"default": [],
"description": "The columns of the table",
"items": {
"$ref": "#/components/schemas/ColumnInfo"
@ -1336,8 +1346,7 @@
}
},
"required": [
"name",
"columns"
"name"
],
"type": "object"
},
@ -1428,6 +1437,7 @@
"FunctionInfo": {
"properties": {
"args": {
"default": [],
"description": "argument info - name/types",
"items": {
"$ref": "#/components/schemas/FunctionInformationArgument"
@ -1448,6 +1458,7 @@
"one",
"many"
],
"nullable": true,
"type": "string"
},
"returns": {
@ -1459,10 +1470,7 @@
},
"required": [
"name",
"type",
"returns",
"response_cardinality",
"args"
"type"
],
"type": "object"
},
@ -1490,6 +1498,44 @@
],
"type": "object"
},
"SchemaRequest": {
"properties": {
"detail_level": {
"$ref": "#/components/schemas/DetailLevel"
},
"filters": {
"$ref": "#/components/schemas/SchemaFilters"
}
},
"type": "object"
},
"SchemaFilters": {
"properties": {
"only_functions": {
"description": "Only get the schemas for these functions",
"items": {
"$ref": "#/components/schemas/FunctionName"
},
"type": "array"
},
"only_tables": {
"description": "Only get the schemas for these tables",
"items": {
"$ref": "#/components/schemas/TableName"
},
"type": "array"
}
},
"type": "object"
},
"DetailLevel": {
"description": "How much information to return about the schema. Values:\n- 'everything': All information about the schema.\n- 'basic_info': For tables, only the table name and table type, for functions, only the function name and function type.\n",
"enum": [
"everything",
"basic_info"
],
"type": "string"
},
"QueryResponse": {
"properties": {
"aggregates": {

View File

@ -48,6 +48,7 @@ export type { DatasetTemplateName } from './models/DatasetTemplateName';
export type { DefaultValueGenerationStrategy } from './models/DefaultValueGenerationStrategy';
export type { DeleteCapabilities } from './models/DeleteCapabilities';
export type { DeleteMutationOperation } from './models/DeleteMutationOperation';
export type { DetailLevel } from './models/DetailLevel';
export type { ErrorResponse } from './models/ErrorResponse';
export type { ErrorResponseType } from './models/ErrorResponseType';
export type { ExistsExpression } from './models/ExistsExpression';
@ -132,6 +133,8 @@ export type { ScalarTypeCapabilities } from './models/ScalarTypeCapabilities';
export type { ScalarTypesCapabilities } from './models/ScalarTypesCapabilities';
export type { ScalarValue } from './models/ScalarValue';
export type { ScalarValueComparison } from './models/ScalarValueComparison';
export type { SchemaFilters } from './models/SchemaFilters';
export type { SchemaRequest } from './models/SchemaRequest';
export type { SchemaResponse } from './models/SchemaResponse';
export type { SetColumnRowUpdate } from './models/SetColumnRowUpdate';
export type { SingleColumnAggregate } from './models/SingleColumnAggregate';

View File

@ -0,0 +1,11 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
/**
* How much information to return about the schema. Values:
* - 'everything': All information about the schema.
* - 'basic_info': For tables, only the table name and table type, for functions, only the function name and function type.
*
*/
export type DetailLevel = 'everything' | 'basic_info';

View File

@ -11,7 +11,7 @@ export type FunctionInfo = {
/**
* argument info - name/types
*/
args: Array<FunctionInformationArgument>;
args?: Array<FunctionInformationArgument>;
/**
* Description of the table
*/
@ -20,8 +20,8 @@ export type FunctionInfo = {
/**
* object response if false, rows if true
*/
response_cardinality: 'one' | 'many';
returns: FunctionReturnType;
response_cardinality?: 'one' | 'many' | null;
returns?: FunctionReturnType;
type: FunctionType;
};

View File

@ -0,0 +1,18 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
import type { FunctionName } from './FunctionName';
import type { TableName } from './TableName';
export type SchemaFilters = {
/**
* Only get the schemas for these functions
*/
only_functions?: Array<FunctionName>;
/**
* Only get the schemas for these tables
*/
only_tables?: Array<TableName>;
};

View File

@ -0,0 +1,12 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
import type { DetailLevel } from './DetailLevel';
import type { SchemaFilters } from './SchemaFilters';
export type SchemaRequest = {
detail_level?: DetailLevel;
filters?: SchemaFilters;
};

View File

@ -11,7 +11,7 @@ export type TableInfo = {
/**
* The columns of the table
*/
columns: Array<ColumnInfo>;
columns?: Array<ColumnInfo>;
/**
* Whether or not existing rows can be deleted in the table
*/

View File

@ -24,7 +24,7 @@
},
"dc-api-types": {
"name": "@hasura/dc-api-types",
"version": "0.40.0",
"version": "0.41.0",
"license": "Apache-2.0",
"devDependencies": {
"@tsconfig/node16": "^1.0.3",
@ -2227,7 +2227,7 @@
"license": "Apache-2.0",
"dependencies": {
"@fastify/cors": "^8.1.0",
"@hasura/dc-api-types": "0.40.0",
"@hasura/dc-api-types": "0.41.0",
"fastify": "^4.13.0",
"mathjs": "^11.0.0",
"pino-pretty": "^8.0.0",
@ -2547,7 +2547,7 @@
"license": "Apache-2.0",
"dependencies": {
"@fastify/cors": "^8.1.0",
"@hasura/dc-api-types": "0.40.0",
"@hasura/dc-api-types": "0.41.0",
"fastify": "^4.13.0",
"fastify-metrics": "^9.2.1",
"nanoid": "^3.3.4",
@ -2868,7 +2868,7 @@
"version": "file:reference",
"requires": {
"@fastify/cors": "^8.1.0",
"@hasura/dc-api-types": "0.40.0",
"@hasura/dc-api-types": "0.41.0",
"@tsconfig/node16": "^1.0.3",
"@types/node": "^16.11.49",
"@types/xml2js": "^0.4.11",
@ -3080,7 +3080,7 @@
"version": "file:sqlite",
"requires": {
"@fastify/cors": "^8.1.0",
"@hasura/dc-api-types": "0.40.0",
"@hasura/dc-api-types": "0.41.0",
"@tsconfig/node16": "^1.0.3",
"@types/node": "^16.11.49",
"@types/sqlite3": "^3.1.8",

View File

@ -10,7 +10,7 @@
"license": "Apache-2.0",
"dependencies": {
"@fastify/cors": "^8.1.0",
"@hasura/dc-api-types": "0.40.0",
"@hasura/dc-api-types": "0.41.0",
"fastify": "^4.13.0",
"mathjs": "^11.0.0",
"pino-pretty": "^8.0.0",
@ -52,7 +52,7 @@
"integrity": "sha512-lgHwxlxV1qIg1Eap7LgIeoBWIMFibOjbrYPIPJZcI1mmGAI2m3lNYpK12Y+GBdPQ0U1hRwSord7GIaawz962qQ=="
},
"node_modules/@hasura/dc-api-types": {
"version": "0.40.0",
"version": "0.41.0",
"license": "Apache-2.0",
"devDependencies": {
"@tsconfig/node16": "^1.0.3",

View File

@ -22,7 +22,7 @@
},
"dependencies": {
"@fastify/cors": "^8.1.0",
"@hasura/dc-api-types": "0.40.0",
"@hasura/dc-api-types": "0.41.0",
"fastify": "^4.13.0",
"mathjs": "^11.0.0",
"pino-pretty": "^8.0.0",

View File

@ -1,4 +1,4 @@
import { FunctionInfo, SchemaResponse, TableName } from "@hasura/dc-api-types"
import { SchemaRequest, SchemaResponse, TableInfo, TableName } from "@hasura/dc-api-types"
import { Casing, Config } from "../config";
import xml2js from "xml2js"
import fs from "fs"
@ -26,7 +26,7 @@ const streamToBuffer = async (stream: stream.Readable): Promise<Buffer> => {
// Only parse numeric columns as numbers, otherwise you get "number-like" columns like BillingPostCode
// getting partially parsed as a number or a string depending on the individual postcode
const parseNumbersInNumericColumns = (schema: SchemaResponse) => {
const numericColumns = new Set(schema.tables.flatMap(table => table.columns.filter(c => c.type === "number").map(c => c.name)));
const numericColumns = new Set(schema.tables.flatMap(table => (table.columns ?? []).filter(c => c.type === "number").map(c => c.name)));
return (value: string, name: string): any => {
return numericColumns.has(name)
@ -139,7 +139,7 @@ const applyCasing = (casing: Casing) => (str: string): string => {
}
}
export const getSchema = (store: Record<string, StaticData>, config: Config): SchemaResponse => {
export const getSchema = (store: Record<string, StaticData>, config: Config, request: SchemaRequest = {}): SchemaResponse => {
const applyTableNameCasing = applyCasing(config.table_name_casing);
const applyColumnNameCasing = applyCasing(config.column_name_casing);
@ -155,11 +155,19 @@ export const getSchema = (store: Record<string, StaticData>, config: Config): Sc
throw new Error(`Couldn't find db store for ${dbName}`);
}
const filterForOnlyTheseTables = request.filters?.only_tables
// If we're using a schema, only use those table names that belong to that schema
?.filter(n => config.schema ? n.length === 2 && n[0] === config.schema : true)
// But the schema is fake, so just keep the actual table name
?.map(n => n[n.length - 1])
const filteredTables = schema.tables.filter(table =>
config.tables === null ? true : config.tables.map(n => [n]).find(nameEquals(table.name)) !== undefined
config.tables || filterForOnlyTheseTables
? (config.tables ?? []).concat(filterForOnlyTheseTables ?? []).map(n => [n]).find(nameEquals(table.name)) !== undefined
: true
);
const prefixedTables = filteredTables.map(table => ({
const prefixedTables: TableInfo[] = filteredTables.map(table => ({
...table,
name: prefixSchemaToTableName(table.name.map(applyTableNameCasing)),
primary_key: table.primary_key?.map(applyColumnNameCasing),
@ -170,17 +178,49 @@ export const getSchema = (store: Record<string, StaticData>, config: Config): Sc
column_mapping: mapObject(constraint.column_mapping, ([outer, inner]) => [applyColumnNameCasing(outer), applyColumnNameCasing(inner)])
}))
: table.foreign_keys,
columns: table.columns.map(column => ({
columns: table.columns?.map(column => ({
...column,
name: applyColumnNameCasing(column.name),
}))
}));
const prefixedFunctions = (schema.functions ?? []); // TODO: Put some real prefixes here
const filterForOnlyTheseFunctions = request.filters?.only_functions
// If we're using a schema, only use those function names that belong to that schema
?.filter(n => config.schema ? n.length === 2 && n[0] === config.schema : true)
// But the schema is fake, so just keep the actual function name
?.map(n => n[n.length - 1])
const filteredFunctions = (schema.functions ?? []).filter(func =>
filterForOnlyTheseFunctions
? filterForOnlyTheseFunctions.map(n => [n]).find(nameEquals(func.name)) !== undefined
: true
)
const prefixedFunctions = filteredFunctions; // TODO: Put some real prefixes here
const detailLevel = request?.detail_level ?? "everything";
switch (detailLevel) {
case "everything":
return {
tables: prefixedTables,
functions: prefixedFunctions,
};
case "basic_info":
return {
tables: prefixedTables.map(table => ({
name: table.name,
type: table.type
})),
functions: prefixedFunctions.map(func => ({
name: func.name,
type: func.type,
})),
};
default:
return unreachable(detailLevel);
}
return {
...schema,
tables: prefixedTables,
functions: prefixedFunctions,
};
};

View File

@ -4,7 +4,7 @@ import { filterAvailableTables, getSchema, getTable, loadStaticData, StaticData
import { queryData } from './query';
import { getConfig } from './config';
import { capabilitiesResponse } from './capabilities';
import { CapabilitiesResponse, SchemaResponse, QueryRequest, QueryResponse, DatasetGetTemplateResponse, DatasetCreateCloneRequest, DatasetCreateCloneResponse, DatasetDeleteCloneResponse } from '@hasura/dc-api-types';
import { CapabilitiesResponse, SchemaResponse, QueryRequest, QueryResponse, DatasetGetTemplateResponse, DatasetCreateCloneRequest, DatasetCreateCloneResponse, DatasetDeleteCloneResponse, SchemaRequest } from '@hasura/dc-api-types';
import { cloneDataset, defaultDbStoreName, deleteDataset, getDataset, getDbStoreName } from './datasets';
const port = Number(process.env.PORT) || 8100;
@ -20,15 +20,23 @@ server.register(FastifyCors, {
allowedHeaders: ["X-Hasura-DataConnector-Config", "X-Hasura-DataConnector-SourceName"]
});
// This is a hack to get Fastify to parse bodies on /schema GET requests
// We basically trick its code into thinking the request is actually a POST
// request so it doesn't skip parsing request bodies.
server.addHook("onRequest", async(request, reply) => {
if (request.routerPath === "/schema")
request.raw.method = "POST"
})
server.get<{ Reply: CapabilitiesResponse }>("/capabilities", async (request, _response) => {
server.log.info({ headers: request.headers, query: request.body, }, "capabilities.request");
return capabilitiesResponse;
});
server.get<{ Reply: SchemaResponse }>("/schema", async (request, _response) => {
server.get<{ Body: SchemaRequest | undefined, Reply: SchemaResponse }>("/schema", async (request, _response) => {
server.log.info({ headers: request.headers, query: request.body, }, "schema.request");
const config = getConfig(request);
return getSchema(staticData, config);
return getSchema(staticData, config, request.body);
});
server.post<{ Body: QueryRequest, Reply: QueryResponse }>("/query", async (request, _response) => {

View File

@ -10,7 +10,7 @@
"license": "Apache-2.0",
"dependencies": {
"@fastify/cors": "^8.1.0",
"@hasura/dc-api-types": "0.40.0",
"@hasura/dc-api-types": "0.41.0",
"fastify": "^4.13.0",
"fastify-metrics": "^9.2.1",
"nanoid": "^3.3.4",
@ -57,7 +57,7 @@
"integrity": "sha512-lgHwxlxV1qIg1Eap7LgIeoBWIMFibOjbrYPIPJZcI1mmGAI2m3lNYpK12Y+GBdPQ0U1hRwSord7GIaawz962qQ=="
},
"node_modules/@hasura/dc-api-types": {
"version": "0.40.0",
"version": "0.41.0",
"license": "Apache-2.0",
"devDependencies": {
"@tsconfig/node16": "^1.0.3",

View File

@ -22,7 +22,7 @@
},
"dependencies": {
"@fastify/cors": "^8.1.0",
"@hasura/dc-api-types": "0.40.0",
"@hasura/dc-api-types": "0.41.0",
"fastify-metrics": "^9.2.1",
"fastify": "^4.13.0",
"nanoid": "^3.3.4",

View File

@ -4,7 +4,7 @@ import { getSchema } from './schema';
import { explain, queryData } from './query';
import { getConfig, tryGetConfig } from './config';
import { capabilitiesResponse } from './capabilities';
import { QueryResponse, SchemaResponse, QueryRequest, CapabilitiesResponse, ExplainResponse, RawRequest, RawResponse, ErrorResponse, MutationRequest, MutationResponse, DatasetTemplateName, DatasetGetTemplateResponse, DatasetCreateCloneRequest, DatasetCreateCloneResponse, DatasetDeleteCloneResponse } from '@hasura/dc-api-types';
import { QueryResponse, SchemaResponse, QueryRequest, CapabilitiesResponse, ExplainResponse, RawRequest, RawResponse, ErrorResponse, MutationRequest, MutationResponse, DatasetTemplateName, DatasetGetTemplateResponse, DatasetCreateCloneRequest, DatasetCreateCloneResponse, DatasetDeleteCloneResponse, SchemaRequest } from '@hasura/dc-api-types';
import { defaultMode, withConnection } from './db';
import metrics from 'fastify-metrics';
import prometheus from 'prom-client';
@ -97,6 +97,14 @@ if(PERMISSIVE_CORS) {
})
})();
// This is a hack to get Fastify to parse bodies on /schema GET requests
// We basically trick its code into thinking the request is actually a POST
// request so it doesn't skip parsing request bodies.
server.addHook("onRequest", async(request, reply) => {
if (request.routerPath === "/schema")
request.raw.method = "POST"
})
// Serves as an example of a custom histogram
// Not especially useful at present as this mirrors
// http_request_duration_seconds_bucket but is less general
@ -118,10 +126,10 @@ server.get<{ Reply: CapabilitiesResponse }>("/capabilities", async (request, _re
return capabilitiesResponse;
});
server.get<{ Reply: SchemaResponse }>("/schema", async (request, _response) => {
server.get<{ Body: SchemaRequest | undefined, Reply: SchemaResponse }>("/schema", async (request, _response) => {
server.log.info({ headers: request.headers, query: request.body, }, "schema.request");
const config = getConfig(request);
return getSchema(config, sqlLogger);
return getSchema(config, sqlLogger, request.body);
});
/**

View File

@ -1,8 +1,9 @@
import { SchemaResponse, ColumnInfo, TableInfo, Constraint, ColumnValueGenerationStrategy } from "@hasura/dc-api-types"
import { SchemaResponse, ColumnInfo, TableInfo, Constraint, ColumnValueGenerationStrategy, SchemaRequest, DetailLevel, TableName } from "@hasura/dc-api-types"
import { ScalarTypeKey } from "./capabilities";
import { Config } from "./config";
import { defaultMode, SqlLogger, withConnection } from './db';
import { MUTATIONS } from "./environment";
import { unreachable } from "./util";
var sqliteParser = require('sqlite-parser');
@ -62,18 +63,33 @@ function nullableCast(ds: any[]): boolean {
return true;
}
const formatTableInfo = (config: Config) => (info: TableInfoInternal): TableInfo => {
const formatTableInfo = (config: Config, detailLevel: DetailLevel): ((info: TableInfoInternal) => TableInfo) => {
switch (detailLevel) {
case "everything": return formatEverythingTableInfo(config);
case "basic_info": return formatBasicTableInfo(config);
default: return unreachable(detailLevel);
}
}
const formatBasicTableInfo = (config: Config) => (info: TableInfoInternal): TableInfo => {
const tableName = config.explicit_main_schema ? ["main", info.name] : [info.name];
return {
name: tableName,
type: "table"
}
}
const formatEverythingTableInfo = (config: Config) => (info: TableInfoInternal): TableInfo => {
const basicTableInfo = formatBasicTableInfo(config)(info);
const ast = sqliteParser(info.sql);
const columnsDdl = getColumnsDdl(ast);
const primaryKeys = getPrimaryKeyNames(ast);
const foreignKeys = ddlFKs(config, tableName, ast);
const foreignKeys = ddlFKs(config, basicTableInfo.name, ast);
const primaryKey = primaryKeys.length > 0 ? { primary_key: primaryKeys } : {};
const foreignKey = foreignKeys.length > 0 ? { foreign_keys: Object.fromEntries(foreignKeys) } : {};
return {
name: tableName,
type: "table",
...basicTableInfo,
...primaryKey,
...foreignKey,
description: info.sql,
@ -92,14 +108,21 @@ function isMeta(table : TableInfoInternal) {
return table.type != 'table' || table.name === 'sqlite_sequence';
}
function includeTable(config: Config, table: TableInfoInternal): boolean {
if(config.tables === null) {
if(isMeta(table) && ! config.meta) {
return false;
}
return true;
const includeTable = (config: Config, only_tables?: TableName[]) => (table: TableInfoInternal): boolean => {
if (isMeta(table) && !config.meta) {
return false;
}
const filterForOnlyTheseTables = only_tables
// If we're using an explicit main schema, only use those table names that belong to that schema
?.filter(n => config.explicit_main_schema ? n.length === 2 && n[0] === "main" : true)
// Just keep the actual table name
?.map(n => n[n.length - 1])
if (config.tables || only_tables) {
return (config.tables ?? []).concat(filterForOnlyTheseTables ?? []).indexOf(table.name) >= 0;
} else {
return config.tables.indexOf(table.name) >= 0
return true;
}
}
@ -227,12 +250,14 @@ function getPrimaryKeyNames(ddl: any): string[] {
})
}
export async function getSchema(config: Config, sqlLogger: SqlLogger): Promise<SchemaResponse> {
export async function getSchema(config: Config, sqlLogger: SqlLogger, schemaRequest: SchemaRequest = {}): Promise<SchemaResponse> {
return await withConnection(config, defaultMode, sqlLogger, async db => {
const detailLevel = schemaRequest.detail_level ?? "everything";
const results = await db.query("SELECT * from sqlite_schema");
const resultsT: TableInfoInternal[] = results as TableInfoInternal[];
const filtered: TableInfoInternal[] = resultsT.filter(table => includeTable(config,table));
const result: TableInfo[] = filtered.map(formatTableInfo(config));
const filtered: TableInfoInternal[] = resultsT.filter(includeTable(config, schemaRequest?.filters?.only_tables));
const result: TableInfo[] = filtered.map(formatTableInfo(config, detailLevel));
return {
tables: result

View File

@ -164,6 +164,7 @@ test-suite tests-dc-api
Test.Specs.CapabilitiesSpec
Test.Specs.ErrorSpec
Test.Specs.ExplainSpec
Test.Specs.FunctionsSpec
Test.Specs.HealthSpec
Test.Specs.MetricsSpec
Test.Specs.MutationSpec
@ -180,5 +181,4 @@ test-suite tests-dc-api
Test.Specs.QuerySpec.RedactionSpec
Test.Specs.QuerySpec.RelationshipsSpec
Test.Specs.SchemaSpec
Test.Specs.UDFSpec
Test.TestHelpers

View File

@ -84,6 +84,7 @@ type SchemaApi config =
"schema"
:> SourceNameHeader Required
:> ConfigHeader config Required
:> ReqBody '[JSON] V0.SchemaRequest
:> UVerb 'GET '[JSON] SchemaResponses
-- | This function defines a central place to ensure that all cases are covered for query and error responses.

View File

@ -194,8 +194,8 @@ data FunctionInfo = FunctionInfo
{ -- NOTE: Some fields from PG are omitted here due to initial implementation, or non-generality.
_fiName :: FunctionName,
_fiFunctionType :: FunctionType,
_fiReturns :: FunctionReturnType, -- Functions must currently return tables as per PG.
_fiResponseCardinality :: FunctionArity,
_fiReturns :: Maybe FunctionReturnType, -- Functions must currently return tables as per PG.
_fiResponseCardinality :: Maybe FunctionArity,
_fiInputArgs :: [FunctionArg], -- Args info is listed grouped unlike PG.
_fiDescription :: Maybe Text
}
@ -209,9 +209,9 @@ instance HasCodec FunctionInfo where
FunctionInfo
<$> requiredField "name" "The name of the table" .= _fiName
<*> requiredField "type" "read/write classification of the function" .= _fiFunctionType
<*> requiredField "returns" "table listed in schema that matches the return type of the function - to relax later" .= _fiReturns
<*> requiredField "response_cardinality" "object response if false, rows if true" .= _fiResponseCardinality
<*> requiredField "args" "argument info - name/types" .= _fiInputArgs
<*> optionalFieldOrNull "returns" "table listed in schema that matches the return type of the function - to relax later" .= _fiReturns
<*> optionalFieldOrNull "response_cardinality" "object response if false, rows if true" .= _fiResponseCardinality
<*> optionalFieldWithOmittedDefault "args" [] "argument info - name/types" .= _fiInputArgs
<*> optionalFieldOrNull "description" "Description of the table" .= _fiDescription
--------------------------------------------------------------------------------

View File

@ -1,7 +1,11 @@
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE OverloadedLists #-}
module Hasura.Backends.DataConnector.API.V0.Schema
( SchemaResponse (..),
( SchemaRequest (..),
SchemaFilters (..),
DetailLevel (..),
SchemaResponse (..),
ObjectTypeDefinition (..),
)
where
@ -24,6 +28,60 @@ import Prelude
--------------------------------------------------------------------------------
-- Schema Response
data SchemaRequest = SchemaRequest
{ _srFilters :: SchemaFilters,
_srDetailLevel :: DetailLevel
}
deriving stock (Eq, Show, Generic)
deriving anyclass (NFData, Hashable)
deriving (FromJSON, ToJSON, ToSchema) via Autodocodec SchemaRequest
instance HasCodec SchemaRequest where
codec =
object "SchemaRequest" $
SchemaRequest
<$> optionalFieldWithOmittedDefault "filters" mempty "Optional schema filtering settings" .= _srFilters
<*> optionalFieldWithOmittedDefault "detail_level" Everything "Only return names for schema items" .= _srDetailLevel
data SchemaFilters = SchemaFilters
{ _sfOnlyTables :: Maybe [API.V0.TableName],
_sfOnlyFunctions :: Maybe [API.V0.FunctionName]
}
deriving stock (Eq, Show, Generic)
deriving anyclass (NFData, Hashable)
deriving (FromJSON, ToJSON, ToSchema) via Autodocodec SchemaFilters
instance Semigroup SchemaFilters where
a <> b =
SchemaFilters
{ _sfOnlyTables = _sfOnlyTables a <> _sfOnlyTables b,
_sfOnlyFunctions = _sfOnlyFunctions a <> _sfOnlyFunctions b
}
instance Monoid SchemaFilters where
mempty = SchemaFilters Nothing Nothing
instance HasCodec SchemaFilters where
codec =
object "SchemaFilters" $
SchemaFilters
<$> optionalField "only_tables" "Only get the schemas for these tables" .= _sfOnlyTables
<*> optionalField "only_functions" "Only get the schemas for these functions" .= _sfOnlyFunctions
data DetailLevel = Everything | BasicInfo
deriving stock (Eq, Show, Generic)
deriving anyclass (NFData, Hashable)
deriving (FromJSON, ToJSON, ToSchema) via Autodocodec DetailLevel
instance HasCodec DetailLevel where
codec =
named "DetailLevel" $
stringConstCodec [(Everything, "everything"), (BasicInfo, "basic_info")]
<??> [ "How much information to return about the schema. Values:",
"- 'everything': All information about the schema.",
"- 'basic_info': For tables, only the table name and table type, for functions, only the function name and function type."
]
-- | The Schema Response provides the schemas for tracked tables and
-- 'Capabilities' supported by the service.
data SchemaResponse = SchemaResponse

View File

@ -90,7 +90,7 @@ instance HasCodec TableInfo where
TableInfo
<$> requiredField "name" "The name of the table" .= _tiName
<*> optionalFieldWithDefault "type" Table "The type of table" .= _tiType
<*> requiredField "columns" "The columns of the table" .= _tiColumns
<*> optionalFieldWithDefault "columns" [] "The columns of the table" .= _tiColumns
<*> dimapMaybeNonEmpty (optionalFieldWithOmittedDefault "primary_key" [] "The primary key of the table") .= _tiPrimaryKey
<*> optionalFieldWithOmittedDefault "foreign_keys" (ForeignKeys mempty) "Foreign key constraints" .= _tiForeignKeys
<*> optionalFieldOrNull "description" "Description of the table" .= _tiDescription

View File

@ -24,12 +24,12 @@ import Test.Sandwich.Options qualified as Sandwich
import Test.Specs.CapabilitiesSpec qualified
import Test.Specs.ErrorSpec qualified
import Test.Specs.ExplainSpec qualified
import Test.Specs.FunctionsSpec qualified
import Test.Specs.HealthSpec qualified
import Test.Specs.MetricsSpec qualified
import Test.Specs.MutationSpec qualified
import Test.Specs.QuerySpec qualified
import Test.Specs.SchemaSpec qualified
import Test.Specs.UDFSpec qualified
import Test.TestHelpers (AgentTestSpec)
import Prelude
@ -51,7 +51,7 @@ tests testData testConfig edgeCasesTestData capabilitiesResponse@API.Capabilitie
for_ (API._cMutations _crCapabilities) \_ -> Test.Specs.MutationSpec.spec testData edgeCasesTestData _crCapabilities
for_ (API._cUserDefinedFunctions _crCapabilities) \_ -> do
usesDataset functionsTemplate do
Test.Specs.UDFSpec.spec testConfig _crCapabilities
Test.Specs.FunctionsSpec.spec testConfig _crCapabilities
getCloneSchema :: Maybe API.Config -> API.DatasetTemplateName -> AgentIOClient -> IO API.SchemaResponse
getCloneSchema mergeConfig datasetTemplate (AgentIOClient agentClient) =
@ -59,13 +59,13 @@ getCloneSchema mergeConfig datasetTemplate (AgentIOClient agentClient) =
(createClone agentClient datasetTemplate)
(deleteClone agentClient)
( \DatasetCloneInfo {..} ->
(agentClient // API._schema) testSourceName (mergeAgentConfig _dciAgentConfig mergeConfig) >>= guardSchemaResponse
(agentClient // API._schema) testSourceName (mergeAgentConfig _dciAgentConfig mergeConfig) (API.SchemaRequest mempty API.Everything) >>= guardSchemaResponse
)
getChinookSchema :: API.Capabilities -> AgentConfig -> AgentIOClient -> IO API.SchemaResponse
getChinookSchema API.Capabilities {..} agentConfig agentIOClient@(AgentIOClient agentClient) = do
case agentConfig of
ManualConfig config -> (agentClient // API._schema) testSourceName config >>= guardSchemaResponse
ManualConfig config -> (agentClient // API._schema) testSourceName config (API.SchemaRequest mempty API.Everything) >>= guardSchemaResponse
DatasetConfig mergeConfig ->
if isJust _cDatasets
then getCloneSchema mergeConfig chinookTemplate agentIOClient

View File

@ -15,6 +15,7 @@ module Test.AgentAPI
getHealth,
getSourceHealth,
getSchemaGuarded,
getSchemaGuarded',
guardSchemaResponse,
queryGuarded,
queryExpectError,
@ -70,7 +71,12 @@ getSourceHealth = do
getSchemaGuarded :: (HasBaseContext context, HasAgentTestContext context, HasDatasetContext context, MonadReader context m, MonadThrow m, MonadIO m) => AgentClientT m API.SchemaResponse
getSchemaGuarded = do
(sourceName, config) <- getSourceNameAndConfig
guardSchemaResponse =<< (client // API._schema) sourceName config
guardSchemaResponse =<< (client // API._schema) sourceName config (API.SchemaRequest mempty API.Everything)
getSchemaGuarded' :: (HasBaseContext context, HasAgentTestContext context, HasDatasetContext context, MonadReader context m, MonadThrow m, MonadIO m) => API.SchemaRequest -> AgentClientT m API.SchemaResponse
getSchemaGuarded' schemaRequest = do
(sourceName, config) <- getSourceNameAndConfig
guardSchemaResponse =<< (client // API._schema) sourceName config schemaRequest
guardSchemaResponse :: (MonadThrow m) => Union API.SchemaResponses -> m API.SchemaResponse
guardSchemaResponse = API.schemaCase defaultAction successAction errorAction

View File

@ -658,7 +658,7 @@ functionField :: API.SchemaResponse -> TestConfig -> API.TableName -> API.Functi
functionField schemaResponse@API.SchemaResponse {..} testConfig defaultTableName functionName columnName =
columnField schemaResponse testConfig tableName columnName
where
tableName = fromMaybe defaultTableName (functionReturnType ^? API._FunctionReturnsTable)
tableName = fromMaybe defaultTableName (functionReturnType ^? _Just . API._FunctionReturnsTable)
functionReturnType = maybe (error $ "Can't find the function " <> show functionName <> " in " <> show (API._fiName <$> _srFunctions)) API._fiReturns functionInfo
functionInfo = find (\API.FunctionInfo {..} -> _fiName == functionName) _srFunctions

View File

@ -1,26 +1,26 @@
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE OverloadedStrings #-}
-- | Module containing tests for user-defined-functions aka. "UDFs".
module Test.Specs.UDFSpec (spec) where
-- | Module containing tests for user-defined-functions
module Test.Specs.FunctionsSpec (spec) where
--------------------------------------------------------------------------------
import Command (TestConfig)
import Control.Lens ((?~))
import Control.Lens ((<&>), (?~))
import Control.Lens.Lens ((&))
import Control.Monad.Catch (MonadThrow)
import Control.Monad.IO.Class (MonadIO)
import Control.Monad.Reader (MonadReader)
import Data.Aeson (Value (..))
import Data.HashMap.Strict qualified as HashMap
import Data.List (sort)
import Data.List (sort, sortOn)
import Data.Set qualified as Set
import Data.Text qualified as Text
import GHC.Stack (HasCallStack)
import Hasura.Backends.DataConnector.API
import Hasura.Backends.DataConnector.API qualified as API
import Test.AgentAPI (getSchemaGuarded, queryGuarded)
import Test.AgentAPI (getSchemaGuarded, getSchemaGuarded', queryGuarded)
import Test.AgentClient (HasAgentClient, runAgentClientT)
import Test.AgentDatasets (HasDatasetContext)
import Test.AgentTestContext (HasAgentTestContext)
@ -50,6 +50,36 @@ spec testConfig API.Capabilities {} = describe "supports functions" $ preloadAge
functionNames <- (extractFunctionNames . API._srFunctions) <$> getPreloadedAgentSchema
functionNames `jsonShouldBe` expectedFunctionNames
it "returns the specified functions from the Functions dataset when filtered" do
preloadedSchema <- getPreloadedAgentSchema
let FunctionsTestData {..} = mkFunctionsTestData preloadedSchema testConfig
extractFunctionNames = sort . fmap API._fiName
desiredFunctions = [_ftdFibonacciFunctionName]
filters = mempty {API._sfOnlyFunctions = Just desiredFunctions}
functionNames <- extractFunctionNames . API._srFunctions <$> getSchemaGuarded' (API.SchemaRequest filters API.BasicInfo)
functionNames `jsonShouldBe` desiredFunctions
it "returns the no functions when filtered with an empty list" do
let filters = mempty {API._sfOnlyFunctions = Just []}
functionInfos <- API._srFunctions <$> getSchemaGuarded' (API.SchemaRequest filters API.BasicInfo)
functionInfos `jsonShouldBe` []
it "returns only Function names and types when using basic_info detail level" $ do
preloadedSchema <- getPreloadedAgentSchema
let FunctionsTestData {..} = mkFunctionsTestData preloadedSchema testConfig
expectedFunctionNames = [_ftdFibonacciFunctionName, _ftdSearchArticlesFunctionName]
functionInfos <- sortOn API._fiName . API._srFunctions <$> getSchemaGuarded' (API.SchemaRequest mempty API.BasicInfo)
let expectedFunctionInfos =
expectedFunctionNames
<&> (\functionName -> API.FunctionInfo functionName API.FRead Nothing Nothing [] Nothing)
functionInfos `jsonShouldBe` expectedFunctionInfos
it "can query for a list Fibonacci numbers using the fibonacci function" $ do
preloadedSchema <- getPreloadedAgentSchema

View File

@ -4,7 +4,7 @@ module Test.Specs.SchemaSpec (spec) where
--------------------------------------------------------------------------------
import Control.Lens ((%~), (.~), (?~))
import Control.Lens ((%~), (.~), (<&>), (?~))
import Control.Lens.At (at)
import Control.Lens.Lens ((&))
import Control.Monad (forM_)
@ -18,11 +18,11 @@ import Data.Foldable (find)
import Data.HashMap.Strict qualified as HashMap
import Data.List (sort, sortOn)
import Data.List.NonEmpty qualified as NonEmpty
import Data.Maybe (isJust, isNothing)
import Data.Maybe (isJust, isNothing, mapMaybe)
import Data.Text qualified as Text
import GHC.Stack (HasCallStack)
import Hasura.Backends.DataConnector.API qualified as API
import Test.AgentAPI (getSchemaGuarded)
import Test.AgentAPI (getSchemaGuarded, getSchemaGuarded')
import Test.AgentClient (HasAgentClient, runAgentClientT)
import Test.AgentDatasets (HasDatasetContext)
import Test.AgentTestContext (HasAgentTestContext)
@ -49,6 +49,46 @@ spec TestData {..} API.Capabilities {..} = describe "schema API" $ preloadAgentS
let expectedTableNames = extractTableNames _tdSchemaTables
tableNames `jsonShouldBe` expectedTableNames
it "returns the specified Chinook tables when filtered" $ do
let desiredTables = [_tdCustomersTableName, _tdInvoicesTableName, _tdInvoiceLinesTableName, _tdTracksTableName]
let filters = mempty {API._sfOnlyTables = Just desiredTables}
tableNames <- sort . fmap API._tiName . API._srTables <$> getSchemaGuarded' (API.SchemaRequest filters API.Everything)
tableNames `jsonShouldBe` desiredTables
it "returns no tables when filtered with an empty list" $ do
let filters = mempty {API._sfOnlyTables = Just []}
tableInfos <- API._srTables <$> getSchemaGuarded' (API.SchemaRequest filters API.Everything)
tableInfos `jsonShouldBe` []
it "returns only Chinook table names and types when using basic_info detail level" $ do
tableInfos <- sortOn API._tiName . API._srTables <$> getSchemaGuarded' (API.SchemaRequest mempty API.BasicInfo)
let expectedTableInfos =
_tdSchemaTables
<&> (\API.TableInfo {..} -> API.TableInfo _tiName _tiType [] Nothing (API.ForeignKeys mempty) Nothing False False False)
& sortOn API._tiName
tableInfos `jsonShouldBe` expectedTableInfos
it "can filter tables while using basic_info detail level" $ do
let desiredTables = [_tdAlbumsTableName, _tdArtistsTableName]
let filters = mempty {API._sfOnlyTables = Just desiredTables}
tableInfos <- sortOn API._tiName . API._srTables <$> getSchemaGuarded' (API.SchemaRequest filters API.BasicInfo)
let expectedTableInfos =
_tdSchemaTables
& mapMaybe
( \API.TableInfo {..} ->
if _tiName `elem` desiredTables
then Just $ API.TableInfo _tiName _tiType [] Nothing (API.ForeignKeys mempty) Nothing False False False
else Nothing
)
& sortOn API._tiName
tableInfos `jsonShouldBe` expectedTableInfos
testPerTable "returns the correct columns in the Chinook tables" $ \expectedTable actualTable -> do
-- We remove some properties here so that we don't compare them since they vary between agent implementations
let extractJsonForComparison table =

View File

@ -866,8 +866,8 @@ mockCapabilitiesHandler mcfg = liftIO $ do
cfg <- I.readIORef mcfg
pure $ inject $ SOP.I $ _capabilitiesResponse cfg
mockSchemaHandler :: I.IORef MockConfig -> I.IORef (Maybe AgentRequest) -> I.IORef (Maybe API.Config) -> API.SourceName -> API.Config -> Handler (Union API.SchemaResponses)
mockSchemaHandler mcfg mRecordedRequest mRecordedRequestConfig _sourceName requestConfig = liftIO $ do
mockSchemaHandler :: I.IORef MockConfig -> I.IORef (Maybe AgentRequest) -> I.IORef (Maybe API.Config) -> API.SourceName -> API.Config -> API.SchemaRequest -> Handler (Union API.SchemaResponses)
mockSchemaHandler mcfg mRecordedRequest mRecordedRequestConfig _sourceName requestConfig _schemaRequest = liftIO $ do
cfg <- I.readIORef mcfg
I.writeIORef mRecordedRequest (Just Schema)
I.writeIORef mRecordedRequestConfig (Just requestConfig)

View File

@ -134,11 +134,11 @@ arityJsonAggSelect = \case
functionReturnTypeFromAPI ::
(MonadError QErr m) =>
DC.FunctionName ->
(Maybe (FunctionReturnType 'DataConnector), API.FunctionReturnType) ->
(Maybe (FunctionReturnType 'DataConnector), Maybe API.FunctionReturnType) ->
m DC.TableName
functionReturnTypeFromAPI funcGivenName = \case
(Just (DC.FunctionReturnsTable t), _) -> pure t
(_, API.FunctionReturnsTable t) -> pure (Witch.into t)
(_, Just (API.FunctionReturnsTable t)) -> pure (Witch.into t)
_ ->
throw400 NotSupported
$ "Function "
@ -177,7 +177,7 @@ buildFunctionInfo'
objid <-
case (_fcResponse, returnType) of
(Just (DC.FunctionReturnsTable t), _) -> pure $ SOSourceObj sourceName $ mkAnyBackend $ SOITable @'DataConnector t
(_, API.FunctionReturnsTable t) -> pure $ SOSourceObj sourceName $ mkAnyBackend $ SOITable @'DataConnector (Witch.into t)
(_, Just (API.FunctionReturnsTable t)) -> pure $ SOSourceObj sourceName $ mkAnyBackend $ SOITable @'DataConnector (Witch.into t)
_ ->
throw400 NotSupported
$ "Function "
@ -205,6 +205,10 @@ buildFunctionInfo'
else IAUserProvided arg
functionReturnType <- functionReturnTypeFromAPI funcName (_fcResponse, returnType)
jsonAggSelect <-
arityJsonAggSelect
<$> infoSet
`onNothing` throw400 NotSupported ("Function " <> tshow funcName <> " is missing a response cardinality")
let funcInfo =
FunctionInfo
@ -219,7 +223,7 @@ buildFunctionInfo'
_fiReturnType = functionReturnType,
_fiDescription = infoDesc,
_fiPermissions = permissionMap,
_fiJsonAggSelect = arityJsonAggSelect infoSet,
_fiJsonAggSelect = jsonAggSelect,
_fiComment = funcComment
}
pure $ (funcInfo, SchemaDependency objid DRTable)
@ -336,8 +340,9 @@ resolveDatabaseMetadata' ::
SourceMetadata 'DataConnector ->
DC.SourceConfig ->
m (Either QErr (DBObjectsIntrospection 'DataConnector))
resolveDatabaseMetadata' logger SourceMetadata {_smName} sourceConfig = runExceptT do
API.SchemaResponse {..} <- requestDatabaseSchema logger _smName sourceConfig
resolveDatabaseMetadata' logger sourceMetadata@SourceMetadata {_smName} sourceConfig = runExceptT do
let schemaRequest = makeTrackedItemsOnlySchemaRequest sourceMetadata
API.SchemaResponse {..} <- requestDatabaseSchema logger _smName sourceConfig schemaRequest
let logicalModels =
maybe mempty (InsOrdHashMap.fromList . map toLogicalModelMetadata . toList) _srObjectTypes
tables = HashMap.fromList $ do
@ -390,17 +395,29 @@ resolveDatabaseMetadata' logger SourceMetadata {_smName} sourceConfig = runExcep
_rsLogicalModels = logicalModels
}
makeTrackedItemsOnlySchemaRequest :: SourceMetadata 'DataConnector -> API.SchemaRequest
makeTrackedItemsOnlySchemaRequest SourceMetadata {..} =
API.SchemaRequest
{ _srFilters =
API.SchemaFilters
{ _sfOnlyTables = Just $ Witch.into <$> InsOrdHashMap.keys _smTables,
_sfOnlyFunctions = Just $ Witch.into <$> InsOrdHashMap.keys _smFunctions
},
_srDetailLevel = API.Everything
}
requestDatabaseSchema ::
(MonadIO m, MonadBaseControl IO m, MonadError QErr m) =>
Logger Hasura ->
SourceName ->
DC.SourceConfig ->
API.SchemaRequest ->
m API.SchemaResponse
requestDatabaseSchema logger sourceName sourceConfig = do
requestDatabaseSchema logger sourceName sourceConfig schemaRequest = do
transformedSourceConfig <- transformSourceConfig sourceConfig Nothing
ignoreTraceT
. flip runAgentClientT (AgentClientContext logger (DC._scEndpoint transformedSourceConfig) (DC._scManager transformedSourceConfig) (DC._scTimeoutMicroseconds transformedSourceConfig) Nothing)
$ Client.schema sourceName (DC._scConfig transformedSourceConfig)
$ Client.schema sourceName (DC._scConfig transformedSourceConfig) schemaRequest
getFieldType :: Bool -> API.ColumnType -> LogicalModelType 'DataConnector
getFieldType isNullable = \case
@ -635,14 +652,14 @@ listAllTables' :: (CacheRM m, Has (Logger Hasura) r, MonadIO m, MonadBaseControl
listAllTables' sourceName = do
(logger :: Logger Hasura) <- asks getter
sourceConfig <- askSourceConfig @'DataConnector sourceName
schemaResponse <- requestDatabaseSchema logger sourceName sourceConfig
schemaResponse <- requestDatabaseSchema logger sourceName sourceConfig (API.SchemaRequest mempty API.BasicInfo)
pure $ fmap (Witch.from . API._tiName) $ API._srTables schemaResponse
listAllTrackables' :: (CacheRM m, Has (Logger Hasura) r, MonadIO m, MonadBaseControl IO m, MonadReader r m, MonadError QErr m, MetadataM m) => SourceName -> m (TrackableInfo 'DataConnector)
listAllTrackables' sourceName = do
(logger :: Logger Hasura) <- asks getter
sourceConfig <- askSourceConfig @'DataConnector sourceName
schemaResponse <- requestDatabaseSchema logger sourceName sourceConfig
schemaResponse <- requestDatabaseSchema logger sourceName sourceConfig (API.SchemaRequest mempty API.BasicInfo)
let functions = fmap (\fi -> TrackableFunctionInfo (Witch.into (API._fiName fi)) (getVolatility (API._fiFunctionType fi))) $ API._srFunctions schemaResponse
let tables = fmap (TrackableTableInfo . Witch.into . API._tiName) $ API._srTables schemaResponse
pure

View File

@ -105,9 +105,9 @@ capabilities = do
defaultAction = throw400 DataConnectorError "Unexpected data connector capabilities response - Unexpected Type"
capabilitiesGuard = API.capabilitiesCase defaultAction pure errorAction
schema :: (MonadIO m, MonadTrace m, MonadError QErr m) => RQL.SourceName -> API.Config -> AgentClientT m API.SchemaResponse
schema sourceName config = do
schemaGuard =<< (genericClient // API._schema) (toTxt sourceName) config
schema :: (MonadIO m, MonadTrace m, MonadError QErr m) => RQL.SourceName -> API.Config -> API.SchemaRequest -> AgentClientT m API.SchemaResponse
schema sourceName config schemaRequest = do
schemaGuard =<< (genericClient // API._schema) (toTxt sourceName) config schemaRequest
where
errorAction e = throw400WithDetail (mapErrorType $ API._crType e) (API._crMessage e) (API._crDetails e)
defaultAction = throw400 DataConnectorError "Unexpected data connector schema response - Unexpected Type"

View File

@ -4,8 +4,11 @@
module Hasura.Backends.DataConnector.API.V0.SchemaSpec (spec) where
import Data.Aeson.QQ.Simple (aesonQQ)
import Hasura.Backends.DataConnector.API.V0.Function
import Hasura.Backends.DataConnector.API.V0.FunctionSpec (genFunctionName)
import Hasura.Backends.DataConnector.API.V0.Schema
import Hasura.Backends.DataConnector.API.V0.TableSpec (genTableInfo)
import Hasura.Backends.DataConnector.API.V0.Table
import Hasura.Backends.DataConnector.API.V0.TableSpec (genTableInfo, genTableName)
import Hasura.Generator.Common (defaultRange)
import Hasura.Prelude
import Hedgehog
@ -15,10 +18,44 @@ import Test.Hspec
spec :: Spec
spec = do
describe "SchemaRequest" $ do
describe "Minimal" $ do
testToFromJSONToSchema
(SchemaRequest mempty Everything)
[aesonQQ| {} |]
describe "Full" $ do
testToFromJSONToSchema
(SchemaRequest (SchemaFilters (Just [TableName ["my_table"]]) (Just [FunctionName ["my_function"]])) BasicInfo)
[aesonQQ|
{ "filters": {
"only_tables": [["my_table"]],
"only_functions": [["my_function"]]
},
"detail_level": "basic_info"
}
|]
jsonOpenApiProperties genSchemaRequest
describe "SchemaResponse" $ do
testToFromJSONToSchema (SchemaResponse [] [] Nothing) [aesonQQ|{"tables": []}|]
jsonOpenApiProperties genSchemaResponse
genSchemaRequest :: (MonadGen m) => m SchemaRequest
genSchemaRequest =
SchemaRequest
<$> genSchemaFilters
<*> genDetailLevel
genSchemaFilters :: (MonadGen m) => m SchemaFilters
genSchemaFilters =
SchemaFilters
<$> Gen.maybe (Gen.list defaultRange genTableName)
<*> Gen.maybe (Gen.list defaultRange genFunctionName)
genDetailLevel :: (MonadGen m) => m DetailLevel
genDetailLevel =
Gen.element [Everything, BasicInfo]
genSchemaResponse :: Gen SchemaResponse
genSchemaResponse = do
tables <- Gen.list defaultRange genTableInfo