Adding custom scalar types to SQLite DC Agent - GDC-610

PR-URL: https://github.com/hasura/graphql-engine-mono/pull/6783
Co-authored-by: David Overton <7734777+dmoverton@users.noreply.github.com>
Co-authored-by: Daniel Chambers <1214352+daniel-chambers@users.noreply.github.com>
GitOrigin-RevId: ee53c20b0090b6b3c88792ddc98b8287872fe0f3
This commit is contained in:
Lyndon Maydwell 2022-12-08 16:48:21 +10:00 committed by hasura-bot
parent c14fd3ba4c
commit e9dcbf6491
13 changed files with 302 additions and 68 deletions

View File

@ -1,6 +1,86 @@
import { configSchema } from "./config"
import { CapabilitiesResponse } from "@hasura/dc-api-types"
import { envToBool } from "./util"
import { METRICS, MUTATIONS } from "./environment"
import { CapabilitiesResponse, ScalarTypeCapabilities } from "@hasura/dc-api-types"
// NOTE: This should cover all possible schema types.
// This type should be a subtype of ScalarType.
export type ScalarTypeKey
= 'DateTime'
| 'string'
| 'number'
| 'decimal'
| 'bool'
;
// TODO: Should we prefix operators with _ like _eq, or just for symbolic operators?
// TODO: How to model ISNULL, NOTNULL, IN
// TODO: How to apply functions to column arguments? E.g. a.bits = b.bits & b.mask
// TODO: How to reuse operators such as = across different types without enumerating them all?
// TODO: Other operators. See: https://www.tutorialspoint.com/sqlite/sqlite_operators.htm
// Should we explicitly include the default included operators?
// NOTE: See `function bop_op` for most query processing of these operators
//
function standardOperators(t: string): Record<string, string> {
return {
_eq: t, // == Checks if the values of two operands are equal or not, if yes then the condition becomes true. (a == b) is not true.
// = Checks if the values of two operands are equal or not, if yes then the condition becomes true. (a = b) is not true.
_gt: t, // > Checks if the values of the left operand is greater than the value of the right operand, if yes then the condition becomes true. (a > b) is not true.
_gte: t, // >= Checks if the value of the left operand is greater than or equal to the value of the right operand, if yes then the condition becomes true. (a >= b) is not true.
// TODO: _in
// TODO: _is_null
_lt: t, // < Checks if the values of the left operand is less than the value of the right operand, if yes then the condition becomes true. (a < b) is true.
_lte: t, // <= Checks if the value of the left operand is less than or equal to the value of the right operand, if yes then the condition becomes true. (a <= b) is true.
_neq: t, // != Checks if the values of two operands are equal or not, if the values are not equal, then the condition becomes true. (a != b) is true.
// <> Checks if the values of two operands are equal or not, if the values are not equal, then the condition becomes true. (a <> b) is true.
// TODO: The following operators are listed in the documentation but throw errors when used...
// _nlt: t, // !< Checks if the value of the left operand is not less than the value of the right operand, if yes then the condition becomes true. (a !< b) is false.
// _ngt: t // !> Checks if the value of the left operand is not greater than the value of the right operand, if yes then the condition becomes true.
}
}
// TODO: How can we ensure that we have covered all of the operator keys in the query module?
const scalar_types: Record<ScalarTypeKey, ScalarTypeCapabilities> = {
DateTime: {
comparison_operators: {
_in_year: 'int',
...standardOperators('DateTime')
}
},
string: {
comparison_operators: {
// See: https://www.sqlite.org/lang_expr.html #5
_like: 'string',
_glob: 'string',
// _regexp: 'string', // TODO: Detect if REGEXP is supported
...standardOperators('string')
}
},
// TODO: Why do we need a seperate 'decimal' type?
decimal: {
comparison_operators: {
_modulus_is_zero: 'number',
...standardOperators('number')
}
},
number: {
comparison_operators: {
_modulus_is_zero: 'number',
...standardOperators('number')
}
},
bool: {
comparison_operators: {
// TODO: Should we include the standard boolean operators for column comparisons?
_and: 'bool',
_or: 'bool',
_nand: 'bool',
_xor: 'bool',
...standardOperators('bool')
}
}
};
export const capabilitiesResponse: CapabilitiesResponse = {
display_name: 'Hasura SQLite',
@ -12,9 +92,7 @@ export const capabilitiesResponse: CapabilitiesResponse = {
supports_foreign_keys: true,
column_nullability: "nullable_and_non_nullable",
},
scalar_types: {
DateTime: {}
},
scalar_types,
queries: {},
relationships: {},
comparisons: {
@ -23,7 +101,7 @@ export const capabilitiesResponse: CapabilitiesResponse = {
}
},
... (
envToBool('MUTATIONS')
MUTATIONS
? {
mutations: {
atomicity_support_level: "heterogeneous_operations",
@ -37,6 +115,6 @@ export const capabilitiesResponse: CapabilitiesResponse = {
),
explain: {},
raw: {},
... ( envToBool('METRICS') ? { metrics: {} } : {} )
... (METRICS ? { metrics: {} } : {})
},
}

View File

@ -1,14 +1,14 @@
import { Config } from "./config";
import { Sequelize } from 'sequelize';
import { env } from "process";
import { envToBool } from "./util";
import { DB_ALLOW_LIST, DB_CREATE, DB_PRIVATECACHE, DB_READONLY } from "./environment";
import SQLite from 'sqlite3';
export type SqlLogger = (sql: string) => void
export function connect(config: Config, sqlLogger: SqlLogger): Sequelize {
if(env.DB_ALLOW_LIST != null) {
if(!env.DB_ALLOW_LIST.split(',').includes(config.db)) {
if(DB_ALLOW_LIST != null) {
if(DB_ALLOW_LIST.includes(config.db)) {
throw new Error(`Database ${config.db} is not present in DB_ALLOW_LIST 😭`);
}
}
@ -23,9 +23,9 @@ export function connect(config: Config, sqlLogger: SqlLogger): Sequelize {
// * OPEN_SHAREDCACHE
// * OPEN_PRIVATECACHE
// The default value is OPEN_READWRITE | OPEN_CREATE | OPEN_FULLMUTEX.
const readMode = envToBool('DB_READONLY') ? SQLite.OPEN_READONLY : SQLite.OPEN_READWRITE;
const createMode = envToBool('DB_CREATE') ? SQLite.OPEN_CREATE : 0; // Flag style means 0=off
const cacheMode = envToBool('DB_PRIVATECACHE') ? SQLite.OPEN_PRIVATECACHE : SQLite.OPEN_SHAREDCACHE;
const readMode = DB_READONLY ? SQLite.OPEN_READONLY : SQLite.OPEN_READWRITE;
const createMode = DB_CREATE ? SQLite.OPEN_CREATE : 0; // Flag style means 0=off
const cacheMode = DB_PRIVATECACHE ? SQLite.OPEN_PRIVATECACHE : SQLite.OPEN_SHAREDCACHE;
const mode = readMode | createMode | cacheMode;
const db = new Sequelize({

View File

@ -0,0 +1,41 @@

export function stringToBool(x: string | null | undefined): boolean {
return (/1|true|t|yes|y/i).test(x || '');
}
export function envToBool(envVarName: string): boolean {
return stringToBool(process.env[envVarName]);
}
export function envToString(envVarName: string, defaultValue: string): string {
const val = process.env[envVarName];
return val === undefined ? defaultValue : val;
}
export function envToNum(envVarName: string, defaultValue: number): number {
const val = process.env[envVarName];
return val === undefined ? defaultValue : Number(val);
}
export function envToArrayOfString(envVarName: string, defaultValue: Array<string> | null = null): Array<string> | null {
const val = process.env[envVarName];
return val == null ? defaultValue : val.split(',');
}
export const LOG_LEVEL = envToString("LOG_LEVEL", "info");
export const PRETTY_PRINT_LOGS = envToBool('PRETTY_PRINT_LOGS');
export const METRICS = envToBool('METRICS');
// See: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Allow-Origin
export const PERMISSIVE_CORS = envToBool('PERMISSIVE_CORS');
export const DB_ALLOW_LIST = envToArrayOfString('DB_ALLOW_LIST');
// The default value is OPEN_READWRITE | OPEN_CREATE | OPEN_FULLMUTEX.
export const DB_READONLY = envToBool('DB_READONLY');
export const DB_CREATE = envToBool('DB_CREATE');
export const DB_PRIVATECACHE = envToBool('DB_PRIVATECACHE');
export const DEBUGGING_TAGS = envToBool('DEBUGGING_TAGS');
export const QUERY_LENGTH_LIMIT = envToNum('QUERY_LENGTH_LIMIT', Infinity);
export const MUTATIONS = envToBool('MUTATIONS');

View File

@ -6,11 +6,11 @@ import { getConfig, tryGetConfig } from './config';
import { capabilitiesResponse } from './capabilities';
import { QueryResponse, SchemaResponse, QueryRequest, CapabilitiesResponse, ExplainResponse, RawRequest, RawResponse, ErrorResponse } from '@hasura/dc-api-types';
import { connect } from './db';
import { envToBool, envToString } from './util';
import metrics from 'fastify-metrics';
import prometheus from 'prom-client';
import * as fs from 'fs'
import { runRawOperation } from './raw';
import { LOG_LEVEL, METRICS, PERMISSIVE_CORS, PRETTY_PRINT_LOGS } from './environment';
const port = Number(process.env.PORT) || 8100;
@ -21,9 +21,9 @@ const port = Number(process.env.PORT) || 8100;
const server = Fastify({
logger:
{
level: envToString("LOG_LEVEL", "info"),
level: LOG_LEVEL,
...(
(envToBool('PRETTY_PRINT_LOGS'))
PRETTY_PRINT_LOGS
? { transport: { target: 'pino-pretty' } }
: {}
)
@ -47,9 +47,7 @@ server.setErrorHandler(function (error, _request, reply) {
reply.status(500).send(errorResponse);
})
const METRICS_ENABLED = envToBool('METRICS');
if(METRICS_ENABLED) {
if(METRICS) {
// See: https://www.npmjs.com/package/fastify-metrics
server.register(metrics, {
endpoint: '/metrics',
@ -60,7 +58,7 @@ if(METRICS_ENABLED) {
});
}
if(envToBool('PERMISSIVE_CORS')) {
if(PERMISSIVE_CORS) {
// See: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Allow-Origin
server.register(FastifyCors, {
origin: true,
@ -72,7 +70,7 @@ if(envToBool('PERMISSIVE_CORS')) {
// Register request-hook metrics.
// This is done in a closure so that the metrics are scoped here.
(() => {
if(! METRICS_ENABLED) {
if(! METRICS) {
return;
}

View File

@ -1,6 +1,6 @@
import { Config } from "./config";
import { connect, SqlLogger } from "./db";
import { coerceUndefinedToNull, coerceUndefinedOrNullToEmptyRecord, envToBool, isEmptyObject, tableNameEquals, unreachable, envToNum, stringArrayEquals } from "./util";
import { coerceUndefinedToNull, coerceUndefinedOrNullToEmptyRecord, isEmptyObject, tableNameEquals, unreachable, stringArrayEquals } from "./util";
import {
Expression,
BinaryComparisonOperator,
@ -26,6 +26,7 @@ import {
OrderByTarget,
} from "@hasura/dc-api-types";
import { customAlphabet } from "nanoid";
import { DEBUGGING_TAGS, QUERY_LENGTH_LIMIT } from "./environment";
const SqlString = require('sqlstring-sqlite');
@ -130,7 +131,19 @@ function where_clause(relationships: TableRelationships[], expression: Expressio
const bopLhs = generateComparisonColumnFragment(expression.column, queryTableAlias, currentTableAlias);
const bop = bop_op(expression.operator);
const bopRhs = generateComparisonValueFragment(expression.value, queryTableAlias, currentTableAlias);
return `${bopLhs} ${bop} ${bopRhs}`;
if(expression.operator == '_in_year') {
return `cast(strftime('%Y', ${bopLhs}) as integer) = ${bopRhs}`;
} else if(expression.operator == '_modulus_is_zero') {
return `cast(${bopLhs} as integer) % ${bopRhs} = 0`;
} else if(expression.operator == '_nand') {
return `NOT (${bopLhs} AND ${bopRhs})`;
} else if(expression.operator == '_nor') {
return `NOT (${bopLhs} OR ${bopRhs})`;
} else if(expression.operator == '_xor') {
return `(${bopLhs} AND (NOT ${bopRhs})) OR ((NOT${bopRhs}) AND ${bopRhs})`;
} else {
return `${bopLhs} ${bop} ${bopRhs}`;
}
case "binary_arr_op":
const bopALhs = generateComparisonColumnFragment(expression.column, queryTableAlias, currentTableAlias);
@ -365,19 +378,34 @@ function bop_array(o: BinaryArrayComparisonOperator): string {
function bop_op(o: BinaryComparisonOperator): string {
let result = o;
switch(o) {
case 'equal': result = "="; break;
case 'greater_than': result = ">"; break;
case 'greater_than_or_equal': result = ">="; break;
case 'less_than': result = "<"; break;
case 'less_than_or_equal': result = "<="; break;
// TODO: Check for coverage of these operators
case 'equal': result = '='; break;
case 'greater_than': result = '>'; break;
case 'greater_than_or_equal': result = '>='; break;
case 'less_than': result = '<'; break;
case 'less_than_or_equal': result = '<='; break;
case '_eq': result = '='; break; // Why is this required?
case '_gt': result = '>'; break; // Why is this required?
case '_gte': result = '>='; break; // Why is this required?
case '_lt': result = '<'; break; // Why is this required?
case '_lte': result = '<='; break; // Why is this required?
case '_like': result = 'LIKE'; break;
case '_glob': result = 'GLOB'; break;
case '_regexp': result = 'REGEXP'; break; // TODO: Have capabilities detect if REGEXP support is enabled
case '_neq': result = '<>'; break;
case '_nlt': result = '!<'; break;
case '_ngt': result = '!>'; break;
case '_and': result = 'AND'; break;
case '_or': result = 'OR'; break;
}
// TODO: We can't always assume that we can include the operator here verbatim.
return tag('bop_op',result);
}
function uop_op(o: UnaryComparisonOperator): string {
let result = o;
switch(o) {
case 'is_null': result = "IS NULL"; break;
case 'is_null': result = "IS NULL"; break;
}
return tag('uop_op',result);
}
@ -630,7 +658,6 @@ function output(rows: any): QueryResponse {
return JSON.parse(rows[0].data);
}
const DEBUGGING_TAGS = envToBool('DEBUGGING_TAGS');
/** Function to add SQL comments to the generated SQL to tag which procedures generated what text.
*
* comment('a','b') => '/*\<a>\*\/ b /*\</a>*\/'
@ -693,14 +720,13 @@ export async function queryData(config: Config, sqlLogger: SqlLogger, queryReque
const db = connect(config, sqlLogger); // TODO: Should this be cached?
const q = query(queryRequest);
const query_length_limit = envToNum('QUERY_LENGTH_LIMIT', Infinity);
if(q.length > query_length_limit) {
if(q.length > QUERY_LENGTH_LIMIT) {
const result: ErrorResponse =
{
message: `Generated SQL Query was too long (${q.length} > ${query_length_limit})`,
message: `Generated SQL Query was too long (${q.length} > ${QUERY_LENGTH_LIMIT})`,
details: {
"query.length": q.length,
"limit": query_length_limit
"limit": QUERY_LENGTH_LIMIT
}
};
return result;

View File

@ -1,7 +1,8 @@
import { SchemaResponse, ScalarType, ColumnInfo, TableInfo, Constraint } from "@hasura/dc-api-types"
import { ScalarTypeKey } from "./capabilities";
import { Config } from "./config";
import { connect, SqlLogger } from './db';
import { envToBool } from "./util";
import { MUTATIONS } from "./environment";
var sqliteParser = require('sqlite-parser');
@ -18,7 +19,9 @@ type Datatype = {
variant: string, // Declared type, lowercased
}
function determineScalarType(datatype: Datatype): ScalarType {
// Note: Using ScalarTypeKey here instead of ScalarType to show that we have only used
// the capability documented types, and that ScalarTypeKey is a subset of ScalarType
function determineScalarType(datatype: Datatype): ScalarTypeKey {
switch (datatype.variant) {
case "bool": return "bool";
case "boolean": return "bool";
@ -35,7 +38,7 @@ function determineScalarType(datatype: Datatype): ScalarType {
}
}
function getColumns(ast: any[], primaryKeys: string[], mutationsEnabled: boolean) : ColumnInfo[] {
function getColumns(ast: any[], primaryKeys: string[]) : ColumnInfo[] {
return ast.map(c => {
const isPrimaryKey = primaryKeys.includes(c.name);
@ -43,8 +46,8 @@ function getColumns(ast: any[], primaryKeys: string[], mutationsEnabled: boolean
name: c.name,
type: determineScalarType(c.datatype),
nullable: nullableCast(c.definition),
insertable: mutationsEnabled,
updatable: mutationsEnabled && !isPrimaryKey,
insertable: MUTATIONS,
updatable: MUTATIONS && !isPrimaryKey,
};
})
}
@ -67,18 +70,16 @@ const formatTableInfo = (config: Config) => (info: TableInfoInternal): TableInfo
const foreignKey = foreignKeys.length > 0 ? { foreign_keys: Object.fromEntries(foreignKeys) } : {};
const tableName = config.explicit_main_schema ? ["main", info.name] : [info.name];
const mutationsEnabled = envToBool('MUTATIONS');
return {
name: tableName,
type: "table",
...primaryKey,
...foreignKey,
description: info.sql,
columns: getColumns(columnsDdl, primaryKeys, mutationsEnabled),
insertable: mutationsEnabled,
updatable: mutationsEnabled,
deletable: mutationsEnabled,
columns: getColumns(columnsDdl, primaryKeys),
insertable: MUTATIONS,
updatable: MUTATIONS,
deletable: MUTATIONS,
}
}

View File

@ -21,24 +21,6 @@ export const crossProduct = <T, U>(arr1: T[], arr2: U[]): [T,U][] => {
return arr1.flatMap(a1 => arr2.map<[T,U]>(a2 => [a1, a2]));
};
export function stringToBool(x: string | null | undefined): boolean {
return (/1|true|t|yes|y/i).test(x || '');
}
export function envToBool(envVarName: string): boolean {
return stringToBool(process.env[envVarName]);
}
export function envToString(envVarName: string, defaultValue: string): string {
const val = process.env[envVarName];
return val === undefined ? defaultValue : val;
}
export function envToNum(envVarName: string, defaultValue: number): number {
const val = process.env[envVarName];
return val === undefined ? defaultValue : Number(val);
}
export function last<T>(x: T[]): T {
return x[x.length - 1];
}

View File

@ -161,6 +161,7 @@ test-suite tests-dc-api
Test.Specs.QuerySpec
Test.Specs.QuerySpec.AggregatesSpec
Test.Specs.QuerySpec.BasicSpec
Test.Specs.QuerySpec.CustomOperatorsSpec
Test.Specs.QuerySpec.FilteringSpec
Test.Specs.QuerySpec.OrderBySpec
Test.Specs.QuerySpec.RelationshipsSpec

View File

@ -4,6 +4,7 @@
--
module Hasura.Backends.DataConnector.API.V0.Scalar
( ScalarType (..),
scalarTypeToText,
)
where

View File

@ -5,6 +5,7 @@
module Hasura.Backends.DataConnector.API.V0.Table
( TableName (..),
TableInfo (..),
tableNameToText,
tiName,
tiType,
tiColumns,
@ -33,9 +34,9 @@ import Data.Aeson (FromJSON, FromJSONKey, ToJSON, ToJSONKey)
import Data.Data (Data)
import Data.HashMap.Strict (HashMap)
import Data.Hashable (Hashable)
import Data.List.NonEmpty (NonEmpty)
import Data.List.NonEmpty qualified as NonEmpty
import Data.OpenApi (ToSchema)
import Data.Text (Text)
import Data.Text (Text, intercalate)
import GHC.Generics (Generic)
import Hasura.Backends.DataConnector.API.V0.Column qualified as API.V0
import Prelude
@ -45,11 +46,14 @@ import Prelude
-- | The fully qualified name of a table. The last element in the list is the table name
-- and all other elements represent namespacing of the table name.
-- For example, for a database that has schemas, the name would be '[<schema>,<table name>]'
newtype TableName = TableName {unTableName :: NonEmpty Text}
newtype TableName = TableName {unTableName :: NonEmpty.NonEmpty Text}
deriving stock (Eq, Ord, Show, Generic, Data)
deriving anyclass (NFData, Hashable)
deriving (FromJSON, ToJSON, ToSchema) via Autodocodec TableName
tableNameToText :: TableName -> Text
tableNameToText (TableName tns) = intercalate "." (NonEmpty.toList tns)
instance HasCodec TableName where
codec =
named "TableName" $

View File

@ -7,6 +7,7 @@ import Test.Data (TestData)
import Test.Sandwich (describe)
import Test.Specs.QuerySpec.AggregatesSpec qualified as Test.QuerySpec.AggregatesSpec
import Test.Specs.QuerySpec.BasicSpec qualified as Test.QuerySpec.BasicSpec
import Test.Specs.QuerySpec.CustomOperatorsSpec qualified as Test.QuerySpec.CustomOperatorsSpec
import Test.Specs.QuerySpec.FilteringSpec qualified as Test.QuerySpec.FilteringSpec
import Test.Specs.QuerySpec.OrderBySpec qualified as Test.QuerySpec.OrderBySpec
import Test.Specs.QuerySpec.RelationshipsSpec qualified as Test.QuerySpec.RelationshipsSpec
@ -17,6 +18,7 @@ spec :: TestData -> SourceName -> Config -> Capabilities -> AgentTestSpec
spec testData sourceName config capabilities@Capabilities {..} = do
describe "query API" do
Test.QuerySpec.BasicSpec.spec testData sourceName config
Test.QuerySpec.CustomOperatorsSpec.spec testData sourceName config _cScalarTypes
Test.QuerySpec.FilteringSpec.spec testData sourceName config _cComparisons
Test.QuerySpec.OrderBySpec.spec testData sourceName config capabilities
when (isJust _cRelationships) $

View File

@ -0,0 +1,52 @@
module Test.Specs.QuerySpec.CustomOperatorsSpec (spec) where
import Control.Lens ((&), (?~))
import Control.Monad (forM_)
import Control.Monad.List (guard)
import Data.HashMap.Strict qualified as HashMap
import Data.Maybe (maybeToList)
import Data.Text qualified as Text
import Hasura.Backends.DataConnector.API
import Hasura.Backends.DataConnector.API qualified as API
import Language.GraphQL.Draft.Syntax (Name (..))
import Test.AgentClient (queryGuarded)
import Test.Data (TestData (..))
import Test.Data qualified as Data
import Test.Sandwich (describe, shouldBe)
import Test.TestHelpers (AgentTestSpec, it)
import Prelude
spec :: TestData -> SourceName -> Config -> ScalarTypesCapabilities -> AgentTestSpec
spec TestData {..} sourceName config (ScalarTypesCapabilities scalarTypesCapabilities) = describe "Custom Operators in Queries" do
describe "Top-level application of custom operators" do
-- We run a list monad to identify test representatives,
let items :: HashMap.HashMap (Name, ScalarType) (ColumnName, TableName, ColumnName, ScalarType)
items =
HashMap.fromList do
API.TableInfo {_tiName, _tiColumns} <- _tdSchemaTables
ColumnInfo {_ciName, _ciType} <- _tiColumns
ScalarTypeCapabilities {_stcComparisonOperators} <- maybeToList $ HashMap.lookup _ciType scalarTypesCapabilities
(operatorName, argType) <- HashMap.toList $ unComparisonOperators _stcComparisonOperators
ColumnInfo {_ciName = anotherColumnName, _ciType = anotherColumnType} <- _tiColumns
guard $ anotherColumnType == argType
pure ((operatorName, _ciType), (_ciName, _tiName, anotherColumnName, argType))
forM_ (HashMap.toList items) \((operatorName, columnType), (columnName, tableName, argColumnName, argType)) -> do
-- Perform a select using the operator in a where clause
let queryRequest =
let fields = Data.mkFieldsMap [(unColumnName columnName, _tdColumnField (unColumnName columnName) columnType)]
query' = Data.emptyQuery & qFields ?~ fields
in QueryRequest tableName [] query'
where' =
ApplyBinaryComparisonOperator
(CustomBinaryComparisonOperator (unName operatorName))
(_tdCurrentComparisonColumn (unColumnName columnName) columnType)
(AnotherColumn $ ComparisonColumn CurrentTable argColumnName argType)
query =
queryRequest
& qrQuery . qWhere ?~ where'
& qrQuery . qLimit ?~ 1 -- No need to test actual results
it (Text.unpack $ "ComparisonOperator " <> unName operatorName <> ": " <> scalarTypeToText columnType <> " executes without an error") do
result <- queryGuarded sourceName config query
-- Check that you get a success response
Data.responseRows result `shouldBe` take 1 (Data.responseRows result)

View File

@ -31,7 +31,55 @@ backendTypeMetadata =
supports_primary_keys: true
supports_foreign_keys: true
scalar_types:
DateTime: {}
DateTime:
comparison_operators:
_in_year: int
_eq: DateTime
_gt: DateTime
_gte: DateTime
_lt: DateTime
_lte: DateTime
_neq: DateTime
string:
comparison_operators:
_like: string
_glob: string
_eq: string
_gt: string
_gte: string
_lt: string
_lte: string
_neq: string
decimal:
comparison_operators:
_modulus_is_zero: number
_eq: number
_gt: number
_gte: number
_lt: number
_lte: number
_neq: number
number:
comparison_operators:
_modulus_is_zero: number
_eq: number
_gt: number
_gte: number
_lt: number
_lte: number
_neq: number
bool:
comparison_operators:
_and: bool
_or: bool
_nand: bool
_xor: bool
_eq: bool
_gt: bool
_gte: bool
_lt: bool
_lte: bool
_neq: bool
queries: {}
relationships: {}
comparisons: