mirror of
https://github.com/hasura/graphql-engine.git
synced 2024-10-05 14:28:08 +03:00
SQLite Mutations
PR-URL: https://github.com/hasura/graphql-engine-mono/pull/7573 Co-authored-by: Daniel Chambers <1214352+daniel-chambers@users.noreply.github.com> GitOrigin-RevId: 7807368faaa4bac5c0726c2dab041f8180a3fc31
This commit is contained in:
parent
e443e25efe
commit
630bf277c1
@ -30,6 +30,7 @@ services:
|
||||
METRICS: y
|
||||
PRETTY_PRINT_LOGS: y
|
||||
LOG_LEVEL: debug
|
||||
MUTATIONS: y
|
||||
DATASETS: y
|
||||
DATASET_DELETE: y
|
||||
healthcheck:
|
||||
|
@ -13,7 +13,7 @@ The SQLite agent currently supports the following capabilities:
|
||||
* [x] Aggregations
|
||||
* [x] Prometheus Metrics
|
||||
* [x] Exposing Foreign-Key Information
|
||||
* [ ] Mutations
|
||||
* [x] Mutations
|
||||
* [ ] Subscriptions
|
||||
* [ ] Streaming Subscriptions
|
||||
|
||||
@ -53,7 +53,7 @@ You will want to mount a volume with your database(s) so that they can be refere
|
||||
|
||||
## Options / Environment Variables
|
||||
|
||||
Note: Boolean flags `{FLAG}` can be provided as `1`, `true`, `yes`, or omitted and default to `false`.
|
||||
Note: Boolean flags `{FLAG}` can be provided as `1`, `true`, `t`, `yes`, `y`, or omitted and default to `false`.
|
||||
|
||||
| ENV Variable Name | Format | Default | Info |
|
||||
| --- | --- | --- | --- |
|
||||
@ -72,6 +72,7 @@ Note: Boolean flags `{FLAG}` can be provided as `1`, `true`, `yes`, or omitted a
|
||||
| `DATASET_DELETE` | `{FLAG}` | `false` | Enable `DELETE /datasets/:name` |
|
||||
| `DATASET_TEMPLATES` | `DIRECTORY` | `./dataset_templates` | Directory to clone datasets from. |
|
||||
| `DATASET_CLONES` | `DIRECTORY` | `./dataset_clones` | Directory to clone datasets to. |
|
||||
| `MUTATIONS` | `{FLAG}` | `false` | Enable Mutation Support. |
|
||||
|
||||
|
||||
## Agent usage
|
||||
@ -126,7 +127,7 @@ From the HGE repo.
|
||||
## TODO
|
||||
|
||||
* [x] Prometheus metrics hosted at `/metrics`
|
||||
* [ ] Pull reference types from a package rather than checked-in files
|
||||
* [x] Pull reference types from a package rather than checked-in files
|
||||
* [x] Health Check
|
||||
* [x] DB Specific Health Checks
|
||||
* [x] Schema
|
||||
@ -157,4 +158,5 @@ From the HGE repo.
|
||||
* [x] Reuse `find_table_relationship` in more scenarios
|
||||
* [x] ORDER clause in aggregates breaks SQLite parser for some reason
|
||||
* [x] Check that looped exist check doesn't cause name conflicts
|
||||
* [ ] `NOT EXISTS IS NULL` != `EXISTS IS NOT NULL`, Example:
|
||||
* [x] `NOT EXISTS IS NULL` != `EXISTS IS NOT NULL`
|
||||
* [x] Mutation support
|
||||
|
@ -1,11 +1,18 @@
|
||||
import { Config } from "./config";
|
||||
import { Sequelize } from 'sequelize';
|
||||
import { DB_ALLOW_LIST, DB_CREATE, DB_PRIVATECACHE, DB_READONLY } from "./environment";
|
||||
|
||||
import SQLite from 'sqlite3';
|
||||
|
||||
export type SqlLogger = (sql: string) => void
|
||||
|
||||
// See https://github.com/TryGhost/node-sqlite3/wiki/API#new-sqlite3databasefilename--mode--callback
|
||||
// mode (optional): One or more of OPEN_READONLY | OPEN_READWRITE | OPEN_CREATE | OPEN_FULLMUTEX | OPEN_URI | OPEN_SHAREDCACHE | OPEN_PRIVATECACHE
|
||||
// The default value is OPEN_READWRITE | OPEN_CREATE | OPEN_FULLMUTEX.
|
||||
const readMode = DB_READONLY ? SQLite.OPEN_READONLY : SQLite.OPEN_READWRITE;
|
||||
const createMode = DB_CREATE ? SQLite.OPEN_CREATE : 0; // Flag style means 0=off
|
||||
const cacheMode = DB_PRIVATECACHE ? SQLite.OPEN_PRIVATECACHE : SQLite.OPEN_SHAREDCACHE;
|
||||
const mode = readMode | createMode | cacheMode;
|
||||
|
||||
export function connect(config: Config, sqlLogger: SqlLogger): Sequelize {
|
||||
if(DB_ALLOW_LIST != null) {
|
||||
if(DB_ALLOW_LIST.includes(config.db)) {
|
||||
@ -13,21 +20,6 @@ export function connect(config: Config, sqlLogger: SqlLogger): Sequelize {
|
||||
}
|
||||
}
|
||||
|
||||
// See https://github.com/TryGhost/node-sqlite3/wiki/API#new-sqlite3databasefilename--mode--callback
|
||||
// mode (optional): One or more of
|
||||
// * OPEN_READONLY
|
||||
// * OPEN_READWRITE
|
||||
// * OPEN_CREATE
|
||||
// * OPEN_FULLMUTEX
|
||||
// * OPEN_URI
|
||||
// * OPEN_SHAREDCACHE
|
||||
// * OPEN_PRIVATECACHE
|
||||
// The default value is OPEN_READWRITE | OPEN_CREATE | OPEN_FULLMUTEX.
|
||||
const readMode = DB_READONLY ? SQLite.OPEN_READONLY : SQLite.OPEN_READWRITE;
|
||||
const createMode = DB_CREATE ? SQLite.OPEN_CREATE : 0; // Flag style means 0=off
|
||||
const cacheMode = DB_PRIVATECACHE ? SQLite.OPEN_PRIVATECACHE : SQLite.OPEN_SHAREDCACHE;
|
||||
const mode = readMode | createMode | cacheMode;
|
||||
|
||||
const db = new Sequelize({
|
||||
dialect: 'sqlite',
|
||||
storage: config.db,
|
||||
@ -37,3 +29,62 @@ export function connect(config: Config, sqlLogger: SqlLogger): Sequelize {
|
||||
|
||||
return db;
|
||||
};
|
||||
|
||||
export type Connected = {
|
||||
query: ((query: string, params?: Record<string, unknown>) => Promise<Array<any>>),
|
||||
close: (() => Promise<boolean>)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param config: Config
|
||||
* @param sqlLogger: SqlLogger
|
||||
* @returns {query, mutation}
|
||||
*
|
||||
* Query and mutation support implemented directly on the SQLite3 library.
|
||||
* See: https://github.com/TryGhost/node-sqlite3/wiki/API
|
||||
*/
|
||||
export function connect2(config: Config, sqlLogger: SqlLogger): Connected {
|
||||
if(DB_ALLOW_LIST != null) {
|
||||
if(DB_ALLOW_LIST.includes(config.db)) {
|
||||
throw new Error(`Database ${config.db} is not present in DB_ALLOW_LIST 😭`);
|
||||
}
|
||||
}
|
||||
|
||||
const db_ = new SQLite.Database(config.db, mode);
|
||||
|
||||
// NOTE: Avoiding util.promisify as this seems to be causing connection failures.
|
||||
const dbQueryPromise = (query: string, params?: Record<string, unknown>): Promise<Array<any>> => {
|
||||
return new Promise((resolve, reject) => {
|
||||
/* Pass named params:
|
||||
* db.run("UPDATE tbl SET name = $name WHERE id = $id", {
|
||||
* $id: 2,
|
||||
* $name: "bar"
|
||||
* });
|
||||
*/
|
||||
db_.all(query, params || {}, (err, data) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
} else {
|
||||
resolve(data);
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
const dbClosePromise = (): Promise<boolean> => {
|
||||
return new Promise((resolve, reject) => {
|
||||
db_.close((err) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
} else {
|
||||
resolve(true); // What should we resolve with if there's no data to promise?
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
return {
|
||||
query: dbQueryPromise,
|
||||
close: dbClosePromise,
|
||||
};
|
||||
}
|
||||
|
@ -11,6 +11,8 @@ import prometheus from 'prom-client';
|
||||
import { runRawOperation } from './raw';
|
||||
import { DATASETS, DATASET_DELETE, LOG_LEVEL, METRICS, MUTATIONS, PERMISSIVE_CORS, PRETTY_PRINT_LOGS } from './environment';
|
||||
import { cloneDataset, deleteDataset, getDataset } from './datasets';
|
||||
import { runMutation } from './mutation';
|
||||
import { ErrorWithStatusCode } from './util';
|
||||
|
||||
const port = Number(process.env.PORT) || 8100;
|
||||
|
||||
@ -34,17 +36,25 @@ server.setErrorHandler(function (error, _request, reply) {
|
||||
// Log error
|
||||
this.log.error(error)
|
||||
|
||||
const errorResponse: ErrorResponse = {
|
||||
type: "uncaught-error",
|
||||
message: "SQLite Agent: Uncaught Exception",
|
||||
details: {
|
||||
name: error.name,
|
||||
message: error.message
|
||||
}
|
||||
};
|
||||
if (error instanceof ErrorWithStatusCode) {
|
||||
const errorResponse: ErrorResponse = {
|
||||
type: error.type,
|
||||
message: error.message,
|
||||
details: error.details
|
||||
};
|
||||
reply.status(error.code).send(errorResponse);
|
||||
|
||||
// Send error response
|
||||
reply.status(500).send(errorResponse);
|
||||
} else {
|
||||
const errorResponse: ErrorResponse = {
|
||||
type: "uncaught-error",
|
||||
message: "SQLite Agent: Uncaught Exception",
|
||||
details: {
|
||||
name: error.name,
|
||||
message: error.message
|
||||
}
|
||||
};
|
||||
reply.status(500).send(errorResponse);
|
||||
}
|
||||
})
|
||||
|
||||
if(METRICS) {
|
||||
@ -103,12 +113,7 @@ const sqlLogger = (sql: string): void => {
|
||||
server.log.debug({sql}, "Executed SQL");
|
||||
};
|
||||
|
||||
// NOTE:
|
||||
//
|
||||
// While an ErrorResponse is available it is not currently used as there are no errors anticipated.
|
||||
// It is included here for illustrative purposes.
|
||||
//
|
||||
server.get<{ Reply: CapabilitiesResponse | ErrorResponse }>("/capabilities", async (request, _response) => {
|
||||
server.get<{ Reply: CapabilitiesResponse }>("/capabilities", async (request, _response) => {
|
||||
server.log.info({ headers: request.headers, query: request.body, }, "capabilities.request");
|
||||
return capabilitiesResponse;
|
||||
});
|
||||
@ -119,16 +124,19 @@ server.get<{ Reply: SchemaResponse }>("/schema", async (request, _response) => {
|
||||
return getSchema(config, sqlLogger);
|
||||
});
|
||||
|
||||
server.post<{ Body: QueryRequest, Reply: QueryResponse | ErrorResponse }>("/query", async (request, response) => {
|
||||
/**
|
||||
* @throws ErrorWithStatusCode
|
||||
*/
|
||||
server.post<{ Body: QueryRequest, Reply: QueryResponse }>("/query", async (request, response) => {
|
||||
server.log.info({ headers: request.headers, query: request.body, }, "query.request");
|
||||
const end = queryHistogram.startTimer()
|
||||
const config = getConfig(request);
|
||||
const result : QueryResponse | ErrorResponse = await queryData(config, sqlLogger, request.body);
|
||||
end();
|
||||
if("message" in result) {
|
||||
response.statusCode = 500;
|
||||
try {
|
||||
const result : QueryResponse = await queryData(config, sqlLogger, request.body);
|
||||
return result;
|
||||
} finally {
|
||||
end();
|
||||
}
|
||||
return result;
|
||||
});
|
||||
|
||||
// TODO: Use derived types for body and reply
|
||||
@ -147,7 +155,9 @@ server.post<{ Body: QueryRequest, Reply: ExplainResponse}>("/explain", async (re
|
||||
if(MUTATIONS) {
|
||||
server.post<{ Body: MutationRequest, Reply: MutationResponse}>("/mutation", async (request, _response) => {
|
||||
server.log.info({ headers: request.headers, query: request.body, }, "mutation.request");
|
||||
throw Error("Mutations not yet implemented");
|
||||
// TODO: Mutation Histogram?
|
||||
const config = getConfig(request);
|
||||
return runMutation(config, sqlLogger, request.body);
|
||||
});
|
||||
}
|
||||
|
||||
@ -177,9 +187,6 @@ if(DATASETS) {
|
||||
server.get<{ Params: { template_name: DatasetTemplateName, }, Reply: DatasetGetTemplateResponse }>("/datasets/templates/:template_name", async (request, _response) => {
|
||||
server.log.info({ headers: request.headers, query: request.body, }, "datasets.templates.get");
|
||||
const result = await getDataset(request.params.template_name);
|
||||
if(! result.exists) {
|
||||
_response.statusCode = 404;
|
||||
}
|
||||
return result;
|
||||
});
|
||||
|
||||
@ -219,9 +226,9 @@ server.get("/", async (request, response) => {
|
||||
<li><a href="/raw">POST /raw - Raw Query Handler</a>
|
||||
<li><a href="/health">GET /health - Healthcheck</a>
|
||||
<li><a href="/metrics">GET /metrics - Prometheus formatted metrics</a>
|
||||
<li><a href="/datasets/NAME">GET /datasets/{NAME} - Information on Dataset</a>
|
||||
<li><a href="/datasets/NAME">POST /datasets/{NAME} - Create a Dataset</a>
|
||||
<li><a href="/datasets/NAME">DELETE /datasets/{NAME} - Delete a Dataset</a>
|
||||
<li><a href="/datasets/templates/NAME">GET /datasets/templates/{NAME} - Information on Dataset</a>
|
||||
<li><a href="/datasets/clones/NAME">POST /datasets/clones/{NAME} - Create a Dataset</a>
|
||||
<li><a href="/datasets/clones/NAME">DELETE /datasets/clones/{NAME} - Delete a Dataset</a>
|
||||
</ul>
|
||||
</body>
|
||||
</html>
|
||||
|
330
dc-agents/sqlite/src/mutation.ts
Normal file
330
dc-agents/sqlite/src/mutation.ts
Normal file
@ -0,0 +1,330 @@
|
||||
import { ArrayRelationInsertFieldValue, ColumnInsertFieldValue, DeleteMutationOperation, Expression, Field, InsertFieldSchema, InsertMutationOperation, MutationOperation, MutationOperationResults, MutationRequest, MutationResponse, ObjectRelationInsertFieldValue, QueryRequest, RowObject, RowUpdate, TableInsertSchema, TableName, TableRelationships, UpdateMutationOperation } from "@hasura/dc-api-types";
|
||||
import { Config } from "./config";
|
||||
import { connect2, Connected, SqlLogger } from "./db";
|
||||
import { escapeIdentifier, escapeTableName, escapeTableNameSansSchema, json_object, where_clause, } from "./query";
|
||||
import { asyncSequenceFromInputs, ErrorWithStatusCode, mapObjectToArray, tableNameEquals, unreachable, zip } from "./util";
|
||||
|
||||
// Types
|
||||
|
||||
type Row = {
|
||||
ok: boolean,
|
||||
row: string,
|
||||
statement?: string,
|
||||
values?: Record<string, unknown>,
|
||||
}
|
||||
|
||||
type RowInfoValue = ColumnInsertFieldValue | ObjectRelationInsertFieldValue | ArrayRelationInsertFieldValue | null;
|
||||
|
||||
type RowInfo = {
|
||||
field: string,
|
||||
schema: InsertFieldSchema,
|
||||
variable: string,
|
||||
value: RowInfoValue
|
||||
}
|
||||
|
||||
type UpdateInfo = {
|
||||
variable: string,
|
||||
value: unknown,
|
||||
update: RowUpdate,
|
||||
}
|
||||
|
||||
interface Info {
|
||||
variable: string,
|
||||
value: unknown
|
||||
}
|
||||
|
||||
// Functions
|
||||
|
||||
function escapeVariable(x: string): string {
|
||||
return x.replace(/[^a-zA-Z0-9]/g, '');
|
||||
}
|
||||
|
||||
function valuesString(infos: Array<RowInfo>): string {
|
||||
return infos.map((info) => info.variable).join(', ');
|
||||
}
|
||||
|
||||
function customUpdateOperator(operator: string, column: string, variable: string): string {
|
||||
switch(operator) {
|
||||
case 'inc':
|
||||
return `${column} + ${variable}`;
|
||||
case 'dec':
|
||||
return `${column} - ${variable}`;
|
||||
}
|
||||
throw Error(`Custom operator ${operator} is invalid. This should not happen.`);
|
||||
}
|
||||
|
||||
function setString(infos: Array<UpdateInfo>): string {
|
||||
return infos.map((info) => {
|
||||
const update = info.update;
|
||||
switch(update.type) {
|
||||
case 'custom_operator':
|
||||
return `${update.column} = ${customUpdateOperator(update.operator_name, update.column, info.variable)}`
|
||||
case 'set':
|
||||
return `${update.column} = ${info.variable}`
|
||||
default:
|
||||
return unreachable(update);
|
||||
}
|
||||
}).join(', ');
|
||||
}
|
||||
|
||||
function columnsString(infos: Array<RowInfo>): string {
|
||||
return infos.map((info) => {
|
||||
switch(info.schema.type) {
|
||||
case 'column':
|
||||
return escapeIdentifier(info.schema.column);
|
||||
default:
|
||||
throw(Error(`Type ${info.schema.type} for field ${info.field} is not currently supported.`))
|
||||
}
|
||||
}).join(', ');
|
||||
}
|
||||
|
||||
/**
|
||||
* @param schemas
|
||||
* @param table
|
||||
* @returns schema | null
|
||||
*/
|
||||
function getTableInsertSchema(schemas: Array<TableInsertSchema>, table: TableName): TableInsertSchema | null {
|
||||
for(var i = 0; i < schemas.length; i++) {
|
||||
const schema = schemas[i];
|
||||
if(tableNameEquals(schema.table)(table)) {
|
||||
return schema;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param e
|
||||
* @returns boolean check on returned data
|
||||
*
|
||||
* Note: The heavy lifting is performed by `where_clause` from query.ts
|
||||
*/
|
||||
function whereString(relationships: Array<TableRelationships>, e: Expression, table: TableName): string {
|
||||
const w = where_clause(relationships, e, table, escapeTableNameSansSchema(table));
|
||||
return w;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param op
|
||||
* @returns SQLite expression that can be used in RETURNING clauses
|
||||
*
|
||||
* The `json_object` function from query.ts performs the heavy lifting here.
|
||||
*/
|
||||
function returningString(relationships: Array<TableRelationships>, fields: Record<string, Field>, table: TableName): string {
|
||||
/* Example of fields:
|
||||
{
|
||||
"ArtistId": {
|
||||
"type": "column",
|
||||
"column": "ArtistId",
|
||||
"column_type": "number"
|
||||
}
|
||||
}
|
||||
*/
|
||||
const r = json_object(relationships, fields, table, escapeTableNameSansSchema(table));
|
||||
return r;
|
||||
}
|
||||
|
||||
function queryValues(info: Array<Info>): Record<string, unknown> {
|
||||
return Object.fromEntries(info.map((x) => [x.variable, x.value]));
|
||||
}
|
||||
|
||||
const EMPTY_AND: Expression = { type: 'and', expressions: [] };
|
||||
|
||||
function insertString(relationships: Array<TableRelationships>, op: InsertMutationOperation, info: Array<RowInfo>): string {
|
||||
const columnValues =
|
||||
info.length > 0
|
||||
? `(${columnsString(info)}) VALUES (${valuesString(info)})`
|
||||
: "DEFAULT VALUES";
|
||||
|
||||
return `
|
||||
INSERT INTO ${escapeTableName(op.table)} ${columnValues}
|
||||
RETURNING
|
||||
${returningString(relationships, op.returning_fields || {}, op.table)} as row,
|
||||
${whereString(relationships, op.post_insert_check || EMPTY_AND, op.table)} as ok
|
||||
`;
|
||||
}
|
||||
|
||||
function deleteString(relationships: Array<TableRelationships>, op: DeleteMutationOperation): string {
|
||||
return `
|
||||
DELETE FROM ${op.table}
|
||||
WHERE ${whereString(relationships, op.where || EMPTY_AND, op.table)}
|
||||
RETURNING
|
||||
${returningString(relationships, op.returning_fields || {}, op.table)} as row,
|
||||
1=1 as ok
|
||||
`;
|
||||
}
|
||||
|
||||
function updateString(relationships: Array<TableRelationships>, op: UpdateMutationOperation, info: Array<UpdateInfo>): string {
|
||||
const result = `
|
||||
UPDATE ${op.table}
|
||||
SET ${setString(info)}
|
||||
WHERE ${whereString(relationships, op.where || EMPTY_AND, op.table)}
|
||||
RETURNING
|
||||
${returningString(relationships, op.returning_fields || {}, op.table)} as row,
|
||||
${whereString(relationships, op.post_update_check || EMPTY_AND, op.table)} as ok
|
||||
`;
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param schemas
|
||||
* @param op
|
||||
* @returns Nested Array of RowInfo
|
||||
*
|
||||
* This function compiles all the useful information for constructing query-strings and variable data
|
||||
* into arrays of RowInfo packets. It is done this way to avoid repeated lookups and to keep the alignment
|
||||
* of identifiers, variables, and data in sync.
|
||||
*/
|
||||
function getInsertRowInfos(schemas: Array<TableInsertSchema>, op: InsertMutationOperation): Array<RowInfo[]> {
|
||||
const schema = getTableInsertSchema(schemas, op.table);
|
||||
if(schema == null) {
|
||||
throw(Error(`Couldn't find schema for table ${escapeTableName(op.table)}`));
|
||||
}
|
||||
return op.rows.map((row, r) => {
|
||||
const rowInfo = mapObjectToArray(row, ([k,v], i) => {
|
||||
const fieldSchema = schema.fields[k];
|
||||
if(fieldSchema == null) {
|
||||
throw(Error(`Couldn't find schema for field ${k}`));
|
||||
}
|
||||
return {
|
||||
field: k,
|
||||
schema: fieldSchema,
|
||||
variable: `$${escapeVariable(k)}_${r}_${i}`,
|
||||
value: v
|
||||
};
|
||||
});
|
||||
r++;
|
||||
return rowInfo;
|
||||
});
|
||||
}
|
||||
|
||||
function getUpdateRowInfos(op: UpdateMutationOperation): Array<UpdateInfo> {
|
||||
return op.updates.map((update, i) => {
|
||||
return {
|
||||
variable: `$${escapeVariable(update.column)}_${i}`,
|
||||
value: update.value,
|
||||
update: update
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
async function insertRow(db: Connected, relationships: Array<TableRelationships>, op: InsertMutationOperation, info: Array<RowInfo>): Promise<Array<Row>> {
|
||||
const q = insertString(relationships, op, info);
|
||||
const v = queryValues(info);
|
||||
const results = await db.query(q,v);
|
||||
results.forEach((r) => {
|
||||
if(!r.ok) {
|
||||
r.statement = q
|
||||
r.values = v
|
||||
}
|
||||
});
|
||||
return results;
|
||||
}
|
||||
|
||||
async function updateRow(db: Connected, relationships: Array<TableRelationships>, op: UpdateMutationOperation, info: Array<UpdateInfo>): Promise<Array<Row>> {
|
||||
const q = updateString(relationships, op, info);
|
||||
const v = queryValues(info);
|
||||
const results = await db.query(q,v);
|
||||
results.forEach((r) => {
|
||||
if(!r.ok) {
|
||||
r.statement = q
|
||||
r.values = v
|
||||
}
|
||||
});
|
||||
return results;
|
||||
}
|
||||
|
||||
async function deleteRows(db: Connected, relationships: Array<TableRelationships>, op: DeleteMutationOperation): Promise<Array<Row>> {
|
||||
const q = deleteString(relationships, op);
|
||||
const results = await db.query(q);
|
||||
return results;
|
||||
}
|
||||
|
||||
function postMutationCheckError(op: MutationOperation, failed: Array<Row>): ErrorWithStatusCode {
|
||||
return ErrorWithStatusCode.mutationPermissionCheckFailure(
|
||||
`Post-Insert checks failed with ${failed.length} ${failed.length > 1 ? 'errors' : 'error'}: ${JSON.stringify(failed)}`,
|
||||
{op: op, results: failed}
|
||||
);
|
||||
}
|
||||
|
||||
async function mutationOperation(db: Connected, relationships: Array<TableRelationships>, schema: Array<TableInsertSchema>, op: MutationOperation): Promise<MutationOperationResults> {
|
||||
switch(op.type) {
|
||||
case 'insert':
|
||||
const infos = getInsertRowInfos(schema, op);
|
||||
await db.query('BEGIN',{});
|
||||
// We split this operation into multiple inserts in case the inserted columns are hetrogenous: https://sqlite.org/forum/info/d7384e085b808b05
|
||||
const insertResultsSet = await asyncSequenceFromInputs(infos, (info) => insertRow(db, relationships, op, info));
|
||||
const insertResults = ([] as Array<Row>).concat(...insertResultsSet);
|
||||
let insertFailed: Array<Row> = [];
|
||||
const mappedInsertResults = insertResults.map((e: Row) => {
|
||||
if(! e.ok) {
|
||||
insertFailed.push(e);
|
||||
}
|
||||
return JSON.parse(e.row);
|
||||
});
|
||||
if(insertFailed.length > 0) {
|
||||
await db.query('ROLLBACK', {});
|
||||
throw(postMutationCheckError(op, insertFailed));
|
||||
} else {
|
||||
await db.query('COMMIT', {});
|
||||
return {
|
||||
affected_rows: mappedInsertResults.length,
|
||||
...(op.returning_fields ? { returning: mappedInsertResults } : {})
|
||||
};
|
||||
}
|
||||
|
||||
case 'update':
|
||||
const updateInfo = getUpdateRowInfos(op);
|
||||
await db.query('BEGIN',{});
|
||||
const resultSet = await updateRow(db, relationships, op, updateInfo);
|
||||
const updateResults = ([] as Array<Row>).concat(...resultSet);
|
||||
let updateFailed: Array<Row> = [];
|
||||
const mappedUpdateResults = updateResults.map((e: Row) => {
|
||||
if(! e.ok) {
|
||||
updateFailed.push(e);
|
||||
}
|
||||
return JSON.parse(e.row);
|
||||
});
|
||||
if(updateFailed.length > 0) {
|
||||
await db.query('ROLLBACK', {});
|
||||
throw(postMutationCheckError(op, updateFailed));
|
||||
} else {
|
||||
await db.query('COMMIT', {});
|
||||
return {
|
||||
affected_rows: mappedUpdateResults.length,
|
||||
...(op.returning_fields ? { returning: mappedUpdateResults } : {})
|
||||
};
|
||||
}
|
||||
|
||||
case 'delete':
|
||||
await db.query('BEGIN',{});
|
||||
const deleteResults = await deleteRows(db, relationships, op);
|
||||
const mappedDeleteResults = deleteResults.map((r: Row) => JSON.parse(r.row));
|
||||
await db.query('COMMIT',{});
|
||||
return {
|
||||
affected_rows: mappedDeleteResults.length,
|
||||
...(op.returning_fields ? { returning: mappedDeleteResults } : {})
|
||||
};
|
||||
|
||||
default:
|
||||
return unreachable(op['type']);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param config
|
||||
* @param sqlLogger
|
||||
* @param request
|
||||
* @returns MutationResponse
|
||||
*
|
||||
* Top-Level function for mutations.
|
||||
* This performs inserts/updates/deletes.
|
||||
*/
|
||||
export async function runMutation(config: Config, sqlLogger: SqlLogger, request: MutationRequest): Promise<MutationResponse> {
|
||||
const db = connect2(config, sqlLogger);
|
||||
const resultSet = await asyncSequenceFromInputs(request.operations, (op) => mutationOperation(db, request.table_relationships, request.insert_schema, op));
|
||||
return {
|
||||
operation_results: resultSet
|
||||
};
|
||||
}
|
@ -1,6 +1,6 @@
|
||||
import { Config } from "./config";
|
||||
import { connect, SqlLogger } from "./db";
|
||||
import { coerceUndefinedToNull, coerceUndefinedOrNullToEmptyRecord, isEmptyObject, tableNameEquals, unreachable, stringArrayEquals } from "./util";
|
||||
import { coerceUndefinedToNull, coerceUndefinedOrNullToEmptyRecord, isEmptyObject, tableNameEquals, unreachable, stringArrayEquals, ErrorWithStatusCode } from "./util";
|
||||
import {
|
||||
Expression,
|
||||
BinaryComparisonOperator,
|
||||
@ -46,7 +46,7 @@ function escapeString(x: any): string {
|
||||
* @param identifier: Unescaped name. E.g. 'Alb"um'
|
||||
* @returns Escaped name. E.g. '"Alb\"um"'
|
||||
*/
|
||||
function escapeIdentifier(identifier: string): string {
|
||||
export function escapeIdentifier(identifier: string): string {
|
||||
// TODO: Review this function since the current implementation is off the cuff.
|
||||
const result = identifier.replace(/\\/g,"\\\\").replace(/"/g,'\\"');
|
||||
return `"${result}"`;
|
||||
@ -65,16 +65,34 @@ function validateTableName(tableName: TableName): TableName {
|
||||
throw new Error(`${tableName.join(".")} is not a valid table`);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param ts
|
||||
* @returns last section of a qualified table array. E.g. [a,b] -> [b]
|
||||
*/
|
||||
export function getTableNameSansSchema(ts: Array<string>): Array<string> {
|
||||
return [ts[ts.length-1]];
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param tableName: Unescaped table name. E.g. 'Alb"um'
|
||||
* @returns Escaped table name. E.g. '"Alb\"um"'
|
||||
*/
|
||||
function escapeTableName(tableName: TableName): string {
|
||||
export function escapeTableName(tableName: TableName): string {
|
||||
return validateTableName(tableName).map(escapeIdentifier).join(".");
|
||||
}
|
||||
|
||||
function json_object(relationships: TableRelationships[], fields: Fields, table: TableName, tableAlias: string): string {
|
||||
/**
|
||||
* @param tableName
|
||||
* @returns escaped tableName string with schema qualification removed
|
||||
*
|
||||
* This is useful in where clauses in returning statements where a qualified table name is invalid SQLite SQL.
|
||||
*/
|
||||
export function escapeTableNameSansSchema(tableName: TableName): string {
|
||||
return escapeTableName(getTableNameSansSchema(tableName));
|
||||
}
|
||||
|
||||
export function json_object(relationships: TableRelationships[], fields: Fields, table: TableName, tableAlias: string): string {
|
||||
const result = Object.entries(fields).map(([fieldName, field]) => {
|
||||
switch(field.type) {
|
||||
case "column":
|
||||
@ -97,7 +115,7 @@ function json_object(relationships: TableRelationships[], fields: Fields, table:
|
||||
return tag('json_object', `JSON_OBJECT(${result})`);
|
||||
}
|
||||
|
||||
function where_clause(relationships: TableRelationships[], expression: Expression, queryTableName: TableName, queryTableAlias: string): string {
|
||||
export function where_clause(relationships: TableRelationships[], expression: Expression, queryTableName: TableName, queryTableAlias: string): string {
|
||||
const generateWhere = (expression: Expression, currentTableName: TableName, currentTableAlias: string): string => {
|
||||
switch(expression.type) {
|
||||
case "not":
|
||||
@ -193,18 +211,21 @@ function calculateExistsJoinInfo(allTableRelationships: TableRelationships[], ex
|
||||
}
|
||||
|
||||
function generateRelationshipJoinComparisonFragments(relationship: Relationship, sourceTableAlias: string, targetTableAlias: string): string[] {
|
||||
const sourceTablePrefix = `${sourceTableAlias}.`;
|
||||
return Object
|
||||
.entries(relationship.column_mapping)
|
||||
.map(([sourceColumnName, targetColumnName]) =>
|
||||
`${sourceTableAlias}.${escapeIdentifier(sourceColumnName)} = ${targetTableAlias}.${escapeIdentifier(targetColumnName)}`);
|
||||
`${sourceTablePrefix}${escapeIdentifier(sourceColumnName)} = ${targetTableAlias}.${escapeIdentifier(targetColumnName)}`);
|
||||
}
|
||||
|
||||
function generateComparisonColumnFragment(comparisonColumn: ComparisonColumn, queryTableAlias: string, currentTableAlias: string): string {
|
||||
const path = comparisonColumn.path ?? [];
|
||||
const queryTablePrefix = queryTableAlias ? `${queryTableAlias}.` : '';
|
||||
const currentTablePrefix = currentTableAlias ? `${currentTableAlias}.` : '';
|
||||
if (path.length === 0) {
|
||||
return `${currentTableAlias}.${escapeIdentifier(comparisonColumn.name)}`
|
||||
return `${currentTablePrefix}${escapeIdentifier(comparisonColumn.name)}`
|
||||
} else if (path.length === 1 && path[0] === "$") {
|
||||
return `${queryTableAlias}.${escapeIdentifier(comparisonColumn.name)}`
|
||||
return `${queryTablePrefix}${escapeIdentifier(comparisonColumn.name)}`
|
||||
} else {
|
||||
throw new Error(`Unsupported path on ComparisonColumn: ${[...path, comparisonColumn.name].join(".")}`);
|
||||
}
|
||||
@ -625,8 +646,6 @@ function offset(o: number | null): string {
|
||||
}
|
||||
}
|
||||
|
||||
/** Top-Level Query Function.
|
||||
*/
|
||||
function query(request: QueryRequest): string {
|
||||
const result = table_query(
|
||||
request.table_relationships,
|
||||
@ -708,24 +727,21 @@ function tag(t: string, s: string): string {
|
||||
* ```
|
||||
*
|
||||
*/
|
||||
export async function queryData(config: Config, sqlLogger: SqlLogger, queryRequest: QueryRequest): Promise<QueryResponse | ErrorResponse> {
|
||||
export async function queryData(config: Config, sqlLogger: SqlLogger, queryRequest: QueryRequest): Promise<QueryResponse> {
|
||||
const db = connect(config, sqlLogger); // TODO: Should this be cached?
|
||||
const q = query(queryRequest);
|
||||
|
||||
if(q.length > QUERY_LENGTH_LIMIT) {
|
||||
const result: ErrorResponse =
|
||||
{
|
||||
message: `Generated SQL Query was too long (${q.length} > ${QUERY_LENGTH_LIMIT})`,
|
||||
details: {
|
||||
"query.length": q.length,
|
||||
"limit": QUERY_LENGTH_LIMIT
|
||||
}
|
||||
};
|
||||
return result;
|
||||
} else {
|
||||
const [result, metadata] = await db.query(q);
|
||||
return output(result);
|
||||
const error = new ErrorWithStatusCode(
|
||||
`Generated SQL Query was too long (${q.length} > ${QUERY_LENGTH_LIMIT})`,
|
||||
500,
|
||||
{ "query.length": q.length, "limit": QUERY_LENGTH_LIMIT }
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
|
||||
const [result, metadata] = await db.query(q);
|
||||
return output(result);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { TableName } from "@hasura/dc-api-types";
|
||||
import { ErrorResponseType, TableName } from "@hasura/dc-api-types";
|
||||
|
||||
export const coerceUndefinedToNull = <T>(v: T | undefined): T | null => v === undefined ? null : v;
|
||||
|
||||
@ -17,6 +17,14 @@ export const zip = <T, U>(arr1: T[], arr2: U[]): [T,U][] => {
|
||||
return newArray;
|
||||
};
|
||||
|
||||
export const mapObject = <T, U>(obj: Record<string, T>, fn: (entry: [string, T]) => [string, U]): Record<string, U> => {
|
||||
return Object.fromEntries(Object.entries(obj).map(fn));
|
||||
}
|
||||
|
||||
export const mapObjectToArray = <T, U>(obj: Record<string, T>, fn: (entry: [string, T], index: number) => U): Array<U> => {
|
||||
return Object.entries(obj).map(fn);
|
||||
}
|
||||
|
||||
export const crossProduct = <T, U>(arr1: T[], arr2: U[]): [T,U][] => {
|
||||
return arr1.flatMap(a1 => arr2.map<[T,U]>(a2 => [a1, a2]));
|
||||
};
|
||||
@ -25,12 +33,12 @@ export function last<T>(x: T[]): T {
|
||||
return x[x.length - 1];
|
||||
}
|
||||
|
||||
export function logDeep(msg: string, myObject: any): void {
|
||||
export function logDeep(msg: string, myObject: unknown): void {
|
||||
const util = require('util');
|
||||
console.log(msg, util.inspect(myObject, {showHidden: true, depth: null, colors: true}));
|
||||
}
|
||||
|
||||
export function isEmptyObject(obj: Record<string, any>): boolean {
|
||||
export function isEmptyObject(obj: Record<string, unknown>): boolean {
|
||||
return Object.keys(obj).length === 0;
|
||||
}
|
||||
|
||||
@ -54,3 +62,36 @@ export const stringArrayEquals = (arr1: string[]) => (arr2: string[]): boolean =
|
||||
|
||||
return zip(arr1, arr2).every(([n1, n2]) => n1 === n2);
|
||||
}
|
||||
|
||||
export class ErrorWithStatusCode extends Error {
|
||||
code: number;
|
||||
type: ErrorResponseType;
|
||||
details: Record<string, unknown>;
|
||||
constructor(message: string, code: number, details: Record<string, unknown>) {
|
||||
super(message);
|
||||
this.code = code;
|
||||
this.type = 'uncaught-error';
|
||||
this.details = details;
|
||||
}
|
||||
public static mutationPermissionCheckFailure(message: string, details: Record<string, unknown>): ErrorWithStatusCode {
|
||||
const cls = new ErrorWithStatusCode(message, 400, details);
|
||||
cls.type = 'mutation-permission-check-failure';
|
||||
return cls;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param inputSequence
|
||||
* @param asyncFn
|
||||
* @returns Promise<Array<Result>>
|
||||
*
|
||||
* This function exists to sequence promise generating inputs and a matching function.
|
||||
* Promise.all executes in parallel which is not always desired behaviour.
|
||||
*/
|
||||
export async function asyncSequenceFromInputs<Input, Result>(inputSequence: Input[], asyncFn: (input: Input) => Promise<Result>): Promise<Array<Result>> {
|
||||
const results = [];
|
||||
for (const input of inputSequence) {
|
||||
results.push(await asyncFn(input));
|
||||
}
|
||||
return results;
|
||||
}
|
@ -169,7 +169,7 @@ schemaInspectionTests opts = describe "Schema and Source Inspection" $ do
|
||||
nullable: false
|
||||
type: number
|
||||
insertable: *supportsInserts
|
||||
updatable: *supportsUpdates
|
||||
updatable: false
|
||||
- name: *artistId
|
||||
nullable: false
|
||||
type: number
|
||||
|
@ -273,6 +273,7 @@ spec TestData {..} Capabilities {..} = describe "Insert Mutations" $ do
|
||||
for_ _cRelationships $ \_relationshipCapabilities -> do
|
||||
usesDataset chinookTemplate $ it "can return rows from an object relationship" $ do
|
||||
let rows = take 1 newAcdcAlbums ++ take 1 newApocalypticaAlbums
|
||||
|
||||
let returning =
|
||||
Data.mkFieldsMap
|
||||
[ ("AlbumId", _tdColumnField _tdAlbumsTableName "AlbumId"),
|
||||
@ -290,10 +291,12 @@ spec TestData {..} Capabilities {..} = describe "Insert Mutations" $ do
|
||||
)
|
||||
)
|
||||
]
|
||||
|
||||
let insertOperation =
|
||||
albumsInsertOperation
|
||||
& imoRows .~ rows
|
||||
& imoReturningFields .~ returning
|
||||
|
||||
let mutationRequest =
|
||||
Data.emptyMutationRequest
|
||||
& mrOperations .~ [InsertOperation insertOperation]
|
||||
@ -306,11 +309,13 @@ spec TestData {..} Capabilities {..} = describe "Insert Mutations" $ do
|
||||
let artist = (album ^? Data.field "ArtistId" . Data._ColumnFieldNumber) >>= \artistId -> _tdArtistsRowsById ^? ix artistId
|
||||
artistPropVal = maybeToList artist
|
||||
in Data.insertField "Artist" (mkSubqueryResponse artistPropVal) album
|
||||
|
||||
let removeArtistId = Data.deleteField "ArtistId"
|
||||
|
||||
let expectedRows =
|
||||
take 1 (expectedInsertedAcdcAlbums albumsStartingId)
|
||||
++ take 1 (expectedInsertedApocalypticaAlbums (albumsStartingId + 1))
|
||||
( take 1 (expectedInsertedAcdcAlbums albumsStartingId)
|
||||
++ take 1 (expectedInsertedApocalypticaAlbums (albumsStartingId + 1))
|
||||
)
|
||||
& fmap (joinInArtist >>> removeArtistId)
|
||||
|
||||
let expectedResult = MutationOperationResults 2 (Just expectedRows)
|
||||
|
@ -86,6 +86,13 @@ backendTypeConfig =
|
||||
subquery:
|
||||
supports_relations: true
|
||||
explain: {}
|
||||
mutations:
|
||||
atomicity_support_level: heterogeneous_operations
|
||||
delete: {}
|
||||
insert:
|
||||
supports_nested_inserts: true
|
||||
returning: {}
|
||||
update: {}
|
||||
metrics: {}
|
||||
raw: {}
|
||||
|],
|
||||
|
Loading…
Reference in New Issue
Block a user