Added weird table edge-case insert mutation tests to agent test suite

[GDC-719]: https://hasurahq.atlassian.net/browse/GDC-719?atlOrigin=eyJpIjoiNWRkNTljNzYxNjVmNDY3MDlhMDU5Y2ZhYzA5YTRkZjUiLCJwIjoiZ2l0aHViLWNvbS1KU1cifQ

PR-URL: https://github.com/hasura/graphql-engine-mono/pull/7964
GitOrigin-RevId: d3077aafd29f3b5553e00bf6bba4c1ff9966485d
This commit is contained in:
Daniel Chambers 2023-02-16 18:50:11 +11:00 committed by hasura-bot
parent b3553ac88d
commit ac475fa02e
11 changed files with 427 additions and 149 deletions

View File

@ -31,6 +31,12 @@ const parseNumbersInNumericColumns = (schema: SchemaResponse) => {
};
}
export const staticDataExists = async(name: string): Promise<boolean> => {
return new Promise((resolve) => {
fs.access(__dirname + "/" + name, fs.constants.R_OK, err => err ? resolve(false) : resolve(true));
});
}
export const loadStaticData = async (name: string): Promise<StaticData> => {
const gzipReadStream = fs.createReadStream(__dirname + "/" + name);
const unzipStream = stream.pipeline(gzipReadStream, zlib.createGunzip(), () => { });

View File

@ -1,14 +1,11 @@
import { DatasetDeleteCloneResponse, DatasetGetTemplateResponse, DatasetCreateCloneRequest, DatasetCreateCloneResponse, } from '@hasura/dc-api-types';
import { loadStaticData, StaticData } from './data';
import { loadStaticData, StaticData, staticDataExists } from './data';
export async function getDataset(name: string): Promise<DatasetGetTemplateResponse> {
const safePath = mkPath(name);
const data = await loadStaticData(safePath); // TODO: Could make this more efficient, but this works for now!
if(data) {
return { exists: true };
} else {
return { exists: false };
}
return {
exists: await staticDataExists(safePath)
};
}
export async function cloneDataset(store: Record<string, StaticData>, dbName: string, body: DatasetCreateCloneRequest): Promise<DatasetCreateCloneResponse> {

View File

@ -0,0 +1,35 @@
-- A table without a primary key
CREATE TABLE NoPrimaryKey (
[FirstName] VARCHAR(40) NOT NULL,
[LastName] VARCHAR(40) NOT NULL
);
INSERT INTO NoPrimaryKey ([FirstName], [LastName]) VALUES ('Jean-Luc', 'Picard');
INSERT INTO NoPrimaryKey ([FirstName], [LastName]) VALUES ('Will', 'Riker');
INSERT INTO NoPrimaryKey ([FirstName], [LastName]) VALUES ('Geordi', 'La Forge');
INSERT INTO NoPrimaryKey ([FirstName], [LastName]) VALUES ('Deanna', 'Troi');
INSERT INTO NoPrimaryKey ([FirstName], [LastName]) VALUES ('Beverly', 'Crusher');
-- A table with a PK that can be fulfilled by a default value
CREATE TABLE DefaultedPrimaryKey (
[TimestampKey] DATETIME PRIMARY KEY DEFAULT CURRENT_TIMESTAMP,
[Message] VARCHAR(255)
);
INSERT INTO DefaultedPrimaryKey ([TimestampKey], [Message]) VALUES ('2023-02-13 12:23:00', 'Message 1');
INSERT INTO DefaultedPrimaryKey ([TimestampKey], [Message]) VALUES ('2023-02-13 13:12:01', 'Message 2');
INSERT INTO DefaultedPrimaryKey ([TimestampKey], [Message]) VALUES ('2023-02-13 16:54:02', 'Message 3');
INSERT INTO DefaultedPrimaryKey ([TimestampKey], [Message]) VALUES ('2023-02-13 17:31:03', 'Message 4');
-- A table where all columns can be generated by the database
-- via defaults, autoincrementing, or nullability
CREATE TABLE AllColumnsDefaultable (
[Id] INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
[Message] VARCHAR(255),
[Importance] INT DEFAULT 100
);
INSERT INTO AllColumnsDefaultable ([Message]) VALUES ('Message 1');
INSERT INTO AllColumnsDefaultable ([Message], [Importance]) VALUES ('Message 2', 200);
INSERT INTO AllColumnsDefaultable ([Message], [Importance]) VALUES ('Message 3', 50);

View File

@ -1,42 +1,44 @@
import { connect, SqlLogger } from './db';
import { connect, createDbMode, SqlLogger, withConnection } from './db';
import { DatasetDeleteCloneResponse, DatasetGetTemplateResponse, DatasetCreateCloneRequest, DatasetCreateCloneResponse, } from '@hasura/dc-api-types';
import { promises, existsSync } from 'fs';
import { access, constants, promises, existsSync } from 'fs';
import { DATASET_CLONES, DATASET_DELETE, DATASET_TEMPLATES } from "./environment";
import path from 'path';
export async function getDataset(template_name: string): Promise<DatasetGetTemplateResponse> {
const path = mkTemplatePath(template_name);
if(existsSync(path)) {
const stats = await promises.stat(path);
if(stats.isFile()) {
return { exists: true };
} else {
return { exists: false };
}
} else {
return { exists: false };
}
const templatePaths = mkTemplatePaths(template_name);
return {
exists: await fileIsReadable(templatePaths.dbFileTemplatePath) || await fileIsReadable(templatePaths.sqlFileTemplatePath)
};
}
export async function cloneDataset(logger: SqlLogger, clone_name: string, body: DatasetCreateCloneRequest): Promise<DatasetCreateCloneResponse> {
const fromPath = mkTemplatePath(body.from);
const templatePaths = mkTemplatePaths(body.from);
const toPath = mkClonePath(clone_name);
const fromStats = await promises.stat(fromPath);
const exists = existsSync(toPath);
if(fromStats.isFile() && ! exists) {
// Check if this is a real SQLite DB
const db = connect({ db: fromPath, explicit_main_schema: false, tables: [], meta: false }, logger);
if(db) {
const cloneExistsAlready = await fileIsReadable(toPath);
if (cloneExistsAlready) {
throw new Error("Dataset clone already exists");
}
if (await fileIsReadable(templatePaths.dbFileTemplatePath)) {
const db = connect({ db: templatePaths.dbFileTemplatePath, explicit_main_schema: false, tables: [], meta: false }, logger);
if (db) {
db.close();
} else {
throw(Error("Dataset is not an SQLite Database!"))
throw new Error("Dataset template is not a valid SQLite database!");
}
await promises.cp(fromPath, toPath);
await promises.cp(templatePaths.dbFileTemplatePath, toPath);
return { config: { db: toPath } };
} else if (await fileIsReadable(templatePaths.sqlFileTemplatePath)) {
const sql = await promises.readFile(templatePaths.sqlFileTemplatePath, { encoding: "utf-8" });
await withConnection({db: toPath, explicit_main_schema: false, tables: [], meta: false}, createDbMode, logger, async db => {
await db.withTransaction(async () => {
await db.exec(sql);
});
});
return { config: { db: toPath } };
} else if(exists) {
throw(Error("Dataset already exists!"))
} else {
throw(Error("Can't Clone!"))
throw new Error("Dataset template does not exist");
}
}
@ -60,13 +62,21 @@ export async function deleteDataset(clone_name: string): Promise<DatasetDeleteCl
}
}
function mkTemplatePath(name: string): string {
type TemplatePaths = {
dbFileTemplatePath: string,
sqlFileTemplatePath: string,
}
function mkTemplatePaths(name: string): TemplatePaths {
const parsed = path.parse(name);
const safeName = parsed.base;
if(name != safeName) {
throw(Error(`Template name ${name} is not valid.`));
}
return path.join(DATASET_TEMPLATES, safeName + ".sqlite");
return {
dbFileTemplatePath: path.join(DATASET_TEMPLATES, safeName + ".sqlite"),
sqlFileTemplatePath: path.join(DATASET_TEMPLATES, safeName + ".sql"),
};
}
function mkClonePath(name: string): string {
@ -77,3 +87,9 @@ function mkClonePath(name: string): string {
}
return path.join(DATASET_CLONES, safeName + ".sqlite");
}
export const fileIsReadable = async(filepath: string): Promise<boolean> => {
return new Promise((resolve) => {
access(filepath, constants.R_OK, err => err ? resolve(false) : resolve(true));
});
}

View File

@ -11,7 +11,8 @@ export type SqlLogger = (sql: string) => void
const readMode = DB_READONLY ? SQLite.OPEN_READONLY : SQLite.OPEN_READWRITE;
const createMode = DB_CREATE ? SQLite.OPEN_CREATE : 0; // Flag style means 0=off
const cacheMode = DB_PRIVATECACHE ? SQLite.OPEN_PRIVATECACHE : SQLite.OPEN_SHAREDCACHE;
const mode = readMode | createMode | cacheMode;
export const defaultMode = readMode | createMode | cacheMode;
export const createDbMode = SQLite.OPEN_CREATE | readMode | cacheMode;
export function connect(config: Config, sqlLogger: SqlLogger): Sequelize {
if(DB_ALLOW_LIST != null) {
@ -23,27 +24,20 @@ export function connect(config: Config, sqlLogger: SqlLogger): Sequelize {
const db = new Sequelize({
dialect: 'sqlite',
storage: config.db,
dialectOptions: { mode: mode },
dialectOptions: { mode: defaultMode },
logging: sqlLogger
});
return db;
};
export type Connected = {
query: ((query: string, params?: Record<string, unknown>) => Promise<Array<any>>),
close: (() => Promise<boolean>)
export type Connection = {
query: (query: string, params?: Record<string, unknown>) => Promise<Array<any>>,
exec: (sql: string) => Promise<void>;
withTransaction: <Result>(action: () => Promise<Result>) => Promise<Result>
}
/**
* @param config: Config
* @param sqlLogger: SqlLogger
* @returns {query, mutation}
*
* Query and mutation support implemented directly on the SQLite3 library.
* See: https://github.com/TryGhost/node-sqlite3/wiki/API
*/
export function connect2(config: Config, sqlLogger: SqlLogger): Connected {
export async function withConnection<Result>(config: Config, mode: number, sqlLogger: SqlLogger, useConnection: (connection: Connection) => Promise<Result>): Promise<Result> {
if(DB_ALLOW_LIST != null) {
if(DB_ALLOW_LIST.includes(config.db)) {
throw new Error(`Database ${config.db} is not present in DB_ALLOW_LIST 😭`);
@ -51,9 +45,9 @@ export function connect2(config: Config, sqlLogger: SqlLogger): Connected {
}
const db_ = new SQLite.Database(config.db, mode);
// NOTE: Avoiding util.promisify as this seems to be causing connection failures.
const dbQueryPromise = (query: string, params?: Record<string, unknown>): Promise<Array<any>> => {
const query = (query: string, params?: Record<string, unknown>): Promise<Array<any>> => {
return new Promise((resolve, reject) => {
/* Pass named params:
* db.run("UPDATE tbl SET name = $name WHERE id = $id", {
@ -71,8 +65,35 @@ export function connect2(config: Config, sqlLogger: SqlLogger): Connected {
})
}
const dbClosePromise = (): Promise<boolean> => {
const exec = (sql: string): Promise<void> => {
return new Promise((resolve, reject) => {
db_.exec(sql, err => {
if (err) {
reject(err);
} else {
resolve();
}
})
})
};
const withTransaction = async <Result>(action: () => Promise<Result>): Promise<Result> => {
await exec("BEGIN TRANSACTION");
try {
const result = await action();
await exec("COMMIT");
return result;
} catch (err) {
await exec("ROLLBACK")
throw err;
}
}
try {
return await useConnection({ query, exec, withTransaction });
}
finally {
await new Promise((resolve, reject) => {
db_.close((err) => {
if (err) {
return reject(err);
@ -80,11 +101,6 @@ export function connect2(config: Config, sqlLogger: SqlLogger): Connected {
resolve(true); // What should we resolve with if there's no data to promise?
}
})
})
});
}
return {
query: dbQueryPromise,
close: dbClosePromise,
};
}

View File

@ -1,6 +1,6 @@
import { ArrayRelationInsertFieldValue, ColumnInsertFieldValue, DeleteMutationOperation, Expression, Field, InsertFieldSchema, InsertMutationOperation, MutationOperation, MutationOperationResults, MutationRequest, MutationResponse, ObjectRelationInsertFieldValue, QueryRequest, RowObject, RowUpdate, TableInsertSchema, TableName, TableRelationships, UpdateMutationOperation } from "@hasura/dc-api-types";
import { Config } from "./config";
import { connect2, Connected, SqlLogger } from "./db";
import { Connection, defaultMode, SqlLogger, withConnection } from "./db";
import { escapeIdentifier, escapeTableName, escapeTableNameSansSchema, json_object, where_clause, } from "./query";
import { asyncSequenceFromInputs, ErrorWithStatusCode, mapObjectToArray, tableNameEquals, unreachable, zip } from "./util";
@ -79,8 +79,8 @@ function columnsString(infos: Array<RowInfo>): string {
}
/**
* @param schemas
* @param table
* @param schemas
* @param table
* @returns schema | null
*/
function getTableInsertSchema(schemas: Array<TableInsertSchema>, table: TableName): TableInsertSchema | null {
@ -94,10 +94,10 @@ function getTableInsertSchema(schemas: Array<TableInsertSchema>, table: TableNam
}
/**
*
* @param e
*
* @param e
* @returns boolean check on returned data
*
*
* Note: The heavy lifting is performed by `where_clause` from query.ts
*/
function whereString(relationships: Array<TableRelationships>, e: Expression, table: TableName): string {
@ -106,9 +106,9 @@ function whereString(relationships: Array<TableRelationships>, e: Expression, ta
}
/**
* @param op
* @param op
* @returns SQLite expression that can be used in RETURNING clauses
*
*
* The `json_object` function from query.ts performs the heavy lifting here.
*/
function returningString(relationships: Array<TableRelationships>, fields: Record<string, Field>, table: TableName): string {
@ -169,9 +169,9 @@ function updateString(relationships: Array<TableRelationships>, op: UpdateMutati
/**
* @param schemas
* @param op
* @param op
* @returns Nested Array of RowInfo
*
*
* This function compiles all the useful information for constructing query-strings and variable data
* into arrays of RowInfo packets. It is done this way to avoid repeated lookups and to keep the alignment
* of identifiers, variables, and data in sync.
@ -209,7 +209,7 @@ function getUpdateRowInfos(op: UpdateMutationOperation): Array<UpdateInfo> {
});
}
async function insertRow(db: Connected, relationships: Array<TableRelationships>, op: InsertMutationOperation, info: Array<RowInfo>): Promise<Array<Row>> {
async function insertRow(db: Connection, relationships: Array<TableRelationships>, op: InsertMutationOperation, info: Array<RowInfo>): Promise<Array<Row>> {
const q = insertString(relationships, op, info);
const v = queryValues(info);
const results = await db.query(q,v);
@ -222,7 +222,7 @@ async function insertRow(db: Connected, relationships: Array<TableRelationships>
return results;
}
async function updateRow(db: Connected, relationships: Array<TableRelationships>, op: UpdateMutationOperation, info: Array<UpdateInfo>): Promise<Array<Row>> {
async function updateRow(db: Connection, relationships: Array<TableRelationships>, op: UpdateMutationOperation, info: Array<UpdateInfo>): Promise<Array<Row>> {
const q = updateString(relationships, op, info);
const v = queryValues(info);
const results = await db.query(q,v);
@ -235,7 +235,7 @@ async function updateRow(db: Connected, relationships: Array<TableRelationships>
return results;
}
async function deleteRows(db: Connected, relationships: Array<TableRelationships>, op: DeleteMutationOperation): Promise<Array<Row>> {
async function deleteRows(db: Connection, relationships: Array<TableRelationships>, op: DeleteMutationOperation): Promise<Array<Row>> {
const q = deleteString(relationships, op);
const results = await db.query(q);
return results;
@ -248,7 +248,7 @@ function postMutationCheckError(op: MutationOperation, failed: Array<Row>): Erro
);
}
async function mutationOperation(db: Connected, relationships: Array<TableRelationships>, schema: Array<TableInsertSchema>, op: MutationOperation): Promise<MutationOperationResults> {
async function mutationOperation(db: Connection, relationships: Array<TableRelationships>, schema: Array<TableInsertSchema>, op: MutationOperation): Promise<MutationOperationResults> {
switch(op.type) {
case 'insert':
const infos = getInsertRowInfos(schema, op);
@ -313,18 +313,19 @@ async function mutationOperation(db: Connected, relationships: Array<TableRelati
}
/**
* @param config
* @param sqlLogger
* @param request
* @param config
* @param sqlLogger
* @param request
* @returns MutationResponse
*
*
* Top-Level function for mutations.
* This performs inserts/updates/deletes.
*/
export async function runMutation(config: Config, sqlLogger: SqlLogger, request: MutationRequest): Promise<MutationResponse> {
const db = connect2(config, sqlLogger);
const resultSet = await asyncSequenceFromInputs(request.operations, (op) => mutationOperation(db, request.table_relationships, request.insert_schema, op));
return {
operation_results: resultSet
};
return await withConnection(config, defaultMode, sqlLogger, async db => {
const resultSet = await asyncSequenceFromInputs(request.operations, (op) => mutationOperation(db, request.table_relationships, request.insert_schema, op));
return {
operation_results: resultSet
};
});
}

View File

@ -14,9 +14,9 @@ import Servant.Client ((//))
import System.Environment (withArgs)
import Test.AgentAPI (guardCapabilitiesResponse, guardSchemaResponse, mergeAgentConfig)
import Test.AgentClient (AgentIOClient (..), introduceAgentClient, mkAgentClientConfig, mkAgentIOClient)
import Test.AgentDatasets (DatasetCloneInfo (..), chinookTemplate, createClone, deleteClone, usesDataset)
import Test.AgentDatasets (DatasetCloneInfo (..), chinookTemplate, createClone, deleteClone, testingEdgeCasesTemplate, usesDataset)
import Test.AgentTestContext (AgentTestContext (..), introduceAgentTestContext)
import Test.Data (TestData, mkTestData)
import Test.Data (EdgeCasesTestData, TestData, mkEdgeCasesTestData, mkTestData)
import Test.DataExport (exportData)
import Test.Sandwich (runSandwichWithCommandLineArgs)
import Test.Sandwich.Options qualified as Sandwich
@ -36,8 +36,8 @@ import Prelude
testSourceName :: API.SourceName
testSourceName = "dc-api-tests"
tests :: TestData -> API.CapabilitiesResponse -> AgentTestSpec
tests testData capabilitiesResponse@API.CapabilitiesResponse {..} = do
tests :: TestData -> Maybe EdgeCasesTestData -> API.CapabilitiesResponse -> AgentTestSpec
tests testData edgeCasesTestData capabilitiesResponse@API.CapabilitiesResponse {..} = do
usesDataset chinookTemplate $ do
Test.Specs.HealthSpec.spec
Test.Specs.CapabilitiesSpec.spec capabilitiesResponse
@ -46,23 +46,39 @@ tests testData capabilitiesResponse@API.CapabilitiesResponse {..} = do
Test.Specs.ErrorSpec.spec testData
for_ (API._cMetrics _crCapabilities) \m -> Test.Specs.MetricsSpec.spec m
for_ (API._cExplain _crCapabilities) \_ -> Test.Specs.ExplainSpec.spec testData _crCapabilities
for_ (API._cMutations _crCapabilities) \_ -> Test.Specs.MutationSpec.spec testData _crCapabilities
for_ (API._cMutations _crCapabilities) \_ -> Test.Specs.MutationSpec.spec testData edgeCasesTestData _crCapabilities
getCloneSchema :: Maybe API.Config -> API.DatasetTemplateName -> AgentIOClient -> IO API.SchemaResponse
getCloneSchema mergeConfig datasetTemplate (AgentIOClient agentClient) =
bracket
(createClone agentClient datasetTemplate)
(deleteClone agentClient)
( \DatasetCloneInfo {..} ->
(agentClient // API._schema) testSourceName (mergeAgentConfig _dciAgentConfig mergeConfig) >>= guardSchemaResponse
)
getChinookSchema :: API.Capabilities -> AgentConfig -> AgentIOClient -> IO API.SchemaResponse
getChinookSchema API.Capabilities {..} manuallyProvidedConfig (AgentIOClient agentClient) = do
case manuallyProvidedConfig of
getChinookSchema API.Capabilities {..} agentConfig agentIOClient@(AgentIOClient agentClient) = do
case agentConfig of
ManualConfig config -> (agentClient // API._schema) testSourceName config >>= guardSchemaResponse
DatasetConfig mergeConfig ->
if isJust _cDatasets
then
bracket
(createClone agentClient chinookTemplate)
(deleteClone agentClient)
( \DatasetCloneInfo {..} ->
(agentClient // API._schema) testSourceName (mergeAgentConfig _dciAgentConfig mergeConfig) >>= guardSchemaResponse
)
then getCloneSchema mergeConfig chinookTemplate agentIOClient
else fail $ "The agent does not support datasets, therefore an agent configuration must be provided on the command line (--agent-config)"
getTestingEdgeCasesSchema :: API.Capabilities -> AgentConfig -> AgentIOClient -> IO (Maybe API.SchemaResponse)
getTestingEdgeCasesSchema API.Capabilities {..} agentConfig agentIOClient@(AgentIOClient agentClient) = do
case agentConfig of
ManualConfig _config -> pure Nothing
DatasetConfig mergeConfig ->
if isJust _cDatasets
then do
API.DatasetGetTemplateResponse {..} <- (agentClient // API._datasets // API._getTemplate) testingEdgeCasesTemplate
if _dgtrExists
then Just <$> getCloneSchema mergeConfig testingEdgeCasesTemplate agentIOClient
else pure Nothing
else pure Nothing
main :: IO ()
main = do
command <- parseCommandLine
@ -71,13 +87,15 @@ main = do
agentIOClient@(AgentIOClient agentClient) <- mkAgentIOClient _toSensitiveOutputHandling (_aoAgentBaseUrl _toAgentOptions)
agentCapabilities <- (agentClient // API._capabilities) >>= guardCapabilitiesResponse
chinookSchema <- getChinookSchema (API._crCapabilities agentCapabilities) (_aoAgentConfig _toAgentOptions) agentIOClient
testingEdgeCasesSchema <- getTestingEdgeCasesSchema (API._crCapabilities agentCapabilities) (_aoAgentConfig _toAgentOptions) agentIOClient
agentClientConfig <- mkAgentClientConfig _toSensitiveOutputHandling (_aoAgentBaseUrl _toAgentOptions)
let testData = mkTestData chinookSchema _toTestConfig
let edgeCasesTestData = mkEdgeCasesTestData _toTestConfig <$> testingEdgeCasesSchema
let testContext = AgentTestContext testSourceName agentCapabilities (_aoAgentConfig _toAgentOptions)
runSandwichWithCommandLineArgs Sandwich.defaultOptions $
introduceAgentTestContext testContext . introduceAgentClient agentClientConfig $
tests testData agentCapabilities
tests testData edgeCasesTestData agentCapabilities
pure ()
ExportOpenAPISpec ->
Text.putStrLn $ encodeToLazyText openApiSchema

View File

@ -5,6 +5,7 @@ module Test.AgentDatasets
DatasetCloneInfo (..),
usesDataset,
chinookTemplate,
testingEdgeCasesTemplate,
HasDatasetContext,
getDatasetContext,
createClone,
@ -33,6 +34,9 @@ import Prelude
chinookTemplate :: API.DatasetTemplateName
chinookTemplate = API.DatasetTemplateName "Chinook"
testingEdgeCasesTemplate :: API.DatasetTemplateName
testingEdgeCasesTemplate = API.DatasetTemplateName "TestingEdgeCases"
-------------------------------------------------------------------------------
data DatasetContext = DatasetContext

View File

@ -2,11 +2,14 @@
{-# LANGUAGE TemplateHaskell #-}
module Test.Data
( -- = Test Data
( -- = Chinook Test Data
TestData (..),
mkTestData,
schemaTables,
allTableRows,
-- = TestingEdgeCases Test Data
EdgeCasesTestData (..),
mkEdgeCasesTestData,
-- = Utilities
emptyQuery,
emptyMutationRequest,
@ -357,94 +360,113 @@ data TestData = TestData
_tdOrderByColumn :: [API.RelationshipName] -> Text -> API.OrderDirection -> API.OrderByElement
}
-- | Test data from the Chinook dataset template
mkTestData :: API.SchemaResponse -> TestConfig -> TestData
mkTestData schemaResponse TestConfig {..} =
mkTestData schemaResponse testConfig =
TestData
{ _tdSchemaTables = formatTableInfo <$> schemaTables,
_tdArtistsTableName = formatTableName artistsTableName,
_tdArtistsTableName = formatTableName testConfig artistsTableName,
_tdArtistsRows = artistsRows,
_tdArtistsRowsById = artistsRowsById,
_tdArtistsTableRelationships = formatTableRelationships artistsTableRelationships,
_tdAlbumsRelationshipName = albumsRelationshipName,
_tdAlbumsTableName = formatTableName albumsTableName,
_tdAlbumsTableName = formatTableName testConfig albumsTableName,
_tdAlbumsRows = albumsRows,
_tdAlbumsRowsById = albumsRowsById,
_tdAlbumsTableRelationships = formatTableRelationships albumsTableRelationships,
_tdArtistRelationshipName = artistRelationshipName,
_tdTracksRelationshipName = tracksRelationshipName,
_tdCustomersTableName = formatTableName customersTableName,
_tdCustomersTableName = formatTableName testConfig customersTableName,
_tdCustomersRows = customersRows,
_tdCustomersTableRelationships = formatTableRelationships customersTableRelationships,
_tdSupportRepRelationshipName = supportRepRelationshipName,
_tdEmployeesTableName = formatTableName employeesTableName,
_tdEmployeesTableName = formatTableName testConfig employeesTableName,
_tdEmployeesRows = employeesRows,
_tdEmployeesRowsById = employeesRowsById,
_tdEmployeesTableRelationships = formatTableRelationships employeesTableRelationships,
_tdSupportRepForCustomersRelationshipName = supportRepForCustomersRelationshipName,
_tdInvoicesTableName = formatTableName invoicesTableName,
_tdInvoicesTableName = formatTableName testConfig invoicesTableName,
_tdInvoicesRows = invoicesRows,
_tdInvoiceLinesTableName = formatTableName invoiceLinesTableName,
_tdInvoiceLinesTableName = formatTableName testConfig invoiceLinesTableName,
_tdInvoiceLinesRows = invoiceLinesRows,
_tdMediaTypesTableName = formatTableName mediaTypesTableName,
_tdMediaTypesTableName = formatTableName testConfig mediaTypesTableName,
_tdMediaTypesRows = mediaTypesRows,
_tdTracksTableName = formatTableName tracksTableName,
_tdTracksTableName = formatTableName testConfig tracksTableName,
_tdTracksRows = tracksRows,
_tdTracksTableRelationships = formatTableRelationships tracksTableRelationships,
_tdInvoiceLinesRelationshipName = invoiceLinesRelationshipName,
_tdMediaTypeRelationshipName = mediaTypeRelationshipName,
_tdAlbumRelationshipName = albumRelationshipName,
_tdGenreRelationshipName = genreRelationshipName,
_tdGenresTableName = formatTableName genresTableName,
_tdGenresTableName = formatTableName testConfig genresTableName,
_tdGenresRows = genresRows,
_tdGenresTableRelationships = formatTableRelationships genresTableRelationships,
_tdColumnName = formatColumnName . API.ColumnName,
_tdColumnField = columnField,
_tdColumnInsertSchema = columnInsertSchema,
_tdFindColumnScalarType = \tableName name -> findColumnScalarType schemaResponse tableName (formatColumnName $ API.ColumnName name),
_tdQueryComparisonColumn = API.ComparisonColumn API.QueryTable . formatColumnName . API.ColumnName,
_tdCurrentComparisonColumn = API.ComparisonColumn API.CurrentTable . formatColumnName . API.ColumnName,
_tdOrderByColumn = \targetPath name -> orderByColumn targetPath (formatColumnName $ API.ColumnName name)
_tdColumnName = formatColumnName testConfig . API.ColumnName,
_tdColumnField = columnField schemaResponse testConfig,
_tdColumnInsertSchema = columnInsertSchema schemaResponse testConfig,
_tdFindColumnScalarType = \tableName name -> findColumnScalarType schemaResponse tableName (formatColumnName testConfig $ API.ColumnName name),
_tdQueryComparisonColumn = API.ComparisonColumn API.QueryTable . formatColumnName testConfig . API.ColumnName,
_tdCurrentComparisonColumn = API.ComparisonColumn API.CurrentTable . formatColumnName testConfig . API.ColumnName,
_tdOrderByColumn = \targetPath name -> orderByColumn targetPath (formatColumnName testConfig $ API.ColumnName name)
}
where
formatTableName :: API.TableName -> API.TableName
formatTableName = applyTableNamePrefix _tcTableNamePrefix . API.TableName . fmap (applyNameCasing _tcTableNameCasing) . API.unTableName
formatTableRelationships :: API.TableRelationships -> API.TableRelationships
formatTableRelationships =
prefixTableRelationships
>>> API.trRelationships . traverse . API.rColumnMapping %~ (HashMap.toList >>> fmap (bimap formatColumnName formatColumnName) >>> HashMap.fromList)
formatColumnName :: API.ColumnName -> API.ColumnName
formatColumnName = API.ColumnName . applyNameCasing _tcColumnNameCasing . API.unColumnName
>>> API.trRelationships . traverse . API.rColumnMapping %~ (HashMap.toList >>> fmap (bimap (formatColumnName testConfig) (formatColumnName testConfig)) >>> HashMap.fromList)
prefixTableRelationships :: API.TableRelationships -> API.TableRelationships
prefixTableRelationships =
API.trSourceTable %~ formatTableName
>>> API.trRelationships . traverse . API.rTargetTable %~ formatTableName
API.trSourceTable %~ formatTableName testConfig
>>> API.trRelationships . traverse . API.rTargetTable %~ formatTableName testConfig
formatTableInfo :: API.TableInfo -> API.TableInfo
formatTableInfo =
API.tiName %~ formatTableName
>>> API.tiColumns . traverse . API.ciName %~ formatColumnName
>>> API.tiPrimaryKey . traverse %~ formatColumnName
API.tiName %~ formatTableName testConfig
>>> API.tiColumns . traverse . API.ciName %~ formatColumnName testConfig
>>> API.tiPrimaryKey . traverse %~ formatColumnName testConfig
>>> API.tiForeignKeys . lens API.unForeignKeys (const API.ForeignKeys) . traverse
%~ ( API.cForeignTable %~ formatTableName
>>> API.cColumnMapping %~ (HashMap.toList >>> fmap (bimap formatColumnName formatColumnName) >>> HashMap.fromList)
%~ ( API.cForeignTable %~ formatTableName testConfig
>>> API.cColumnMapping %~ (HashMap.toList >>> fmap (bimap (formatColumnName testConfig) (formatColumnName testConfig)) >>> HashMap.fromList)
)
columnField :: API.TableName -> Text -> API.Field
columnField tableName columnName =
API.ColumnField columnName' scalarType
where
columnName' = formatColumnName $ API.ColumnName columnName
scalarType = findColumnScalarType schemaResponse tableName columnName'
-- | Test data from the TestingEdgeCases dataset template
data EdgeCasesTestData = EdgeCasesTestData
{ -- = NoPrimaryKey table
_ectdNoPrimaryKeyTableName :: API.TableName,
-- = DefaultedPrimaryKey table
_ectdDefaultedPrimaryKeyTableName :: API.TableName,
-- = AllColumnsDefaultable table
_ectdAllColumnsDefaultableTableName :: API.TableName,
-- = Scalar Types
_ectdFindColumnScalarType :: API.TableName -> Text -> API.ScalarType,
-- = Utility functions
_ectdTableExists :: API.TableName -> Bool,
_ectdColumnField :: API.TableName -> Text -> API.Field,
_ectdColumnInsertSchema :: API.TableName -> Text -> API.ColumnInsertSchema
}
columnInsertSchema :: API.TableName -> Text -> API.ColumnInsertSchema
columnInsertSchema tableName columnName =
API.ColumnInsertSchema columnName' scalarType
where
columnName' = formatColumnName $ API.ColumnName columnName
scalarType = findColumnScalarType schemaResponse tableName columnName'
mkEdgeCasesTestData :: TestConfig -> API.SchemaResponse -> EdgeCasesTestData
mkEdgeCasesTestData testConfig schemaResponse =
EdgeCasesTestData
{ _ectdNoPrimaryKeyTableName = noPrimaryKeyTableName,
_ectdDefaultedPrimaryKeyTableName = defaultedPrimaryKeyTableName,
_ectdAllColumnsDefaultableTableName = allColumnsDefaultableTableName,
_ectdFindColumnScalarType = \tableName name -> findColumnScalarType schemaResponse tableName (formatColumnName testConfig $ API.ColumnName name),
_ectdTableExists = tableExists,
_ectdColumnField = columnField schemaResponse testConfig,
_ectdColumnInsertSchema = columnInsertSchema schemaResponse testConfig
}
where
tableExists :: API.TableName -> Bool
tableExists tableName = tableName `elem` (API._tiName <$> API._srTables schemaResponse)
noPrimaryKeyTableName = formatTableName testConfig (API.TableName $ "NoPrimaryKey" :| [])
defaultedPrimaryKeyTableName = formatTableName testConfig (API.TableName $ "DefaultedPrimaryKey" :| [])
allColumnsDefaultableTableName = formatTableName testConfig (API.TableName $ "AllColumnsDefaultable" :| [])
formatTableName :: TestConfig -> API.TableName -> API.TableName
formatTableName TestConfig {..} = applyTableNamePrefix _tcTableNamePrefix . API.TableName . fmap (applyNameCasing _tcTableNameCasing) . API.unTableName
applyTableNamePrefix :: [Text] -> API.TableName -> API.TableName
applyTableNamePrefix prefix tableName@(API.TableName rawTableName) =
@ -458,6 +480,23 @@ applyNameCasing casing text = case casing of
Lowercase -> Text.toLower text
Uppercase -> Text.toUpper text
formatColumnName :: TestConfig -> API.ColumnName -> API.ColumnName
formatColumnName TestConfig {..} = API.ColumnName . applyNameCasing _tcColumnNameCasing . API.unColumnName
columnField :: API.SchemaResponse -> TestConfig -> API.TableName -> Text -> API.Field
columnField schemaResponse testConfig tableName columnName =
API.ColumnField columnName' scalarType
where
columnName' = formatColumnName testConfig $ API.ColumnName columnName
scalarType = findColumnScalarType schemaResponse tableName columnName'
columnInsertSchema :: API.SchemaResponse -> TestConfig -> API.TableName -> Text -> API.ColumnInsertSchema
columnInsertSchema schemaResponse testConfig tableName columnName =
API.ColumnInsertSchema columnName' scalarType
where
columnName' = formatColumnName testConfig $ API.ColumnName columnName
scalarType = findColumnScalarType schemaResponse tableName columnName'
findColumnScalarType :: API.SchemaResponse -> API.TableName -> API.ColumnName -> API.ScalarType
findColumnScalarType API.SchemaResponse {..} tableName columnName =
maybe (error $ "Can't find the scalar type of column " <> show columnName <> " in table " <> show tableName) API._ciType columnInfo

View File

@ -5,14 +5,14 @@ where
import Data.Foldable (for_)
import Hasura.Backends.DataConnector.API
import Test.Data (TestData)
import Test.Data (EdgeCasesTestData, TestData)
import Test.Sandwich (describe)
import Test.Specs.MutationSpec.InsertSpec qualified as InsertSpec
import Test.TestHelpers (AgentTestSpec)
import Prelude
spec :: TestData -> Capabilities -> AgentTestSpec
spec testData capabilities@Capabilities {..} = do
spec :: TestData -> Maybe EdgeCasesTestData -> Capabilities -> AgentTestSpec
spec testData edgeCasesTestData capabilities@Capabilities {..} = do
describe "mutation API" do
for_ (_cMutations >>= _mcInsertCapabilities) $ \_insertCapabilities ->
InsertSpec.spec testData capabilities
InsertSpec.spec testData edgeCasesTestData capabilities

View File

@ -1,8 +1,11 @@
module Test.Specs.MutationSpec.InsertSpec (spec) where
import Control.Arrow ((>>>))
import Control.Lens (ix, (&), (.~), (?~), (^?))
import Control.Lens (ix, (&), (.~), (?~), (^?), _Just)
import Control.Monad (when)
import Control.Monad.Catch (MonadThrow)
import Control.Monad.Free (Free)
import Control.Monad.IO.Class (MonadIO)
import Data.Aeson qualified as J
import Data.Foldable (for_)
import Data.Functor ((<&>))
@ -12,18 +15,23 @@ import Data.List (sortOn)
import Data.List.NonEmpty (NonEmpty (..))
import Data.Maybe (fromMaybe, isJust, maybeToList)
import Data.Scientific (Scientific)
import Data.Text qualified as Text
import GHC.Stack (HasCallStack)
import Hasura.Backends.DataConnector.API
import Test.AgentAPI (mutationExpectError, mutationGuarded)
import Test.AgentDatasets (chinookTemplate, usesDataset)
import Test.Data (TestData (..))
import Test.AgentClient (AgentClientT, HasAgentClient)
import Test.AgentDatasets (HasDatasetContext, chinookTemplate, testingEdgeCasesTemplate, usesDataset)
import Test.AgentTestContext
import Test.Data (EdgeCasesTestData (..), TestData (..))
import Test.Data qualified as Data
import Test.Expectations (mutationResponseShouldBe)
import Test.Sandwich (describe, shouldBe)
import Test.Sandwich (ExampleT, HasBaseContext, describe, pendingWith, shouldBe)
import Test.Sandwich.Internal (SpecCommand)
import Test.TestHelpers (AgentTestSpec, it)
import Prelude
spec :: TestData -> Capabilities -> AgentTestSpec
spec TestData {..} Capabilities {..} = describe "Insert Mutations" $ do
spec :: TestData -> Maybe EdgeCasesTestData -> Capabilities -> AgentTestSpec
spec TestData {..} edgeCasesTestData Capabilities {..} = describe "Insert Mutations" $ do
usesDataset chinookTemplate $ it "can insert a single row" $ do
let insertOperation = artistsInsertOperation & imoRows .~ take 1 newArtists
let mutationRequest =
@ -394,11 +402,149 @@ spec TestData {..} Capabilities {..} = describe "Insert Mutations" $ do
let expectedResult = MutationOperationResults 1 (Just expectedRows)
response `mutationResponseShouldBe` MutationResponse [expectedResult]
describe "edge cases" $ do
edgeCaseTest _ectdNoPrimaryKeyTableName "can insert into a table with no primary key" $ \EdgeCasesTestData {..} -> do
let rows =
[ RowObject . Data.mkFieldsMap $
[ ("FirstName", mkColumnInsertFieldValue $ J.String "James"),
("LastName", mkColumnInsertFieldValue $ J.String "Kirk")
],
RowObject . Data.mkFieldsMap $
[ ("FirstName", mkColumnInsertFieldValue $ J.String "Christopher"),
("LastName", mkColumnInsertFieldValue $ J.String "Pike")
]
]
let returning =
Data.mkFieldsMap
[ ("FirstName", _ectdColumnField _ectdNoPrimaryKeyTableName "FirstName"),
("LastName", _ectdColumnField _ectdNoPrimaryKeyTableName "LastName")
]
let insertOperation =
mkInsertOperation _ectdNoPrimaryKeyTableName
& imoRows .~ rows
& imoReturningFields .~ returning
let insertSchema =
TableInsertSchema _ectdNoPrimaryKeyTableName $
HashMap.fromList
[ (FieldName "FirstName", ColumnInsert (_ectdColumnInsertSchema _ectdNoPrimaryKeyTableName "FirstName")),
(FieldName "LastName", ColumnInsert (_ectdColumnInsertSchema _ectdNoPrimaryKeyTableName "LastName"))
]
let mutationRequest =
Data.emptyMutationRequest
& mrOperations .~ [InsertOperation insertOperation]
& mrInsertSchema .~ [insertSchema]
response <- mutationGuarded mutationRequest
let expectedRows =
[ Data.mkFieldsMap $
[ ("FirstName", mkColumnFieldValue $ J.String "James"),
("LastName", mkColumnFieldValue $ J.String "Kirk")
],
Data.mkFieldsMap $
[ ("FirstName", mkColumnFieldValue $ J.String "Christopher"),
("LastName", mkColumnFieldValue $ J.String "Pike")
]
]
let expectedResult = MutationOperationResults 2 (Just expectedRows)
response `mutationResponseShouldBe` MutationResponse [expectedResult]
edgeCaseTest _ectdDefaultedPrimaryKeyTableName "can insert into a table with a defaulted primary key" $ \EdgeCasesTestData {..} -> do
let rows =
[ RowObject . Data.mkFieldsMap $
[ ("Message", mkColumnInsertFieldValue $ J.String "A message")
]
]
let returning =
Data.mkFieldsMap
[ ("TimestampKey", _ectdColumnField _ectdDefaultedPrimaryKeyTableName "TimestampKey"),
("Message", _ectdColumnField _ectdDefaultedPrimaryKeyTableName "Message")
]
let insertOperation =
mkInsertOperation _ectdDefaultedPrimaryKeyTableName
& imoRows .~ rows
& imoReturningFields .~ returning
let insertSchema =
TableInsertSchema _ectdDefaultedPrimaryKeyTableName $
HashMap.fromList
[ (FieldName "Message", ColumnInsert (_ectdColumnInsertSchema _ectdDefaultedPrimaryKeyTableName "Message"))
]
let mutationRequest =
Data.emptyMutationRequest
& mrOperations .~ [InsertOperation insertOperation]
& mrInsertSchema .~ [insertSchema]
response <- mutationGuarded mutationRequest
let dbGeneratedTimestampValue = response ^? mrOperationResults . ix 0 . morReturning . _Just . ix 0 . Data.field "TimestampKey" . Data._ColumnFieldString
let expectedRow =
Data.mkFieldsMap $
-- Use the timestamp generated by the DB as the expected value, if it exists, or use a placeholder as a fallback to fail against
[ ("TimestampKey", mkColumnFieldValue $ J.String $ fromMaybe "<some DB-generated value>" dbGeneratedTimestampValue),
("Message", mkColumnFieldValue $ J.String "A message")
]
let expectedResult = MutationOperationResults 1 (Just [expectedRow])
response `mutationResponseShouldBe` MutationResponse [expectedResult]
edgeCaseTest _ectdAllColumnsDefaultableTableName "can insert into a table with all defaultable columns" $ \EdgeCasesTestData {..} -> do
let rows =
[ RowObject . Data.mkFieldsMap $ []
]
let returning =
Data.mkFieldsMap
[ ("Id", _ectdColumnField _ectdAllColumnsDefaultableTableName "Id"),
("Message", _ectdColumnField _ectdAllColumnsDefaultableTableName "Message"),
("Importance", _ectdColumnField _ectdAllColumnsDefaultableTableName "Importance")
]
let insertOperation =
mkInsertOperation _ectdAllColumnsDefaultableTableName
& imoRows .~ rows
& imoReturningFields .~ returning
let insertSchema =
TableInsertSchema _ectdAllColumnsDefaultableTableName $ HashMap.fromList []
let mutationRequest =
Data.emptyMutationRequest
& mrOperations .~ [InsertOperation insertOperation]
& mrInsertSchema .~ [insertSchema]
response <- mutationGuarded mutationRequest
let expectedRow =
Data.mkFieldsMap $
[ ("Id", mkColumnFieldValue $ J.Number 4),
("Message", mkColumnFieldValue $ J.Null),
("Importance", mkColumnFieldValue $ J.Number 100)
]
let expectedResult = MutationOperationResults 1 (Just [expectedRow])
response `mutationResponseShouldBe` MutationResponse [expectedResult]
where
edgeCaseTest ::
(HasCallStack, HasAgentClient context, HasAgentTestContext context, HasBaseContext context, MonadThrow m, MonadIO m) =>
(EdgeCasesTestData -> TableName) ->
String ->
(forall testContext. (HasBaseContext testContext, HasAgentClient testContext, HasAgentTestContext testContext, HasDatasetContext testContext) => EdgeCasesTestData -> AgentClientT (ExampleT testContext m) ()) ->
Free (SpecCommand context m) ()
edgeCaseTest expectedTable name test = do
case edgeCasesTestData of
Nothing -> it name $ pendingWith (testingEdgeCasesTemplateName <> " dataset template does not exist")
Just edgeCasesTestData'@EdgeCasesTestData {..} ->
if _ectdTableExists (expectedTable edgeCasesTestData')
then usesDataset testingEdgeCasesTemplate $ it name $ test edgeCasesTestData'
else it name $ pendingWith (Text.unpack (tableNameToText (expectedTable edgeCasesTestData')) <> " table does not exist within the " <> testingEdgeCasesTemplateName <> " dataset")
where
testingEdgeCasesTemplateName = Text.unpack (_unDatasetTemplateName testingEdgeCasesTemplate)
mkSubqueryResponse :: [HashMap FieldName FieldValue] -> FieldValue
mkSubqueryResponse rows =
mkRelationshipFieldValue $ QueryResponse (Just rows) Nothing
mkInsertOperation :: TableName -> InsertMutationOperation
mkInsertOperation tableName = InsertMutationOperation tableName [] Nothing mempty
artistsInsertOperation :: InsertMutationOperation
artistsInsertOperation = InsertMutationOperation _tdArtistsTableName [] Nothing mempty