mirror of
https://github.com/hasura/graphql-engine.git
synced 2024-12-15 01:12:56 +03:00
change table and foreign-key naming conventions in json2graphql (#485)
Foreign keys will now be detected if a column name is `<table_name>_id`. Earlier, foreign keys would be detected when column name was `<table_name - s>_id`. This was a problem because it placed restrictions on table names. Such restrictions were limiting us from importing complicated JSON databases. Here are the examples of JSON sampel data for a user, post and comment schema as per new and old conventions: Old convention: ``` { posts: [ { id: 1, title: "Lorem Ipsum", views: 254, user_id: 123 }, { id: 2, title: "Sic Dolor amet", views: 65, user_id: 456 }, ], users: [ { id: 123, name: "John Doe" }, { id: 456, name: "Jane Doe" } ], comments: [ { id: 987, post_id: 1, body: "Consectetur adipiscing elit" }, { id: 995, post_id: 1, body: "Nam molestie pellentesque dui" } ] } ``` New convention: ``` { post: [ { id: 1, title: "Lorem Ipsum", views: 254, user_id: 123 }, { id: 2, title: "Sic Dolor amet", views: 65, user_id: 456 }, ], user: [ { id: 123, name: "John Doe" }, { id: 456, name: "Jane Doe" } ], comment: [ { id: 987, post_id: 1, body: "Consectetur adipiscing elit" }, { id: 995, post_id: 1, body: "Nam molestie pellentesque dui" } ] }; ``` @FrediBach I am not sure how this would affect [Blowson](https://github.com/FrediBach/json-data-extender). Can you provide some input?
This commit is contained in:
parent
ec516ce55b
commit
3d6732b678
@ -19,11 +19,11 @@ This is A CLI tool to import a schema and data to Postgres using JSON data. You
|
||||
|
||||
```js
|
||||
module.exports = {
|
||||
users: [
|
||||
user: [
|
||||
{ id: 123, name: "John Doe" },
|
||||
{ id: 456, name: "Jane Doe" }
|
||||
],
|
||||
cities: [
|
||||
city: [
|
||||
{ id: 987, name: "Stockholm", country: "Sweden" },
|
||||
{ id: 995, name: "Sydney", country: "Australia" }
|
||||
]
|
||||
@ -41,11 +41,11 @@ This is A CLI tool to import a schema and data to Postgres using JSON data. You
|
||||
|
||||
```graphql
|
||||
query {
|
||||
users {
|
||||
user {
|
||||
id
|
||||
name
|
||||
}
|
||||
cities {
|
||||
city {
|
||||
id
|
||||
name
|
||||
country
|
||||
@ -102,19 +102,19 @@ $ gq URL [flags]
|
||||
|
||||
You can also define foreign keys and relationships in your JSON sample data. The CLI infers foreign keys and relationships from column names and table names.
|
||||
|
||||
For example, in the following data set, the `posts` table has a field called `user_id` which is a foreign key to the `id` column of table `users`. Also, the `comments` table has a field called `post_id` which is a foreign key to the `id` column of table `posts`.
|
||||
For example, in the following data set, the `posts` table has a field called `users_id` which is a foreign key to the `id` column of table `users`. Also, the `comments` table has a field called `posts_id` which is a foreign key to the `id` column of table `posts`.
|
||||
|
||||
```js
|
||||
module.exports = {
|
||||
posts: [
|
||||
post: [
|
||||
{ id: 1, title: "Lorem Ipsum", views: 254, user_id: 123 },
|
||||
{ id: 2, title: "Sic Dolor amet", views: 65, user_id: 456 },
|
||||
],
|
||||
users: [
|
||||
user: [
|
||||
{ id: 123, name: "John Doe" },
|
||||
{ id: 456, name: "Jane Doe" }
|
||||
],
|
||||
comments: [
|
||||
comment: [
|
||||
{ id: 987, post_id: 1, body: "Consectetur adipiscing elit" },
|
||||
{ id: 995, post_id: 1, body: "Nam molestie pellentesque dui" }
|
||||
]
|
||||
@ -131,18 +131,58 @@ Now you can make complicated queries like:
|
||||
|
||||
```graphql
|
||||
query {
|
||||
users {
|
||||
post {
|
||||
id
|
||||
name
|
||||
postsByUsersId {
|
||||
title
|
||||
views
|
||||
userByUserId {
|
||||
id
|
||||
title
|
||||
views
|
||||
commentsByPostsId {
|
||||
id
|
||||
body
|
||||
}
|
||||
name
|
||||
}
|
||||
commentsByPostId {
|
||||
id
|
||||
body
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
The response would be:
|
||||
|
||||
```json
|
||||
{
|
||||
"data": {
|
||||
"post": [
|
||||
{
|
||||
"userByUserId": {
|
||||
"name": "John Doe",
|
||||
"id": 123
|
||||
},
|
||||
"views": 254,
|
||||
"id": 1,
|
||||
"title": "Lorem Ipsum",
|
||||
"commentsByPostId": [
|
||||
{
|
||||
"body": "Consectetur adipiscing elit",
|
||||
"id": 987
|
||||
},
|
||||
{
|
||||
"body": "Nam molestie pellentesque dui",
|
||||
"id": 995
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"userByUserId": {
|
||||
"name": "Jane Doe",
|
||||
"id": 456
|
||||
},
|
||||
"views": 65,
|
||||
"id": 2,
|
||||
"title": "Sic Dolor amet",
|
||||
"commentsByPostId": []
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
2
community/tools/json2graphql/package-lock.json
generated
2
community/tools/json2graphql/package-lock.json
generated
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "json2graphql",
|
||||
"version": "0.0.0",
|
||||
"version": "0.1.1",
|
||||
"lockfileVersion": 1,
|
||||
"requires": true,
|
||||
"dependencies": {
|
||||
|
@ -27,14 +27,11 @@ class JSON2GraphQL extends Command {
|
||||
const headers = key ? {'x-hasura-access-key': key} : {};
|
||||
const urlVerification = await this.verifyUrl(safeUrl, headers);
|
||||
if (urlVerification.error) {
|
||||
cli.action.stop('Error')
|
||||
console.log('Message: ', urlVerification.message);
|
||||
process.exit
|
||||
throw new CLIError(urlVerification.message);
|
||||
} else {
|
||||
cli.action.stop('Done!');
|
||||
await importData(dbJson, safeUrl, headers, overwrite);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
getDbJson(db) {
|
||||
@ -53,12 +50,12 @@ class JSON2GraphQL extends Command {
|
||||
`${url}/v1/version`,
|
||||
{
|
||||
method: 'GET',
|
||||
headers
|
||||
headers,
|
||||
}
|
||||
);
|
||||
return resp.status === 200 ? {error: false} : { error: true, message: 'invalid access key'};
|
||||
return resp.status === 200 ? {error: false} : {error: true, message: 'invalid access key'};
|
||||
} catch (e) {
|
||||
return { error: true, message: 'invalid URL'}
|
||||
return {error: true, message: 'invalid URL'};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
const fetch = require('node-fetch');
|
||||
const {CLIError} = require('@oclif/errors');
|
||||
const {cli} = require('cli-ux');
|
||||
const throwError = require('./error');
|
||||
|
||||
const createTables = async (tables, url, headers, overwrite, runSql, sql) => {
|
||||
if (overwrite) {
|
||||
@ -26,17 +26,15 @@ const createTables = async (tables, url, headers, overwrite, runSql, sql) => {
|
||||
table_schema: 'public',
|
||||
},
|
||||
},
|
||||
})
|
||||
}),
|
||||
}
|
||||
);
|
||||
const dbTables = await resp.json();
|
||||
let found = false;
|
||||
tables.forEach((table) => {
|
||||
if(dbTables.find((dbTable) => dbTable.table_name === table.name)) {
|
||||
tables.forEach(table => {
|
||||
if (dbTables.find(dbTable => dbTable.table_name === table.name)) {
|
||||
found = true;
|
||||
cli.action.stop('Error');
|
||||
console.log('Message: Your JSON database contains tables that already exist in Postgres. Please use the flag "--overwrite" to overwrite them.');
|
||||
process.exit(1);
|
||||
throwError('Message: Your JSON database contains tables that already exist in Postgres. Please use the flag "--overwrite" to overwrite them.');
|
||||
}
|
||||
});
|
||||
if (!found) {
|
||||
|
7
community/tools/json2graphql/src/import/error.js
Normal file
7
community/tools/json2graphql/src/import/error.js
Normal file
@ -0,0 +1,7 @@
|
||||
const {cli} = require('cli-ux');
|
||||
|
||||
module.exports = message => {
|
||||
cli.action.stop('Error');
|
||||
console.log(message);
|
||||
process.exit(1);
|
||||
};
|
@ -1,5 +1,4 @@
|
||||
const {CLIError} = require('@oclif/errors');
|
||||
const {cli} = require('cli-ux');
|
||||
const throwError = require('./error');
|
||||
|
||||
const getDataType = (data, column) => {
|
||||
if (typeof data === 'number') {
|
||||
@ -17,9 +16,7 @@ const getDataType = (data, column) => {
|
||||
if (data.constructor.name === 'Object') {
|
||||
return 'json';
|
||||
}
|
||||
cli.action.stop('Error');
|
||||
console.log(`Message: invalid data type given for column ${column}: ${typeof data}`);
|
||||
process.exit(1);
|
||||
throwError(`message: invalid data type given for column ${column}: ${typeof data}`);
|
||||
};
|
||||
|
||||
const isForeign = (name, db) => {
|
||||
@ -27,7 +24,7 @@ const isForeign = (name, db) => {
|
||||
if (l > 3) {
|
||||
if (name.substring(l - 3, l) === '_id' &&
|
||||
Object.keys(db).find(tableName => {
|
||||
return tableName === name.substring(0, l - 3) + 's';
|
||||
return tableName === name.substring(0, l - 3);
|
||||
})) {
|
||||
return true;
|
||||
}
|
||||
@ -51,9 +48,7 @@ const getColumnData = (dataArray, db) => {
|
||||
Object.keys(refColumns).forEach(column => {
|
||||
const columnMetadata = {};
|
||||
if (!column) {
|
||||
cli.action.stop('Error');
|
||||
console.log("Message: column names can't be empty strings");
|
||||
process.exit(1);
|
||||
throwError("message: column names can't be empty strings");
|
||||
}
|
||||
columnMetadata.name = column;
|
||||
const sampleData = refColumns[column];
|
||||
@ -66,7 +61,7 @@ const getColumnData = (dataArray, db) => {
|
||||
|
||||
const hasPrimaryKey = dataObj => {
|
||||
let has = true;
|
||||
dataObj.forEach((obj) => {
|
||||
dataObj.forEach(obj => {
|
||||
if (!Object.keys(obj).find(name => name === 'id')) {
|
||||
has = false;
|
||||
}
|
||||
@ -79,16 +74,14 @@ const generate = db => {
|
||||
Object.keys(db).forEach(rootField => {
|
||||
const tableMetadata = {};
|
||||
if (!hasPrimaryKey(db[rootField], rootField)) {
|
||||
cli.action.stop('Error');
|
||||
console.log(`Message: A unique column with name "id" and type integer must present in table "${rootField}"`);
|
||||
process.exit(1);
|
||||
throwError(`message: a unique column with name "id" and type integer must present in table "${rootField}"`);
|
||||
}
|
||||
tableMetadata.name = rootField;
|
||||
tableMetadata.columns = getColumnData(db[rootField], db);
|
||||
tableMetadata.dependencies = [];
|
||||
tableMetadata.columns.forEach(column => {
|
||||
if (column.isForeign) {
|
||||
tableMetadata.dependencies.push(column.name.substring(0, column.name.length - 3) + 's');
|
||||
tableMetadata.dependencies.push(column.name.substring(0, column.name.length - 3));
|
||||
}
|
||||
});
|
||||
metaData.push(tableMetadata);
|
||||
|
@ -1,7 +1,6 @@
|
||||
const generate = require('./generateTables');
|
||||
const {generateSql, runSql} = require('./sql');
|
||||
const {cli} = require('cli-ux');
|
||||
const {CLIError} = require('@oclif/errors');
|
||||
const {trackTables} = require('./track');
|
||||
const {getInsertOrder, insertData} = require('./insert');
|
||||
const {createRelationships} = require('./relationships');
|
||||
@ -11,6 +10,7 @@ const importData = async (db, url, headers, overwrite) => {
|
||||
cli.action.start('Processing JSON data');
|
||||
const tables = generate(db);
|
||||
const sql = generateSql(tables);
|
||||
sql.forEach(s => console.log(s + '\n'));
|
||||
cli.action.stop('Done!');
|
||||
cli.action.start('Checking database');
|
||||
createTables(tables, url, headers, overwrite, runSql, sql).then(() => {
|
||||
@ -26,7 +26,7 @@ const importData = async (db, url, headers, overwrite) => {
|
||||
insertData(insertOrder, db, tables, url, headers);
|
||||
});
|
||||
});
|
||||
})
|
||||
});
|
||||
};
|
||||
|
||||
module.exports = importData;
|
||||
|
@ -1,7 +1,7 @@
|
||||
const {query} = require('graphqurl');
|
||||
const {CLIError} = require('@oclif/errors');
|
||||
const moment = require('moment');
|
||||
const {cli} = require('cli-ux');
|
||||
const throwError = require('./error');
|
||||
|
||||
const getInsertOrder = tables => {
|
||||
let order = [];
|
||||
@ -73,11 +73,13 @@ const insertData = async (insertOrder, sampleData, tables, url, headers) => {
|
||||
variables,
|
||||
headers,
|
||||
});
|
||||
cli.action.stop('Done!');
|
||||
if (response.data !== null && response.data !== 'undefined') {
|
||||
cli.action.stop('Done!');
|
||||
} else {
|
||||
throw new Error(response);
|
||||
}
|
||||
} catch (e) {
|
||||
cli.action.stop('Error');
|
||||
console.log(JSON.stringify(e, null, 2));
|
||||
process.exit(1);
|
||||
throwError(JSON.stringify(e, null, 2));
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -1,12 +1,13 @@
|
||||
const fetch = require('node-fetch');
|
||||
const throwError = require('./error');
|
||||
|
||||
const getObjRelationshipName = dep => {
|
||||
const relName = `${dep.substring(0, dep.length - 1)}By${dep[0].toUpperCase()}`;
|
||||
return dep.length === 0 ? relName + 'Id' : relName + dep.substring(1, dep.length) + 'Id';
|
||||
const relName = `${dep}By${dep[0].toUpperCase()}`;
|
||||
return dep.length === 1 ? relName + 'Id' : relName + dep.substring(1, dep.length) + 'Id';
|
||||
};
|
||||
|
||||
const getArrayRelationshipName = (table, parent) => {
|
||||
const relName = `${table}By${parent[0].toUpperCase()}`;
|
||||
const relName = `${table}sBy${parent[0].toUpperCase()}`;
|
||||
return parent.length === 1 ? `${relName}Id` : `${relName}${parent.substring(1, parent.length)}Id`;
|
||||
};
|
||||
|
||||
@ -22,7 +23,7 @@ const generateRelationships = tables => {
|
||||
table: table.name,
|
||||
name: `${getObjRelationshipName(dep)}`,
|
||||
using: {
|
||||
foreign_key_constraint_on: `${dep.substring(0, dep.length - 1)}_id`,
|
||||
foreign_key_constraint_on: `${dep}_id`,
|
||||
},
|
||||
},
|
||||
});
|
||||
@ -34,7 +35,7 @@ const generateRelationships = tables => {
|
||||
using: {
|
||||
foreign_key_constraint_on: {
|
||||
table: table.name,
|
||||
column: `${dep.substring(0, dep.length - 1)}_id`,
|
||||
column: `${dep}_id`,
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -66,8 +67,7 @@ const createRelationships = async (tables, url, headers) => {
|
||||
);
|
||||
if (resp.status !== 200) {
|
||||
const error = await resp.json();
|
||||
console.log(JSON.stringify(error, null, 2));
|
||||
process.exit(1);
|
||||
throwError(JSON.stringify(error, null, 2));
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -1,6 +1,5 @@
|
||||
const fetch = require('node-fetch');
|
||||
const {CLIError} = require('@oclif/errors');
|
||||
const {cli} = require('cli-ux');
|
||||
const throwError = require('./error');
|
||||
|
||||
const runSql = async (sqlArray, url, headers) => {
|
||||
let sqlString = '';
|
||||
@ -23,9 +22,7 @@ const runSql = async (sqlArray, url, headers) => {
|
||||
);
|
||||
if (resp.status !== 200) {
|
||||
const error = await resp.json();
|
||||
cli.action.stop('Error');
|
||||
console.log(JSON.stringify(error, null, 2));
|
||||
process.exit(1);
|
||||
throwError(JSON.stringify(error, null, 2));
|
||||
}
|
||||
};
|
||||
|
||||
@ -53,7 +50,7 @@ const generateConstraintsSql = metadata => {
|
||||
metadata.forEach(table => {
|
||||
table.columns.forEach(column => {
|
||||
if (column.isForeign) {
|
||||
const fkSql = `add foreign key ("${column.name}") references public."${column.name.substring(0, column.name.length - 3)}s" ("id");`;
|
||||
const fkSql = `add foreign key ("${column.name}") references public."${column.name.substring(0, column.name.length - 3)}" ("id");`;
|
||||
sqlArray.push(`alter table public."${table.name}" ${fkSql}`);
|
||||
}
|
||||
});
|
||||
|
@ -1,6 +1,5 @@
|
||||
const fetch = require('node-fetch');
|
||||
const {CLIError} = require('@oclif/errors');
|
||||
const {cli} = require('cli-ux');
|
||||
const throwError = require('./error');
|
||||
|
||||
const trackTables = async (tables, url, headers) => {
|
||||
const bulkQueryArgs = [];
|
||||
@ -27,9 +26,7 @@ const trackTables = async (tables, url, headers) => {
|
||||
);
|
||||
if (resp.status !== 200) {
|
||||
const error = await resp.json();
|
||||
cli.action.stop('error');
|
||||
console.log(JSON.stringify(error, null, 2));
|
||||
process.exit(1);
|
||||
throwError(JSON.stringify(error, null, 2));
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -4,13 +4,13 @@ const db = {
|
||||
{id: 2, name: 'Samuel Patzen', country: 'CH', birthday: '1978-02-01', sex: 'm', email: 'patzen@bluewin.ch', userStatus_id: 2, date: new Date()},
|
||||
{id: 3, name: 'Hans Muster', country: 'CH', birthday: '1978-02-01', sex: 'm', email: 'hans.muster@domain.ch', userStatus_id: 1, date: new Date()},
|
||||
],
|
||||
userStatuss: [
|
||||
userStatus: [
|
||||
{id: 1, key: 'inactive'},
|
||||
{id: 2, key: 'active'},
|
||||
{id: 3, key: 'blocked'},
|
||||
],
|
||||
userConfigs: [
|
||||
{id: 1, user_id: 1},
|
||||
{id: 1, users_id: 1},
|
||||
],
|
||||
leagues: [
|
||||
{id: 1, name: 'Switzerland', yearly: true, description: 'Waypoint are all placed in Switzerland by local instructors and top pilots.', created: '2018-05-01', seasonStart: '10-01', seasonEnd: '09-31'},
|
||||
@ -18,18 +18,18 @@ const db = {
|
||||
{id: 3, name: 'Vol Liber Grischun Clubmeisterschaft', yearly: false, created: '2018-05-02', seasonStart: '2018-10-01', seasonEnd: '2048-10-01'},
|
||||
],
|
||||
userLeagues: [
|
||||
{id: 1, user_id: 1, league_id: 1, isAdmin: true},
|
||||
{id: 2, user_id: 1, league_id: 2, isAdmin: true},
|
||||
{id: 3, user_id: 2, league_id: 1},
|
||||
{id: 4, user_id: 1, league_id: 3},
|
||||
{id: 5, user_id: 2, league_id: 3, isAdmin: true},
|
||||
{id: 1, users_id: 1, leagues_id: 1, isAdmin: true},
|
||||
{id: 2, users_id: 1, leagues_id: 2, isAdmin: true},
|
||||
{id: 3, users_id: 2, leagues_id: 1},
|
||||
{id: 4, users_id: 1, leagues_id: 3},
|
||||
{id: 5, users_id: 2, leagues_id: 3, isAdmin: true},
|
||||
],
|
||||
files: [
|
||||
{id: 1, mimetype_id: 1, width: 250, height: 250, url: 'https://imgplaceholder.com/250x250/cccccc/757575/ion-happy-outline'},
|
||||
{id: 2, mimetype_id: 1, width: 800, height: 400, url: 'https://imgplaceholder.com/800x400/cccccc/757575/fa-image'},
|
||||
{id: 3, mimetype_id: 1, width: 300, height: 200, url: 'https://imgplaceholder.com/300x200/cccccc/757575/fa-map-marker'},
|
||||
{id: 4, mimetype_id: 3, url: 'https://mycdn.com/fredi-bach/2018-07-02-001.igc'},
|
||||
{id: 5, mimetype_id: 3, url: 'https://mycdn.com/fredi-bach/2018-07-03-001.igc'},
|
||||
{id: 1, mimetypes_id: 1, width: 250, height: 250, url: 'https://imgplaceholder.com/250x250/cccccc/757575/ion-happy-outline'},
|
||||
{id: 2, mimetypes_id: 1, width: 800, height: 400, url: 'https://imgplaceholder.com/800x400/cccccc/757575/fa-image'},
|
||||
{id: 3, mimetypes_id: 1, width: 300, height: 200, url: 'https://imgplaceholder.com/300x200/cccccc/757575/fa-map-marker'},
|
||||
{id: 4, mimetypes_id: 3, url: 'https://mycdn.com/fredi-bach/2018-07-02-001.igc'},
|
||||
{id: 5, mimetypes_id: 3, url: 'https://mycdn.com/fredi-bach/2018-07-03-001.igc'},
|
||||
],
|
||||
mimetypes: [
|
||||
{id: 1, mime: 'image/png', description: 'Portable Network Graphics'},
|
||||
@ -44,70 +44,70 @@ const db = {
|
||||
{id: 5, name: 'Landing', description: 'Official landing', points: 10},
|
||||
],
|
||||
waypoints: [
|
||||
{id: 1, league_id: 1, type_id: 1, lat: 3.789, lng: 41.987, radius: 400, points: 100, minAltitude: 3500, name: 'Oberalp Pass', description: 'From Andermatt to Disentis', file_id: 3},
|
||||
{id: 2, league_id: 1, type_id: 2, lat: 3.589, lng: 41.787, radius: 400, points: 100, minAltitude: 3500, name: 'Furka Pass', description: 'From the Goms to Andermatt', file_id: 3},
|
||||
{id: 3, league_id: 1, type_id: 4, lat: 3.889, lng: 40.787, radius: 400, points: 10, name: 'Fiesch'},
|
||||
{id: 1, leagues_id: 1, types_id: 1, lat: 3.789, lng: 41.987, radius: 400, points: 100, minAltitude: 3500, name: 'Oberalp Pass', description: 'From Andermatt to Disentis', files_id: 3},
|
||||
{id: 2, leagues_id: 1, types_id: 2, lat: 3.589, lng: 41.787, radius: 400, points: 100, minAltitude: 3500, name: 'Furka Pass', description: 'From the Goms to Andermatt', files_id: 3},
|
||||
{id: 3, leagues_id: 1, types_id: 4, lat: 3.889, lng: 40.787, radius: 400, points: 10, name: 'Fiesch'},
|
||||
],
|
||||
waypointNotes: [
|
||||
{id: 1, waypoint_id: 1, noteType_id: 1, title: 'Föhn', text: 'Bei Föhn sehr gefährlich!'},
|
||||
{id: 2, waypoint_id: 1, noteType_id: 2, title: 'Basis', text: 'Braucht mindestens 3000 Meter Basis, besser mehr.'},
|
||||
{id: 1, waypoints_id: 1, noteTypes_id: 1, title: 'Föhn', text: 'Bei Föhn sehr gefährlich!'},
|
||||
{id: 2, waypoints_id: 1, noteTypes_id: 2, title: 'Basis', text: 'Braucht mindestens 3000 Meter Basis, besser mehr.'},
|
||||
],
|
||||
waypointPhotos: [
|
||||
{id: 1, user_id: 1, official: true, waypoint_id: 1, mimetype_id: 2, width: 1080, height: 960, url: 'https://mycdn.com/fredi-bach/oberalp-2018-1.jpeg'},
|
||||
{id: 2, user_id: 1, official: true, waypoint_id: 1, mimetype_id: 2, width: 1080, height: 960, url: 'https://mycdn.com/fredi-bach/oberalp-2018-2.jpeg'},
|
||||
{id: 3, user_id: 2, official: false, waypoint_id: 1, mimetype_id: 2, width: 1080, height: 960, url: 'https://mycdn.com/fredi-bach/oberalp-2018-3.jpeg'},
|
||||
{id: 1, users_id: 1, official: true, waypoints_id: 1, mimetypes_id: 2, width: 1080, height: 960, url: 'https://mycdn.com/fredi-bach/oberalp-2018-1.jpeg'},
|
||||
{id: 2, users_id: 1, official: true, waypoints_id: 1, mimetypes_id: 2, width: 1080, height: 960, url: 'https://mycdn.com/fredi-bach/oberalp-2018-2.jpeg'},
|
||||
{id: 3, users_id: 2, official: false, waypoints_id: 1, mimetypes_id: 2, width: 1080, height: 960, url: 'https://mycdn.com/fredi-bach/oberalp-2018-3.jpeg'},
|
||||
],
|
||||
waypointSuggestions: [
|
||||
{id: 1, user_id: 2, league_id: 1, type_id: 1, lat: 11.789, lng: 33.987, radius: 800, points: 100, minAltitude: 3500, name: 'Limmeren Stausee', description: 'Auf dem Weg von der Surselva ins Glaernerland', file_id: 3},
|
||||
{id: 1, users_id: 2, leagues_id: 1, types_id: 1, lat: 11.789, lng: 33.987, radius: 800, points: 100, minAltitude: 3500, name: 'Limmeren Stausee', description: 'Auf dem Weg von der Surselva ins Glaernerland', files_id: 3},
|
||||
],
|
||||
noteTypes: [
|
||||
{id: 1, name: 'Wind', icon: 'wind', class: 'waypoint-note-wind'},
|
||||
{id: 2, name: 'Altitude', icon: 'altitude', class: 'waypoint-note-altitude'},
|
||||
],
|
||||
sponsors: [
|
||||
{id: 1, waypoint_id: 1, user_id: 1, name: 'Flugschule Appenzell', url: 'http://www.gleitschirm.ch', slogan: 'Die Flugschule im Alpstein.'},
|
||||
{id: 2, waypoint_id: 2, name: 'Ozone', url: 'http://www.flyozone.ch', slogan: 'Real world performance.'},
|
||||
{id: 1, waypoints_id: 1, users_id: 1, name: 'Flugschule Appenzell', url: 'http://www.gleitschirm.ch', slogan: 'Die Flugschule im Alpstein.'},
|
||||
{id: 2, waypoints_id: 2, name: 'Ozone', url: 'http://www.flyozone.ch', slogan: 'Real world performance.'},
|
||||
],
|
||||
waypointChats: [
|
||||
{id: 1, waypoint_id: 1, user_id: 1, message: 'Can be quite hard with low base!', datetime: '2018-07-02 12:48:45'},
|
||||
{id: 2, waypoint_id: 1, user_id: 2, message: 'Oh yes, it can!', datetime: '2018-07-02 12:52:11'},
|
||||
{id: 1, waypoints_id: 1, users_id: 1, message: 'Can be quite hard with low base!', datetime: '2018-07-02 12:48:45'},
|
||||
{id: 2, waypoints_id: 1, users_id: 2, message: 'Oh yes, it can!', datetime: '2018-07-02 12:52:11'},
|
||||
],
|
||||
wings: [
|
||||
{id: 1, model: 'Zeno', brand: 'Ozone', certification: 'D'},
|
||||
{id: 2, model: 'Mentor 3', brand: 'Nova', certification: 'B'},
|
||||
],
|
||||
flights: [
|
||||
{id: 1, user_id: 1, league_id: 1, wing_id: 1, date: '2018-07-02', score: 200, file_id: 4, comment: 'Bockig!'},
|
||||
{id: 2, user_id: 2, league_id: 1, wing_id: 2, date: '2018-07-03', score: 100, file_id: 5},
|
||||
{id: 1, users_id: 1, leagues_id: 1, wings_id: 1, date: '2018-07-02', score: 200, files_id: 4, comment: 'Bockig!'},
|
||||
{id: 2, users_id: 2, leagues_id: 1, wings_id: 2, date: '2018-07-03', score: 100, files_id: 5},
|
||||
],
|
||||
favoriteFlights: [
|
||||
{id: 1, user_id: 1, flight_id: 2, datetime: '2018-07-02 12:48:45'},
|
||||
{id: 1, users_id: 1, flights_id: 2, datetime: '2018-07-02 12:48:45'},
|
||||
],
|
||||
flightWaypoints: [
|
||||
{id: 1, flight_id: 1, waypoint_id: 1, datetime: '2018-07-02 12:48:45', score: 100},
|
||||
{id: 2, flight_id: 1, waypoint_id: 2, datetime: '2018-07-02 13:11:59', score: 100},
|
||||
{id: 3, flight_id: 2, waypoint_id: 2, datetime: '2018-08-02 14:06:11', score: 100},
|
||||
{id: 1, flights_id: 1, waypoints_id: 1, datetime: '2018-07-02 12:48:45', score: 100},
|
||||
{id: 2, flights_id: 1, waypoints_id: 2, datetime: '2018-07-02 13:11:59', score: 100},
|
||||
{id: 3, flights_id: 2, waypoints_id: 2, datetime: '2018-08-02 14:06:11', score: 100},
|
||||
],
|
||||
flightComments: [
|
||||
{id: 1, flight_id: 1, user_id: 2, datetime: '2018-08-02 14:06:11', text: 'Ok, that was nice!'},
|
||||
{id: 2, flight_id: 1, user_id: 1, datetime: '2018-08-02 14:09:11', text: 'Thanks'},
|
||||
{id: 1, flights_id: 1, users_id: 2, datetime: '2018-08-02 14:06:11', text: 'Ok, that was nice!'},
|
||||
{id: 2, flights_id: 1, users_id: 1, datetime: '2018-08-02 14:09:11', text: 'Thanks'},
|
||||
],
|
||||
leagueSeasonUserScores: [
|
||||
{id: 1, user_id: 1, league_id: 1, season: '2018', score: 200, flightCount: 1},
|
||||
{id: 2, user_id: 1, league_id: 2, season: '2018', score: 0, flightCount: 0},
|
||||
{id: 3, user_id: 2, league_id: 1, season: '2018', score: 100, flightCount: 1},
|
||||
{id: 1, users_id: 1, leagues_id: 1, season: '2018', score: 200, flightCount: 1},
|
||||
{id: 2, users_id: 1, leagues_id: 2, season: '2018', score: 0, flightCount: 0},
|
||||
{id: 3, users_id: 2, leagues_id: 1, season: '2018', score: 100, flightCount: 1},
|
||||
],
|
||||
routes: [
|
||||
{id: 1, user_id: 1, league_id: 1, name: 'Wallis Sightseeing', description: 'A great route for a low wind high cloudbase day.'},
|
||||
{id: 2, user_id: 1, league_id: 1, name: 'Surselva Adventure'},
|
||||
{id: 1, users_id: 1, leagues_id: 1, name: 'Wallis Sightseeing', description: 'A great route for a low wind high cloudbase day.'},
|
||||
{id: 2, users_id: 1, leagues_id: 1, name: 'Surselva Adventure'},
|
||||
],
|
||||
routeWaypoints: [
|
||||
{id: 1, route_id: 1, waypoint_id: 1},
|
||||
{id: 2, route_id: 1, waypoint_id: 2, routeWaypoint_id: 1},
|
||||
{id: 3, route_id: 1, waypoint_id: 3, routeWaypoint_id: 2},
|
||||
{id: 1, routes_id: 1, waypoints_id: 1},
|
||||
{id: 2, routes_id: 1, waypoints_id: 2, routeWaypoints_id: 1},
|
||||
{id: 3, routes_id: 1, waypoints_id: 3, routeWaypoints_id: 2},
|
||||
],
|
||||
favoriteRoutes: [
|
||||
{id: 1, user_id: 1, route_id: 1, datetime: '2018-07-01 15:48:45'},
|
||||
{id: 1, users_id: 1, routes_id: 1, datetime: '2018-07-01 15:48:45'},
|
||||
],
|
||||
};
|
||||
|
||||
|
@ -1,5 +1,6 @@
|
||||
#!/bin/bash
|
||||
if [ -z "$TEST_HGE_URL" ];
|
||||
then echo "ERROR: Please run the test command with the environment variable TEST_HGE_ENDPOINT";
|
||||
else ../bin/run $TEST_HGE_URL --db=./db.js --overwrite && node verify.js;
|
||||
if [ -z "$TEST_HGE_URL" ] && [ -z "$TEST_X_HASURA_ACCESS_KEY" ]; then
|
||||
echo "ERROR: Please run the test command with the environment variable TEST_HGE_URL"
|
||||
else
|
||||
../bin/run $TEST_HGE_URL --access-key=$TEST_X_HASURA_ACCESS_KEY --db=./db.js --overwrite && node verify.js
|
||||
fi
|
||||
|
@ -3,11 +3,11 @@ const {query} = require('graphqurl');
|
||||
const complexQuery = `
|
||||
query {
|
||||
favoriteRoutes {
|
||||
routeByRoutesId {
|
||||
leagueByLeaguesId {
|
||||
flightsByLeaguesId {
|
||||
flightCommentsByFlightsId {
|
||||
userByUsersId {
|
||||
routesByRoutesId {
|
||||
leaguesByLeaguesId {
|
||||
flightssByLeaguesId {
|
||||
flightCommentssByFlightsId {
|
||||
usersByUsersId {
|
||||
email
|
||||
}
|
||||
}
|
||||
@ -24,7 +24,7 @@ const verifyDataImport = () => {
|
||||
endpoint: `${process.env.TEST_HGE_URL}/v1alpha1/graphql`,
|
||||
headers: {'x-hasura-access-key': process.env.TEST_X_HASURA_ACCESS_KEY},
|
||||
}).then(response => {
|
||||
if (response.data.favoriteRoutes[0].routeByRoutesId.leagueByLeaguesId.flightsByLeaguesId[0].flightCommentsByFlightsId[0].userByUsersId.email === 'osxcode@gmail.com') {
|
||||
if (response.data.favoriteRoutes[0].routesByRoutesId.leaguesByLeaguesId.flightssByLeaguesId[0].flightCommentssByFlightsId[0].usersByUsersId.email === 'osxcode@gmail.com') {
|
||||
console.log('✔︎ Test passed');
|
||||
process.exit();
|
||||
} else {
|
||||
|
Loading…
Reference in New Issue
Block a user