mirror of
https://github.com/hasura/graphql-engine.git
synced 2024-12-15 01:12:56 +03:00
add json2graphql to community tools (#454)
This commit is contained in:
parent
e6c5aa5b43
commit
68a2e0372f
11
community/tools/json2graphql/.editorconfig
Normal file
11
community/tools/json2graphql/.editorconfig
Normal file
@ -0,0 +1,11 @@
|
||||
root = true
|
||||
|
||||
[*]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
charset = utf-8
|
||||
trim_trailing_whitespace = true
|
||||
insert_final_newline = true
|
||||
|
||||
[*.md]
|
||||
trim_trailing_whitespace = false
|
1
community/tools/json2graphql/.eslintignore
Normal file
1
community/tools/json2graphql/.eslintignore
Normal file
@ -0,0 +1 @@
|
||||
./test/db.js
|
18
community/tools/json2graphql/.eslintrc
Normal file
18
community/tools/json2graphql/.eslintrc
Normal file
@ -0,0 +1,18 @@
|
||||
{
|
||||
"extends": "oclif",
|
||||
"rules": {
|
||||
"max-params": "off",
|
||||
"no-console": "off",
|
||||
"max-depth": "off",
|
||||
"one-var": "off",
|
||||
"complexity": "off",
|
||||
"unicorn/no-process-exit": "off",
|
||||
"unicorn/filename-case": "off",
|
||||
"no-process-exit": "off",
|
||||
"no-throw-literal": "off",
|
||||
"node/no-unsupported-features": "off",
|
||||
"no-warning-comments": "off",
|
||||
"semi": [1, "always"],
|
||||
"camelcase": "off"
|
||||
}
|
||||
}
|
2
community/tools/json2graphql/.gitattributes
vendored
Normal file
2
community/tools/json2graphql/.gitattributes
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
* text=auto
|
||||
*.js text eol=lf
|
9
community/tools/json2graphql/.gitignore
vendored
Normal file
9
community/tools/json2graphql/.gitignore
vendored
Normal file
@ -0,0 +1,9 @@
|
||||
*-debug.log
|
||||
*-error.log
|
||||
/.nyc_output
|
||||
/dist
|
||||
/tmp
|
||||
/yarn.lock
|
||||
/test-db.js
|
||||
test-db
|
||||
node_modules
|
24
community/tools/json2graphql/CONTRIBUTING.md
Normal file
24
community/tools/json2graphql/CONTRIBUTING.md
Normal file
@ -0,0 +1,24 @@
|
||||
# Contributing to JSON Data Import
|
||||
|
||||
## Issues
|
||||
|
||||
Please open an issue related to your work. Add the label `c/json2graphql`.
|
||||
|
||||
## Local developmet
|
||||
|
||||
1. Make changes and save
|
||||
2. Run the executable in the `bin` directory to test your code. Treat the executable as the command. For example:
|
||||
|
||||
```
|
||||
$ bin/run --help
|
||||
```
|
||||
|
||||
## Testing
|
||||
|
||||
Please make sure you run the tests before making pull requests. All the pull requests will be run through tests before merging.
|
||||
|
||||
To run the tests locally, you will need an instance of [Hasura GraphQL Engine](https://github.com/hasura/graphql-engine) running. To run the tests, run the command:
|
||||
|
||||
```
|
||||
$ TEST_HGE_URL=https://hge.herokuapp.com npm test
|
||||
```
|
157
community/tools/json2graphql/README.md
Normal file
157
community/tools/json2graphql/README.md
Normal file
@ -0,0 +1,157 @@
|
||||
# JSON database to GraphQL
|
||||
|
||||
[Hasura GraphQL Engine](https://hasura.io) gives instant GraphQL APIs over Postgres.
|
||||
|
||||
This is A CLI tool to import a schema and data to Postgres using JSON data. You can then leverage all the features of Hasura GraphQL Engine to query the Postgres data over GraphQL.
|
||||
|
||||
[![oclif](https://img.shields.io/badge/cli-oclif-brightgreen.svg)](https://oclif.io)
|
||||
[![Version](https://img.shields.io/npm/v/json2graphql.svg)](https://npmjs.org/package/json2graphql)
|
||||
|
||||
## Quick start
|
||||
|
||||
1. Quickly get the GraphQL Engine running by clicking this button:
|
||||
|
||||
[![Deploy to heroku](https://www.herokucdn.com/deploy/button.svg)](https://heroku.com/deploy?template=https://github.com/hasura/graphql-engine-heroku)
|
||||
|
||||
Note the URL. It will be of the form: `https://<app-name>.herokuapp.com`
|
||||
|
||||
2. Create a db.js file. Your data file should export an object where the keys are the entity types. The values should be lists of entities, i.e. arrays of value objects with at least an id key. For instance:
|
||||
|
||||
```js
|
||||
module.exports = {
|
||||
users: [
|
||||
{ id: 123, name: "John Doe" },
|
||||
{ id: 456, name: "Jane Doe" }
|
||||
],
|
||||
cities: [
|
||||
{ id: 987, name: "Stockholm", country: "Sweden" },
|
||||
{ id: 995, name: "Sydney", country: "Australia" }
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
3. Use the CLI to import the data:
|
||||
|
||||
```
|
||||
$ npm install -g json2graphql
|
||||
$ json2graphql https://<app-name>.herokuapp.com --db=./path/to/db.js
|
||||
```
|
||||
|
||||
4. That's it. You can go your HGE URL `https://<app-name>.herokuapp.com` and start querying this data over GraphQL:
|
||||
|
||||
```graphql
|
||||
query {
|
||||
users {
|
||||
id
|
||||
name
|
||||
}
|
||||
cities {
|
||||
id
|
||||
name
|
||||
country
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Check [this section](#foreign-keys-and-relationships) for knowing about foreign keys and relationships.
|
||||
|
||||
## Installation
|
||||
|
||||
### CLI
|
||||
|
||||
```bash
|
||||
npm install -g json2graphql
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
### CLI
|
||||
|
||||
#### Without access key
|
||||
|
||||
```
|
||||
$ json2graphql https://hge.herokuapp.com -d ./path/to/db.js
|
||||
```
|
||||
|
||||
#### With access key
|
||||
|
||||
```
|
||||
$ json2graphql https://hge.herokuapp.com -k <access-key> -d ./path/to/db.js
|
||||
```
|
||||
|
||||
### Command
|
||||
|
||||
```bash
|
||||
$ gq URL [flags]
|
||||
```
|
||||
|
||||
#### Args
|
||||
|
||||
* `URL`: The URL where Hasura GraphQL Engine is running
|
||||
|
||||
#### Options
|
||||
|
||||
- `-d --db`: path to the JS file that exports your sample JSON database
|
||||
- `-o --overwrite`: Overwrite tables if they already exist in database
|
||||
- `-v --version`: show CLI version
|
||||
- `-h, --help`: show CLI help
|
||||
|
||||
## More features
|
||||
|
||||
### Foreign keys and relationships
|
||||
|
||||
You can also define foreign keys and relationships in your JSON sample data. The CLI infers foreign keys and relationships from column names and table names.
|
||||
|
||||
For example, in the following data set, the `posts` table has a field called `user_id` which is a foreign key to the `id` column of table `users`. Also, the `comments` table has a field called `post_id` which is a foreign key to the `id` column of table `posts`.
|
||||
|
||||
```js
|
||||
module.exports = {
|
||||
posts: [
|
||||
{ id: 1, title: "Lorem Ipsum", views: 254, user_id: 123 },
|
||||
{ id: 2, title: "Sic Dolor amet", views: 65, user_id: 456 },
|
||||
],
|
||||
users: [
|
||||
{ id: 123, name: "John Doe" },
|
||||
{ id: 456, name: "Jane Doe" }
|
||||
],
|
||||
comments: [
|
||||
{ id: 987, post_id: 1, body: "Consectetur adipiscing elit" },
|
||||
{ id: 995, post_id: 1, body: "Nam molestie pellentesque dui" }
|
||||
]
|
||||
};
|
||||
```
|
||||
|
||||
Import the database:
|
||||
|
||||
```
|
||||
$ json2graphql https://<app-name>.herokuapp.com --db=./path/to/db.js
|
||||
```
|
||||
|
||||
Now you can make complicated queries like:
|
||||
|
||||
```graphql
|
||||
query {
|
||||
users {
|
||||
id
|
||||
name
|
||||
postsByUsersId {
|
||||
id
|
||||
title
|
||||
views
|
||||
commentsByPostsId {
|
||||
id
|
||||
body
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Overwrite
|
||||
|
||||
If your Postgres already contains tables that you are trying to import using `json2graphql`, the command will fail.
|
||||
|
||||
If you want to overwrite the the existing tables in the database with the new tables from your sample JSON database, you must provide a flag `-o` or `--overwrite`
|
||||
|
||||
---
|
||||
Maintained with ♡ by <a href="https://hasura.io">Hasura</a>
|
4
community/tools/json2graphql/bin/run
Executable file
4
community/tools/json2graphql/bin/run
Executable file
@ -0,0 +1,4 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
require('../src/command').run()
|
||||
.catch(require('@oclif/errors/handle'))
|
3
community/tools/json2graphql/bin/run.cmd
Normal file
3
community/tools/json2graphql/bin/run.cmd
Normal file
@ -0,0 +1,3 @@
|
||||
@echo off
|
||||
|
||||
node "%~dp0\run" %*
|
2405
community/tools/json2graphql/package-lock.json
generated
Normal file
2405
community/tools/json2graphql/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
55
community/tools/json2graphql/package.json
Normal file
55
community/tools/json2graphql/package.json
Normal file
@ -0,0 +1,55 @@
|
||||
{
|
||||
"name": "json2graphql",
|
||||
"description": "A CLI tool to import JSON data in Hasura GraphQL Engine",
|
||||
"version": "0.1.1",
|
||||
"author": "Hasura",
|
||||
"bin": {
|
||||
"json2graphql": "./bin/run",
|
||||
"j2g": "./bin/run"
|
||||
},
|
||||
"bugs": "https://github.com/hasura/graphql-engine/issues?q=is%3Aissue+is%3Aopen+label%3Ac%2Fjson2graphql",
|
||||
"dependencies": {
|
||||
"@oclif/command": "^1.4.35",
|
||||
"@oclif/config": "^1.6.33",
|
||||
"@oclif/errors": "^1.1.2",
|
||||
"@oclif/plugin-help": "^2.0.5",
|
||||
"cli-ux": "^4.7.3",
|
||||
"graphqurl": "^0.3.2",
|
||||
"moment": "^2.22.2",
|
||||
"node-fetch": "^2.2.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"eslint": "^4.19.1",
|
||||
"eslint-config-oclif": "^1.5.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8.0.0"
|
||||
},
|
||||
"files": [
|
||||
"/bin",
|
||||
"/src"
|
||||
],
|
||||
"homepage": "https://github.com/wawhal/graphql-engine/tree/master/community/tools/json2graphql",
|
||||
"keywords": [
|
||||
"oclif",
|
||||
"cli",
|
||||
"graphql",
|
||||
"grapql-engine",
|
||||
"json"
|
||||
],
|
||||
"license": "MIT",
|
||||
"main": "src/command.js",
|
||||
"oclif": {
|
||||
"bin": "json2graphql"
|
||||
},
|
||||
"repository": "hasura/graphql-engine",
|
||||
"scripts": {
|
||||
"eslint": "eslint .",
|
||||
"eslintfix": "eslint . --fix",
|
||||
"posttest": "npm run eslint",
|
||||
"test": "cd test && ./test.sh"
|
||||
},
|
||||
"pre-commit": [
|
||||
"eslintfix"
|
||||
]
|
||||
}
|
110
community/tools/json2graphql/src/command.js
Normal file
110
community/tools/json2graphql/src/command.js
Normal file
@ -0,0 +1,110 @@
|
||||
const {Command, flags} = require('@oclif/command');
|
||||
const fetch = require('node-fetch');
|
||||
const {CLIError} = require('@oclif/errors');
|
||||
const {cli} = require('cli-ux');
|
||||
const importData = require('./import/import');
|
||||
const resolve = require('path').resolve;
|
||||
|
||||
class JSON2GraphQL extends Command {
|
||||
async run() {
|
||||
const {args, flags} = this.parse(JSON2GraphQL);
|
||||
const {url} = args;
|
||||
if (!url) {
|
||||
throw new CLIError('endpoint is required: \'json2graphql <url>\'');
|
||||
}
|
||||
|
||||
const {db, overwrite} = flags;
|
||||
const key = flags['access-key'];
|
||||
|
||||
if (!url) {
|
||||
throw new CLIError('endpoint is required: \'json2graphql <url> -d ./db.js\'');
|
||||
}
|
||||
const safeUrl = this.getSafeUrl(url);
|
||||
if (!db) {
|
||||
throw new CLIError('path to sample database is required: \'json2graphql <url> -d ./db.js\'');
|
||||
}
|
||||
const dbJson = this.getDbJson(db);
|
||||
const headers = key ? {'x-hasura-access-key': key} : {};
|
||||
const urlVerification = await this.verifyUrl(safeUrl, headers);
|
||||
if (urlVerification.error) {
|
||||
cli.action.stop('Error')
|
||||
console.log('Message: ', urlVerification.message);
|
||||
process.exit
|
||||
} else {
|
||||
cli.action.stop('Done!');
|
||||
await importData(dbJson, safeUrl, headers, overwrite);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
getDbJson(db) {
|
||||
return require(resolve(db));
|
||||
}
|
||||
|
||||
getSafeUrl(url) {
|
||||
const urlLength = url.length;
|
||||
return url[urlLength - 1] === '/' ? url.slice(0, -1) : url;
|
||||
}
|
||||
|
||||
async verifyUrl(url, headers) {
|
||||
try {
|
||||
cli.action.start('Verifying URL');
|
||||
const resp = await fetch(
|
||||
`${url}/v1/version`,
|
||||
{
|
||||
method: 'GET',
|
||||
headers
|
||||
}
|
||||
);
|
||||
return resp.status === 200 ? {error: false} : { error: true, message: 'invalid access key'};
|
||||
} catch (e) {
|
||||
return { error: true, message: 'invalid URL'}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
JSON2GraphQL.description = `JSON Data Import: Import JSON data to Hasura GraphQL Engine
|
||||
# Examples:
|
||||
|
||||
# Import data from a JSON file to Hasura GraphQL Engine without access key
|
||||
json2graphql https://hge.herokuapp.com --db=./path/to/db.js
|
||||
|
||||
# Import data from a JSON file to Hasura GraphQL Engine with access key
|
||||
json2graphql https://hge.herokuapp.com --access-key='<access-key>' --db=./path/to/db.js
|
||||
|
||||
`;
|
||||
|
||||
JSON2GraphQL.usage = 'URL [-k KEY]';
|
||||
|
||||
JSON2GraphQL.flags = {
|
||||
// add --version flag to show CLI version
|
||||
version: flags.version(),
|
||||
|
||||
// add --help flag to show CLI version
|
||||
help: flags.help({char: 'h'}),
|
||||
|
||||
// Access key to Hasura GraphQL Engine
|
||||
'access-key': flags.string({
|
||||
char: 'k',
|
||||
description: 'Access key to Hasura GraphQL Engine (X-Hasura-Access-Key)',
|
||||
}),
|
||||
|
||||
db: flags.string({
|
||||
char: 'd',
|
||||
description: 'Path to the .js files that exports a JSON database',
|
||||
}),
|
||||
|
||||
overwrite: flags.boolean({
|
||||
char: 'o',
|
||||
description: 'Overwrite tables if they exist',
|
||||
}),
|
||||
};
|
||||
|
||||
JSON2GraphQL.args = [
|
||||
{
|
||||
name: 'url',
|
||||
description: 'URL where Hasura GraphQL Engine is running',
|
||||
},
|
||||
];
|
||||
|
||||
module.exports = JSON2GraphQL;
|
56
community/tools/json2graphql/src/import/check.js
Normal file
56
community/tools/json2graphql/src/import/check.js
Normal file
@ -0,0 +1,56 @@
|
||||
const fetch = require('node-fetch');
|
||||
const {CLIError} = require('@oclif/errors');
|
||||
const {cli} = require('cli-ux');
|
||||
|
||||
const createTables = async (tables, url, headers, overwrite, runSql, sql) => {
|
||||
if (overwrite) {
|
||||
cli.action.stop('Skipped!');
|
||||
cli.action.start('Creating tables');
|
||||
await runSql(sql, url, headers);
|
||||
} else {
|
||||
try {
|
||||
const resp = await fetch(
|
||||
`${url}/v1/query`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers,
|
||||
body: JSON.stringify({
|
||||
type: 'select',
|
||||
args: {
|
||||
table: {
|
||||
name: 'hdb_table',
|
||||
schema: 'hdb_catalog',
|
||||
},
|
||||
columns: ['*.*'],
|
||||
where: {
|
||||
table_schema: 'public',
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
);
|
||||
const dbTables = await resp.json();
|
||||
let found = false;
|
||||
tables.forEach((table) => {
|
||||
if(dbTables.find((dbTable) => dbTable.table_name === table.name)) {
|
||||
found = true;
|
||||
cli.action.stop('Error');
|
||||
console.log('Message: Your JSON database contains tables that already exist in Postgres. Please use the flag "--overwrite" to overwrite them.');
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
if (!found) {
|
||||
cli.action.stop('Done!');
|
||||
cli.action.start('Creating tables');
|
||||
await runSql(sql, url, headers);
|
||||
}
|
||||
} catch (e) {
|
||||
console.log('Unexpected: ', e);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
createTables,
|
||||
};
|
99
community/tools/json2graphql/src/import/generateTables.js
Normal file
99
community/tools/json2graphql/src/import/generateTables.js
Normal file
@ -0,0 +1,99 @@
|
||||
const {CLIError} = require('@oclif/errors');
|
||||
const {cli} = require('cli-ux');
|
||||
|
||||
const getDataType = (data, column) => {
|
||||
if (typeof data === 'number') {
|
||||
return (data === parseInt(data, 10)) ? 'int' : 'numeric';
|
||||
}
|
||||
if (typeof data === 'string' || data === null) {
|
||||
return 'text';
|
||||
}
|
||||
if (typeof data === 'boolean') {
|
||||
return 'boolean';
|
||||
}
|
||||
if (data.constructor.name === 'Date') {
|
||||
return 'timestamptz';
|
||||
}
|
||||
if (data.constructor.name === 'Object') {
|
||||
return 'json';
|
||||
}
|
||||
cli.action.stop('Error');
|
||||
console.log(`Message: invalid data type given for column ${column}: ${typeof data}`);
|
||||
process.exit(1);
|
||||
};
|
||||
|
||||
const isForeign = (name, db) => {
|
||||
const l = name.length;
|
||||
if (l > 3) {
|
||||
if (name.substring(l - 3, l) === '_id' &&
|
||||
Object.keys(db).find(tableName => {
|
||||
return tableName === name.substring(0, l - 3) + 's';
|
||||
})) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
const getColumnData = (dataArray, db) => {
|
||||
const refRow = {
|
||||
numOfCols: 0,
|
||||
index: 0,
|
||||
};
|
||||
dataArray.forEach((row, i) => {
|
||||
if (Object.keys(row).length > refRow.numOfCols) {
|
||||
refRow.numOfCols = Object.keys(row).length;
|
||||
refRow.index = i;
|
||||
}
|
||||
});
|
||||
const refColumns = dataArray[refRow.index];
|
||||
const columnData = [];
|
||||
Object.keys(refColumns).forEach(column => {
|
||||
const columnMetadata = {};
|
||||
if (!column) {
|
||||
cli.action.stop('Error');
|
||||
console.log("Message: column names can't be empty strings");
|
||||
process.exit(1);
|
||||
}
|
||||
columnMetadata.name = column;
|
||||
const sampleData = refColumns[column];
|
||||
columnMetadata.type = getDataType(sampleData, column, db);
|
||||
columnMetadata.isForeign = isForeign(column, db);
|
||||
columnData.push(columnMetadata);
|
||||
});
|
||||
return columnData;
|
||||
};
|
||||
|
||||
const hasPrimaryKey = dataObj => {
|
||||
let has = true;
|
||||
dataObj.forEach((obj) => {
|
||||
if (!Object.keys(obj).find(name => name === 'id')) {
|
||||
has = false;
|
||||
}
|
||||
});
|
||||
return has;
|
||||
};
|
||||
|
||||
const generate = db => {
|
||||
const metaData = [];
|
||||
Object.keys(db).forEach(rootField => {
|
||||
const tableMetadata = {};
|
||||
if (!hasPrimaryKey(db[rootField], rootField)) {
|
||||
cli.action.stop('Error');
|
||||
console.log(`Message: A unique column with name "id" and type integer must present in table "${rootField}"`);
|
||||
process.exit(1);
|
||||
}
|
||||
tableMetadata.name = rootField;
|
||||
tableMetadata.columns = getColumnData(db[rootField], db);
|
||||
tableMetadata.dependencies = [];
|
||||
tableMetadata.columns.forEach(column => {
|
||||
if (column.isForeign) {
|
||||
tableMetadata.dependencies.push(column.name.substring(0, column.name.length - 3) + 's');
|
||||
}
|
||||
});
|
||||
metaData.push(tableMetadata);
|
||||
});
|
||||
return metaData;
|
||||
};
|
||||
|
||||
module.exports = generate;
|
32
community/tools/json2graphql/src/import/import.js
Normal file
32
community/tools/json2graphql/src/import/import.js
Normal file
@ -0,0 +1,32 @@
|
||||
const generate = require('./generateTables');
|
||||
const {generateSql, runSql} = require('./sql');
|
||||
const {cli} = require('cli-ux');
|
||||
const {CLIError} = require('@oclif/errors');
|
||||
const {trackTables} = require('./track');
|
||||
const {getInsertOrder, insertData} = require('./insert');
|
||||
const {createRelationships} = require('./relationships');
|
||||
const {createTables} = require('./check');
|
||||
|
||||
const importData = async (db, url, headers, overwrite) => {
|
||||
cli.action.start('Processing JSON data');
|
||||
const tables = generate(db);
|
||||
const sql = generateSql(tables);
|
||||
cli.action.stop('Done!');
|
||||
cli.action.start('Checking database');
|
||||
createTables(tables, url, headers, overwrite, runSql, sql).then(() => {
|
||||
cli.action.stop('Done!');
|
||||
cli.action.start('Tracking tables');
|
||||
trackTables(tables, url, headers).then(() => {
|
||||
cli.action.stop('Done!');
|
||||
cli.action.start('Creating relationships');
|
||||
createRelationships(tables, url, headers).then(() => {
|
||||
cli.action.stop('Done!');
|
||||
cli.action.start('Inserting data');
|
||||
const insertOrder = getInsertOrder(tables);
|
||||
insertData(insertOrder, db, tables, url, headers);
|
||||
});
|
||||
});
|
||||
})
|
||||
};
|
||||
|
||||
module.exports = importData;
|
87
community/tools/json2graphql/src/import/insert.js
Normal file
87
community/tools/json2graphql/src/import/insert.js
Normal file
@ -0,0 +1,87 @@
|
||||
const {query} = require('graphqurl');
|
||||
const {CLIError} = require('@oclif/errors');
|
||||
const moment = require('moment');
|
||||
const {cli} = require('cli-ux');
|
||||
|
||||
const getInsertOrder = tables => {
|
||||
let order = [];
|
||||
const tablesHash = {};
|
||||
tables.forEach(table => {
|
||||
tablesHash[table.name] = table;
|
||||
});
|
||||
const pushedHash = {};
|
||||
const setOrder = table => {
|
||||
if (table.dependencies.length === 0) {
|
||||
order.push(table.name);
|
||||
pushedHash[table.name] = true;
|
||||
} else {
|
||||
table.dependencies.forEach(parentTable => {
|
||||
if (!pushedHash[parentTable] && parentTable !== table.name) {
|
||||
setOrder(tablesHash[parentTable]);
|
||||
}
|
||||
});
|
||||
order.push(table.name);
|
||||
pushedHash[table.name] = true;
|
||||
}
|
||||
};
|
||||
|
||||
tables.forEach(table => {
|
||||
if (!pushedHash[table.name]) {
|
||||
setOrder(table);
|
||||
}
|
||||
});
|
||||
return order;
|
||||
};
|
||||
|
||||
const transformData = (data, tables) => {
|
||||
const newData = {};
|
||||
tables.forEach(table => {
|
||||
const tableData = data[table.name];
|
||||
newData[table.name] = [];
|
||||
tableData.forEach(row => {
|
||||
const newRow = {...row};
|
||||
table.columns.forEach(column => {
|
||||
if (column.type === 'timestamptz' && row[column.name]) {
|
||||
newRow[column.name] = moment(row[column.name]).format();
|
||||
}
|
||||
if (column.type === 'json' && row[column.name]) {
|
||||
newRow[column.name] = JSON.stringify(row[column.name]);
|
||||
}
|
||||
});
|
||||
newData[table.name].push(newRow);
|
||||
});
|
||||
});
|
||||
return newData;
|
||||
};
|
||||
|
||||
const insertData = async (insertOrder, sampleData, tables, url, headers) => {
|
||||
const transformedData = transformData(sampleData, tables);
|
||||
let mutationString = '';
|
||||
let objectString = '';
|
||||
const variables = {};
|
||||
insertOrder.forEach(tableName => {
|
||||
mutationString += `insert_${tableName} ( objects: $objects_${tableName} ) { returning { id } } \n`;
|
||||
objectString += `$objects_${tableName}: [${tableName}_insert_input!]!,\n`;
|
||||
variables[`objects_${tableName}`] = transformedData[tableName];
|
||||
});
|
||||
const mutation = `mutation ( ${objectString} ) { ${mutationString} }`;
|
||||
cli.action.start('Inserting data');
|
||||
try {
|
||||
const response = await query({
|
||||
query: mutation,
|
||||
endpoint: `${url}/v1alpha1/graphql`,
|
||||
variables,
|
||||
headers,
|
||||
});
|
||||
cli.action.stop('Done!');
|
||||
} catch (e) {
|
||||
cli.action.stop('Error');
|
||||
console.log(JSON.stringify(e, null, 2));
|
||||
process.exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
getInsertOrder,
|
||||
insertData,
|
||||
};
|
76
community/tools/json2graphql/src/import/relationships.js
Normal file
76
community/tools/json2graphql/src/import/relationships.js
Normal file
@ -0,0 +1,76 @@
|
||||
const fetch = require('node-fetch');
|
||||
|
||||
const getObjRelationshipName = dep => {
|
||||
const relName = `${dep.substring(0, dep.length - 1)}By${dep[0].toUpperCase()}`;
|
||||
return dep.length === 0 ? relName + 'Id' : relName + dep.substring(1, dep.length) + 'Id';
|
||||
};
|
||||
|
||||
const getArrayRelationshipName = (table, parent) => {
|
||||
const relName = `${table}By${parent[0].toUpperCase()}`;
|
||||
return parent.length === 1 ? `${relName}Id` : `${relName}${parent.substring(1, parent.length)}Id`;
|
||||
};
|
||||
|
||||
const generateRelationships = tables => {
|
||||
const objectRelationships = [];
|
||||
const arrayRelationships = [];
|
||||
tables.forEach(table => {
|
||||
if (table.dependencies.length > 0) {
|
||||
table.dependencies.forEach(dep => {
|
||||
objectRelationships.push({
|
||||
type: 'create_object_relationship',
|
||||
args: {
|
||||
table: table.name,
|
||||
name: `${getObjRelationshipName(dep)}`,
|
||||
using: {
|
||||
foreign_key_constraint_on: `${dep.substring(0, dep.length - 1)}_id`,
|
||||
},
|
||||
},
|
||||
});
|
||||
arrayRelationships.push({
|
||||
type: 'create_array_relationship',
|
||||
args: {
|
||||
table: dep,
|
||||
name: `${getArrayRelationshipName(table.name, dep)}`,
|
||||
using: {
|
||||
foreign_key_constraint_on: {
|
||||
table: table.name,
|
||||
column: `${dep.substring(0, dep.length - 1)}_id`,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
return {
|
||||
objectRelationships,
|
||||
arrayRelationships,
|
||||
};
|
||||
};
|
||||
|
||||
const createRelationships = async (tables, url, headers) => {
|
||||
const relationships = generateRelationships(tables);
|
||||
const bulkQuery = {
|
||||
type: 'bulk',
|
||||
args: [],
|
||||
};
|
||||
relationships.objectRelationships.forEach(or => bulkQuery.args.push(or));
|
||||
relationships.arrayRelationships.forEach(ar => bulkQuery.args.push(ar));
|
||||
const resp = await fetch(
|
||||
`${url}/v1/query`,
|
||||
{
|
||||
method: 'POST',
|
||||
body: JSON.stringify(bulkQuery),
|
||||
headers,
|
||||
}
|
||||
);
|
||||
if (resp.status !== 200) {
|
||||
const error = await resp.json();
|
||||
console.log(JSON.stringify(error, null, 2));
|
||||
process.exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
createRelationships,
|
||||
};
|
74
community/tools/json2graphql/src/import/sql.js
Normal file
74
community/tools/json2graphql/src/import/sql.js
Normal file
@ -0,0 +1,74 @@
|
||||
const fetch = require('node-fetch');
|
||||
const {CLIError} = require('@oclif/errors');
|
||||
const {cli} = require('cli-ux');
|
||||
|
||||
const runSql = async (sqlArray, url, headers) => {
|
||||
let sqlString = '';
|
||||
sqlArray.forEach(sql => {
|
||||
sqlString += sql;
|
||||
});
|
||||
const resp = await fetch(
|
||||
`${url}/v1/query`,
|
||||
{
|
||||
method: 'POST',
|
||||
body: JSON.stringify({
|
||||
type: 'run_sql',
|
||||
args: {
|
||||
sql: sqlString,
|
||||
cascade: true,
|
||||
},
|
||||
}),
|
||||
headers,
|
||||
}
|
||||
);
|
||||
if (resp.status !== 200) {
|
||||
const error = await resp.json();
|
||||
cli.action.stop('Error');
|
||||
console.log(JSON.stringify(error, null, 2));
|
||||
process.exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
const generateCreateTableSql = metadata => {
|
||||
const sqlArray = [];
|
||||
metadata.forEach(table => {
|
||||
sqlArray.push(`drop table if exists public."${table.name}" cascade;`);
|
||||
let columnSql = '(';
|
||||
table.columns.forEach((column, i) => {
|
||||
if (column.name === 'id') {
|
||||
columnSql += '"id" int not null primary key';
|
||||
} else {
|
||||
columnSql += `"${column.name}" ${column.type}`;
|
||||
}
|
||||
columnSql += (table.columns.length === i + 1) ? ' ) ' : ', ';
|
||||
});
|
||||
const createTableSql = `create table public."${table.name}" ${columnSql};`;
|
||||
sqlArray.push(createTableSql);
|
||||
});
|
||||
return sqlArray;
|
||||
};
|
||||
|
||||
const generateConstraintsSql = metadata => {
|
||||
const sqlArray = [];
|
||||
metadata.forEach(table => {
|
||||
table.columns.forEach(column => {
|
||||
if (column.isForeign) {
|
||||
const fkSql = `add foreign key ("${column.name}") references public."${column.name.substring(0, column.name.length - 3)}s" ("id");`;
|
||||
sqlArray.push(`alter table public."${table.name}" ${fkSql}`);
|
||||
}
|
||||
});
|
||||
});
|
||||
return sqlArray;
|
||||
};
|
||||
|
||||
const generateSql = metadata => {
|
||||
const createTableSql = generateCreateTableSql(metadata);
|
||||
const constraintsSql = generateConstraintsSql(metadata);
|
||||
let sqlArray = [...createTableSql, ...constraintsSql];
|
||||
return sqlArray;
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
generateSql,
|
||||
runSql,
|
||||
};
|
38
community/tools/json2graphql/src/import/track.js
Normal file
38
community/tools/json2graphql/src/import/track.js
Normal file
@ -0,0 +1,38 @@
|
||||
const fetch = require('node-fetch');
|
||||
const {CLIError} = require('@oclif/errors');
|
||||
const {cli} = require('cli-ux');
|
||||
|
||||
const trackTables = async (tables, url, headers) => {
|
||||
const bulkQueryArgs = [];
|
||||
tables.forEach(table => {
|
||||
bulkQueryArgs.push({
|
||||
type: 'add_existing_table_or_view',
|
||||
args: {
|
||||
name: table.name,
|
||||
schema: 'public',
|
||||
},
|
||||
});
|
||||
});
|
||||
const bulkQuery = {
|
||||
type: 'bulk',
|
||||
args: bulkQueryArgs,
|
||||
};
|
||||
const resp = await fetch(
|
||||
`${url}/v1/query`,
|
||||
{
|
||||
method: 'POST',
|
||||
body: JSON.stringify(bulkQuery),
|
||||
headers,
|
||||
}
|
||||
);
|
||||
if (resp.status !== 200) {
|
||||
const error = await resp.json();
|
||||
cli.action.stop('error');
|
||||
console.log(JSON.stringify(error, null, 2));
|
||||
process.exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
trackTables,
|
||||
};
|
114
community/tools/json2graphql/test/db.js
Normal file
114
community/tools/json2graphql/test/db.js
Normal file
@ -0,0 +1,114 @@
|
||||
const db = {
|
||||
users: [
|
||||
{id: 1, name: 'Fredi Bach', country: 'CH', birthday: '1975-09-03', sex: 'm', email: 'osxcode@gmail.com', userStatus_id: 2, date: new Date(), object: {hey: 'there', whats: 'up'}},
|
||||
{id: 2, name: 'Samuel Patzen', country: 'CH', birthday: '1978-02-01', sex: 'm', email: 'patzen@bluewin.ch', userStatus_id: 2, date: new Date()},
|
||||
{id: 3, name: 'Hans Muster', country: 'CH', birthday: '1978-02-01', sex: 'm', email: 'hans.muster@domain.ch', userStatus_id: 1, date: new Date()},
|
||||
],
|
||||
userStatuss: [
|
||||
{id: 1, key: 'inactive'},
|
||||
{id: 2, key: 'active'},
|
||||
{id: 3, key: 'blocked'},
|
||||
],
|
||||
userConfigs: [
|
||||
{id: 1, user_id: 1},
|
||||
],
|
||||
leagues: [
|
||||
{id: 1, name: 'Switzerland', yearly: true, description: 'Waypoint are all placed in Switzerland by local instructors and top pilots.', created: '2018-05-01', seasonStart: '10-01', seasonEnd: '09-31'},
|
||||
{id: 2, name: 'Austria', yearly: true, description: 'Waypoint are all placed in Austria by local instructors and top pilots.', created: '2018-05-02', seasonStart: '10-01', seasonEnd: '09-31'},
|
||||
{id: 3, name: 'Vol Liber Grischun Clubmeisterschaft', yearly: false, created: '2018-05-02', seasonStart: '2018-10-01', seasonEnd: '2048-10-01'},
|
||||
],
|
||||
userLeagues: [
|
||||
{id: 1, user_id: 1, league_id: 1, isAdmin: true},
|
||||
{id: 2, user_id: 1, league_id: 2, isAdmin: true},
|
||||
{id: 3, user_id: 2, league_id: 1},
|
||||
{id: 4, user_id: 1, league_id: 3},
|
||||
{id: 5, user_id: 2, league_id: 3, isAdmin: true},
|
||||
],
|
||||
files: [
|
||||
{id: 1, mimetype_id: 1, width: 250, height: 250, url: 'https://imgplaceholder.com/250x250/cccccc/757575/ion-happy-outline'},
|
||||
{id: 2, mimetype_id: 1, width: 800, height: 400, url: 'https://imgplaceholder.com/800x400/cccccc/757575/fa-image'},
|
||||
{id: 3, mimetype_id: 1, width: 300, height: 200, url: 'https://imgplaceholder.com/300x200/cccccc/757575/fa-map-marker'},
|
||||
{id: 4, mimetype_id: 3, url: 'https://mycdn.com/fredi-bach/2018-07-02-001.igc'},
|
||||
{id: 5, mimetype_id: 3, url: 'https://mycdn.com/fredi-bach/2018-07-03-001.igc'},
|
||||
],
|
||||
mimetypes: [
|
||||
{id: 1, mime: 'image/png', description: 'Portable Network Graphics'},
|
||||
{id: 2, mime: 'image/jpeg', description: 'JPEG images'},
|
||||
{id: 3, mime: 'application/vnd.fai.igc', description: 'Flight track file'},
|
||||
],
|
||||
types: [
|
||||
{id: 1, name: 'Challenge', description: 'A challenging waypoint, only for the best', points: 200},
|
||||
{id: 2, name: 'Altitude', description: 'A big mountain, that needs altitude to reach', points: 150},
|
||||
{id: 3, name: 'Beauty', description: 'Just a nice view', points: 100},
|
||||
{id: 4, name: 'Takeoff', description: 'Official takoeff', points: 10},
|
||||
{id: 5, name: 'Landing', description: 'Official landing', points: 10},
|
||||
],
|
||||
waypoints: [
|
||||
{id: 1, league_id: 1, type_id: 1, lat: 3.789, lng: 41.987, radius: 400, points: 100, minAltitude: 3500, name: 'Oberalp Pass', description: 'From Andermatt to Disentis', file_id: 3},
|
||||
{id: 2, league_id: 1, type_id: 2, lat: 3.589, lng: 41.787, radius: 400, points: 100, minAltitude: 3500, name: 'Furka Pass', description: 'From the Goms to Andermatt', file_id: 3},
|
||||
{id: 3, league_id: 1, type_id: 4, lat: 3.889, lng: 40.787, radius: 400, points: 10, name: 'Fiesch'},
|
||||
],
|
||||
waypointNotes: [
|
||||
{id: 1, waypoint_id: 1, noteType_id: 1, title: 'Föhn', text: 'Bei Föhn sehr gefährlich!'},
|
||||
{id: 2, waypoint_id: 1, noteType_id: 2, title: 'Basis', text: 'Braucht mindestens 3000 Meter Basis, besser mehr.'},
|
||||
],
|
||||
waypointPhotos: [
|
||||
{id: 1, user_id: 1, official: true, waypoint_id: 1, mimetype_id: 2, width: 1080, height: 960, url: 'https://mycdn.com/fredi-bach/oberalp-2018-1.jpeg'},
|
||||
{id: 2, user_id: 1, official: true, waypoint_id: 1, mimetype_id: 2, width: 1080, height: 960, url: 'https://mycdn.com/fredi-bach/oberalp-2018-2.jpeg'},
|
||||
{id: 3, user_id: 2, official: false, waypoint_id: 1, mimetype_id: 2, width: 1080, height: 960, url: 'https://mycdn.com/fredi-bach/oberalp-2018-3.jpeg'},
|
||||
],
|
||||
waypointSuggestions: [
|
||||
{id: 1, user_id: 2, league_id: 1, type_id: 1, lat: 11.789, lng: 33.987, radius: 800, points: 100, minAltitude: 3500, name: 'Limmeren Stausee', description: 'Auf dem Weg von der Surselva ins Glaernerland', file_id: 3},
|
||||
],
|
||||
noteTypes: [
|
||||
{id: 1, name: 'Wind', icon: 'wind', class: 'waypoint-note-wind'},
|
||||
{id: 2, name: 'Altitude', icon: 'altitude', class: 'waypoint-note-altitude'},
|
||||
],
|
||||
sponsors: [
|
||||
{id: 1, waypoint_id: 1, user_id: 1, name: 'Flugschule Appenzell', url: 'http://www.gleitschirm.ch', slogan: 'Die Flugschule im Alpstein.'},
|
||||
{id: 2, waypoint_id: 2, name: 'Ozone', url: 'http://www.flyozone.ch', slogan: 'Real world performance.'},
|
||||
],
|
||||
waypointChats: [
|
||||
{id: 1, waypoint_id: 1, user_id: 1, message: 'Can be quite hard with low base!', datetime: '2018-07-02 12:48:45'},
|
||||
{id: 2, waypoint_id: 1, user_id: 2, message: 'Oh yes, it can!', datetime: '2018-07-02 12:52:11'},
|
||||
],
|
||||
wings: [
|
||||
{id: 1, model: 'Zeno', brand: 'Ozone', certification: 'D'},
|
||||
{id: 2, model: 'Mentor 3', brand: 'Nova', certification: 'B'},
|
||||
],
|
||||
flights: [
|
||||
{id: 1, user_id: 1, league_id: 1, wing_id: 1, date: '2018-07-02', score: 200, file_id: 4, comment: 'Bockig!'},
|
||||
{id: 2, user_id: 2, league_id: 1, wing_id: 2, date: '2018-07-03', score: 100, file_id: 5},
|
||||
],
|
||||
favoriteFlights: [
|
||||
{id: 1, user_id: 1, flight_id: 2, datetime: '2018-07-02 12:48:45'},
|
||||
],
|
||||
flightWaypoints: [
|
||||
{id: 1, flight_id: 1, waypoint_id: 1, datetime: '2018-07-02 12:48:45', score: 100},
|
||||
{id: 2, flight_id: 1, waypoint_id: 2, datetime: '2018-07-02 13:11:59', score: 100},
|
||||
{id: 3, flight_id: 2, waypoint_id: 2, datetime: '2018-08-02 14:06:11', score: 100},
|
||||
],
|
||||
flightComments: [
|
||||
{id: 1, flight_id: 1, user_id: 2, datetime: '2018-08-02 14:06:11', text: 'Ok, that was nice!'},
|
||||
{id: 2, flight_id: 1, user_id: 1, datetime: '2018-08-02 14:09:11', text: 'Thanks'},
|
||||
],
|
||||
leagueSeasonUserScores: [
|
||||
{id: 1, user_id: 1, league_id: 1, season: '2018', score: 200, flightCount: 1},
|
||||
{id: 2, user_id: 1, league_id: 2, season: '2018', score: 0, flightCount: 0},
|
||||
{id: 3, user_id: 2, league_id: 1, season: '2018', score: 100, flightCount: 1},
|
||||
],
|
||||
routes: [
|
||||
{id: 1, user_id: 1, league_id: 1, name: 'Wallis Sightseeing', description: 'A great route for a low wind high cloudbase day.'},
|
||||
{id: 2, user_id: 1, league_id: 1, name: 'Surselva Adventure'},
|
||||
],
|
||||
routeWaypoints: [
|
||||
{id: 1, route_id: 1, waypoint_id: 1},
|
||||
{id: 2, route_id: 1, waypoint_id: 2, routeWaypoint_id: 1},
|
||||
{id: 3, route_id: 1, waypoint_id: 3, routeWaypoint_id: 2},
|
||||
],
|
||||
favoriteRoutes: [
|
||||
{id: 1, user_id: 1, route_id: 1, datetime: '2018-07-01 15:48:45'},
|
||||
],
|
||||
};
|
||||
|
||||
module.exports = db;
|
5
community/tools/json2graphql/test/test.sh
Executable file
5
community/tools/json2graphql/test/test.sh
Executable file
@ -0,0 +1,5 @@
|
||||
#!/bin/bash
|
||||
if [ -z "$TEST_HGE_URL" ];
|
||||
then echo "ERROR: Please run the test command with the environment variable TEST_HGE_ENDPOINT";
|
||||
else ../bin/run $TEST_HGE_URL --db=./db.js --overwrite && node verify.js;
|
||||
fi
|
37
community/tools/json2graphql/test/verify.js
Normal file
37
community/tools/json2graphql/test/verify.js
Normal file
@ -0,0 +1,37 @@
|
||||
const {query} = require('graphqurl');
|
||||
|
||||
const complexQuery = `
|
||||
query {
|
||||
favoriteRoutes {
|
||||
routeByRoutesId {
|
||||
leagueByLeaguesId {
|
||||
flightsByLeaguesId {
|
||||
flightCommentsByFlightsId {
|
||||
userByUsersId {
|
||||
email
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const verifyDataImport = () => {
|
||||
query({
|
||||
query: complexQuery,
|
||||
endpoint: `${process.env.TEST_HGE_URL}/v1alpha1/graphql`,
|
||||
headers: {'x-hasura-access-key': process.env.TEST_X_HASURA_ACCESS_KEY},
|
||||
}).then(response => {
|
||||
if (response.data.favoriteRoutes[0].routeByRoutesId.leagueByLeaguesId.flightsByLeaguesId[0].flightCommentsByFlightsId[0].userByUsersId.email === 'osxcode@gmail.com') {
|
||||
console.log('✔︎ Test passed');
|
||||
process.exit();
|
||||
} else {
|
||||
console.log('✖ Test failed. Unexpected response.');
|
||||
console.log(response.data);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
verifyDataImport();
|
Loading…
Reference in New Issue
Block a user