mirror of
https://github.com/hasura/graphql-engine.git
synced 2024-12-15 01:12:56 +03:00
add firebase2graphql to community tools (#573)
This commit is contained in:
parent
b084249e01
commit
bdb73fe6d1
2
community/tools/firebase2graphql/.eslintignore
Normal file
2
community/tools/firebase2graphql/.eslintignore
Normal file
@ -0,0 +1,2 @@
|
||||
./test/db.js
|
||||
test-db
|
19
community/tools/firebase2graphql/.eslintrc
Normal file
19
community/tools/firebase2graphql/.eslintrc
Normal file
@ -0,0 +1,19 @@
|
||||
{
|
||||
"extends": "oclif",
|
||||
"rules": {
|
||||
"max-params": "off",
|
||||
"no-console": "off",
|
||||
"max-depth": "off",
|
||||
"one-var": "off",
|
||||
"complexity": "off",
|
||||
"unicorn/no-process-exit": "off",
|
||||
"unicorn/filename-case": "off",
|
||||
"no-process-exit": "off",
|
||||
"no-throw-literal": "off",
|
||||
"node/no-unsupported-features": "off",
|
||||
"no-warning-comments": "off",
|
||||
"semi": [1, "always"],
|
||||
"camelcase": "off",
|
||||
"guard-for-in": "off"
|
||||
}
|
||||
}
|
2
community/tools/firebase2graphql/.gitattributes
vendored
Normal file
2
community/tools/firebase2graphql/.gitattributes
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
* text=auto
|
||||
*.js text eol=lf
|
8
community/tools/firebase2graphql/.gitignore
vendored
Normal file
8
community/tools/firebase2graphql/.gitignore
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
*-debug.log
|
||||
*-error.log
|
||||
/.nyc_output
|
||||
/dist
|
||||
/tmp
|
||||
/yarn.lock
|
||||
test-db
|
||||
node_modules
|
29
community/tools/firebase2graphql/CONTRIBUTING.md
Normal file
29
community/tools/firebase2graphql/CONTRIBUTING.md
Normal file
@ -0,0 +1,29 @@
|
||||
# Contributing to firebase2graphql
|
||||
|
||||
## Issues
|
||||
|
||||
Please open an issue related to your work. Add the label `c/firebase2graphql`.
|
||||
|
||||
## Local developmet
|
||||
|
||||
1. Make changes and save
|
||||
2. Run the executable in the `bin` directory to test your code. Treat the executable as the command. For example:
|
||||
|
||||
```
|
||||
$ bin/run --help
|
||||
```
|
||||
|
||||
## Testing
|
||||
|
||||
Please run the tests before making pull requests.
|
||||
|
||||
To run the tests locally, you will need an instance of [Hasura GraphQL Engine](https://github.com/hasura/graphql-engine) running. To run the tests, run the command:
|
||||
|
||||
```
|
||||
$ TEST_HGE_URL=https://hge.herokuapp.com npm test
|
||||
```
|
||||
|
||||
### Test data sets
|
||||
|
||||
Firebase RTD being a NoSQL database, there are very few data-sets available on the web. Since the tool takes heuristic based approach to convert the data, the more data we have the better results we can achieve. If you're aware of any such data-sets, please consider adding them to the test suite (test/data-sets).
|
||||
|
222
community/tools/firebase2graphql/README.md
Normal file
222
community/tools/firebase2graphql/README.md
Normal file
@ -0,0 +1,222 @@
|
||||
# Firebase to GraphQL
|
||||
|
||||
This is a CLI tool to convert your Firebase Realtime Database into a Realtime GraphQL API over Postgres.
|
||||
|
||||
[![oclif](https://img.shields.io/badge/cli-oclif-brightgreen.svg)](https://oclif.io)
|
||||
[![Version](https://img.shields.io/npm/v/firebase2graphql.svg)](https://npmjs.org/package/firebase2graphql)
|
||||
|
||||
![GIF](https://graphql-engine-cdn.hasura.io/assets/firebase2graphql/demo.gif)
|
||||
|
||||
## Quick start
|
||||
|
||||
1. Quickly get the GraphQL Engine running by clicking this button:
|
||||
|
||||
[![Deploy to heroku](https://www.herokucdn.com/deploy/button.svg)](https://heroku.com/deploy?template=https://github.com/hasura/graphql-engine-heroku)
|
||||
|
||||
Note the URL. It will be of the form: `https://<app-name>.herokuapp.com`
|
||||
|
||||
> Check [this page](https://docs.hasura.io/1.0/graphql/manual/deployment/index.html) for other deployment options
|
||||
|
||||
2. Go to `Firebase console > Database > Realtime Database` and click on `Export JSON` from the options on the upper right corner
|
||||
|
||||
![firebase-export](assets/firebase-export.png)
|
||||
|
||||
The exported JSON will be something like this:
|
||||
|
||||
```json
|
||||
{
|
||||
"posts" : {
|
||||
"-LMbLFOAW2q6GO1bD-5g" : {
|
||||
"author" : "Rishichandra Wawhal",
|
||||
"authorPic" : "https://lh4.googleusercontent.com/-vPOIBOxCUpo/AAAAAAAAAAI/AAAAAAAAAFo/SKk9hpOB7v4/photo.jpg",
|
||||
"body" : "My first post content\nAnd body\nANd structure",
|
||||
"starCount" : 0,
|
||||
"title" : "My first post",
|
||||
"uid" : "4UPmbcaqZKT2NdAAqBahXj4tHYN2"
|
||||
},
|
||||
"-LMbLIv6VKHYul7p_PZ-" : {
|
||||
"author" : "Rishichandra Wawhal",
|
||||
"authorPic" : "https://lh4.googleusercontent.com/-vPOIBOxCUpo/AAAAAAAAAAI/AAAAAAAAAFo/SKk9hpOB7v4/photo.jpg",
|
||||
"body" : "AKsdjak\naklsdjaskldjklas\nasdklfjaklsdfjklsda\nasdklfjasklf",
|
||||
"starCount" : 0,
|
||||
"title" : "Whatta proaaa",
|
||||
"uid" : "4UPmbcaqZKT2NdAAqBahXj4tHYN2"
|
||||
}
|
||||
},
|
||||
"user-posts" : {
|
||||
"4UPmbcaqZKT2NdAAqBahXj4tHYN2" : {
|
||||
"-LMbLFOAW2q6GO1bD-5g" : {
|
||||
"author" : "Rishichandra Wawhal",
|
||||
"authorPic" : "https://lh4.googleusercontent.com/-vPOIBOxCUpo/AAAAAAAAAAI/AAAAAAAAAFo/SKk9hpOB7v4/photo.jpg",
|
||||
"body" : "My first post content\nAnd body\nANd structure",
|
||||
"starCount" : 0,
|
||||
"title" : "My first post",
|
||||
"uid" : "4UPmbcaqZKT2NdAAqBahXj4tHYN2"
|
||||
},
|
||||
"-LMbLIv6VKHYul7p_PZ-" : {
|
||||
"author" : "Rishichandra Wawhal",
|
||||
"authorPic" : "https://lh4.googleusercontent.com/-vPOIBOxCUpo/AAAAAAAAAAI/AAAAAAAAAFo/SKk9hpOB7v4/photo.jpg",
|
||||
"body" : "AKsdjak\naklsdjaskldjklas\nasdklfjaklsdfjklsda\nasdklfjasklf",
|
||||
"starCount" : 0,
|
||||
"title" : "Whatta proaaa",
|
||||
"uid" : "4UPmbcaqZKT2NdAAqBahXj4tHYN2"
|
||||
}
|
||||
}
|
||||
},
|
||||
"users" : {
|
||||
"4UPmbcaqZKT2NdAAqBahXj4tHYN2" : {
|
||||
"email" : "rishichandrawawhal@gmail.com",
|
||||
"profile_picture" : "https://lh4.googleusercontent.com/-vPOIBOxCUpo/AAAAAAAAAAI/AAAAAAAAAFo/SKk9hpOB7v4/photo.jpg",
|
||||
"username" : "Rishichandra Wawhal"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
4. Use the CLI to import the data:
|
||||
|
||||
```
|
||||
npx firebase2graphql https://<app-name>.herokuapp.com --db=./path/to/db.json
|
||||
```
|
||||
|
||||
5. That's it. You can now go to your GraphQL Engine URL `https://<app-name>.herokuapp.com` and make awesome GraphQL Queries like:
|
||||
|
||||
```graphql
|
||||
query {
|
||||
posts {
|
||||
title
|
||||
body
|
||||
author
|
||||
}
|
||||
users {
|
||||
email
|
||||
username
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Check out [next steps](#next-steps).
|
||||
|
||||
## Installation
|
||||
|
||||
### CLI
|
||||
|
||||
```bash
|
||||
npm install -g firebase2graphql
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
**Without access key**
|
||||
|
||||
```
|
||||
firebase2graphql https://hge.herokuapp.com -d ./path/to/db.json
|
||||
```
|
||||
|
||||
**### With access key**
|
||||
|
||||
```
|
||||
firebase2graphql https://hge.herokuapp.com -k <access-key> -d ./path/to/db.json
|
||||
```
|
||||
|
||||
## Command
|
||||
|
||||
```bash
|
||||
firebase2graphql URL [flags]
|
||||
```
|
||||
|
||||
### Args
|
||||
|
||||
* `URL`: The URL where Hasura GraphQL Engine is running
|
||||
|
||||
### Options
|
||||
|
||||
- `-d --db`: path to the JS file that exports your sample JSON database
|
||||
- `-n --normalize`: normalize the schema while importing
|
||||
- `-o --overwrite`: (experimental) overwrite tables if they already exist in database
|
||||
- `-v --version`: show CLI version
|
||||
- `-h, --help`: show CLI help
|
||||
|
||||
## Next steps
|
||||
|
||||
Once you have imported your data, it is recommended that you make it production ready.
|
||||
|
||||
1. Denormalize the data by [removing duplicates](#duplicates).
|
||||
2. Explore the GraphQL Engine Console to play with things such as
|
||||
|
||||
- [Relationships](https://docs.hasura.io/1.0/graphql/manual/schema/relationships/index.html)
|
||||
- [Permissions](https://docs.hasura.io/1.0/graphql/manual/auth/index.html)
|
||||
- Using SQL
|
||||
- [Set up async business logic using event triggers](https://docs.hasura.io/1.0/graphql/manual/event-triggers/index.html)
|
||||
- [Create new tables](https://docs.hasura.io/1.0/graphql/manual/schema/basics.html)
|
||||
|
||||
3. Set appropriate permissions. GraphQL Engine comes with [fine grained control layer](https://docs.hasura.io/1.0/graphql/manual/auth/index.html) that can be integrated with any standard Auth provider.
|
||||
|
||||
## Usage Comparison - Firebase SDK vs GraphQL
|
||||
|
||||
A typical query to do a single read from the database using [Firebase SDK](https://firebase.google.com/docs/reference/), (javascript) would look something like:
|
||||
|
||||
```javascript
|
||||
firebase.database().ref('/users/' + userId).once('value').then(function(snapshot) {
|
||||
var username = (snapshot.val() && snapshot.val().username) || 'Anonymous';
|
||||
// ...
|
||||
});
|
||||
```
|
||||
|
||||
Equivalent GraphQL Query would look like:
|
||||
|
||||
```graphql
|
||||
query {
|
||||
users(where: {uid: {_eq: userId}}) {
|
||||
uid,
|
||||
username
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Similarly a write into database using Firebase SDK, would look something like:
|
||||
|
||||
```javascript
|
||||
firebase.database().ref('users/' + userId).set({
|
||||
username: name,
|
||||
email: email,
|
||||
profile_picture : imageUrl
|
||||
});
|
||||
```
|
||||
|
||||
And the equivalent GraphQL Mutation would look like:
|
||||
|
||||
```graphql
|
||||
mutation {
|
||||
insert_users(objects:[{
|
||||
uid: userId
|
||||
username: name,
|
||||
email: email,
|
||||
profile_picture: imageUrl
|
||||
}])
|
||||
}
|
||||
```
|
||||
|
||||
## Things to know about implementation
|
||||
|
||||
### Duplicates
|
||||
|
||||
By default, the CLI gives you the exact API that you originally had in Firebase (of course, over GraphQL). But in that case, some duplicate tables might be created and you might not be able to leverage the complete power of GraphQL and Postgres.
|
||||
|
||||
In such cases, you have three choices:
|
||||
|
||||
1. Use the API as such if you prefer the exact API.
|
||||
2. Go to the UI Console and delete the duplicates and normalize the database as you feel fit.
|
||||
3. (Experimental) Use the `--normalize` flag. In this case, the CLI will detect duplicates and make appropriate relationships between root nodes. (This feature is experimental and needs more test cases to get stable. Contributions are welcome)
|
||||
|
||||
|
||||
### Overwrite
|
||||
|
||||
If your database already contains tables with the same name as the root fields of your JSON database, the command will fail. If you want to overwrite the database anyway, you should provide an additional flag "--overwrite".
|
||||
|
||||
## Feedback
|
||||
|
||||
This project is still in alpha and we are actively looking for feedback about how the tool can be improved. If you facing an issue, feel free to [open one here](https://github.com/hasura/graphql-engine/issues/new). Any positive or negative feedback would be appreciated.
|
||||
|
||||
---
|
||||
Maintained with ♡ by <a href="https://hasura.io">Hasura</a>
|
BIN
community/tools/firebase2graphql/assets/firebase-export.png
Normal file
BIN
community/tools/firebase2graphql/assets/firebase-export.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 76 KiB |
4
community/tools/firebase2graphql/bin/run
Executable file
4
community/tools/firebase2graphql/bin/run
Executable file
@ -0,0 +1,4 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
require('../src/command').run()
|
||||
.catch(require('@oclif/errors/handle'))
|
3
community/tools/firebase2graphql/bin/run.cmd
Normal file
3
community/tools/firebase2graphql/bin/run.cmd
Normal file
@ -0,0 +1,3 @@
|
||||
@echo off
|
||||
|
||||
node "%~dp0\run" %*
|
2420
community/tools/firebase2graphql/package-lock.json
generated
Normal file
2420
community/tools/firebase2graphql/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
59
community/tools/firebase2graphql/package.json
Normal file
59
community/tools/firebase2graphql/package.json
Normal file
@ -0,0 +1,59 @@
|
||||
{
|
||||
"name": "firebase2graphql",
|
||||
"description": "A CLI tool to get GraphQL over Firebase data dump",
|
||||
"version": "0.0.1-alpha6",
|
||||
"author": "Hasura",
|
||||
"bin": {
|
||||
"firebase2graphql": "./bin/run",
|
||||
"f2g": "./bin/run"
|
||||
},
|
||||
"bugs": "https://github.com/hasura/graphql-engine/issues?q=is%3Aissue+is%3Aopen+label%3Ac%2Ffirebase2graphql",
|
||||
"dependencies": {
|
||||
"@oclif/command": "^1.4.35",
|
||||
"@oclif/config": "^1.6.33",
|
||||
"@oclif/errors": "^1.1.2",
|
||||
"@oclif/plugin-help": "^2.0.5",
|
||||
"cli-ux": "^4.7.3",
|
||||
"colors": "^1.3.2",
|
||||
"graphqurl": "^0.3.2",
|
||||
"moment": "^2.22.2",
|
||||
"node-fetch": "^2.2.0",
|
||||
"uuid": "^3.3.2",
|
||||
"uuid-validate": "0.0.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"eslint": "^4.19.1",
|
||||
"eslint-config-oclif": "^1.5.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8.0.0"
|
||||
},
|
||||
"files": [
|
||||
"/bin",
|
||||
"/src"
|
||||
],
|
||||
"homepage": "https://github.com/wawhal/graphql-engine/tree/master/community/tools/firebase2graphql",
|
||||
"keywords": [
|
||||
"oclif",
|
||||
"cli",
|
||||
"graphql",
|
||||
"grapql-engine",
|
||||
"json",
|
||||
"firebase"
|
||||
],
|
||||
"license": "MIT",
|
||||
"main": "src/command.js",
|
||||
"oclif": {
|
||||
"bin": "firebase2graphql"
|
||||
},
|
||||
"repository": "hasura/graphql-engine",
|
||||
"scripts": {
|
||||
"eslint": "eslint .",
|
||||
"eslintfix": "eslint . --fix",
|
||||
"posttest": "npm run eslint",
|
||||
"test": "cd test && ./test.sh"
|
||||
},
|
||||
"pre-commit": [
|
||||
"eslintfix"
|
||||
]
|
||||
}
|
120
community/tools/firebase2graphql/src/command.js
Normal file
120
community/tools/firebase2graphql/src/command.js
Normal file
@ -0,0 +1,120 @@
|
||||
const {Command, flags} = require('@oclif/command');
|
||||
const fetch = require('node-fetch');
|
||||
const {CLIError} = require('@oclif/errors');
|
||||
const throwError = require('./error');
|
||||
const {spinnerStart, spinnerStop} = require('./log');
|
||||
const resolve = require('path').resolve;
|
||||
const importData = require('./import/import');
|
||||
|
||||
class Firebase2GraphQL extends Command {
|
||||
async run() {
|
||||
const {args, flags} = this.parse(Firebase2GraphQL);
|
||||
const {url} = args;
|
||||
if (!url) {
|
||||
throw new CLIError('endpoint is required: \'firebase2graphql <url>\'');
|
||||
}
|
||||
const {db, overwrite, normalize} = flags;
|
||||
const key = flags['access-key'];
|
||||
|
||||
if (!url) {
|
||||
throw new CLIError('endpoint is required: \'firebase2graphql <url> -d ./db.js\'');
|
||||
}
|
||||
const safeUrl = this.getSafeUrl(url);
|
||||
if (!db) {
|
||||
throw new CLIError('path to firebase JSON database is required: \'firebase2graphql <url> -d ./db.js\'');
|
||||
}
|
||||
const dbJson = this.getDbJson(db);
|
||||
const headers = key ? {'x-hasura-access-key': key} : {};
|
||||
const urlVerification = await this.verifyUrl(safeUrl, headers);
|
||||
if (urlVerification.error) {
|
||||
throwError(`Message: ${urlVerification.message}`);
|
||||
} else {
|
||||
spinnerStop('Done!');
|
||||
await importData(dbJson, safeUrl, headers, overwrite, 1, normalize);
|
||||
}
|
||||
}
|
||||
|
||||
getDbJson(db) {
|
||||
return require(resolve(db));
|
||||
}
|
||||
|
||||
getSafeUrl(url) {
|
||||
const urlLength = url.length;
|
||||
return url[urlLength - 1] === '/' ? url.slice(0, -1) : url;
|
||||
}
|
||||
|
||||
async verifyUrl(url, headers) {
|
||||
try {
|
||||
spinnerStart('Verifying URL');
|
||||
const resp = await fetch(
|
||||
`${url}/v1/query`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers,
|
||||
body: JSON.stringify({
|
||||
type: 'run_sql',
|
||||
args: {
|
||||
sql: 'select * from hdb_catalog.hdb_version;',
|
||||
},
|
||||
}),
|
||||
}
|
||||
);
|
||||
return resp.status === 200 ? {error: false} : {error: true, message: 'invalid access key'};
|
||||
} catch (e) {
|
||||
return {error: true, message: 'invalid URL'};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Firebase2GraphQL.description = `firebase2graphql: Import JSON data to Hasura GraphQL Engine
|
||||
# Examples:
|
||||
|
||||
# Import data from a Firebase JSON database to Hasura GraphQL Engine without access key
|
||||
json2graphql https://hge.herokuapp.com --db=./path/to/db.json
|
||||
|
||||
# Import data from a Firebase JSON database to Hasura GraphQL Engine with access key
|
||||
json2graphql https://hge.herokuapp.com --db=./path/to/db.json -k <access-key>
|
||||
|
||||
# Import data from a Firebase JSON database to Hasura GraphQL Engine while normalizing it
|
||||
json2graphql https://hge.herokuapp.com --db=./path/to/db.json -n
|
||||
`;
|
||||
|
||||
Firebase2GraphQL.usage = 'URL [-k KEY]';
|
||||
|
||||
Firebase2GraphQL.flags = {
|
||||
// add --version flag to show CLI version
|
||||
version: flags.version(),
|
||||
|
||||
// add --help flag to show CLI version
|
||||
help: flags.help({char: 'h'}),
|
||||
|
||||
// Access key to Hasura GraphQL Engine
|
||||
'access-key': flags.string({
|
||||
char: 'k',
|
||||
description: 'Access key to Hasura GraphQL Engine (X-Hasura-Access-Key)',
|
||||
}),
|
||||
|
||||
db: flags.string({
|
||||
char: 'd',
|
||||
description: 'Path to the .js files that exports a JSON database',
|
||||
}),
|
||||
|
||||
normalize: flags.boolean({
|
||||
char: 'n',
|
||||
description: 'Normalize the data as it is imported to GraphQL Engine',
|
||||
}),
|
||||
|
||||
overwrite: flags.boolean({
|
||||
char: 'o',
|
||||
description: 'Overwrite tables if they exist',
|
||||
}),
|
||||
};
|
||||
|
||||
Firebase2GraphQL.args = [
|
||||
{
|
||||
name: 'url',
|
||||
description: 'URL where Hasura GraphQL Engine is running',
|
||||
},
|
||||
];
|
||||
|
||||
module.exports = Firebase2GraphQL;
|
13
community/tools/firebase2graphql/src/error.js
Normal file
13
community/tools/firebase2graphql/src/error.js
Normal file
@ -0,0 +1,13 @@
|
||||
const {cli} = require('cli-ux');
|
||||
const {log} = require('./log');
|
||||
const colors = require('colors/safe');
|
||||
|
||||
module.exports = (message, preExitHook) => {
|
||||
cli.action.stop(colors.red('Error!'));
|
||||
if (preExitHook) {
|
||||
preExitHook(message);
|
||||
}
|
||||
console.log('');
|
||||
log(message, 'red');
|
||||
process.exit(1);
|
||||
};
|
@ -0,0 +1,244 @@
|
||||
const uuid = require('uuid/v4');
|
||||
|
||||
const throwError = require('../error');
|
||||
|
||||
const getPrimaryKeys = obj => {
|
||||
const pkeyMap = {};
|
||||
for (var pkey in obj) {
|
||||
if (pkey.indexOf('_id') === 0) {
|
||||
pkeyMap[pkey] = obj[pkey];
|
||||
}
|
||||
}
|
||||
return pkeyMap;
|
||||
};
|
||||
|
||||
const getLastId = (obj, index = 0, selfGenerated = '') => {
|
||||
const id = index === 0 ? `_id${selfGenerated}` : `_id${selfGenerated}_${index}`;
|
||||
const nextIndex = index === 0 ? 2 : index + 1;
|
||||
if (!obj[`_id_${nextIndex}`]) {
|
||||
return id;
|
||||
}
|
||||
getLastId(obj, nextIndex, selfGenerated);
|
||||
};
|
||||
|
||||
const getIdNumber = (obj, index = 0, selfGenerated = '') => {
|
||||
const id = index === 0 ? `_id${selfGenerated}` : `_id${selfGenerated}_${index}`;
|
||||
const nextIndex = index === 0 ? 2 : index + 1;
|
||||
if (obj[id] === undefined) {
|
||||
return id;
|
||||
}
|
||||
return getIdNumber(obj, nextIndex, selfGenerated);
|
||||
};
|
||||
|
||||
const isRandomList = obj => {
|
||||
if (!obj) {
|
||||
return false;
|
||||
}
|
||||
for (var objKey in obj) {
|
||||
if (obj[objKey] !== null && typeof obj[objKey] === 'object') {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
const isList = obj => {
|
||||
if (Object.keys(obj).length === 0) {
|
||||
return false;
|
||||
}
|
||||
for (var objKey in obj) {
|
||||
if (obj[objKey] === null) {
|
||||
return false;
|
||||
}
|
||||
if (obj[objKey].constructor.name !== 'Boolean' || !obj[objKey]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
const isObjectList = obj => {
|
||||
if (obj === null || obj === undefined) {
|
||||
return false;
|
||||
}
|
||||
const listChildStructure = {};
|
||||
for (var key in obj) {
|
||||
if (obj[key] === null) {
|
||||
return false;
|
||||
}
|
||||
if (typeof obj[key] !== 'object') {
|
||||
return false;
|
||||
}
|
||||
if (Object.keys(obj[key]).length === 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
for (var childKey in obj[key]) {
|
||||
if (!listChildStructure[childKey]) {
|
||||
if (obj[key][childKey] !== null && obj[key][childKey] !== undefined) {
|
||||
listChildStructure[childKey] = typeof obj[key][childKey];
|
||||
}
|
||||
} else if (obj[key][childKey] !== null && obj[key][childKey] !== undefined) {
|
||||
if (typeof obj[key][childKey] !== listChildStructure[childKey]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
const handleTable = (obj, tableName, tableDetectedCallback) => {
|
||||
const rowArray = [];
|
||||
const flatten = (object, row, parent) => {
|
||||
if (isObjectList(object)) {
|
||||
const dummyRow = {...row};
|
||||
for (var objListKey in object) {
|
||||
row[getIdNumber(dummyRow)] = objListKey;
|
||||
const value = object[objListKey];
|
||||
const newRow = {...flatten(value, row)};
|
||||
if (newRow && Object.keys(newRow).length > 0) {
|
||||
rowArray.push(newRow);
|
||||
}
|
||||
}
|
||||
} else if (isList(object)) {
|
||||
for (var listKey in object) {
|
||||
const dummyRow = {...row};
|
||||
dummyRow[getIdNumber(dummyRow, null, 'self')] = uuid();
|
||||
dummyRow.value = listKey;
|
||||
if (Object.keys(dummyRow).length > 0) {
|
||||
rowArray.push(dummyRow);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for (var objectKey in object) {
|
||||
const value = object[objectKey];
|
||||
if (value === null || value.constructor.name !== 'Object') {
|
||||
row[objectKey] = value;
|
||||
} else if (value.constructor.name === 'Object') {
|
||||
const pkeyMap = getPrimaryKeys(row);
|
||||
if (isList(value)) {
|
||||
tableDetectedCallback(
|
||||
null,
|
||||
{
|
||||
tableName: parent || tableName,
|
||||
name: objectKey,
|
||||
pkeys: pkeyMap,
|
||||
data: Object.keys(value).map(item => ({__value: item})),
|
||||
}
|
||||
);
|
||||
} else if (isObjectList(value)) {
|
||||
tableDetectedCallback(
|
||||
null,
|
||||
{
|
||||
tableName: parent || tableName,
|
||||
name: objectKey,
|
||||
pkeys: pkeyMap,
|
||||
data: handleTable(value, `${tableName}_${objectKey}`, tableDetectedCallback),
|
||||
}
|
||||
);
|
||||
} else if (Object.keys(value).length !== 0) {
|
||||
const newUUID = uuid();
|
||||
row[`${tableName}_${objectKey}__idself`] = newUUID;
|
||||
tableDetectedCallback(
|
||||
{
|
||||
tableName,
|
||||
name: objectKey,
|
||||
data: flatten(value, {_idself: newUUID}, `${tableName}_${objectKey}`),
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
return row;
|
||||
}
|
||||
};
|
||||
if (!isObjectList(obj)) {
|
||||
if (isRandomList(obj)) {
|
||||
for (var objKey in obj) {
|
||||
rowArray.push({
|
||||
__key: objKey,
|
||||
__value: obj[objKey],
|
||||
_id: uuid(),
|
||||
});
|
||||
}
|
||||
return rowArray;
|
||||
}
|
||||
throwError('Message: invalid JSON provided for node ' + tableName);
|
||||
}
|
||||
for (var id in obj) {
|
||||
const flatRow = flatten(obj[id], {_id: id});
|
||||
if (flatRow && Object.keys(flatRow).length > 0) {
|
||||
rowArray.push(flatRow);
|
||||
}
|
||||
}
|
||||
return rowArray;
|
||||
};
|
||||
|
||||
const handleJSONDoc = db => {
|
||||
const tablesMap = {};
|
||||
const generateNewTable = (objectRelMetadata, arrayRelMetadata) => {
|
||||
if (arrayRelMetadata) {
|
||||
const newTableName = `${arrayRelMetadata.tableName}_${arrayRelMetadata.name}`;
|
||||
const parentTableName = arrayRelMetadata.tableName;
|
||||
const pkeys = arrayRelMetadata.pkeys;
|
||||
if (!tablesMap[newTableName]) {
|
||||
tablesMap[newTableName] = [];
|
||||
}
|
||||
tablesMap[newTableName] = [
|
||||
...tablesMap[newTableName],
|
||||
...arrayRelMetadata.data.map(item => {
|
||||
const newItem = {
|
||||
...item,
|
||||
};
|
||||
for (var pkey in pkeys) {
|
||||
newItem[`${parentTableName}_${pkey}`] = pkeys[pkey];
|
||||
}
|
||||
if (newItem._idself === undefined) {
|
||||
newItem[getLastId(newItem, 0, 'self')] = uuid();
|
||||
}
|
||||
return newItem;
|
||||
}),
|
||||
];
|
||||
} else {
|
||||
const newTableName = objectRelMetadata.tableName + '_' + objectRelMetadata.name;
|
||||
const newItem = {
|
||||
...objectRelMetadata.data,
|
||||
};
|
||||
if (!tablesMap[newTableName]) {
|
||||
tablesMap[newTableName] = [];
|
||||
}
|
||||
// let existingRow = null;
|
||||
// if (!tablesMap[newTableName].find(row => { // eslint-disable-line array-callback-return
|
||||
// for (var column in row) {
|
||||
// if (column.indexOf('_id') !== 0) {
|
||||
// if (row[column] !== newItem[column]) {
|
||||
// return false;
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// objectRelMetadata.callback(row._idself);
|
||||
// return true;
|
||||
// })) {
|
||||
// tablesMap[newTableName].push(newItem);
|
||||
// if (objectRelMetadata.callback) {
|
||||
// objectRelMetadata.callback();
|
||||
// }
|
||||
// }
|
||||
tablesMap[newTableName].push(newItem);
|
||||
}
|
||||
};
|
||||
|
||||
for (var tableName in db) {
|
||||
tablesMap[tableName] = handleTable(
|
||||
db[tableName],
|
||||
tableName,
|
||||
generateNewTable
|
||||
);
|
||||
}
|
||||
|
||||
return tablesMap;
|
||||
};
|
||||
|
||||
module.exports = handleJSONDoc;
|
||||
|
58
community/tools/firebase2graphql/src/import/check.js
Normal file
58
community/tools/firebase2graphql/src/import/check.js
Normal file
@ -0,0 +1,58 @@
|
||||
const fetch = require('node-fetch');
|
||||
const {cli} = require('cli-ux');
|
||||
const throwError = require('../error');
|
||||
|
||||
const createTables = async (tables, url, headers, overwrite, runSql, sql) => {
|
||||
if (overwrite) {
|
||||
cli.action.stop('Skipped!');
|
||||
cli.action.start('Creating tables');
|
||||
await runSql(sql, url, headers);
|
||||
} else {
|
||||
try {
|
||||
const resp = await fetch(
|
||||
`${url}/v1/query`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers,
|
||||
body: JSON.stringify({
|
||||
type: 'select',
|
||||
args: {
|
||||
table: {
|
||||
name: 'hdb_table',
|
||||
schema: 'hdb_catalog',
|
||||
},
|
||||
columns: ['*.*'],
|
||||
where: {
|
||||
table_schema: 'public',
|
||||
},
|
||||
},
|
||||
}),
|
||||
}
|
||||
);
|
||||
const dbTables = await resp.json();
|
||||
if (resp.status === 401) {
|
||||
throw (dbTables);
|
||||
} else {
|
||||
let found = false;
|
||||
tables.forEach(table => {
|
||||
if (dbTables.find(dbTable => dbTable.table_name === table.name)) {
|
||||
found = true;
|
||||
throwError('Message: Your JSON database contains tables that already exist in Postgres. Please use the flag "--overwrite" to overwrite them.');
|
||||
}
|
||||
});
|
||||
if (!found) {
|
||||
cli.action.stop('Done!');
|
||||
cli.action.start('Creating tables');
|
||||
await runSql(sql, url, headers);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
throwError(e);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
createTables,
|
||||
};
|
@ -0,0 +1,90 @@
|
||||
const throwError = require('../error');
|
||||
const validateUUID = require('uuid-validate');
|
||||
|
||||
const getDataType = (data, column) => {
|
||||
if (typeof data === 'number') {
|
||||
return (data === parseInt(data, 10)) ? 'bigint' : 'numeric';
|
||||
}
|
||||
if (typeof data === 'string' || data === null) {
|
||||
if (data && validateUUID(data)) {
|
||||
return 'uuid';
|
||||
}
|
||||
return 'text';
|
||||
}
|
||||
if (typeof data === 'boolean') {
|
||||
return 'boolean';
|
||||
}
|
||||
if (data.constructor.name === 'Date') {
|
||||
return 'timestamptz';
|
||||
}
|
||||
if (data.constructor.name === 'Object') {
|
||||
return 'json';
|
||||
}
|
||||
throwError(`Message: invalid data type given for column ${column}: ${typeof data}`);
|
||||
};
|
||||
|
||||
const isForeign = (name, db) => {
|
||||
const idPos = name.indexOf('__id');
|
||||
if (idPos <= 0) {
|
||||
return false;
|
||||
}
|
||||
if (Object.keys(db).find(tableName => tableName === name.substring(0, idPos))) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
const getColumnData = (dataArray, db) => {
|
||||
if (dataArray.length === 0) {
|
||||
return [];
|
||||
}
|
||||
const refRow = {
|
||||
numOfCols: 0,
|
||||
index: 0,
|
||||
};
|
||||
dataArray.forEach((row, i) => {
|
||||
if (Object.keys(row).length > refRow.numOfCols) {
|
||||
refRow.numOfCols = Object.keys(row).length;
|
||||
refRow.index = i;
|
||||
}
|
||||
});
|
||||
const refColumns = dataArray[refRow.index];
|
||||
const columnData = [];
|
||||
Object.keys(refColumns).forEach(column => {
|
||||
const columnMetadata = {};
|
||||
if (!column) {
|
||||
throwError("Message: column names can't be empty strings");
|
||||
}
|
||||
columnMetadata.name = column;
|
||||
const sampleData = refColumns[column];
|
||||
columnMetadata.type = getDataType(sampleData, column, db);
|
||||
columnMetadata.isForeign = isForeign(column, db);
|
||||
columnData.push(columnMetadata);
|
||||
});
|
||||
return columnData;
|
||||
};
|
||||
|
||||
const generate = db => {
|
||||
const metaData = [];
|
||||
Object.keys(db).forEach(rootField => {
|
||||
if (db[rootField].length === 0) {
|
||||
return;
|
||||
}
|
||||
const tableMetadata = {};
|
||||
tableMetadata.name = rootField;
|
||||
tableMetadata.columns = getColumnData(db[rootField], db);
|
||||
tableMetadata.dependencies = [];
|
||||
tableMetadata.columns.forEach(column => {
|
||||
if (column.isForeign) {
|
||||
tableMetadata.dependencies.push(
|
||||
column.name.substring(0, column.name.indexOf('__id'))
|
||||
);
|
||||
}
|
||||
});
|
||||
metaData.push(tableMetadata);
|
||||
});
|
||||
return metaData;
|
||||
};
|
||||
|
||||
module.exports = generate;
|
46
community/tools/firebase2graphql/src/import/import.js
Normal file
46
community/tools/firebase2graphql/src/import/import.js
Normal file
@ -0,0 +1,46 @@
|
||||
const {spinnerStart, spinnerStop, log} = require('../log');
|
||||
const generate = require('./generateTables');
|
||||
const {refineJson} = require('./utils');
|
||||
const {generateSql, runSql} = require('./sql');
|
||||
const {trackTables} = require('./track');
|
||||
const {getInsertOrder, insertData} = require('./insert');
|
||||
const {createRelationships} = require('./relationships');
|
||||
const {createTables} = require('./check');
|
||||
const normalize = require('./normalize');
|
||||
const generateGenericJson = require('../firebase/generateGenericJson');
|
||||
const makeSuggestions = require('./suggest');
|
||||
|
||||
const importData = async (jsonDb, url, headers, overwrite, level = 1, shouldNormalize) => {
|
||||
spinnerStart('Processing Firebase JSON');
|
||||
const db = level === 1 ? refineJson(generateGenericJson(jsonDb)) : jsonDb;
|
||||
const tables = generate(db);
|
||||
const sql = generateSql(tables);
|
||||
spinnerStop('Done!');
|
||||
spinnerStart('Checking database');
|
||||
createTables(tables, url, headers, overwrite, runSql, sql).then(() => {
|
||||
spinnerStop('Done!');
|
||||
spinnerStart('Tracking tables');
|
||||
trackTables(tables, url, headers).then(() => {
|
||||
spinnerStop('Done!');
|
||||
spinnerStart('Creating relationships');
|
||||
createRelationships(tables, url, headers).then(() => {
|
||||
spinnerStop('Done!');
|
||||
const insertOrder = getInsertOrder(tables);
|
||||
insertData(insertOrder, db, tables, url, headers, success => {
|
||||
if (level <= 10 && shouldNormalize) {
|
||||
normalize(tables, db, url, headers, level, importData);
|
||||
} else if (success) {
|
||||
log('');
|
||||
log(`Success! Try out the GraphQL API at ${url}/console`, 'green');
|
||||
|
||||
if (!shouldNormalize) {
|
||||
makeSuggestions();
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
module.exports = importData;
|
152
community/tools/firebase2graphql/src/import/insert.js
Normal file
152
community/tools/firebase2graphql/src/import/insert.js
Normal file
@ -0,0 +1,152 @@
|
||||
const {query} = require('graphqurl');
|
||||
const fetch = require('node-fetch');
|
||||
const moment = require('moment');
|
||||
const throwError = require('../error');
|
||||
const {log, spinnerStart, spinnerStop} = require('../log');
|
||||
|
||||
const getInsertOrder = tables => {
|
||||
let order = [];
|
||||
const tablesHash = {};
|
||||
tables.forEach(table => {
|
||||
tablesHash[table.name] = table;
|
||||
});
|
||||
const pushedHash = {};
|
||||
const setOrder = table => {
|
||||
if (table.dependencies.length === 0) {
|
||||
order.push(table.name);
|
||||
pushedHash[table.name] = true;
|
||||
} else {
|
||||
table.dependencies.forEach(parentTable => {
|
||||
if (!pushedHash[parentTable] && parentTable !== table.name) {
|
||||
setOrder(tablesHash[parentTable]);
|
||||
}
|
||||
});
|
||||
order.push(table.name);
|
||||
pushedHash[table.name] = true;
|
||||
}
|
||||
};
|
||||
|
||||
tables.forEach(table => {
|
||||
if (!pushedHash[table.name]) {
|
||||
setOrder(table);
|
||||
}
|
||||
});
|
||||
return order;
|
||||
};
|
||||
|
||||
const transformData = (data, tables) => {
|
||||
const newData = {};
|
||||
tables.forEach(table => {
|
||||
const tableData = data[table.name];
|
||||
newData[table.name] = [];
|
||||
tableData.forEach(row => {
|
||||
const newRow = {...row};
|
||||
table.columns.forEach(column => {
|
||||
if (column.type === 'timestamptz' && row[column.name]) {
|
||||
newRow[column.name] = moment(row[column.name]).format();
|
||||
}
|
||||
if (column.type === 'json' && row[column.name]) {
|
||||
newRow[column.name] = JSON.stringify(row[column.name]);
|
||||
}
|
||||
});
|
||||
newData[table.name].push(newRow);
|
||||
});
|
||||
});
|
||||
return newData;
|
||||
};
|
||||
|
||||
const deleteDataTill = async (tableName, insertOrder, url, headers) => {
|
||||
spinnerStart('Restoring database to a safe state');
|
||||
const truncate = async tn => {
|
||||
const resp = await fetch(
|
||||
url,
|
||||
{
|
||||
method: 'POST',
|
||||
headers,
|
||||
body: JSON.stringify({
|
||||
type: 'run_sql',
|
||||
args: {
|
||||
sql: `truncate table public."${tn}" cascade;`,
|
||||
cascade: true,
|
||||
},
|
||||
}),
|
||||
}
|
||||
);
|
||||
if (tn === tableName) {
|
||||
spinnerStop('Done');
|
||||
return resp;
|
||||
}
|
||||
};
|
||||
if (insertOrder.length === 0) {
|
||||
return;
|
||||
}
|
||||
return truncate(insertOrder[0]);
|
||||
};
|
||||
|
||||
const insertData = async (insertOrder, sampleData, tables, url, headers, callback) => {
|
||||
const transformedData = transformData(sampleData, tables);
|
||||
let numOfTables = insertOrder.length;
|
||||
const insertToTable = j => {
|
||||
if (j >= numOfTables) {
|
||||
callback(true);
|
||||
return true;
|
||||
}
|
||||
const tableName = insertOrder[j];
|
||||
const numOfRows = transformedData[tableName].length;
|
||||
let insertedRows = 0;
|
||||
const insertHundredRows = i => {
|
||||
let mutationString = '';
|
||||
let objectString = '';
|
||||
const variables = {};
|
||||
const numOfelementsToInsert = Math.min(numOfRows - insertedRows, 100);
|
||||
mutationString += `insert_${tableName} ( objects: $objects ) { affected_rows } \n`;
|
||||
objectString += `$objects: [${tableName}_insert_input!]!,\n`;
|
||||
variables.objects = [...transformedData[tableName].slice(i, numOfelementsToInsert + i)];
|
||||
const mutation = `mutation ( ${objectString} ) { ${mutationString} }`;
|
||||
spinnerStart(`Inserting ${i} to ${i + numOfelementsToInsert} rows of ${numOfRows} in table ${tableName}`);
|
||||
return query(
|
||||
{
|
||||
query: mutation,
|
||||
endpoint: `${url}/v1alpha1/graphql`,
|
||||
variables,
|
||||
headers,
|
||||
}
|
||||
).then(response => {
|
||||
if (response.data) {
|
||||
spinnerStop('Done!');
|
||||
insertedRows += numOfelementsToInsert;
|
||||
if (insertedRows >= numOfRows) {
|
||||
return insertToTable(j + 1);
|
||||
}
|
||||
return insertHundredRows(i + 100);
|
||||
}
|
||||
deleteDataTill(tableName, insertOrder, url, headers).then(() => {
|
||||
throwError(
|
||||
JSON.stringify(response, null, 2),
|
||||
() => {
|
||||
log('Message: Schema has been imported. But the data could not be inserted due to the following error.', 'yellow');
|
||||
callback(false);
|
||||
}
|
||||
);
|
||||
});
|
||||
}).catch(e => {
|
||||
deleteDataTill(tableName, insertOrder, url, headers).then(() => {
|
||||
throwError(
|
||||
JSON.stringify(e, null, 2),
|
||||
() => {
|
||||
log('Message: Schema has been imported. But the data could not be imported due to the following error.', 'yellow');
|
||||
callback(false);
|
||||
}
|
||||
);
|
||||
});
|
||||
});
|
||||
};
|
||||
insertHundredRows(0);
|
||||
};
|
||||
return insertToTable(0);
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
getInsertOrder,
|
||||
insertData,
|
||||
};
|
263
community/tools/firebase2graphql/src/import/normalize.js
Normal file
263
community/tools/firebase2graphql/src/import/normalize.js
Normal file
@ -0,0 +1,263 @@
|
||||
const fetch = require('node-fetch');
|
||||
const throwError = require('../error');
|
||||
const {log, spinnerStart, spinnerStop} = require('../log');
|
||||
|
||||
const shouldIgnoreTable = table => {
|
||||
return (table.columns.find(c => c.name === '__value'));
|
||||
};
|
||||
|
||||
const getDupeCandidates = tables => {
|
||||
const dupes = [];
|
||||
for (var i = tables.length - 1; i >= 0; i--) {
|
||||
const table = tables[i];
|
||||
if (shouldIgnoreTable(table)) {
|
||||
continue;
|
||||
}
|
||||
for (var j = tables.length - 1; j >= 0; j--) {
|
||||
if (table.name !== tables[j].name) {
|
||||
const dupeSuspect = tables[j];
|
||||
if (shouldIgnoreTable(dupeSuspect)) {
|
||||
continue;
|
||||
}
|
||||
let isDupe = true;
|
||||
for (var k = dupeSuspect.columns.length - 1; k >= 0; k--) {
|
||||
const columnName = dupeSuspect.columns[k].name;
|
||||
if (columnName.indexOf('_id') < 0) {
|
||||
if (!table.columns.find(col => col.name === columnName)) {
|
||||
isDupe = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (isDupe) {
|
||||
dupes.push({
|
||||
table1: table.name,
|
||||
table2: dupeSuspect.name,
|
||||
columnList: dupeSuspect.columns.filter(dupeCol => dupeCol.name.indexOf('_id') < 0).map(dupeCol => dupeCol.name),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return dupes;
|
||||
};
|
||||
|
||||
const categorizeDupeCandidates = async (dupes, url, headers) => {
|
||||
const bulkQueryArgs = [];
|
||||
dupes.forEach(dupe => {
|
||||
const {table1, table2, columnList} = dupe;
|
||||
const table1Sql = `select count(public."${table1}".*) from public."${table1}";`;
|
||||
const overlapSql = `select count(public."${table2}".*) from public."${table1}", public."${table2}"`;
|
||||
let whereSql = '';
|
||||
columnList.forEach((column, i) => {
|
||||
whereSql += ` public."${table1}"."${column}" = public."${table2}"."${column}"`;
|
||||
whereSql += i === columnList.length - 1 ? '' : ' and ';
|
||||
});
|
||||
const sql = `${overlapSql} where ${whereSql};`;
|
||||
bulkQueryArgs.push({
|
||||
type: 'run_sql',
|
||||
args: {
|
||||
sql: table1Sql,
|
||||
},
|
||||
});
|
||||
bulkQueryArgs.push({
|
||||
type: 'run_sql',
|
||||
args: {
|
||||
sql,
|
||||
},
|
||||
});
|
||||
});
|
||||
const response = await fetch(
|
||||
`${url}/v1/query`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers,
|
||||
body: JSON.stringify({
|
||||
type: 'bulk',
|
||||
args: bulkQueryArgs,
|
||||
}),
|
||||
}
|
||||
);
|
||||
const respObj = await response.json();
|
||||
if (response.status !== 200) {
|
||||
throwError('Message: Could not normalize your data');
|
||||
}
|
||||
const newDupes = {
|
||||
confirmed: [],
|
||||
unconfirmed: [],
|
||||
};
|
||||
dupes.forEach((dupe, i) => {
|
||||
const overlapResult = respObj[(i * 2) + 1].result[1][0];
|
||||
const table1Count = respObj[i].result[1][0];
|
||||
if (!overlapResult || !table1Count) {
|
||||
throwError('Message: Could not normalize your data');
|
||||
}
|
||||
if (table1Count > 0 && overlapResult > 0) {
|
||||
if (table1Count === overlapResult) {
|
||||
newDupes.confirmed.push(dupe);
|
||||
} else if (overlapResult <= Number(table1Count) / 4) {
|
||||
newDupes.unconfirmed.push(dupe);
|
||||
} else {
|
||||
newDupes.confirmed.push(dupe);
|
||||
}
|
||||
}
|
||||
});
|
||||
return newDupes;
|
||||
};
|
||||
|
||||
const patchDupeDependentTables = (table, dupe, tables, data, pkeyMap) => {
|
||||
const patchedData = {};
|
||||
tables.forEach(otherTable => {
|
||||
if (otherTable.name !== table && otherTable.name !== dupe) {
|
||||
if (otherTable.columns.find(column => column.name === `${dupe}__idself`)) {
|
||||
const newData = data[otherTable.name].map(row => {
|
||||
const newRow = {
|
||||
...row,
|
||||
};
|
||||
newRow[`${table}__id`] = pkeyMap[row[`${dupe}__idself`]];
|
||||
delete newRow[`${dupe}__idself`];
|
||||
return newRow;
|
||||
});
|
||||
patchedData[otherTable.name] = newData;
|
||||
}
|
||||
}
|
||||
});
|
||||
return patchedData;
|
||||
};
|
||||
|
||||
const handleConfirmedDupes = (confirmedDupes, tables, data) => {
|
||||
/*
|
||||
1. Go through the dupes
|
||||
2. Check which one of table1, table2 has _id (table) and _idself(dupe)
|
||||
3. Spread all fields of dupe in table
|
||||
4. Change column names and dependencies of all tables that have dupe as a dependency
|
||||
*/
|
||||
let newData = {
|
||||
...data,
|
||||
};
|
||||
const handle = (dupes, index) => {
|
||||
if (dupes.length === 0 || index > dupes.length - 1) {
|
||||
return;
|
||||
}
|
||||
const tableData = [];
|
||||
let table1, table2;
|
||||
const columnList = dupes[index].columnList;
|
||||
if (!newData[dupes[index].table1][0]._idself &&
|
||||
!newData[dupes[index].table2][0]._idself &&
|
||||
newData[dupes[index].table1][0]._id &&
|
||||
newData[dupes[index].table1][0]._id
|
||||
) {
|
||||
if (dupes[index].table1.length > dupes[index].table2.length) {
|
||||
table2 = dupes[index].table1;
|
||||
table1 = dupes[index].table2;
|
||||
} else {
|
||||
table1 = dupes[index].table1;
|
||||
table2 = dupes[index].table2;
|
||||
}
|
||||
} else if (!newData[dupes[index].table1][0]._idself && newData[dupes[index].table1][0]._id) {
|
||||
table1 = dupes[index].table1;
|
||||
table2 = dupes[index].table2;
|
||||
} else if (!newData[dupes[index].table2][0]._idself && newData[dupes[index].table2][0]._id) {
|
||||
table2 = dupes[index].table1;
|
||||
table1 = dupes[index].table2;
|
||||
} else {
|
||||
handle(dupes, index + 1);
|
||||
return;
|
||||
}
|
||||
const table = tables.find(t => t.name === table1);
|
||||
const dupe = tables.find(t => t.name === table2);
|
||||
const pkeyMap = {};
|
||||
newData[table.name].forEach(tableRow => {
|
||||
const dLength = data[dupe.name].length;
|
||||
for (let j = 0; j < dLength; j++) {
|
||||
const dupeRow = newData[dupe.name][j];
|
||||
if (columnList.every(colName => dupeRow[colName] === tableRow[colName])) {
|
||||
const item = {};
|
||||
for (var key in dupeRow) {
|
||||
if (key.indexOf('_idself') === 0) {
|
||||
if (!pkeyMap[dupeRow]) {
|
||||
pkeyMap.dupeRow = {};
|
||||
}
|
||||
pkeyMap[dupeRow._idself] = tableRow._id;
|
||||
} else {
|
||||
item[key.replace(dupe.name + '_', table.name + '_')] = dupeRow[key];
|
||||
}
|
||||
}
|
||||
tableData.push({
|
||||
...item,
|
||||
...tableRow,
|
||||
});
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
newData[table.name] = tableData;
|
||||
delete newData[dupe.name];
|
||||
newData = {
|
||||
...newData,
|
||||
...patchDupeDependentTables(table.name, dupe.name, tables, newData, pkeyMap),
|
||||
};
|
||||
handle(
|
||||
dupes.filter(d => d.table1 !== table1 && d.table2 !== table1 && d.table1 !== table2 && d.table2 !== table2),
|
||||
0
|
||||
);
|
||||
};
|
||||
handle(confirmedDupes, 0);
|
||||
return newData;
|
||||
};
|
||||
|
||||
const dropTables = async (tableList, url, headers) => {
|
||||
spinnerStop('Done!');
|
||||
spinnerStart('Deleting unnecessary tables');
|
||||
if (tableList.length === 0) {
|
||||
spinnerStop('Done');
|
||||
return true;
|
||||
}
|
||||
let sql = '';
|
||||
tableList.forEach(t => {
|
||||
sql += `drop table if exists public."${t}" cascade;`;
|
||||
});
|
||||
const resp = await fetch(
|
||||
`${url}/v1/query`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers,
|
||||
body: JSON.stringify({
|
||||
type: 'run_sql',
|
||||
args: {
|
||||
sql,
|
||||
cascade: true,
|
||||
},
|
||||
}),
|
||||
}
|
||||
);
|
||||
if (resp.status !== 200) {
|
||||
log('Message: Could not delete unnecessary tables. Your database might have some unnecessary tables.', 'yellow');
|
||||
}
|
||||
spinnerStop('Done');
|
||||
return true;
|
||||
};
|
||||
|
||||
const normalize = async (tables, data, url, headers, level, importData) => {
|
||||
spinnerStart('Normalizing your data');
|
||||
const dupeCandidates = getDupeCandidates(tables);
|
||||
const maybeDupes = await categorizeDupeCandidates(dupeCandidates, url, headers);
|
||||
let newData;
|
||||
if (level === 10) {
|
||||
newData = handleConfirmedDupes(
|
||||
[...maybeDupes.confirmed, ...maybeDupes.unconfirmed],
|
||||
tables,
|
||||
data
|
||||
);
|
||||
} else {
|
||||
newData = handleConfirmedDupes(maybeDupes.confirmed, tables, data);
|
||||
}
|
||||
const tablesToDrop = tables.filter(t => newData[t.name] === undefined).map(tbl => tbl.name);
|
||||
const dropResp = await dropTables(tablesToDrop, url, headers);
|
||||
if (maybeDupes.unconfirmed.length === 0 && maybeDupes.confirmed.length === 0 && dropResp) {
|
||||
await importData(newData, url, headers, true, 11, true);
|
||||
} else {
|
||||
await importData(newData, url, headers, true, level + 1, true);
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = normalize;
|
124
community/tools/firebase2graphql/src/import/relationships.js
Normal file
124
community/tools/firebase2graphql/src/import/relationships.js
Normal file
@ -0,0 +1,124 @@
|
||||
const fetch = require('node-fetch');
|
||||
const throwError = require('../error');
|
||||
|
||||
const getArrayRelType = (table, child) => {
|
||||
const columnMapping = {};
|
||||
let numOfMappings = 0;
|
||||
table.columns.forEach(col => {
|
||||
if (col.name.indexOf('_id') === 0) {
|
||||
numOfMappings++;
|
||||
columnMapping[col.name] = `${table.name}_${col.name}`;
|
||||
}
|
||||
});
|
||||
if (numOfMappings === 1) {
|
||||
return {
|
||||
foreign_key_constraint_on: {
|
||||
table: child.name,
|
||||
column: columnMapping[Object.keys(columnMapping)[0]],
|
||||
},
|
||||
};
|
||||
}
|
||||
return {
|
||||
manual_configuration: {
|
||||
remote_table: child.name,
|
||||
column_mapping: columnMapping,
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
const getObjRelType = (table, dep) => {
|
||||
const columnMapping = {};
|
||||
let numOfMappings = 0;
|
||||
table.columns.forEach(col => {
|
||||
if (col.name.indexOf(`${dep.name}__id`) === 0) {
|
||||
numOfMappings++;
|
||||
columnMapping[col.name] = col.name.substring(col.name.indexOf('_id'), col.name.length);
|
||||
}
|
||||
});
|
||||
if (numOfMappings === 1) {
|
||||
return {
|
||||
foreign_key_constraint_on: Object.keys(columnMapping)[0],
|
||||
};
|
||||
}
|
||||
return {
|
||||
manual_configuration: {
|
||||
remote_table: dep.name,
|
||||
column_mapping: columnMapping,
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
const generateRelationships = tables => {
|
||||
const objectRelationships = [];
|
||||
const arrayRelationships = [];
|
||||
tables.forEach(table => {
|
||||
if (table.dependencies.length > 0) {
|
||||
table.dependencies.forEach(dep => {
|
||||
const objUsing = getObjRelType(table, tables.find(t => t.name === dep));
|
||||
const arrUsing = getArrayRelType(tables.find(t => t.name === dep), table);
|
||||
const newObjRel = {
|
||||
type: 'create_object_relationship',
|
||||
args: {
|
||||
table: table.name,
|
||||
name: dep,
|
||||
using: objUsing,
|
||||
},
|
||||
};
|
||||
if (!objectRelationships.find(or => {
|
||||
return (
|
||||
or.args.table === newObjRel.args.table &&
|
||||
or.args.name === newObjRel.args.name
|
||||
);
|
||||
})) {
|
||||
objectRelationships.push(newObjRel);
|
||||
}
|
||||
const newArrRel = {
|
||||
type: 'create_array_relationship',
|
||||
args: {
|
||||
table: dep,
|
||||
name: `${table.name}`,
|
||||
using: arrUsing,
|
||||
},
|
||||
};
|
||||
if (!arrayRelationships.find(ar => {
|
||||
return (
|
||||
ar.args.table === newArrRel.args.table &&
|
||||
ar.args.name === newArrRel.args.name
|
||||
);
|
||||
})) {
|
||||
arrayRelationships.push(newArrRel);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
return {
|
||||
objectRelationships,
|
||||
arrayRelationships,
|
||||
};
|
||||
};
|
||||
|
||||
const createRelationships = async (tables, url, headers) => {
|
||||
const relationships = generateRelationships(tables);
|
||||
const bulkQuery = {
|
||||
type: 'bulk',
|
||||
args: [],
|
||||
};
|
||||
relationships.objectRelationships.forEach(or => bulkQuery.args.push(or));
|
||||
relationships.arrayRelationships.forEach(ar => bulkQuery.args.push(ar));
|
||||
const resp = await fetch(
|
||||
`${url}/v1/query`,
|
||||
{
|
||||
method: 'POST',
|
||||
body: JSON.stringify(bulkQuery),
|
||||
headers,
|
||||
}
|
||||
);
|
||||
if (resp.status !== 200) {
|
||||
const error = await resp.json();
|
||||
throwError(JSON.stringify(error, null, 2));
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
createRelationships,
|
||||
};
|
96
community/tools/firebase2graphql/src/import/sql.js
Normal file
96
community/tools/firebase2graphql/src/import/sql.js
Normal file
@ -0,0 +1,96 @@
|
||||
const fetch = require('node-fetch');
|
||||
const throwError = require('../error');
|
||||
|
||||
const runSql = async (sqlArray, url, headers) => {
|
||||
let sqlString = '';
|
||||
sqlArray.forEach(sql => {
|
||||
sqlString += sql;
|
||||
});
|
||||
const resp = await fetch(
|
||||
`${url}/v1/query`,
|
||||
{
|
||||
method: 'POST',
|
||||
body: JSON.stringify({
|
||||
type: 'run_sql',
|
||||
args: {
|
||||
sql: sqlString,
|
||||
cascade: true,
|
||||
},
|
||||
}),
|
||||
headers,
|
||||
}
|
||||
);
|
||||
if (resp.status !== 200) {
|
||||
const error = await resp.json();
|
||||
throwError(JSON.stringify(error, null, 2));
|
||||
}
|
||||
};
|
||||
|
||||
const generateCreateTableSql = metadata => {
|
||||
const sqlArray = [];
|
||||
metadata.forEach(table => {
|
||||
sqlArray.push(`drop table if exists public."${table.name}" cascade;`);
|
||||
let columnSql = '(';
|
||||
const pkeyArr = [];
|
||||
table.columns.forEach((column, i) => {
|
||||
if (column.name.indexOf('_id') === 0) {
|
||||
pkeyArr.push(column.name);
|
||||
columnSql += `"${column.name}" ${column.type} not null,`;
|
||||
} else {
|
||||
columnSql += `"${column.name}" ${column.type},`;
|
||||
}
|
||||
|
||||
if (table.columns.length === i + 1) {
|
||||
columnSql += 'primary key (';
|
||||
pkeyArr.forEach((key, j) => {
|
||||
columnSql += `"${key}"`;
|
||||
columnSql += j === pkeyArr.length - 1 ? ')' : ', ';
|
||||
});
|
||||
}
|
||||
});
|
||||
const createTableSql = `create table public."${table.name}" ${columnSql});`;
|
||||
sqlArray.push(createTableSql);
|
||||
});
|
||||
return sqlArray;
|
||||
};
|
||||
|
||||
const foreignKeySql = table => {
|
||||
const sqlArray = [];
|
||||
table.dependencies.forEach((dep, i) => {
|
||||
let colNames = '';
|
||||
let fks = '';
|
||||
table.columns.forEach(col => {
|
||||
if (col.name.indexOf(`${dep}__id`) === 0) {
|
||||
colNames += `"${col.name}", `;
|
||||
fks += `"${col.name.substring(col.name.indexOf('_id'), col.name.length)}", `;
|
||||
}
|
||||
});
|
||||
fks = fks.substring(0, fks.length - 2);
|
||||
colNames = colNames.substring(0, colNames.length - 2);
|
||||
sqlArray.push(`alter table "${table.name}" add constraint "fk_${table.name}_${dep}_${i}" foreign key (${colNames}) references "${dep}"(${fks});`);
|
||||
});
|
||||
return sqlArray;
|
||||
};
|
||||
|
||||
const generateConstraintsSql = metadata => {
|
||||
let sqlArray = [];
|
||||
metadata.forEach(table => {
|
||||
sqlArray = [
|
||||
...sqlArray,
|
||||
...foreignKeySql(table),
|
||||
];
|
||||
});
|
||||
return sqlArray;
|
||||
};
|
||||
|
||||
const generateSql = metadata => {
|
||||
const createTableSql = generateCreateTableSql(metadata);
|
||||
const constraintsSql = generateConstraintsSql(metadata);
|
||||
let sqlArray = [...createTableSql, ...constraintsSql];
|
||||
return sqlArray;
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
generateSql,
|
||||
runSql,
|
||||
};
|
79
community/tools/firebase2graphql/src/import/suggest.js
Normal file
79
community/tools/firebase2graphql/src/import/suggest.js
Normal file
@ -0,0 +1,79 @@
|
||||
const {log} = require('../log');
|
||||
const colors = require('colors/safe');
|
||||
|
||||
const isSubset = (array1, array2) => {
|
||||
return array2.every(item => array1.includes(item));
|
||||
};
|
||||
|
||||
const getTableColumns = obj => {
|
||||
const columns = {};
|
||||
for (var key in obj) {
|
||||
if (key.indexOf('_id') === -1) {
|
||||
columns[key] = [];
|
||||
}
|
||||
}
|
||||
return columns;
|
||||
};
|
||||
|
||||
const getColumnsMap = db => {
|
||||
const columnMap = {};
|
||||
for (var tableName in db) {
|
||||
columnMap[tableName] = getTableColumns(db[tableName][0]);
|
||||
db[tableName].forEach(row => {
|
||||
for (var key in columnMap[tableName]) {
|
||||
columnMap[tableName][key].push(row[key]);
|
||||
}
|
||||
});
|
||||
}
|
||||
return columnMap;
|
||||
};
|
||||
|
||||
const getDuplicates = db => {
|
||||
const tableColumnMap = getColumnsMap(db);
|
||||
const maybeDuplicates = {};
|
||||
for (var t1 in tableColumnMap) {
|
||||
if (!maybeDuplicates[t1]) {
|
||||
maybeDuplicates[t1] = [];
|
||||
}
|
||||
for (var t2 in tableColumnMap) {
|
||||
if (!maybeDuplicates[t1]) {
|
||||
maybeDuplicates[t2] = [];
|
||||
}
|
||||
if (t1 !== t2) {
|
||||
for (var key in tableColumnMap[t1]) {
|
||||
if (tableColumnMap[t2][key]) {
|
||||
if (isSubset(tableColumnMap[t1][key], tableColumnMap[t2][key])) {
|
||||
maybeDuplicates[t1].push(t2);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return maybeDuplicates;
|
||||
};
|
||||
|
||||
const suggest = (db, url) => {
|
||||
const maybeDuplicates = (getDuplicates(db));
|
||||
const newDuplicates = {
|
||||
...maybeDuplicates,
|
||||
};
|
||||
|
||||
let count = 1;
|
||||
const dupes = [];
|
||||
for (var tableName in newDuplicates) {
|
||||
maybeDuplicates[tableName].forEach(dup => {
|
||||
dupes.push(`${count++}. ${colors.yellow(tableName)} could be same as ${colors.yellow(dup)}`);
|
||||
});
|
||||
}
|
||||
if (dupes.length > 0) {
|
||||
log('');
|
||||
log('Warning:', 'yellow');
|
||||
log('While importing your data, the following duplicate tables might have been created:', 'yellow');
|
||||
dupes.forEach(dupe => log(dupe));
|
||||
log(`You can either re-run the command with the flag "--normalize", or normalize your database yourself at ${url}/console/data/schema/public`, 'yellow');
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = suggest;
|
35
community/tools/firebase2graphql/src/import/track.js
Normal file
35
community/tools/firebase2graphql/src/import/track.js
Normal file
@ -0,0 +1,35 @@
|
||||
const fetch = require('node-fetch');
|
||||
const throwError = require('../error');
|
||||
|
||||
const trackTables = async (tables, url, headers) => {
|
||||
const bulkQueryArgs = [];
|
||||
tables.forEach(table => {
|
||||
bulkQueryArgs.push({
|
||||
type: 'add_existing_table_or_view',
|
||||
args: {
|
||||
name: table.name,
|
||||
schema: 'public',
|
||||
},
|
||||
});
|
||||
});
|
||||
const bulkQuery = {
|
||||
type: 'bulk',
|
||||
args: bulkQueryArgs,
|
||||
};
|
||||
const resp = await fetch(
|
||||
`${url}/v1/query`,
|
||||
{
|
||||
method: 'POST',
|
||||
body: JSON.stringify(bulkQuery),
|
||||
headers,
|
||||
}
|
||||
);
|
||||
if (resp.status !== 200) {
|
||||
const error = await resp.json();
|
||||
throwError(JSON.stringify(error, null, 2));
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
trackTables,
|
||||
};
|
19
community/tools/firebase2graphql/src/import/utils.js
Normal file
19
community/tools/firebase2graphql/src/import/utils.js
Normal file
@ -0,0 +1,19 @@
|
||||
const refineJson = db => {
|
||||
const newDb = {};
|
||||
for (var tableName in db) {
|
||||
const newTableName = tableName.replace(/[^a-zA-Z0-9]/g, '_');
|
||||
newDb[newTableName] = [];
|
||||
db[tableName].forEach(row => {
|
||||
const newRow = {};
|
||||
for (var colName in row) {
|
||||
newRow[colName.replace(/[^a-zA-Z0-9]/g, '_')] = row[colName];
|
||||
}
|
||||
newDb[newTableName].push(newRow);
|
||||
});
|
||||
}
|
||||
return newDb;
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
refineJson,
|
||||
};
|
25
community/tools/firebase2graphql/src/log.js
Normal file
25
community/tools/firebase2graphql/src/log.js
Normal file
@ -0,0 +1,25 @@
|
||||
const colors = require('colors/safe');
|
||||
const {cli} = require('cli-ux');
|
||||
|
||||
const log = (message, color) => {
|
||||
if (color) {
|
||||
console.log(colors[color](message));
|
||||
} else {
|
||||
console.log(message);
|
||||
}
|
||||
};
|
||||
|
||||
const spinnerStart = message => {
|
||||
cli.action.start(message);
|
||||
};
|
||||
|
||||
const spinnerStop = () => {
|
||||
cli.action.stop(colors.green('Done!'));
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
log,
|
||||
spinnerStop,
|
||||
spinnerStart,
|
||||
};
|
||||
|
48879
community/tools/firebase2graphql/test/db.json
Normal file
48879
community/tools/firebase2graphql/test/db.json
Normal file
File diff suppressed because it is too large
Load Diff
6
community/tools/firebase2graphql/test/test.sh
Executable file
6
community/tools/firebase2graphql/test/test.sh
Executable file
@ -0,0 +1,6 @@
|
||||
#!/bin/bash
|
||||
if [ -z "$TEST_HGE_URL" ] && [ -z "$TEST_X_HASURA_ACCESS_KEY" ]; then
|
||||
echo "ERROR: Please run the test command with the environment variable TEST_HGE_URL"
|
||||
else
|
||||
../bin/run $TEST_HGE_URL --access-key=$TEST_X_HASURA_ACCESS_KEY --db=./db.json --overwrite && node verify.js
|
||||
fi
|
66
community/tools/firebase2graphql/test/verify.js
Normal file
66
community/tools/firebase2graphql/test/verify.js
Normal file
@ -0,0 +1,66 @@
|
||||
const {query} = require('graphqurl');
|
||||
const fetch = require('node-fetch');
|
||||
const colors = require('colors/safe');
|
||||
|
||||
const complexQuery = `
|
||||
query {
|
||||
f2g_test_Album (
|
||||
order_by:_id_asc
|
||||
){
|
||||
_id
|
||||
f2g_test_Album_artist {
|
||||
Name
|
||||
ArtistId
|
||||
}
|
||||
f2g_test_Album_tracks (
|
||||
order_by: Name_asc
|
||||
) {
|
||||
Name
|
||||
Composer
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const verifyDataImport = () => {
|
||||
query({
|
||||
query: complexQuery,
|
||||
endpoint: `${process.env.TEST_HGE_URL}/v1alpha1/graphql`,
|
||||
headers: {'x-hasura-access-key': process.env.TEST_X_HASURA_ACCESS_KEY},
|
||||
}).then(response => {
|
||||
if (
|
||||
response.data.f2g_test_Album[0].f2g_test_Album_artist.ArtistId === 1 &&
|
||||
response.data.f2g_test_Album[0].f2g_test_Album_tracks[0].Name === 'Breaking The Rules'
|
||||
) {
|
||||
let sqlString = '';
|
||||
['Album', 'Album_artist', 'Album_tracks'].forEach(t => {
|
||||
sqlString += `drop table public."f2g_test_${t}" cascade;`;
|
||||
});
|
||||
fetch(
|
||||
`${process.env.TEST_HGE_URL}/v1/query`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: {'x-hasura-access-key': process.env.TEST_X_HASURA_ACCESS_KEY},
|
||||
body: JSON.stringify({
|
||||
type: 'run_sql',
|
||||
args: {
|
||||
sql: sqlString,
|
||||
cascade: true,
|
||||
},
|
||||
}),
|
||||
}
|
||||
).then(() => {
|
||||
console.log(colors.green('✔︎ Test passed'));
|
||||
process.exit();
|
||||
}).catch(() => {
|
||||
process.exit();
|
||||
});
|
||||
} else {
|
||||
console.log(colors.red('✖ Test failed. Unexpected response.'));
|
||||
console.log(response.data);
|
||||
process.exit();
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
verifyDataImport();
|
Loading…
Reference in New Issue
Block a user