improve normalization for firebase2graphql (#621)

This commit is contained in:
Rishichandra Wawhal 2018-10-04 09:09:42 +05:30 committed by Shahidh K Muhammed
parent 509c8b89ed
commit 30273d354d
19 changed files with 96454 additions and 236 deletions

View File

@ -26,49 +26,112 @@ A CLI tool to help you try realtime GraphQL on your firebase data. It takes data
```json
{
"posts" : {
"-LMbLFOAW2q6GO1bD-5g" : {
"author" : "Rishichandra Wawhal",
"authorPic" : "https://lh4.googleusercontent.com/-vPOIBOxCUpo/AAAAAAAAAAI/AAAAAAAAAFo/SKk9hpOB7v4/photo.jpg",
"body" : "My first post content\nAnd body\nANd structure",
"starCount" : 0,
"title" : "My first post",
"uid" : "4UPmbcaqZKT2NdAAqBahXj4tHYN2"
},
"-LMbLIv6VKHYul7p_PZ-" : {
"author" : "Rishichandra Wawhal",
"authorPic" : "https://lh4.googleusercontent.com/-vPOIBOxCUpo/AAAAAAAAAAI/AAAAAAAAAFo/SKk9hpOB7v4/photo.jpg",
"body" : "AKsdjak\naklsdjaskldjklas\nasdklfjaklsdfjklsda\nasdklfjasklf",
"starCount" : 0,
"title" : "Whatta proaaa",
"uid" : "4UPmbcaqZKT2NdAAqBahXj4tHYN2"
}
},
"user-posts" : {
"4UPmbcaqZKT2NdAAqBahXj4tHYN2" : {
"-LMbLFOAW2q6GO1bD-5g" : {
"author" : "Rishichandra Wawhal",
"authorPic" : "https://lh4.googleusercontent.com/-vPOIBOxCUpo/AAAAAAAAAAI/AAAAAAAAAFo/SKk9hpOB7v4/photo.jpg",
"body" : "My first post content\nAnd body\nANd structure",
"starCount" : 0,
"title" : "My first post",
"uid" : "4UPmbcaqZKT2NdAAqBahXj4tHYN2"
"Articles": {
"A1": {
"Title": "Title1",
"Body": "Body1",
"IsUnpublished": false,
"Author": {
"Name": "AName1",
"Age": 11
},
"-LMbLIv6VKHYul7p_PZ-" : {
"author" : "Rishichandra Wawhal",
"authorPic" : "https://lh4.googleusercontent.com/-vPOIBOxCUpo/AAAAAAAAAAI/AAAAAAAAAFo/SKk9hpOB7v4/photo.jpg",
"body" : "AKsdjak\naklsdjaskldjklas\nasdklfjaklsdfjklsda\nasdklfjasklf",
"starCount" : 0,
"title" : "Whatta proaaa",
"uid" : "4UPmbcaqZKT2NdAAqBahXj4tHYN2"
"Comments": {
"C1": {
"Body": "Comment1",
"Author": {
"Name": "AName2",
"Sex": "M"
},
"Date": "22-09-2018"
},
"C2": {
"Body": "Comment2",
"Author": {
"Name": "AName1",
"Sex": "F"
},
"Date": "21-09-2018"
}
}
},
"A2": {
"Title": "Title2",
"Body": "Body2",
"IsUnpublished": true,
"Author": {
"Name": "AName2",
"Age": 22
},
"Comments": {
"C3": {
"Body": "Comment1",
"Author": {
"Name": "AName1",
"Sex": "F"
},
"Date": "23-09-2018"
},
"C4": {
"Body": "Comment2",
"Author": {
"Name": "AName2",
"Sex": "M"
},
"Date": "24-09-2018"
}
}
}
},
"users" : {
"4UPmbcaqZKT2NdAAqBahXj4tHYN2" : {
"email" : "rishichandrawawhal@gmail.com",
"profile_picture" : "https://lh4.googleusercontent.com/-vPOIBOxCUpo/AAAAAAAAAAI/AAAAAAAAAFo/SKk9hpOB7v4/photo.jpg",
"username" : "Rishichandra Wawhal"
"Authors": {
"AT1": {
"Name": "AName1",
"Age": 11,
"Sex": "F",
"Articles": {
"A1": {
"Title": "Title1"
}
}
},
"AT2": {
"Name": "AName2",
"Age": 22,
"Sex": "M",
"Articles": {
"A2": {
"Title": "Title2"
}
}
}
},
"Comments": {
"C1": {
"Body": "Comment1",
"Author": {
"Name": "AName2"
},
"Date": "22-09-2018"
},
"C2": {
"Body": "Comment2",
"Author": {
"Name": "AName1"
},
"Date": "21-09-2018"
},
"C3": {
"Body": "Comment1",
"Author": {
"Name": "AName1"
},
"Date": "23-09-2018"
},
"C4": {
"Body": "Comment2",
"Author": {
"Name": "AName2"
},
"Date": "24-09-2018"
}
}
}
@ -77,21 +140,35 @@ A CLI tool to help you try realtime GraphQL on your firebase data. It takes data
4. Use the CLI to import the data:
```
npx firebase2graphql https://<app-name>.herokuapp.com --db=./path/to/db.json
npx firebase2graphql https://<app-name>.herokuapp.com --db=./path/to/db.json --normalize
```
5. That's it. You can now go to your GraphQL Engine URL `https://<app-name>.herokuapp.com` and make awesome GraphQL Queries like:
```graphql
query {
posts {
title
body
author
}
users {
email
username
Authors (order_by: Name_asc){
Name
Age
Sex
Articles (
order_by: Title_asc
where: {
IsUnpublished: {
_eq: false
}
}
){
Title
Body
Comments (order_by: Date_desc){
Body
Authors {
Name
}
Date
}
}
}
}
```
@ -200,15 +277,34 @@ mutation {
## Things to know about implementation
### Working
We flatten the JSON database into tables and create children tables when data nesting is detected.
In this way, you get almost the exact API over GraphQL that you had on Firebase.
If you use the flag `--normalize`, the CLI finds out if the children tables are duplicates of the original tables and tries to normalize the data by removing duplicates and creating respective relationships.
### Normalization
The CLI provides a flag called `--normalize` if you want to normalize your denormalized database.
A lot of guess-work is done by the CLI while normalizing the database. Here are some thing you need to know:
1. Root level tables are never deleted. So if there are some relationships that you wish to create manually, you can do so.
2. Children tables are deleted if they are detected to be duplicates of some other root or child table.
3. In case of some children tables, when the data lacks a unique identifier, an extra unique field is added. In most cases, this field gets deleted while mergine a duplicate table with the original table.
### Duplicates
By default, the CLI gives you the exact API that you originally had in Firebase (of course, over GraphQL). But in that case, some duplicate tables might be created and you might not be able to leverage the complete power of GraphQL and Postgres.
By default, the CLI gives you almost the exact API that you originally had in Firebase (of course, over GraphQL). But in that case, some duplicate tables might be created and you might not be able to leverage the complete power of GraphQL and Postgres.
In such cases, you have three choices:
1. Use the API as such if you prefer the exact API.
2. Go to the UI Console and delete the duplicates and normalize the database as you feel fit.
3. (Experimental) Use the `--normalize` flag. In this case, the CLI will detect duplicates and make appropriate relationships between root nodes. (This feature is experimental and needs more test cases to get stable. Contributions are welcome)
3. Use the `--normalize` flag and rerun the migration. In this case, the CLI will detect duplicates and make appropriate relationships between root nodes. (This feature is experimental and needs more test cases to attain stability. Contributions are welcome)
### Overwrite

View File

@ -1,7 +1,7 @@
{
"name": "firebase2graphql",
"description": "A CLI tool to get GraphQL over Firebase data dump",
"version": "0.0.1-alpha6",
"version": "0.0.1-alpha7",
"author": "Hasura",
"bin": {
"firebase2graphql": "./bin/run",

View File

@ -1,102 +1,22 @@
const uuid = require('uuid/v4');
const {
getParentPrimaryKeyMap,
getLastPrimaryKey,
getPrimaryKeyName,
isRandomList,
isList,
isObjectList,
} = require('./utils');
const throwError = require('../error');
const getPrimaryKeys = obj => {
const pkeyMap = {};
for (var pkey in obj) {
if (pkey.indexOf('_id') === 0) {
pkeyMap[pkey] = obj[pkey];
}
}
return pkeyMap;
};
const getLastId = (obj, index = 0, selfGenerated = '') => {
const id = index === 0 ? `_id${selfGenerated}` : `_id${selfGenerated}_${index}`;
const nextIndex = index === 0 ? 2 : index + 1;
if (!obj[`_id_${nextIndex}`]) {
return id;
}
getLastId(obj, nextIndex, selfGenerated);
};
const getIdNumber = (obj, index = 0, selfGenerated = '') => {
const id = index === 0 ? `_id${selfGenerated}` : `_id${selfGenerated}_${index}`;
const nextIndex = index === 0 ? 2 : index + 1;
if (obj[id] === undefined) {
return id;
}
return getIdNumber(obj, nextIndex, selfGenerated);
};
const isRandomList = obj => {
if (!obj) {
return false;
}
for (var objKey in obj) {
if (obj[objKey] !== null && typeof obj[objKey] === 'object') {
return false;
}
}
return true;
};
const isList = obj => {
if (Object.keys(obj).length === 0) {
return false;
}
for (var objKey in obj) {
if (obj[objKey] === null) {
return false;
}
if (obj[objKey].constructor.name !== 'Boolean' || !obj[objKey]) {
return false;
}
}
return true;
};
const isObjectList = obj => {
if (obj === null || obj === undefined) {
return false;
}
const listChildStructure = {};
for (var key in obj) {
if (obj[key] === null) {
return false;
}
if (typeof obj[key] !== 'object') {
return false;
}
if (Object.keys(obj[key]).length === 0) {
return false;
}
for (var childKey in obj[key]) {
if (!listChildStructure[childKey]) {
if (obj[key][childKey] !== null && obj[key][childKey] !== undefined) {
listChildStructure[childKey] = typeof obj[key][childKey];
}
} else if (obj[key][childKey] !== null && obj[key][childKey] !== undefined) {
if (typeof obj[key][childKey] !== listChildStructure[childKey]) {
return false;
}
}
}
}
return true;
};
const handleTable = (obj, tableName, tableDetectedCallback) => {
const handleTableCandidate = (obj, tableName, tableDetectedCallback, isRootLevel) => {
const rowArray = [];
const flatten = (object, row, parent) => {
const flattenObject = (object, row, parent) => {
if (isObjectList(object)) {
const dummyRow = {...row};
for (var objListKey in object) {
row[getIdNumber(dummyRow)] = objListKey;
const value = object[objListKey];
const newRow = {...flatten(value, row)};
row[getPrimaryKeyName(dummyRow)] = objListKey;
const newRow = {...flattenObject(object[objListKey], row)};
if (newRow && Object.keys(newRow).length > 0) {
rowArray.push(newRow);
}
@ -104,8 +24,8 @@ const handleTable = (obj, tableName, tableDetectedCallback) => {
} else if (isList(object)) {
for (var listKey in object) {
const dummyRow = {...row};
dummyRow[getIdNumber(dummyRow, null, 'self')] = uuid();
dummyRow.value = listKey;
dummyRow[getPrimaryKeyName(dummyRow, null, 'self')] = uuid();
dummyRow.__value = listKey;
if (Object.keys(dummyRow).length > 0) {
rowArray.push(dummyRow);
}
@ -116,7 +36,7 @@ const handleTable = (obj, tableName, tableDetectedCallback) => {
if (value === null || value.constructor.name !== 'Object') {
row[objectKey] = value;
} else if (value.constructor.name === 'Object') {
const pkeyMap = getPrimaryKeys(row);
const pkeyMap = getParentPrimaryKeyMap(row);
if (isList(value)) {
tableDetectedCallback(
null,
@ -134,7 +54,7 @@ const handleTable = (obj, tableName, tableDetectedCallback) => {
tableName: parent || tableName,
name: objectKey,
pkeys: pkeyMap,
data: handleTable(value, `${tableName}_${objectKey}`, tableDetectedCallback),
data: handleTableCandidate(value, `${parent || tableName}_${objectKey}`, tableDetectedCallback, false),
}
);
} else if (Object.keys(value).length !== 0) {
@ -144,7 +64,7 @@ const handleTable = (obj, tableName, tableDetectedCallback) => {
{
tableName,
name: objectKey,
data: flatten(value, {_idself: newUUID}, `${tableName}_${objectKey}`),
data: flattenObject(value, {_idself: newUUID}, `${tableName}_${objectKey}`),
}
);
}
@ -154,6 +74,15 @@ const handleTable = (obj, tableName, tableDetectedCallback) => {
}
};
if (!isObjectList(obj)) {
if (isList(obj)) {
for (var listKey in obj) {
rowArray.push({
__value: listKey,
_id: uuid(),
});
}
return rowArray;
}
if (isRandomList(obj)) {
for (var objKey in obj) {
rowArray.push({
@ -167,7 +96,12 @@ const handleTable = (obj, tableName, tableDetectedCallback) => {
throwError('Message: invalid JSON provided for node ' + tableName);
}
for (var id in obj) {
const flatRow = flatten(obj[id], {_id: id});
const randomUUID = uuid();
const initialRow = {_id: id};
if (!isRootLevel) {
initialRow._idself = randomUUID;
}
const flatRow = flattenObject(obj[id], initialRow);
if (flatRow && Object.keys(flatRow).length > 0) {
rowArray.push(flatRow);
}
@ -175,7 +109,7 @@ const handleTable = (obj, tableName, tableDetectedCallback) => {
return rowArray;
};
const handleJSONDoc = db => {
const handleFirebaseJson = db => {
const tablesMap = {};
const generateNewTable = (objectRelMetadata, arrayRelMetadata) => {
if (arrayRelMetadata) {
@ -195,7 +129,7 @@ const handleJSONDoc = db => {
newItem[`${parentTableName}_${pkey}`] = pkeys[pkey];
}
if (newItem._idself === undefined) {
newItem[getLastId(newItem, 0, 'self')] = uuid();
newItem[getLastPrimaryKey(newItem, 0, 'self')] = uuid();
}
return newItem;
}),
@ -208,37 +142,25 @@ const handleJSONDoc = db => {
if (!tablesMap[newTableName]) {
tablesMap[newTableName] = [];
}
// let existingRow = null;
// if (!tablesMap[newTableName].find(row => { // eslint-disable-line array-callback-return
// for (var column in row) {
// if (column.indexOf('_id') !== 0) {
// if (row[column] !== newItem[column]) {
// return false;
// }
// }
// }
// objectRelMetadata.callback(row._idself);
// return true;
// })) {
// tablesMap[newTableName].push(newItem);
// if (objectRelMetadata.callback) {
// objectRelMetadata.callback();
// }
// }
tablesMap[newTableName].push(newItem);
}
};
const topLevelTables = [];
for (var tableName in db) {
tablesMap[tableName] = handleTable(
topLevelTables.push({
_id: uuid(),
__tableName: tableName.replace(/[^a-zA-Z0-9]/g, '_'),
});
tablesMap[tableName] = handleTableCandidate(
db[tableName],
tableName,
generateNewTable
generateNewTable,
true
);
}
tablesMap.__rootTables = topLevelTables;
return tablesMap;
};
module.exports = handleJSONDoc;
module.exports = handleFirebaseJson;

View File

@ -0,0 +1,94 @@
const getParentPrimaryKeyMap = obj => {
const pkeyMap = {};
for (var pkey in obj) {
if (pkey.indexOf('_id') === 0) {
pkeyMap[pkey] = obj[pkey];
}
}
return pkeyMap;
};
const getLastPrimaryKey = (obj, index = 0, selfGenerated = '') => {
const id = index === 0 ? `_id${selfGenerated}` : `_id${selfGenerated}_${index}`;
const nextIndex = index === 0 ? 2 : index + 1;
if (!obj[`_id_${nextIndex}`]) {
return id;
}
getLastPrimaryKey(obj, nextIndex, selfGenerated);
};
const getPrimaryKeyName = (obj, index = 0, selfGenerated = '') => {
const id = index === 0 ? `_id${selfGenerated}` : `_id${selfGenerated}_${index}`;
const nextIndex = index === 0 ? 2 : index + 1;
if (obj[id] === undefined) {
return id;
}
return getPrimaryKeyName(obj, nextIndex, selfGenerated);
};
const isRandomList = obj => {
if (!obj) {
return false;
}
for (var objKey in obj) {
if (obj[objKey] !== null && typeof obj[objKey] === 'object') {
return false;
}
}
return true;
};
const isList = obj => {
if (Object.keys(obj).length === 0) {
return false;
}
for (var objKey in obj) {
if (obj[objKey] === null) {
return false;
}
if (obj[objKey].constructor.name !== 'Boolean' || !obj[objKey]) {
return false;
}
}
return true;
};
const isObjectList = obj => {
if (obj === null || obj === undefined) {
return false;
}
const listChildStructure = {};
for (var key in obj) {
if (obj[key] === null) {
return false;
}
if (typeof obj[key] !== 'object') {
return false;
}
if (Object.keys(obj[key]).length === 0) {
return false;
}
for (var childKey in obj[key]) {
if (!listChildStructure[childKey]) {
if (obj[key][childKey] !== null && obj[key][childKey] !== undefined) {
listChildStructure[childKey] = typeof obj[key][childKey];
}
} else if (obj[key][childKey] !== null && obj[key][childKey] !== undefined) {
if (typeof obj[key][childKey] !== listChildStructure[childKey]) {
return false;
}
}
}
}
return true;
};
module.exports = {
getParentPrimaryKeyMap,
getLastPrimaryKey,
getPrimaryKeyName,
isRandomList,
isList,
isObjectList,
};

View File

@ -1,11 +1,11 @@
const fetch = require('node-fetch');
const {cli} = require('cli-ux');
const throwError = require('../error');
const {spinnerStart, spinnerStop, spinnerStopColorless} = require('../log');
const createTables = async (tables, url, headers, overwrite, runSql, sql) => {
if (overwrite) {
cli.action.stop('Skipped!');
cli.action.start('Creating tables');
spinnerStopColorless('Skipped!');
spinnerStart('Creating tables');
await runSql(sql, url, headers);
} else {
try {
@ -41,8 +41,8 @@ const createTables = async (tables, url, headers, overwrite, runSql, sql) => {
}
});
if (!found) {
cli.action.stop('Done!');
cli.action.start('Creating tables');
spinnerStop('Done!');
spinnerStart('Creating tables');
await runSql(sql, url, headers);
}
}

View File

@ -1,7 +1,7 @@
const {spinnerStart, spinnerStop, log} = require('../log');
const generate = require('./generateTables');
const {refineJson} = require('./utils');
const {generateSql, runSql} = require('./sql');
const {generateSql, runSql, dropUtilityTables} = require('./sql');
const {trackTables} = require('./track');
const {getInsertOrder, insertData} = require('./insert');
const {createRelationships} = require('./relationships');
@ -26,13 +26,17 @@ const importData = async (jsonDb, url, headers, overwrite, level = 1, shouldNorm
createRelationships(tables, url, headers).then(() => {
spinnerStop('Done!');
const insertOrder = getInsertOrder(tables);
insertData(insertOrder, db, tables, url, headers, success => {
insertData(insertOrder, db, tables, url, headers, async success => {
if (level <= 10 && shouldNormalize) {
normalize(tables, db, url, headers, level, importData);
} else if (success) {
spinnerStart('Dropping utility tables');
const resp = await dropUtilityTables(url, headers);
if (resp) {
spinnerStop('Done!');
}
log('');
log(`Success! Try out the GraphQL API at ${url}/console`, 'green');
if (!shouldNormalize) {
makeSuggestions();
}

View File

@ -2,7 +2,8 @@ const {query} = require('graphqurl');
const fetch = require('node-fetch');
const moment = require('moment');
const throwError = require('../error');
const {log, spinnerStart, spinnerStop} = require('../log');
const {log, spinnerStart, spinnerStop, spinnerStopColorless} = require('../log');
const colors = require('colors/safe');
const getInsertOrder = tables => {
let order = [];
@ -56,8 +57,9 @@ const transformData = (data, tables) => {
};
const deleteDataTill = async (tableName, insertOrder, url, headers) => {
spinnerStopColorless(colors.red('Error'));
spinnerStart('Restoring database to a safe state');
const truncate = async tn => {
const truncate = async order => {
const resp = await fetch(
url,
{
@ -66,21 +68,22 @@ const deleteDataTill = async (tableName, insertOrder, url, headers) => {
body: JSON.stringify({
type: 'run_sql',
args: {
sql: `truncate table public."${tn}" cascade;`,
sql: `truncate table public."${insertOrder[order]}" cascade;`,
cascade: true,
},
}),
}
);
if (tn === tableName) {
if (insertOrder[order] === tableName) {
spinnerStop('Done');
return resp;
} else {
await truncate(order + 1, Boolean(resp));
}
};
if (insertOrder.length === 0) {
return;
}
return truncate(insertOrder[0]);
return truncate(0);
};
const insertData = async (insertOrder, sampleData, tables, url, headers, callback) => {

View File

@ -108,13 +108,30 @@ const patchDupeDependentTables = (table, dupe, tables, data, pkeyMap) => {
const patchedData = {};
tables.forEach(otherTable => {
if (otherTable.name !== table && otherTable.name !== dupe) {
if (otherTable.columns.find(column => column.name === `${dupe}__idself`)) {
if (otherTable.columns.find(column => {
return column.name.indexOf(`${dupe}__id`) === 0 ||
column.name.indexOf(`${table}__idself`) === 0;
})) {
const newData = data[otherTable.name].map(row => {
const newRow = {
...row,
};
newRow[`${table}__id`] = pkeyMap[row[`${dupe}__idself`]];
delete newRow[`${dupe}__idself`];
for (var c in row) {
if (c.indexOf(`${table}__idself`) === 0) {
delete newRow[c];
continue;
}
if (c.indexOf(`${dupe}__idself`) === 0) {
newRow[`${table}__id`] = pkeyMap[row[c]];
delete newRow[c];
continue;
}
if (c.indexOf(`${dupe}__id`) === 0) {
delete newRow[c];
continue;
}
}
return newRow;
});
patchedData[otherTable.name] = newData;
@ -124,6 +141,60 @@ const patchDupeDependentTables = (table, dupe, tables, data, pkeyMap) => {
return patchedData;
};
const makePkeyMap = (table, dupe, columnList, data) => {
const map = {};
data[dupe].forEach(dupeRow => {
data[table].forEach(tableRow => {
let isSameRow = true;
columnList.forEach(column => {
if (dupeRow[column] !== tableRow[column]) {
isSameRow = false;
}
});
if (isSameRow) {
map[dupeRow._idself] = tableRow._id;
}
});
});
return map;
};
const getTablePriority = (table, dupe, topLevelTables) => {
let isDupeTopLevel = false;
let isTableTopLevel = false;
for (var i = topLevelTables.length - 1; i >= 0; i--) {
let row = topLevelTables[i];
if (row.__tableName === dupe) {
isDupeTopLevel = true;
}
if (row.__tableName === table) {
isTableTopLevel = true;
}
}
if (isDupeTopLevel && !isTableTopLevel) {
return {
table1: dupe,
table2: table,
};
}
if (!isDupeTopLevel && isTableTopLevel) {
return {
table1: table,
table2: dupe,
};
}
if (!isDupeTopLevel && !isTableTopLevel) {
return {
table1: table,
table2: dupe,
};
}
return {
table1: null,
table2: null,
};
};
const handleConfirmedDupes = (confirmedDupes, tables, data) => {
/*
1. Go through the dupes
@ -134,51 +205,36 @@ const handleConfirmedDupes = (confirmedDupes, tables, data) => {
let newData = {
...data,
};
let filteredTables = [...tables];
const handle = (dupes, index) => {
if (dupes.length === 0 || index > dupes.length - 1) {
return;
}
const tableData = [];
let table1, table2;
const {table1, table2} = getTablePriority(dupes[index].table1, dupes[index].table2, data.__rootTables);
const columnList = dupes[index].columnList;
if (!newData[dupes[index].table1][0]._idself &&
!newData[dupes[index].table2][0]._idself &&
newData[dupes[index].table1][0]._id &&
newData[dupes[index].table1][0]._id
) {
if (dupes[index].table1.length > dupes[index].table2.length) {
table2 = dupes[index].table1;
table1 = dupes[index].table2;
} else {
table1 = dupes[index].table1;
table2 = dupes[index].table2;
}
} else if (!newData[dupes[index].table1][0]._idself && newData[dupes[index].table1][0]._id) {
table1 = dupes[index].table1;
table2 = dupes[index].table2;
} else if (!newData[dupes[index].table2][0]._idself && newData[dupes[index].table2][0]._id) {
table2 = dupes[index].table1;
table1 = dupes[index].table2;
} else {
if (!table1) {
handle(dupes, index + 1);
return;
}
const table = tables.find(t => t.name === table1);
const dupe = tables.find(t => t.name === table2);
const pkeyMap = {};
newData[table.name].forEach(tableRow => {
const table = filteredTables.find(t => t.name === table1);
const dupe = filteredTables.find(t => t.name === table2);
newData[table.name].forEach(r => {
const tableRow = {};
for (var c in r) {
if (c.indexOf('_idself') !== 0) {
tableRow[c] = r[c];
}
}
const dLength = data[dupe.name].length;
let found = false;
for (let j = 0; j < dLength; j++) {
const dupeRow = newData[dupe.name][j];
if (columnList.every(colName => dupeRow[colName] === tableRow[colName])) {
found = true;
const item = {};
for (var key in dupeRow) {
if (key.indexOf('_idself') === 0) {
if (!pkeyMap[dupeRow]) {
pkeyMap.dupeRow = {};
}
pkeyMap[dupeRow._idself] = tableRow._id;
} else {
if (key.indexOf('_idself') !== 0) {
item[key.replace(dupe.name + '_', table.name + '_')] = dupeRow[key];
}
}
@ -189,15 +245,37 @@ const handleConfirmedDupes = (confirmedDupes, tables, data) => {
break;
}
}
if (!found) {
tableData.push(tableRow);
}
});
newData[table.name] = tableData;
delete newData[dupe.name];
filteredTables = filteredTables.filter(ft => ft.name !== dupe.name);
newData = {
...newData,
...patchDupeDependentTables(table.name, dupe.name, tables, newData, pkeyMap),
...patchDupeDependentTables(table.name, dupe.name, filteredTables, newData, makePkeyMap(table1, table2, columnList, newData)),
};
delete newData[dupe.name];
const filteredDupes = [];
for (var i = dupes.length - 1; i >= 0; i--) {
const d = dupes[i];
if ((d.table1 !== table1 && d.table2 !== table2) && (d.table2 !== table1 && d.table1 !== table2)) {
if (d.table1 === table2) {
filteredDupes.push({
table1,
table2: d.table2,
});
}
if (d.table2 === table2) {
filteredDupes.push({
table1,
table2: d.table1,
});
}
}
}
handle(
dupes.filter(d => d.table1 !== table1 && d.table2 !== table1 && d.table1 !== table2 && d.table2 !== table2),
filteredDupes,
0
);
};

View File

@ -90,7 +90,31 @@ const generateSql = metadata => {
return sqlArray;
};
const dropUtilityTables = async (url, headers) => {
const tablesToDrop = ['__rootTables'];
let sql = '';
tablesToDrop.forEach(table => {
sql += `drop table if exists "${table}" cascade;`;
});
const resp = await fetch(
`${url}/v1/query`,
{
method: 'POST',
headers,
body: JSON.stringify({
type: 'run_sql',
args: {
sql,
cascade: true,
},
}),
}
);
return Boolean(resp);
};
module.exports = {
generateSql,
runSql,
dropUtilityTables,
};

View File

@ -1,25 +1,40 @@
const colors = require('colors/safe');
const {cli} = require('cli-ux');
const shouldLog = process.env.F2G_LOG;
const log = (message, color) => {
if (color) {
console.log(colors[color](message));
} else {
console.log(message);
if (shouldLog !== '0') {
if (color) {
console.log(colors[color](message));
} else {
console.log(message);
}
}
};
const spinnerStart = message => {
cli.action.start(message);
if (shouldLog !== '0') {
cli.action.start(message);
}
};
const spinnerStop = () => {
cli.action.stop(colors.green('Done!'));
if (shouldLog !== '0') {
cli.action.stop(colors.green('Done!'));
}
};
const spinnerStopColorless = message => {
if (shouldLog !== '0') {
cli.action.stop(message);
}
};
module.exports = {
log,
spinnerStop,
spinnerStart,
spinnerStopColorless,
};

View File

@ -0,0 +1,47 @@
{
"f2g-test-posts" : {
"-LMbLFOAW2q6GO1bD-5g" : {
"author" : "Eena",
"authorPic" : "https://lh4.googleusercontent.com/-vPOIBOxCUpo/AAAAAAAAAAI/AAAAAAAAAFo/SKk9hpOB7v4/photo.jpg",
"body" : "My first post content\nAnd body\nANd structure",
"starCount" : 0,
"title" : "My first post",
"uid" : "4UPmbcaqZKT2NdAAqBahXj4tHYN2"
},
"-LMbLIv6VKHYul7p_PZ-" : {
"author" : "Eena",
"authorPic" : "https://lh4.googleusercontent.com/-vPOIBOxCUpo/AAAAAAAAAAI/AAAAAAAAAFo/SKk9hpOB7v4/photo.jpg",
"body" : "AKsdjak\naklsdjaskldjklas\nasdklfjaklsdfjklsda\nasdklfjasklf",
"starCount" : 0,
"title" : "Whatta proaaa",
"uid" : "4UPmbcaqZKT2NdAAqBahXj4tHYN2"
}
},
"f2g-test-user-posts" : {
"4UPmbcaqZKT2NdAAqBahXj4tHYN2" : {
"-LMbLFOAW2q6GO1bD-5g" : {
"author" : "Eena",
"authorPic" : "https://lh4.googleusercontent.com/-vPOIBOxCUpo/AAAAAAAAAAI/AAAAAAAAAFo/SKk9hpOB7v4/photo.jpg",
"body" : "My first post content\nAnd body\nANd structure",
"starCount" : 0,
"title" : "My first post",
"uid" : "4UPmbcaqZKT2NdAAqBahXj4tHYN2"
},
"-LMbLIv6VKHYul7p_PZ-" : {
"author" : "Eena",
"authorPic" : "https://lh4.googleusercontent.com/-vPOIBOxCUpo/AAAAAAAAAAI/AAAAAAAAAFo/SKk9hpOB7v4/photo.jpg",
"body" : "AKsdjak\naklsdjaskldjklas\nasdklfjaklsdfjklsda\nasdklfjasklf",
"starCount" : 0,
"title" : "Whatta proaaa",
"uid" : "4UPmbcaqZKT2NdAAqBahXj4tHYN2"
}
}
},
"f2g-test-users" : {
"4UPmbcaqZKT2NdAAqBahXj4tHYN2" : {
"email" : "rishichandrawawhal@gmail.com",
"profile_picture" : "https://lh4.googleusercontent.com/-vPOIBOxCUpo/AAAAAAAAAAI/AAAAAAAAAFo/SKk9hpOB7v4/photo.jpg",
"username" : "Eena"
}
}
}

View File

@ -14278,7 +14278,7 @@
}
}
},
"Track": {
"f2g_test_Track": {
"1": {
"Composer": "Angus Young, Malcolm Young, Brian Johnson",
"album": {

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,136 @@
module.exports = {
scores: {
Rishi: 24,
Rikin: 26,
Tanmai: 27,
},
author: {
someone: {
one: {
name: 'Rishi',
age: 24,
articles: {
first: {
title: 'Rishis article',
body: "Rishi's article's body",
comments: {
'Comment 1': true,
'Comment 2': true,
},
},
second: {
title: 'Rishis another article',
body: "Rishi's another article's body",
comments: {
'Comment 3': true,
},
},
},
friends: {
Rikin: true,
},
},
two: {
name: 'Rikin',
age: 30,
articles: {
third: {
title: "Rikin's article",
body: "Rikin's article's body",
comments: {
'Comment 4': true,
'Comment 5': true,
},
},
fourth: {
title: 'Rikins another article',
body: "Rikin's another article's body",
comments: {
'Comment 6': true,
'Comment df': true,
},
},
},
friends: {
Rishi: true,
Tanmai: true,
},
},
three: {
name: 'Tanmai',
age: 30,
articles: {
fifth: {
title: "Tanmai's article",
body: "Tanmai's article's body",
comments: {
'Comment asdjf': true,
'Comment dsiafjijf': true,
},
},
sixth: {
title: "Tanmai's another article",
body: "Tanmai's another article's body",
comments: {
'Coafsdfment asdjf': true,
'Commenasdft dsiafjijf': true,
},
},
},
friends: {
Rikin: true,
},
},
},
},
articles: {
first: {
title: 'Rishis article',
body: "Rishi's article's body",
author: {
name: 'Rishi',
age: 24,
},
},
second: {
title: 'Rishis another article',
body: "Rishi's another article's body",
author: {
name: 'Rishi',
age: 24,
},
},
third: {
title: "Rikin's article",
body: "Rikin's article's body",
author: {
name: 'Rikin',
age: 30,
},
},
fourth: {
title: 'Rikins another article',
body: "Rikin's another article's body",
author: {
name: 'Rikin',
age: 30,
},
},
fifth: {
title: "Tanmai's article",
body: "Tanmai's article's body",
author: {
name: 'Tanmai',
age: 30,
},
},
sixth: {
title: "Tanmai's another article",
body: "Tanmai's another article's body",
author: {
name: 'Tanmai',
age: 30,
},
},
},
};

View File

@ -0,0 +1,110 @@
{
"Articles": {
"A1": {
"Title": "Title1",
"Body": "Body1",
"IsUnpublished": false,
"Author": {
"Name": "AName1",
"Age": 11
},
"Comments": {
"C1": {
"Body": "Comment1",
"Author": {
"Name": "AName2",
"Sex": "M"
},
"Date": "22-09-2018"
},
"C2": {
"Body": "Comment2",
"Author": {
"Name": "AName1",
"Sex": "F"
},
"Date": "21-09-2018"
}
}
},
"A2": {
"Title": "Title2",
"Body": "Body2",
"IsUnpublished": true,
"Author": {
"Name": "AName2",
"Age": 22
},
"Comments": {
"C3": {
"Body": "Comment1",
"Author": {
"Name": "AName1",
"Sex": "F"
},
"Date": "23-09-2018"
},
"C4": {
"Body": "Comment2",
"Author": {
"Name": "AName2",
"Sex": "M"
},
"Date": "24-09-2018"
}
}
}
},
"Authors": {
"AT1": {
"Name": "AName1",
"Age": 11,
"Sex": "F",
"Articles": {
"A1": {
"Title": "Title1"
}
}
},
"AT2": {
"Name": "AName2",
"Age": 22,
"Sex": "M",
"Articles": {
"A2": {
"Title": "Title2"
}
}
}
},
"Comments": {
"C1": {
"Body": "Comment1",
"Author": {
"Name": "AName2"
},
"Date": "22-09-2018"
},
"C2": {
"Body": "Comment2",
"Author": {
"Name": "AName1"
},
"Date": "21-09-2018"
},
"C3": {
"Body": "Comment1",
"Author": {
"Name": "AName1"
},
"Date": "23-09-2018"
},
"C4": {
"Body": "Comment2",
"Author": {
"Name": "AName2"
},
"Date": "24-09-2018"
}
}
}

View File

@ -2,5 +2,7 @@
if [ -z "$TEST_HGE_URL" ] && [ -z "$TEST_X_HASURA_ACCESS_KEY" ]; then
echo "ERROR: Please run the test command with the environment variable TEST_HGE_URL"
else
../bin/run $TEST_HGE_URL --access-key=$TEST_X_HASURA_ACCESS_KEY --db=./db.json --overwrite && node verify.js
F2G_LOG=0 ../bin/run $TEST_HGE_URL --access-key=$TEST_X_HASURA_ACCESS_KEY --db=./data-sets/chinook.json --overwrite --normalize && node verifyChinook.js
F2G_LOG=0 ../bin/run $TEST_HGE_URL --access-key=$TEST_X_HASURA_ACCESS_KEY --db=./data-sets/blog.json --overwrite --normalize && node verifyBlog.js
F2G_LOG=0 ../bin/run $TEST_HGE_URL --access-key=$TEST_X_HASURA_ACCESS_KEY --db=./data-sets/chinook_nested.json --overwrite --normalize && node verifyChinookNested.js
fi

View File

@ -0,0 +1,62 @@
const {query} = require('graphqurl');
const fetch = require('node-fetch');
const colors = require('colors/safe');
const complexQuery = `
query {
f2g_test_posts (order_by: title_asc) {
title
}
f2g_test_users (order_by: username_asc) {
username
}
f2g_test_user_posts (order_by:title_asc){
author
title
}
}
`;
const verifyDataImport = () => {
query({
query: complexQuery,
endpoint: `${process.env.TEST_HGE_URL}/v1alpha1/graphql`,
headers: {'x-hasura-access-key': process.env.TEST_X_HASURA_ACCESS_KEY},
}).then(response => {
if (
response.data.f2g_test_posts[0].title === 'My first post' &&
response.data.f2g_test_users[0].username === 'Eena' &&
response.data.f2g_test_user_posts[1].title === 'Whatta proaaa'
) {
let sqlString = '';
['f2g_test_users', 'f2g_test_posts', 'f2g_test_user_posts'].forEach(t => {
sqlString += `drop table public."${t}" cascade;`;
});
fetch(
`${process.env.TEST_HGE_URL}/v1/query`,
{
method: 'POST',
headers: {'x-hasura-access-key': process.env.TEST_X_HASURA_ACCESS_KEY},
body: JSON.stringify({
type: 'run_sql',
args: {
sql: sqlString,
cascade: true,
},
}),
}
).then(() => {
console.log(colors.green('✔︎ data-sets/blog.json: Test passed'));
process.exit();
}).catch(() => {
process.exit();
});
} else {
console.log(colors.red('✖ data-sets/blog.json: Test failed. Unexpected response.'));
console.log(response.data);
process.exit();
}
});
};
verifyDataImport();

View File

@ -12,7 +12,7 @@ query {
Name
ArtistId
}
f2g_test_Album_tracks (
f2g_test_Track (
order_by: Name_asc
) {
Name
@ -30,10 +30,10 @@ const verifyDataImport = () => {
}).then(response => {
if (
response.data.f2g_test_Album[0].f2g_test_Album_artist.ArtistId === 1 &&
response.data.f2g_test_Album[0].f2g_test_Album_tracks[0].Name === 'Breaking The Rules'
response.data.f2g_test_Album[0].f2g_test_Track[0].Name === 'Breaking The Rules'
) {
let sqlString = '';
['Album', 'Album_artist', 'Album_tracks'].forEach(t => {
['Album', 'Album_artist', 'Track'].forEach(t => {
sqlString += `drop table public."f2g_test_${t}" cascade;`;
});
fetch(
@ -50,16 +50,20 @@ const verifyDataImport = () => {
}),
}
).then(() => {
console.log(colors.green('✔︎ Test passed'));
console.log(colors.green('✔︎ data-sets/chinook.json: Test passed'));
process.exit();
}).catch(() => {
process.exit();
});
} else {
console.log(colors.red('✖ Test failed. Unexpected response.'));
console.log(colors.red('✖ data-sets/chinook.json: Test failed. Unexpected response.'));
console.log(response.data);
process.exit();
}
}).catch(e => {
console.log(colors.red('✖ data-sets/chinook.json: Test failed. Unexpected response.'));
console.log(JSON.stringify(e, null, 2));
process.exit();
});
};

View File

@ -0,0 +1,76 @@
const {query} = require('graphqurl');
const fetch = require('node-fetch');
const colors = require('colors/safe');
const complexQuery = `
query {
f2gt_Album (order_by:_id_asc){
_id
f2gt_Track (order_by: _id_asc) {
_id
Name
}
f2gt_Artist {
Name
f2gt_Album (order_by: _id_desc){
_id
Title
f2gt_Track (order_by: Name_asc){
Name
Composer
}
}
}
}
}
`;
const verifyDataImport = () => {
query({
query: complexQuery,
endpoint: `${process.env.TEST_HGE_URL}/v1alpha1/graphql`,
headers: {'x-hasura-access-key': process.env.TEST_X_HASURA_ACCESS_KEY},
}).then(response => {
if (
response.data.f2gt_Album[0]._id === '1' &&
response.data.f2gt_Album[0].f2gt_Track[1]._id === '10' &&
response.data.f2gt_Album[0].f2gt_Artist.Name === 'AC/DC' &&
response.data.f2gt_Album[0].f2gt_Artist.f2gt_Album[0].Title === 'Let There Be Rock' &&
response.data.f2gt_Album[0].f2gt_Artist.f2gt_Album[0].f2gt_Track[0].Name === 'Bad Boy Boogie'
) {
let sqlString = '';
['Album', 'Artist', 'Tracks'].forEach(t => {
sqlString += `drop table public."f2gt_${t}" cascade;`;
});
fetch(
`${process.env.TEST_HGE_URL}/v1/query`,
{
method: 'POST',
headers: {'x-hasura-access-key': process.env.TEST_X_HASURA_ACCESS_KEY},
body: JSON.stringify({
type: 'run_sql',
args: {
sql: sqlString,
cascade: true,
},
}),
}
).then(() => {
console.log(colors.green('✔︎ data-sets/chinook.json: Test passed'));
process.exit();
}).catch(() => {
process.exit();
});
} else {
console.log(colors.red('✖ data-sets/chinook.json: Test failed. Unexpected response.'));
process.exit();
}
}).catch(e => {
console.log(colors.red('✖ data-sets/chinook.json: Test failed. Unexpected response.'));
console.log(JSON.stringify(e, null, 2));
process.exit();
});
};
verifyDataImport();