2020-08-20 09:42:38 +03:00
|
|
|
const _ = require('lodash');
|
2020-08-20 11:24:05 +03:00
|
|
|
const errors = require('@tryghost/errors');
|
|
|
|
const db = require('../../data/db');
|
2021-06-15 17:36:27 +03:00
|
|
|
const logging = require('@tryghost/logging');
|
2020-08-20 09:42:38 +03:00
|
|
|
|
|
|
|
const CHUNK_SIZE = 100;
|
|
|
|
|
2020-08-20 10:01:17 +03:00
|
|
|
async function insertChunkSequential(table, chunk, result) {
|
|
|
|
for (const record of chunk) {
|
|
|
|
try {
|
|
|
|
await db.knex(table).insert(record);
|
|
|
|
result.successful += 1;
|
|
|
|
} catch (err) {
|
2020-08-20 11:24:05 +03:00
|
|
|
err.errorDetails = record;
|
2020-08-20 10:01:17 +03:00
|
|
|
result.errors.push(err);
|
2020-08-20 11:24:05 +03:00
|
|
|
result.unsuccessfulRecords.push(record);
|
2020-08-20 10:01:17 +03:00
|
|
|
result.unsuccessful += 1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
async function insertChunk(table, chunk, result) {
|
|
|
|
try {
|
|
|
|
await db.knex(table).insert(chunk);
|
|
|
|
result.successful += chunk.length;
|
|
|
|
} catch (err) {
|
|
|
|
await insertChunkSequential(table, chunk, result);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-08-20 09:42:38 +03:00
|
|
|
async function insert(table, data) {
|
|
|
|
const result = {
|
|
|
|
successful: 0,
|
|
|
|
unsuccessful: 0,
|
2020-08-20 11:24:05 +03:00
|
|
|
unsuccessfulRecords: [],
|
2020-08-20 09:42:38 +03:00
|
|
|
errors: []
|
|
|
|
};
|
|
|
|
|
|
|
|
for (const chunk of _.chunk(data, CHUNK_SIZE)) {
|
2020-08-20 10:01:17 +03:00
|
|
|
await insertChunk(table, chunk, result);
|
2020-08-20 09:42:38 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2020-08-20 10:11:41 +03:00
|
|
|
async function delChunkSequential(table, chunk, result) {
|
2020-08-20 11:24:05 +03:00
|
|
|
for (const id of chunk) {
|
2020-08-20 10:11:41 +03:00
|
|
|
try {
|
2020-08-20 11:24:05 +03:00
|
|
|
await db.knex(table).where('id', id).del();
|
2020-08-20 10:11:41 +03:00
|
|
|
result.successful += 1;
|
|
|
|
} catch (err) {
|
2020-08-20 11:24:05 +03:00
|
|
|
const importError = new errors.DataImportError({
|
|
|
|
message: `Failed to remove entry from ${table}`,
|
|
|
|
context: `Entry id: ${id}`,
|
|
|
|
err: err
|
|
|
|
});
|
|
|
|
logging.error(importError);
|
|
|
|
|
|
|
|
result.errors.push(importError);
|
|
|
|
result.unsuccessfulIds.push(id);
|
2020-08-20 10:11:41 +03:00
|
|
|
result.unsuccessful += 1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
async function delChunk(table, chunk, result) {
|
|
|
|
try {
|
|
|
|
await db.knex(table).whereIn('id', chunk).del();
|
|
|
|
result.successful += chunk.length;
|
|
|
|
} catch (err) {
|
|
|
|
await delChunkSequential(table, chunk, result);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-08-20 09:42:38 +03:00
|
|
|
async function del(table, ids) {
|
|
|
|
const result = {
|
|
|
|
successful: 0,
|
|
|
|
unsuccessful: 0,
|
2020-08-20 10:11:41 +03:00
|
|
|
unsuccessfulIds: [],
|
2020-08-20 09:42:38 +03:00
|
|
|
errors: []
|
|
|
|
};
|
|
|
|
|
|
|
|
for (const chunk of _.chunk(ids, CHUNK_SIZE)) {
|
2020-08-20 10:11:41 +03:00
|
|
|
await delChunk(table, chunk, result);
|
2020-08-20 09:42:38 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2021-06-16 15:51:39 +03:00
|
|
|
/**
|
|
|
|
* @param {import('bookshelf')} Bookshelf
|
|
|
|
*/
|
|
|
|
module.exports = function (Bookshelf) {
|
|
|
|
Bookshelf.Model = Bookshelf.Model.extend({}, {
|
|
|
|
bulkAdd: function bulkAdd(data, tableName) {
|
|
|
|
tableName = tableName || this.prototype.tableName;
|
|
|
|
|
|
|
|
return insert(tableName, data);
|
|
|
|
},
|
|
|
|
|
|
|
|
bulkDestroy: function bulkDestroy(data, tableName) {
|
|
|
|
tableName = tableName || this.prototype.tableName;
|
|
|
|
|
|
|
|
return del(tableName, data);
|
|
|
|
}
|
|
|
|
});
|
|
|
|
};
|