mirror of
https://github.com/TryGhost/Ghost.git
synced 2024-12-22 02:11:44 +03:00
4ff467794f
refs: https://github.com/TryGhost/DevOps/issues/11 This is a pretty huge commit, but the relevant points are: * Each importer no longer needs to be passed a set of data, it just gets the data it needs * Each importer specifies its dependencies, so that the order of import can be determined at runtime using a topological sort * The main data generator function can just tell each importer to import the data it has This makes working on the data generator much easier. Some other benefits are: * Batched importing, massively speeding up the whole process * `--tables` to set the exact tables you want to import, and specify the quantity of each
28 lines
894 B
JavaScript
28 lines
894 B
JavaScript
const {faker} = require('@faker-js/faker');
|
|
const TableImporter = require('./TableImporter');
|
|
|
|
class MembersNewslettersImporter extends TableImporter {
|
|
static table = 'members_newsletters';
|
|
static dependencies = ['members_subscribe_events'];
|
|
|
|
constructor(knex, transaction) {
|
|
super(MembersNewslettersImporter.table, knex, transaction);
|
|
}
|
|
|
|
async import(quantity) {
|
|
const membersSubscribeEvents = await this.transaction.select('member_id', 'newsletter_id').from('members_subscribe_events');
|
|
|
|
await this.importForEach(membersSubscribeEvents, quantity ? quantity / membersSubscribeEvents.length : 1);
|
|
}
|
|
|
|
generate() {
|
|
return {
|
|
id: faker.database.mongodbObjectId(),
|
|
member_id: this.model.member_id,
|
|
newsletter_id: this.model.newsletter_id
|
|
};
|
|
}
|
|
}
|
|
|
|
module.exports = MembersNewslettersImporter;
|