mirror of
https://github.com/TryGhost/Ghost.git
synced 2024-12-22 02:11:44 +03:00
4ff467794f
refs: https://github.com/TryGhost/DevOps/issues/11 This is a pretty huge commit, but the relevant points are: * Each importer no longer needs to be passed a set of data, it just gets the data it needs * Each importer specifies its dependencies, so that the order of import can be determined at runtime using a topological sort * The main data generator function can just tell each importer to import the data it has This makes working on the data generator much easier. Some other benefits are: * Batched importing, massively speeding up the whole process * `--tables` to set the exact tables you want to import, and specify the quantity of each
33 lines
1010 B
JavaScript
33 lines
1010 B
JavaScript
const {faker} = require('@faker-js/faker');
|
|
const TableImporter = require('./TableImporter');
|
|
|
|
class PostsAuthorsImporter extends TableImporter {
|
|
static table = 'posts_authors';
|
|
static dependencies = ['posts', 'users'];
|
|
|
|
constructor(knex, transaction) {
|
|
super(PostsAuthorsImporter.table, knex, transaction);
|
|
this.sortOrder = 0;
|
|
}
|
|
|
|
async import(quantity) {
|
|
const posts = await this.transaction.select('id').from('posts');
|
|
this.users = await this.transaction.select('id').from('users');
|
|
|
|
await this.importForEach(posts, quantity ? quantity / posts.length : 1);
|
|
}
|
|
|
|
generate() {
|
|
const sortOrder = this.sortOrder;
|
|
this.sortOrder = this.sortOrder + 1;
|
|
return {
|
|
id: faker.database.mongodbObjectId(),
|
|
post_id: this.model.id,
|
|
author_id: this.users[faker.datatype.number(this.users.length - 1)].id,
|
|
sort_order: sortOrder
|
|
};
|
|
}
|
|
}
|
|
|
|
module.exports = PostsAuthorsImporter;
|