mirror of
https://github.com/TryGhost/Ghost.git
synced 2024-12-19 00:11:49 +03:00
4ff467794f
refs: https://github.com/TryGhost/DevOps/issues/11 This is a pretty huge commit, but the relevant points are: * Each importer no longer needs to be passed a set of data, it just gets the data it needs * Each importer specifies its dependencies, so that the order of import can be determined at runtime using a topological sort * The main data generator function can just tell each importer to import the data it has This makes working on the data generator much easier. Some other benefits are: * Batched importing, massively speeding up the whole process * `--tables` to set the exact tables you want to import, and specify the quantity of each
43 lines
1.3 KiB
JavaScript
43 lines
1.3 KiB
JavaScript
const {faker} = require('@faker-js/faker');
|
|
const TableImporter = require('./TableImporter');
|
|
|
|
class RolesUsersImporter extends TableImporter {
|
|
static table = 'roles_users';
|
|
// No roles imorter, since roles are statically defined in database
|
|
static dependencies = ['users'];
|
|
|
|
constructor(knex, transaction) {
|
|
super(RolesUsersImporter.table, knex, transaction);
|
|
}
|
|
|
|
/**
|
|
* Ignore overriden quantity for 1:1 relationship
|
|
*/
|
|
async import() {
|
|
const users = await this.transaction.select('id').from('users').whereNot('id', 1);
|
|
this.roles = await this.transaction.select('id', 'name').from('roles');
|
|
|
|
await this.importForEach(users, 1);
|
|
}
|
|
|
|
generate() {
|
|
const userRoles = ['Editor', 'Contributor', 'Author'];
|
|
const userRole = userRoles[faker.datatype.number({
|
|
min: 0,
|
|
max: userRoles.length - 1
|
|
})];
|
|
const actualRole = this.roles.find(role => role.name === userRole);
|
|
if (!actualRole) {
|
|
// No roles defined in database, don't bother creating user role
|
|
return;
|
|
}
|
|
return {
|
|
id: faker.database.mongodbObjectId(),
|
|
role_id: actualRole.id,
|
|
user_id: this.model.id
|
|
};
|
|
}
|
|
}
|
|
|
|
module.exports = RolesUsersImporter;
|