mirror of
https://github.com/TryGhost/Ghost.git
synced 2024-12-02 08:13:34 +03:00
4ff467794f
refs: https://github.com/TryGhost/DevOps/issues/11 This is a pretty huge commit, but the relevant points are: * Each importer no longer needs to be passed a set of data, it just gets the data it needs * Each importer specifies its dependencies, so that the order of import can be determined at runtime using a topological sort * The main data generator function can just tell each importer to import the data it has This makes working on the data generator much easier. Some other benefits are: * Batched importing, massively speeding up the whole process * `--tables` to set the exact tables you want to import, and specify the quantity of each
32 lines
1.0 KiB
JavaScript
32 lines
1.0 KiB
JavaScript
const TableImporter = require('./TableImporter');
|
|
const {faker} = require('@faker-js/faker');
|
|
const {slugify} = require('@tryghost/string');
|
|
const security = require('@tryghost/security');
|
|
const dateToDatabaseString = require('../utils/database-date');
|
|
|
|
class UsersImporter extends TableImporter {
|
|
static table = 'users';
|
|
static dependencies = [];
|
|
defaultQuantity = 8;
|
|
|
|
constructor(knex, transaction) {
|
|
super(UsersImporter.table, knex, transaction);
|
|
}
|
|
|
|
async generate() {
|
|
const name = `${faker.name.firstName()} ${faker.name.lastName()}`;
|
|
return {
|
|
id: faker.database.mongodbObjectId(),
|
|
name: name,
|
|
slug: slugify(name),
|
|
password: await security.password.hash(faker.color.human()),
|
|
email: faker.internet.email(name),
|
|
profile_image: faker.internet.avatar(),
|
|
created_at: dateToDatabaseString(faker.date.between(new Date(2016, 0), new Date())),
|
|
created_by: 'unused'
|
|
};
|
|
}
|
|
}
|
|
|
|
module.exports = UsersImporter;
|