mirror of
https://github.com/TryGhost/Ghost.git
synced 2024-12-19 00:11:49 +03:00
4ff467794f
refs: https://github.com/TryGhost/DevOps/issues/11 This is a pretty huge commit, but the relevant points are: * Each importer no longer needs to be passed a set of data, it just gets the data it needs * Each importer specifies its dependencies, so that the order of import can be determined at runtime using a topological sort * The main data generator function can just tell each importer to import the data it has This makes working on the data generator much easier. Some other benefits are: * Batched importing, massively speeding up the whole process * `--tables` to set the exact tables you want to import, and specify the quantity of each
42 lines
1.2 KiB
JavaScript
42 lines
1.2 KiB
JavaScript
const TableImporter = require('./TableImporter');
|
|
const {faker} = require('@faker-js/faker');
|
|
const {slugify} = require('@tryghost/string');
|
|
const {blogStartDate} = require('../utils/blog-info');
|
|
const dateToDatabaseString = require('../utils/database-date');
|
|
|
|
class LabelsImporter extends TableImporter {
|
|
static table = 'labels';
|
|
static dependencies = [];
|
|
defaultQuantity = 10;
|
|
|
|
constructor(knex, transaction) {
|
|
super(LabelsImporter.table, knex, transaction);
|
|
this.generatedNames = new Set();
|
|
}
|
|
|
|
generateName() {
|
|
let name;
|
|
do {
|
|
name = `${faker.color.human()} ${faker.name.jobType()}`;
|
|
name = `${name[0].toUpperCase()}${name.slice(1)}`;
|
|
} while (this.generatedNames.has(name));
|
|
this.generatedNames.add(name);
|
|
return name;
|
|
}
|
|
|
|
generate() {
|
|
const name = this.generateName();
|
|
return {
|
|
id: faker.database.mongodbObjectId(),
|
|
name: name,
|
|
slug: `${slugify(name)}`,
|
|
created_at: dateToDatabaseString(blogStartDate),
|
|
created_by: '1',
|
|
updated_at: dateToDatabaseString(blogStartDate),
|
|
updated_by: '1'
|
|
};
|
|
}
|
|
}
|
|
|
|
module.exports = LabelsImporter;
|