Wired up Docker setup script and increased data generation performance (#19420)

ref PROD-233

- Stored whether Docker is used in the config files
- When running `yarn setup`, any existing Docker container will be
reset. Run with `-y` to skip the confirmation step.
- `yarn setup` will always init the database and generate fake data
- Increased amount of default generated data to 100,000 members + 500
posts.
- Made lots of performance improvements in the data generator so we can
generate the default data in ±170s
This commit is contained in:
Simon Backx 2024-01-05 14:42:30 +01:00 committed by GitHub
parent 1263cf148e
commit d2cb23c3fa
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
53 changed files with 467 additions and 186 deletions

View File

@ -4,6 +4,8 @@ services:
mysql:
image: mysql:8.0.35
container_name: ghost-mysql
# We'll need to look into how we can further fine tune the memory usage/performance here
command: --innodb-buffer-pool-size=1G --innodb-log-buffer-size=500M --innodb-change-buffer-max-size=50 --innodb-flush-log-at-trx_commit=0 --innodb-flush-method=O_DIRECT
ports:
- "3306:3306"
environment:

View File

@ -3,6 +3,7 @@ const fs = require('fs').promises;
const path = require('path');
const chalk = require('chalk');
const inquirer = require('inquirer');
/**
* Run a command and stream output to the console
@ -36,11 +37,16 @@ async function runAndStream(command, args, options) {
}
const coreFolder = path.join(__dirname, '../../ghost/core');
const rootFolder = path.join(__dirname, '../..');
const config = require('../../ghost/core/core/shared/config/loader').loadNconf({
customConfigPath: coreFolder
});
const dbClient = config.get('database:client');
const isUsingDocker = config.get('database:docker');
// Only reset data if we are using Docker
let resetData = false;
if (!dbClient.includes('mysql')) {
let mysqlSetup = false;
@ -54,6 +60,7 @@ async function runAndStream(command, args, options) {
}
if (mysqlSetup) {
resetData = true;
console.log(chalk.blue(`Adding MySQL credentials to config.local.json`));
const currentConfigPath = path.join(coreFolder, 'config.local.json');
@ -66,6 +73,7 @@ async function runAndStream(command, args, options) {
currentConfig.database = {
client: 'mysql',
docker: true,
connection: {
host: '127.0.0.1',
user: 'root',
@ -81,17 +89,39 @@ async function runAndStream(command, args, options) {
console.log(chalk.yellow(`Please add the following to config.local.json:\n`), JSON.stringify(currentConfig, null, 4));
process.exit(1);
}
console.log(chalk.blue(`Running knex-migrator init`));
await runAndStream('yarn', ['knex-migrator', 'init'], {cwd: coreFolder});
//console.log(chalk.blue(`Running data generator`));
//await runAndStream('node', ['index.js', 'generate-data'], {cwd: coreFolder});
}
} else {
console.log(chalk.green(`MySQL already configured, skipping setup`));
if (isUsingDocker) {
const yesAll = process.argv.includes('-y');
const noAll = process.argv.includes('-n');
const {confirmed} =
yesAll ? {confirmed: true}
: (
noAll ? {confirmed: false}
: await inquirer.prompt({name: 'confirmed', type:'confirm', message: 'MySQL is running via Docker, do you want to reset the Docker container? This will delete all existing data.', default: false})
);
console.log(chalk.blue(`Running knex-migrator init`));
await runAndStream('yarn', ['knex-migrator', 'init'], {cwd: coreFolder});
if (confirmed) {
console.log(chalk.yellow(`Resetting Docker container`));
try {
await runAndStream('yarn', ['docker:reset'], {cwd: path.join(__dirname, '../../')});
resetData = true;
} catch (err) {
console.error(chalk.red('Failed to run MySQL Docker container'), err);
console.error(chalk.red('Hint: is Docker installed and running?'));
}
}
} else {
console.log(chalk.green(`MySQL already configured locally. Stop your local database and delete your "database" configuration in config.local.json to switch to Docker.`));
}
}
console.log(chalk.blue(`Running knex-migrator init`));
await runAndStream('yarn', ['knex-migrator', 'init'], {cwd: coreFolder});
if (resetData) {
console.log(chalk.blue(`Resetting all data`));
await runAndStream('yarn', ['reset:data'], {cwd: rootFolder});
}
})();

View File

@ -32,6 +32,8 @@ module.exports = class DataGeneratorCommand extends Command {
}
async handle(argv = {}) {
// If we can't stream, throw an error while creating the connection
process.env.REQUIRE_INFILE_STREAM = '1';
const knex = require('../server/data/db/connection');
const tables = (argv.tables ? argv.tables.split(',') : []).map(table => ({

View File

@ -1,9 +1,11 @@
const _ = require('lodash');
const knex = require('knex');
const os = require('os');
const fs = require('fs');
const logging = require('@tryghost/logging');
const config = require('../../../shared/config');
const errors = require('@tryghost/errors');
let knexInstance;
// @TODO:
@ -45,6 +47,14 @@ function configure(dbConfig) {
dbConfig.connection.timezone = 'Z';
dbConfig.connection.charset = 'utf8mb4';
dbConfig.connection.decimalNumbers = true;
if (process.env.REQUIRE_INFILE_STREAM) {
if (process.env.NODE_ENV === 'development' || process.env.ALLOW_INFILE_STREAM) {
dbConfig.connection.infileStreamFactory = path => fs.createReadStream(path);
} else {
throw new errors.InternalServerError({message: 'MySQL infile streaming is required to run the current process, but is not allowed. Run the script in development mode or set ALLOW_INFILE_STREAM=1.'});
}
}
}
return dbConfig;

View File

@ -8,7 +8,7 @@ const {faker: americanFaker} = require('@faker-js/faker/locale/en_US');
const crypto = require('crypto');
const {Buffer} = require('node:buffer');
const DatabaseInfo = require('@tryghost/database-info');
const errors = require('@tryghost/errors');
const importers = require('./importers').reduce((acc, val) => {
acc[val.table] = val;
return acc;
@ -152,42 +152,46 @@ class DataGenerator {
}
async importData() {
const start = Date.now();
// Add default tables if none are specified
if (this.tableList.length === 0) {
this.tableList = Object.keys(importers).map(name => ({name}));
} else if (this.withDefault) {
// Add default tables to the end of the list
const defaultTables = Object.keys(importers).map(name => ({name}));
for (const table of defaultTables) {
if (!this.tableList.find(t => t.name === table.name)) {
this.tableList.push(table);
}
}
}
// Error if we have an unknown table
for (const table of this.tableList) {
if (importers[table.name] === undefined) {
throw new errors.IncorrectUsageError({message: `Unknown table: ${table.name}`});
}
}
this.sortTableList();
if (this.printDependencies) {
this.logger.info('Table dependencies:');
for (const table of this.tableList) {
this.logger.info(`\t${table.name}: ${table.dependencies.join(', ')}`);
}
process.exit(0);
}
await this.knex.transaction(async (transaction) => {
// Performance improvements
if (!DatabaseInfo.isSQLite(this.knex)) {
await this.knex.raw('SET FOREIGN_KEY_CHECKS=0;').transacting(transaction);
await this.knex.raw('SET unique_checks=0;').transacting(transaction);
}
// Add default tables if none are specified
if (this.tableList.length === 0) {
this.tableList = Object.keys(importers).map(name => ({name}));
} else if (this.withDefault) {
// Add default tables to the end of the list
const defaultTables = Object.keys(importers).map(name => ({name}));
for (const table of defaultTables) {
if (!this.tableList.find(t => t.name === table.name)) {
this.tableList.push(table);
}
}
}
// Error if we have an unknown table
for (const table of this.tableList) {
if (importers[table.name] === undefined) {
// eslint-disable-next-line
throw new Error(`Unknown table: ${table.name}`);
}
}
this.sortTableList();
if (this.printDependencies) {
this.logger.info('Table dependencies:');
for (const table of this.tableList) {
this.logger.info(`\t${table.name}: ${table.dependencies.join(', ')}`);
}
process.exit(0);
await transaction.raw('ALTER INSTANCE DISABLE INNODB REDO_LOG;');
await transaction.raw('SET FOREIGN_KEY_CHECKS=0;');
await transaction.raw('SET unique_checks=0;');
await transaction.raw('SET autocommit=0;');
await transaction.raw('SET GLOBAL local_infile=1;');
}
if (this.willClearData) {
@ -208,7 +212,7 @@ class DataGenerator {
const cryptoRandomBytes = crypto.randomBytes;
if (this.seed) {
// The probality distributions library uses crypto.randomBytes, which we can't seed, so we need to override it
// The probality distributions library uses crypto.randomBytes, which we can't seed, so we need to override it
crypto.randomBytes = (size) => {
const buffer = Buffer.alloc(size);
for (let i = 0; i < size; i++) {
@ -221,7 +225,7 @@ class DataGenerator {
try {
for (const table of this.tableList) {
if (this.seed) {
// We reset the seed for every table, so the chosen tables don't affect the data and changes in one importer don't affect the others
// We reset the seed for every table, so the chosen tables don't affect the data and changes in one importer don't affect the others
faker.seed(this.seed);
americanFaker.seed(this.seed);
}
@ -238,7 +242,7 @@ class DataGenerator {
}
} finally {
if (this.seed) {
// Revert crypto.randomBytes to the original function
// Revert crypto.randomBytes to the original function
crypto.randomBytes = cryptoRandomBytes;
}
}
@ -250,13 +254,9 @@ class DataGenerator {
});
await tableImporter.finalise();
}
// Performance improvements
if (!DatabaseInfo.isSQLite(this.knex)) {
await this.knex.raw('SET FOREIGN_KEY_CHECKS=1;').transacting(transaction);
await this.knex.raw('SET unique_checks=1;').transacting(transaction);
}
}, {isolationLevel: 'read committed'});
this.logger.info(`Completed data import in ${((Date.now() - start) / 1000).toFixed(1)}s`);
}
}

View File

@ -17,7 +17,7 @@ class BenefitsImporter extends TableImporter {
const sixMonthsLater = new Date(blogStartDate);
sixMonthsLater.setMonth(sixMonthsLater.getMonth() + 6);
return {
id: faker.database.mongodbObjectId(),
id: this.fastFakeObjectId(),
name: name,
slug: `${slugify(name)}-${faker.random.numeric(3)}`,
created_at: faker.date.between(blogStartDate, sixMonthsLater)

View File

@ -25,7 +25,7 @@ class EmailBatchesImporter extends TableImporter {
latestUpdatedDate.setHours(latestUpdatedDate.getHours() + 1);
return {
id: faker.database.mongodbObjectId(),
id: this.fastFakeObjectId(),
email_id: this.model.id,
provider_id: `${new Date().toISOString().split('.')[0].replace(/[^0-9]/g, '')}.${faker.datatype.hexadecimal({length: 16, prefix: '', case: 'lower'})}@m.example.com`,
status: 'submitted', // TODO: introduce failures

View File

@ -53,7 +53,7 @@ class EmailRecipientFailuresImporter extends TableImporter {
const error = faker.helpers.arrayElement(errors);
return {
id: faker.database.mongodbObjectId(),
id: this.fastFakeObjectId(),
email_id: this.model.email_id,
member_id: this.model.member_id,
email_recipient_id: this.model.id,

View File

@ -2,6 +2,7 @@ const TableImporter = require('./TableImporter');
const {faker} = require('@faker-js/faker');
const generateEvents = require('../utils/event-generator');
const dateToDatabaseString = require('../utils/database-date');
const debug = require('@tryghost/debug')('EmailRecipientsImporter');
const emailStatus = {
delivered: Symbol(),
@ -10,6 +11,25 @@ const emailStatus = {
none: Symbol()
};
function findFirstHigherIndex(arr, target) {
let start = 0;
let end = arr.length - 1;
let result = -1;
while (start <= end) {
let mid = Math.floor((start + end) / 2);
if (arr[mid] >= target) {
result = mid;
end = mid - 1; // Continue searching in the left half
} else {
start = mid + 1; // Continue searching in the right half
}
}
return result; // Return -1 if no element is higher than target
}
class EmailRecipientsImporter extends TableImporter {
static table = 'email_recipients';
static dependencies = ['emails', 'email_batches', 'members', 'members_subscribe_events'];
@ -19,6 +39,7 @@ class EmailRecipientsImporter extends TableImporter {
}
async import(quantity) {
const now = Date.now();
const emails = await this.transaction
.select(
'id',
@ -28,38 +49,92 @@ class EmailRecipientsImporter extends TableImporter {
'opened_count',
'failed_count')
.from('emails');
this.emails = emails;
this.emailBatches = await this.transaction.select('id', 'email_id', 'updated_at').from('email_batches');
this.members = await this.transaction.select('id', 'uuid', 'email', 'name').from('members');
this.membersSubscribeEvents = await this.transaction.select('id', 'newsletter_id', 'created_at', 'member_id').from('members_subscribe_events');
this.emails = new Map();
for (const email of emails) {
this.emails.set(email.id, email);
}
this.emailBatches = await this.transaction.select('id', 'email_id', 'updated_at').from('email_batches').orderBy('email_id');
const members = await this.transaction.select('id', 'uuid', 'email', 'name').from('members');
this.membersSubscribeEvents = await this.transaction.select('id', 'newsletter_id', 'created_at', 'member_id').from('members_subscribe_events').orderBy('created_at', 'asc'); // Order required for better performance in setReferencedModel
// Create a map for fast lookups
this.members = new Map();
for (const member of members) {
this.members.set(member.id, member);
}
// Save indexes of each batch for performance (remarkable faster than doing findIndex on each generate call)
let lastEmailId = null;
let lastIndex = 0;
for (const batch of this.emailBatches) {
if (batch.email_id !== lastEmailId) {
lastIndex = 0;
lastEmailId = batch.email_id;
}
batch.index = lastIndex;
lastIndex += 1;
}
// Now reorder by email id
debug (`Prepared data for ${this.name} in ${Date.now() - now}ms`);
// We use the same event curve for all emails to speed up the generation
// Spread over 14 days
this.eventStartTimeUsed = new Date();
const endTime = new Date(this.eventStartTimeUsed.getTime() + 1000 * 60 * 60 * 24 * 14);
this.eventCurve = generateEvents({
shape: 'ease-out',
trend: 'negative',
total: 1000,
startTime: this.eventStartTimeUsed,
endTime
});
this.membersSubscribeEventsByNewsletterId = new Map();
this.membersSubscribeEventsCreatedAtsByNewsletterId = new Map();
for (const memberSubscribeEvent of this.membersSubscribeEvents) {
if (!this.membersSubscribeEventsByNewsletterId.has(memberSubscribeEvent.newsletter_id)) {
this.membersSubscribeEventsByNewsletterId.set(memberSubscribeEvent.newsletter_id, []);
}
this.membersSubscribeEventsByNewsletterId.get(memberSubscribeEvent.newsletter_id).push(memberSubscribeEvent);
if (!this.membersSubscribeEventsCreatedAtsByNewsletterId.has(memberSubscribeEvent.newsletter_id)) {
this.membersSubscribeEventsCreatedAtsByNewsletterId.set(memberSubscribeEvent.newsletter_id, []);
}
if (!(memberSubscribeEvent.created_at instanceof Date)) {
// SQLite fix
memberSubscribeEvent.created_at = new Date(memberSubscribeEvent.created_at);
}
this.membersSubscribeEventsCreatedAtsByNewsletterId.get(memberSubscribeEvent.newsletter_id).push(memberSubscribeEvent.created_at.getTime());
}
await this.importForEach(this.emailBatches, quantity ? quantity / emails.length : 1000);
}
setReferencedModel(model) {
this.batch = model;
this.model = this.emails.find(email => email.id === this.batch.email_id);
this.batchIndex = this.emailBatches.filter(b => b.email_id === this.model.id).findIndex(batch => batch.id === this.batch.id);
this.model = this.emails.get(this.batch.email_id);
this.batchIndex = this.batch.index;
// Shallow clone members list so we can shuffle and modify it
const earliestOpenTime = new Date(this.batch.updated_at);
const latestOpenTime = new Date(this.batch.updated_at);
latestOpenTime.setDate(latestOpenTime.getDate() + 14);
const currentTime = new Date();
this.membersList = this.membersSubscribeEvents
.filter(entry => entry.newsletter_id === this.model.newsletter_id)
.filter(entry => new Date(entry.created_at) < earliestOpenTime)
.map(memberSubscribeEvent => memberSubscribeEvent.member_id)
.slice(this.batchIndex * 1000, (this.batchIndex + 1) * 1000);
// Get all members that were subscribed to this newsletter BEFORE the batch was sent
// We use binary search to speed up it up
const lastIndex = findFirstHigherIndex(this.membersSubscribeEventsCreatedAtsByNewsletterId.get(this.model.newsletter_id), earliestOpenTime);
this.events = this.membersList.length > 0 ? generateEvents({
shape: 'ease-out',
trend: 'negative',
total: this.membersList.length,
startTime: earliestOpenTime,
endTime: currentTime < latestOpenTime ? currentTime : latestOpenTime
}) : [];
this.membersList = this.membersSubscribeEventsByNewsletterId.get(this.model.newsletter_id).slice(0, Math.max(0, lastIndex - 1))
.slice(this.batchIndex * 1000, (this.batchIndex + 1) * 1000)
.map(memberSubscribeEvent => memberSubscribeEvent.member_id);
this.events = faker.helpers.shuffle(this.eventCurve.slice(0, this.membersList.length));
this.eventIndex = 0;
this.emailMeta = {
// delievered and not opened
@ -79,17 +154,20 @@ class EmailRecipientsImporter extends TableImporter {
}
generate() {
const timestamp = this.events.shift();
let timestamp = this.events.pop();
if (!timestamp) {
return;
}
const memberIdIndex = faker.datatype.number({
min: 0,
max: this.membersList.length - 1
});
const [memberId] = this.membersList.splice(memberIdIndex, 1);
const member = this.members.find(m => m.id === memberId);
// The events are generated for a different time, so we need to move them to the batch time
timestamp = new Date(timestamp.getTime() - this.eventStartTimeUsed.getTime() + new Date(this.batch.updated_at).getTime());
if (timestamp > new Date()) {
timestamp = new Date();
}
const memberId = this.membersList[this.events.length];
const member = this.members.get(memberId);
let status = emailStatus.none;
if (this.emailMeta.failedCount > 0) {
@ -105,17 +183,18 @@ class EmailRecipientsImporter extends TableImporter {
let deliveredTime;
if (status === emailStatus.opened) {
const startDate = new Date(this.batch.updated_at).valueOf();
const endDate = timestamp.valueOf();
deliveredTime = new Date(startDate + (Math.random() * (endDate - startDate)));
const startDate = this.batch.updated_at;
const endDate = timestamp;
deliveredTime = faker.date.between(startDate, endDate);
}
return {
id: faker.database.mongodbObjectId(),
// Using sorted ids are much much faster (35% as far as my testing goes) for huge imports
id: this.fastFakeObjectId(),
email_id: this.model.id,
batch_id: this.batch.id,
member_id: member.id,
processed_at: this.batch.updated_at,
processed_at: dateToDatabaseString(this.batch.updated_at),
delivered_at: status === emailStatus.opened ? dateToDatabaseString(deliveredTime) : status === emailStatus.delivered ? dateToDatabaseString(timestamp) : null,
opened_at: status === emailStatus.opened ? dateToDatabaseString(timestamp) : null,
failed_at: status === emailStatus.failed ? dateToDatabaseString(timestamp) : null,

View File

@ -13,15 +13,16 @@ class EmailsImporter extends TableImporter {
}
async import(quantity) {
const posts = await this.transaction.select('id', 'title', 'published_at').from('posts').where('type', 'post');
const posts = await this.transaction.select('id', 'title', 'published_at').from('posts').where('type', 'post').where('status', 'published').orderBy('published_at', 'desc');
this.newsletters = await this.transaction.select('id').from('newsletters').orderBy('sort_order');
this.membersSubscribeEvents = await this.transaction.select('id', 'newsletter_id', 'created_at').from('members_subscribe_events');
await this.importForEach(posts, quantity ? quantity / posts.length : 1);
// Only generate emails for last 25% of posts, and only generate emails for 50% of those
await this.importForEach(posts.slice(0, Math.ceil(posts.length / 4)), quantity ? quantity / posts.length : 0.5);
}
generate() {
const id = faker.database.mongodbObjectId();
const id = this.fastFakeObjectId();
let newsletter;
if (this.newsletters.length === 0) {

View File

@ -27,7 +27,7 @@ class LabelsImporter extends TableImporter {
generate() {
const name = this.generateName();
return {
id: faker.database.mongodbObjectId(),
id: this.fastFakeObjectId(),
name: name,
slug: `${slugify(name)}`,
created_at: dateToDatabaseString(blogStartDate),

View File

@ -12,11 +12,25 @@ class MembersClickEventsImporter extends TableImporter {
}
async import(quantity) {
const emailRecipients = await this.transaction.select('id', 'opened_at', 'email_id', 'member_id').from('email_recipients');
this.redirects = await this.transaction.select('id', 'post_id').from('redirects');
this.emails = await this.transaction.select('id', 'post_id').from('emails');
const emailRecipients = await this.transaction.select('id', 'opened_at', 'email_id', 'member_id').from('email_recipients').whereNotNull('opened_at');
const redirects = await this.transaction.select('id', 'post_id').from('redirects');
const emails = await this.transaction.select('id', 'post_id').from('emails');
this.quantity = quantity ? quantity / emailRecipients.length : 2;
// Create maps for faster lookups (this does make a difference for large data generation)
this.emails = new Map();
for (const email of emails) {
this.emails.set(email.id, email);
}
this.redirects = new Map();
for (const redirect of redirects) {
if (!this.redirects.has(redirect.post_id)) {
this.redirects.set(redirect.post_id, []);
}
this.redirects.get(redirect.post_id).push(redirect);
}
await this.importForEach(emailRecipients, this.quantity);
}
@ -26,28 +40,27 @@ class MembersClickEventsImporter extends TableImporter {
min: 0,
max: this.quantity
}) : 0;
const email = this.emails.find(e => e.id === this.model.email_id);
this.redirectList = this.redirects.filter(redirect => redirect.post_id === email.post_id);
const email = this.emails.get(model.email_id);
this.redirectList = this.redirects.get(email.post_id) ?? [];
}
generate() {
if (this.amount <= 0 || this.redirectList.length === 0) {
if (this.amount <= 0 || this.redirectList.length === 0 || !this.model.opened_at) {
return;
}
this.amount -= 1;
const openedAt = new Date(this.model.opened_at);
const laterOn = new Date(this.model.opened_at);
laterOn.setMinutes(laterOn.getMinutes() + 15);
const clickTime = new Date(openedAt.valueOf() + (Math.random() * (laterOn.valueOf() - openedAt.valueOf())));
const laterOn = new Date(openedAt.getTime() + 1000 * 60 * 15);
const clickTime = faker.date.between(openedAt.getTime(), laterOn.getTime()); //added getTime here because it threw random errors
return {
id: faker.database.mongodbObjectId(),
id: this.fastFakeObjectId(),
member_id: this.model.member_id,
redirect_id: this.redirectList[faker.datatype.number({
redirect_id: this.redirectList[this.redirectList.length === 1 ? 0 : (faker.datatype.number({
min: 0,
max: this.redirectList.length - 1
})].id,
}))].id,
created_at: dateToDatabaseString(clickTime)
};
}

View File

@ -1,6 +1,7 @@
const TableImporter = require('./TableImporter');
const {faker} = require('@faker-js/faker');
const {luck} = require('../utils/random');
const dateToDatabaseString = require('../utils/database-date');
class MembersCreatedEventsImporter extends TableImporter {
static table = 'members_created_events';
@ -32,8 +33,18 @@ class MembersCreatedEventsImporter extends TableImporter {
generate() {
const source = this.generateSource();
let attribution = {};
let referrer = {};
// We need to add all properties here already otherwise CSV imports won't know all the columns
let attribution = {
attribution_id: null,
attribution_type: null,
attribution_url: null
};
let referrer = {
referrer_source: null,
referrer_url: null,
referrer_medium: null
};
if (source === 'member' && luck(30)) {
const post = this.posts.find(p => p.visibility === 'public' && new Date(p.published_at) < new Date(this.model.created_at));
@ -79,8 +90,8 @@ class MembersCreatedEventsImporter extends TableImporter {
}
return {
id: faker.database.mongodbObjectId(),
created_at: this.model.created_at,
id: this.fastFakeObjectId(),
created_at: dateToDatabaseString(this.model.created_at),
member_id: this.model.id,
source,
...attribution,

View File

@ -28,11 +28,11 @@ class MembersFeedbackImporter extends TableImporter {
const openedAt = new Date(this.model.opened_at);
const laterOn = new Date(this.model.opened_at);
laterOn.setMinutes(laterOn.getMinutes() + 60);
const feedbackTime = new Date(openedAt.valueOf() + (Math.random() * (laterOn.valueOf() - openedAt.valueOf())));
const feedbackTime = faker.date.between(openedAt, laterOn);
const postId = this.emails.find(email => email.id === this.model.email_id).post_id;
return {
id: faker.database.mongodbObjectId(),
id: this.fastFakeObjectId(),
score: luck(70) ? 1 : 0,
member_id: this.model.member_id,
post_id: postId,

View File

@ -5,6 +5,7 @@ const {blogStartDate: startTime} = require('../utils/blog-info');
const generateEvents = require('../utils/event-generator');
const {luck} = require('../utils/random');
const dateToDatabaseString = require('../utils/database-date');
const debug = require('@tryghost/debug')('MembersImporter');
class MembersImporter extends TableImporter {
static table = 'members';
@ -19,6 +20,8 @@ class MembersImporter extends TableImporter {
}
async import(quantity = this.defaultQuantity) {
const generateNow = Date.now();
this.timestamps = generateEvents({
shape: 'ease-in',
trend: 'positive',
@ -26,6 +29,7 @@ class MembersImporter extends TableImporter {
startTime,
endTime: new Date()
}).sort();
debug(`${this.name} generated ${this.timestamps.length} timestamps in ${Date.now() - generateNow}ms`);
await super.import(quantity);
}
@ -62,10 +66,10 @@ class MembersImporter extends TableImporter {
}
generate() {
const id = faker.database.mongodbObjectId();
const id = this.fastFakeObjectId();
// Use name from American locale to reflect an English-speaking audience
const name = `${americanFaker.name.firstName()} ${americanFaker.name.lastName()}`;
const timestamp = this.timestamps.shift();
const timestamp = this.timestamps.pop();
return {
id,

View File

@ -25,7 +25,7 @@ class MembersLabelsImporter extends TableImporter {
}
// TODO: Ensure we don't generate the same member label twice
return {
id: faker.database.mongodbObjectId(),
id: this.fastFakeObjectId(),
member_id: this.model.id,
label_id: this.labels[faker.datatype.number({
min: 0,

View File

@ -1,5 +1,4 @@
const TableImporter = require('./TableImporter');
const {faker} = require('@faker-js/faker');
const {luck} = require('../utils/random');
const generateEvents = require('../utils/event-generator');
const dateToDatabaseString = require('../utils/database-date');
@ -31,20 +30,20 @@ class MembersLoginEventsImporter extends TableImporter {
trend: 'negative',
// Steady readers login more, readers who lose interest read less overall.
// ceil because members will all have logged in at least once
total: shape === 'flat' ? Math.ceil(daysBetween / 3) : Math.ceil(daysBetween / 7),
total: Math.min(5, shape === 'flat' ? Math.ceil(daysBetween / 3) : Math.ceil(daysBetween / 7)),
startTime: new Date(model.created_at),
endTime: endDate
});
}
generate() {
const timestamp = this.timestamps.shift();
const timestamp = this.timestamps.pop();
if (!timestamp) {
// Out of events for this user
return null;
}
return {
id: faker.database.mongodbObjectId(),
id: this.fastFakeObjectId(),
created_at: dateToDatabaseString(timestamp),
member_id: this.model.id
};

View File

@ -1,4 +1,3 @@
const {faker} = require('@faker-js/faker');
const TableImporter = require('./TableImporter');
class MembersNewslettersImporter extends TableImporter {
@ -17,7 +16,7 @@ class MembersNewslettersImporter extends TableImporter {
generate() {
return {
id: faker.database.mongodbObjectId(),
id: this.fastFakeObjectId(),
member_id: this.model.member_id,
newsletter_id: this.model.newsletter_id
};

View File

@ -1,5 +1,4 @@
const TableImporter = require('./TableImporter');
const {faker} = require('@faker-js/faker');
class MembersPaidSubscriptionEventsImporter extends TableImporter {
static table = 'members_paid_subscription_events';
@ -11,8 +10,12 @@ class MembersPaidSubscriptionEventsImporter extends TableImporter {
async import() {
const subscriptions = await this.transaction.select('id', 'customer_id', 'plan_currency', 'plan_amount', 'created_at', 'plan_id', 'status', 'cancel_at_period_end', 'current_period_end').from('members_stripe_customers_subscriptions');
this.membersStripeCustomers = await this.transaction.select('id', 'member_id', 'customer_id').from('members_stripe_customers');
const membersStripeCustomers = await this.transaction.select('id', 'member_id', 'customer_id').from('members_stripe_customers');
this.membersStripeCustomers = new Map();
for (const customer of membersStripeCustomers) {
this.membersStripeCustomers.set(customer.customer_id, customer);
}
await this.importForEach(subscriptions, 2);
}
@ -58,7 +61,7 @@ class MembersPaidSubscriptionEventsImporter extends TableImporter {
return;
}
const memberCustomer = this.membersStripeCustomers.find(c => c.customer_id === this.model.customer_id);
const memberCustomer = this.membersStripeCustomers.get(this.model.customer_id);
const isMonthly = this.model.plan_interval === 'month';
// Note that we need to recalculate the MRR, because it will be zero for inactive subscrptions
@ -66,7 +69,7 @@ class MembersPaidSubscriptionEventsImporter extends TableImporter {
// todo: implement + MRR and -MRR in case of inactive subscriptions
return {
id: faker.database.mongodbObjectId(),
id: this.fastFakeObjectId(),
// TODO: Support expired / updated / cancelled events too
type: this.count === 1 ? 'created' : this.getStatus(this.model),
member_id: memberCustomer.member_id,

View File

@ -30,7 +30,7 @@ class MembersProductsImporter extends TableImporter {
generate() {
return {
id: faker.database.mongodbObjectId(),
id: this.fastFakeObjectId(),
member_id: this.model.id,
product_id: this.getProduct().id,
sort_order: 0,

View File

@ -18,15 +18,15 @@ class MembersStatusEventsImporter extends TableImporter {
setReferencedModel(model) {
this.events = [{
id: faker.database.mongodbObjectId(),
id: this.fastFakeObjectId(),
member_id: model.id,
from_status: null,
to_status: 'free',
created_at: model.created_at
created_at: dateToDatabaseString(model.created_at)
}];
if (model.status !== 'free') {
this.events.push({
id: faker.database.mongodbObjectId(),
id: this.fastFakeObjectId(),
member_id: model.id,
from_status: 'free',
to_status: model.status,
@ -36,7 +36,7 @@ class MembersStatusEventsImporter extends TableImporter {
}
generate() {
const event = this.events.shift();
const event = this.events.pop();
if (!event) {
return null;
}

View File

@ -29,7 +29,7 @@ class MembersStripeCustomersImporter extends TableImporter {
}
return {
id: faker.database.mongodbObjectId(),
id: this.fastFakeObjectId(),
member_id: this.model.id,
customer_id: `cus_${faker.random.alphaNumeric(14, {
casing: 'mixed'

View File

@ -13,13 +13,23 @@ class MembersStripeCustomersSubscriptionsImporter extends TableImporter {
}
async import() {
this.membersProducts = await this.transaction.select('member_id', 'product_id').from('members_products');
const membersProducts = await this.transaction.select('member_id', 'product_id').from('members_products');
this.members = await this.transaction.select('id', 'status', 'created_at').from('members');//.where('status', 'paid');
this.membersStripeCustomers = await this.transaction.select('id', 'member_id', 'customer_id').from('members_stripe_customers');
const membersStripeCustomers = await this.transaction.select('id', 'member_id', 'customer_id').from('members_stripe_customers');
this.products = await this.transaction.select('id', 'name').from('products').whereNot('type', 'free');
this.stripeProducts = await this.transaction.select('id', 'product_id', 'stripe_product_id').from('stripe_products');
this.stripePrices = await this.transaction.select('id', 'nickname', 'stripe_product_id', 'stripe_price_id', 'amount', 'interval', 'currency').from('stripe_prices');
this.membersStripeCustomers = new Map();
for (const customer of membersStripeCustomers) {
this.membersStripeCustomers.set(customer.member_id, customer);
}
this.membersProducts = new Map();
for (const product of membersProducts) {
this.membersProducts.set(product.member_id, product);
}
await this.importForEach(this.members, 2);
}
@ -33,7 +43,7 @@ class MembersStripeCustomersSubscriptionsImporter extends TableImporter {
this.count += 1;
const member = this.model;
const customer = this.membersStripeCustomers.find(c => this.model.id === c.member_id);
const customer = this.membersStripeCustomers.get(this.model.id);
if (!customer) {
// This is a requirement, so skip if we don't have a customer
@ -44,7 +54,7 @@ class MembersStripeCustomersSubscriptionsImporter extends TableImporter {
return;
}
const memberProduct = this.membersProducts.find(p => p.member_id === this.model.id);
const memberProduct = this.membersProducts.get(this.model.id);
let ghostProduct = memberProduct ? this.products.find(product => product.id === memberProduct.product_id) : null;
// Whether we should create a valid subscription or not
@ -201,7 +211,7 @@ class MembersStripeCustomersSubscriptionsImporter extends TableImporter {
const status = createValid ? faker.helpers.arrayElement(validStatusses) : faker.helpers.arrayElement(invalidStatusses);
return {
id: faker.database.mongodbObjectId(),
id: this.fastFakeObjectId(),
customer_id: customer.customer_id,
subscription_id: `sub_${faker.random.alphaNumeric(14)}`,
stripe_price_id: stripePrice.stripe_price_id,

View File

@ -41,10 +41,10 @@ class MembersSubscribeEventsImporter extends TableImporter {
const newsletterId = this.newsletters[count % this.newsletters.length].id;
return {
id: faker.database.mongodbObjectId(),
id: this.fastFakeObjectId(),
member_id: this.model.id,
newsletter_id: newsletterId,
subscribed: true,
subscribed: 1,
created_at: createdAt,
source: 'member'
};

View File

@ -4,7 +4,7 @@ const {luck} = require('../utils/random');
class MembersSubscriptionCreatedEventsImporter extends TableImporter {
static table = 'members_subscription_created_events';
static dependencies = ['members_stripe_customers_subscriptions', 'posts', 'mentions'];
static dependencies = ['members_stripe_customers', 'members_stripe_customers_subscriptions', 'posts', 'mentions'];
constructor(knex, transaction) {
super(MembersSubscriptionCreatedEventsImporter.table, knex, transaction);
@ -12,16 +12,29 @@ class MembersSubscriptionCreatedEventsImporter extends TableImporter {
async import(quantity) {
const membersStripeCustomersSubscriptions = await this.transaction.select('id', 'created_at', 'customer_id').from('members_stripe_customers_subscriptions');
this.membersStripeCustomers = await this.transaction.select('id', 'member_id', 'customer_id').from('members_stripe_customers');
this.posts = await this.transaction.select('id', 'published_at', 'visibility', 'type', 'slug').from('posts').orderBy('published_at', 'desc');
const membersStripeCustomers = await this.transaction.select('id', 'member_id', 'customer_id').from('members_stripe_customers');
this.posts = await this.transaction.select('id', 'published_at', 'visibility', 'type', 'slug').from('posts').whereNotNull('published_at').where('visibility', 'public').orderBy('published_at', 'desc');
this.incomingRecommendations = await this.transaction.select('id', 'source', 'created_at').from('mentions');
this.membersStripeCustomers = new Map();
for (const memberStripeCustomer of membersStripeCustomers) {
this.membersStripeCustomers.set(memberStripeCustomer.customer_id, memberStripeCustomer);
}
await this.importForEach(membersStripeCustomersSubscriptions, quantity ? quantity / membersStripeCustomersSubscriptions.length : 1);
}
generate() {
let attribution = {};
let referrer = {};
// We need to add all properties here already otherwise CSV imports won't know all the columns
let attribution = {
attribution_id: null,
attribution_type: null,
attribution_url: null
};
let referrer = {
referrer_source: null,
referrer_url: null,
referrer_medium: null
};
if (luck(30)) {
const post = this.posts.find(p => p.visibility === 'public' && new Date(p.published_at) < new Date(this.model.created_at));
@ -55,10 +68,10 @@ class MembersSubscriptionCreatedEventsImporter extends TableImporter {
}
}
const memberCustomer = this.membersStripeCustomers.find(c => c.customer_id === this.model.customer_id);
const memberCustomer = this.membersStripeCustomers.get(this.model.customer_id);
return {
id: faker.database.mongodbObjectId(),
id: this.fastFakeObjectId(),
created_at: this.model.created_at,
member_id: memberCustomer.member_id,
subscription_id: this.model.id,

View File

@ -24,7 +24,7 @@ class NewslettersImporter extends TableImporter {
weekAfter.setDate(weekAfter.getDate() + 7);
return {
id: faker.database.mongodbObjectId(),
id: this.fastFakeObjectId(),
uuid: faker.datatype.uuid(),
name: name,
slug: `${slugify(name)}-${faker.random.numeric(3)}`,

View File

@ -22,7 +22,7 @@ class OffersImporter extends TableImporter {
}
generate() {
const name = this.names.shift();
const name = this.names.pop();
const product = this.products[faker.datatype.number({
min: 0,
@ -47,7 +47,7 @@ class OffersImporter extends TableImporter {
// created_at: {type: 'dateTime', nullable: false},
// updated_at: {type: 'dateTime', nullable: true}
return {
id: faker.database.mongodbObjectId(),
id: this.fastFakeObjectId(),
active: true,
name,
code: slugify(name),

View File

@ -21,7 +21,7 @@ class PostsAuthorsImporter extends TableImporter {
const sortOrder = this.sortOrder;
this.sortOrder = this.sortOrder + 1;
return {
id: faker.database.mongodbObjectId(),
id: this.fastFakeObjectId(),
post_id: this.model.id,
author_id: this.users[faker.datatype.number(this.users.length - 1)].id,
sort_order: sortOrder

View File

@ -51,7 +51,7 @@ class PostsImporter extends TableImporter {
const visibility = luck(85) ? 'paid' : luck(10) ? 'members' : 'public';
return {
id: faker.database.mongodbObjectId(),
id: this.fastFakeObjectId(),
created_at: dateToDatabaseString(timestamp),
created_by: '1',
updated_at: dateToDatabaseString(timestamp),

View File

@ -1,4 +1,3 @@
const {faker} = require('@faker-js/faker');
const TableImporter = require('./TableImporter');
class PostsProductsImporter extends TableImporter {
@ -25,7 +24,7 @@ class PostsProductsImporter extends TableImporter {
const sortOrder = this.sortOrder;
this.sortOrder = this.sortOrder + 1;
return {
id: faker.database.mongodbObjectId(),
id: this.fastFakeObjectId(),
post_id: this.model.id,
product_id: this.products[sortOrder].id,
sort_order: this.sortOrder

View File

@ -35,7 +35,7 @@ class PostsTagsImporter extends TableImporter {
this.notIndex.push(tagIndex);
return {
id: faker.database.mongodbObjectId(),
id: this.fastFakeObjectId(),
post_id: this.model.id,
tag_id: this.tags[tagIndex].id,
sort_order: sortOrder

View File

@ -1,4 +1,3 @@
const {faker} = require('@faker-js/faker');
const TableImporter = require('./TableImporter');
class ProductsBenefitsImporter extends TableImporter {
@ -44,7 +43,7 @@ class ProductsBenefitsImporter extends TableImporter {
const sortOrder = this.sortOrder;
this.sortOrder = this.sortOrder + 1;
return {
id: faker.database.mongodbObjectId(),
id: this.fastFakeObjectId(),
product_id: this.model.id,
benefit_id: this.benefits[sortOrder].id,
sort_order: sortOrder

View File

@ -14,7 +14,7 @@ class ProductsImporter extends TableImporter {
async import(quantity = this.defaultQuantity) {
// TODO: Add random products if quantity != 4
this.names = ['Free', 'Bronze', 'Silver', 'Gold'];
this.names = ['Free', 'Bronze', 'Silver', 'Gold'].reverse();
this.count = 0;
await super.import(quantity);
@ -61,7 +61,7 @@ class ProductsImporter extends TableImporter {
}
generate() {
const name = this.names.shift();
const name = this.names.pop();
const count = this.count;
this.count = this.count + 1;
const sixMonthsLater = new Date(blogStartDate);
@ -78,7 +78,7 @@ class ProductsImporter extends TableImporter {
tierInfo.yearly_price = count * 5000;
}
return Object.assign({}, {
id: faker.database.mongodbObjectId(),
id: this.fastFakeObjectId(),
name: name,
slug: `${slugify(name)}-${faker.random.numeric(3)}`,
visibility: 'public',

View File

@ -21,7 +21,7 @@ class RecommendationClickEventsImporter extends TableImporter {
// Not unique
const member = luck(30) ? null : faker.helpers.arrayElement(this.members);
return {
id: faker.database.mongodbObjectId(),
id: this.fastFakeObjectId(),
recommendation_id: this.model.id,
member_id: member?.id ?? null,
created_at: faker.date.past()

View File

@ -21,7 +21,7 @@ class RecommendationSubscribeEventsImporter extends TableImporter {
// Not unique
const member = luck(1) ? null : faker.helpers.arrayElement(this.members);
return {
id: faker.database.mongodbObjectId(),
id: this.fastFakeObjectId(),
recommendation_id: this.model.id,
member_id: member?.id ?? null,
created_at: faker.date.past()

View File

@ -15,7 +15,7 @@ class RecommendationsImporter extends TableImporter {
}
generate() {
const id = faker.database.mongodbObjectId();
const id = this.fastFakeObjectId();
return {
id,
url: faker.internet.url(),

View File

@ -36,7 +36,7 @@ class RedirectsImporter extends TableImporter {
}
this.amount -= 1;
return {
id: faker.database.mongodbObjectId(),
id: this.fastFakeObjectId(),
from: `/r/${faker.datatype.hexadecimal({length: 32, prefix: '', case: 'lower'})}`,
to: `${faker.internet.url()}/${faker.helpers.slugify(`${faker.word.adjective()} ${faker.word.noun()}`).toLowerCase()}`,
post_id: this.model.id,

View File

@ -32,7 +32,7 @@ class RolesUsersImporter extends TableImporter {
return;
}
return {
id: faker.database.mongodbObjectId(),
id: this.fastFakeObjectId(),
role_id: actualRole.id,
user_id: this.model.id
};

View File

@ -54,7 +54,7 @@ class StripePricesImporter extends TableImporter {
}
return Object.assign({}, {
id: faker.database.mongodbObjectId(),
id: this.fastFakeObjectId(),
stripe_price_id: faker.datatype.hexadecimal({
length: 64,
prefix: ''

View File

@ -20,7 +20,7 @@ class StripeProductsImporter extends TableImporter {
generate() {
return {
id: faker.database.mongodbObjectId(),
id: this.fastFakeObjectId(),
product_id: this.model.id,
stripe_product_id: faker.datatype.hexadecimal({
length: 64,

View File

@ -58,7 +58,7 @@ class SubscriptionsImporter extends TableImporter {
}
}
return Object.assign({}, {
id: faker.database.mongodbObjectId(),
id: this.fastFakeObjectId(),
type: status,
status: 'active',
member_id: this.model.member_id,

View File

@ -1,4 +1,14 @@
const debug = require('@tryghost/debug')('TableImporter');
const dateToDatabaseString = require('../utils/database-date');
const path = require('path');
const createCsvWriter = require('csv-writer').createObjectCsvWriter;
const fs = require('fs');
const {luck} = require('../utils/random');
const os = require('os');
const errors = require('@tryghost/errors');
const ObjectID = require('bson-objectid').default;
let idIndex = 0;
class TableImporter {
/**
@ -23,6 +33,12 @@ class TableImporter {
this.transaction = transaction;
}
fastFakeObjectId() {
// using faker.database.mongodbObjectId() is too slow (slow generation + MySQL is faster for ascending PRIMARY keys)
idIndex += 1;
return ObjectID.createFromTime(idIndex).toHexString();
}
async #generateData(amount = this.defaultQuantity) {
let data = [];
@ -42,10 +58,7 @@ class TableImporter {
debug(`${this.name} generated ${data.length} records in ${Date.now() - generateNow}ms`);
if (data.length > 0) {
debug (`Importing ${data.length} records into ${this.name}`);
const now = Date.now();
await this.knex.batchInsert(this.name, data).transacting(this.transaction);
debug(`${this.name} imported ${data.length} records in ${Date.now() - now}ms`);
await this.batchInsert(data);
}
}
@ -58,12 +71,16 @@ class TableImporter {
debug (`Generating data for ${models.length} models x ${amount} for ${this.name}`);
const now = Date.now();
let settingReferenceModel = 0;
for (const model of models) {
let s = Date.now();
this.setReferencedModel(model);
settingReferenceModel += Date.now() - s;
let currentAmount = (typeof amount === 'function') ? amount() : amount;
if (!Number.isInteger(currentAmount)) {
currentAmount = Math.floor(currentAmount) + ((Math.random() < currentAmount % 1) ? 1 : 0);
currentAmount = Math.floor(currentAmount) + luck((currentAmount % 1) * 100);
}
const generatedData = await this.#generateData(currentAmount);
@ -72,15 +89,74 @@ class TableImporter {
}
}
debug(`${this.name} generated ${data.length} records in ${Date.now() - now}ms`);
debug(`${this.name} generated ${data.length} records in ${Date.now() - now}ms (${settingReferenceModel}ms setting reference model)`);
if (data.length > 0) {
const now2 = Date.now();
await this.knex.batchInsert(this.name, data).transacting(this.transaction);
debug(`${this.name} imported ${data.length} records in ${Date.now() - now2}ms`);
await this.batchInsert(data);
}
}
async batchInsert(data) {
// Write to CSV file
const rootFolder = os.tmpdir();
const filePath = path.join(rootFolder, `${this.name}.csv`);
let now = Date.now();
if (data.length > 1000) {
try {
await fs.promises.unlink(filePath);
} catch (e) {
// Ignore: file doesn't exist
}
const csvWriter = createCsvWriter({
path: filePath,
header: Object.keys(data[0]).map((key) => {
return {id: key, title: key};
})
});
// Loop the data in chunks of 50.000 items
const batchSize = 50000;
// Otherwise we get a out of range error because csvWriter tries to create a string that is too long
for (let i = 0; i < data.length; i += batchSize) {
const slicedData = data.slice(i, i + batchSize);
// Map data to what MySQL expects in the CSV for values like booleans, null and dates
for (let j = 0; j < slicedData.length; j++) {
const obj = slicedData[j];
for (const [key, value] of Object.entries(obj)) {
if (typeof value === 'boolean') {
obj[key] = value ? 1 : 0;
} else if (value instanceof Date) {
obj[key] = dateToDatabaseString(value);
} else if (value === null) {
obj[key] = '\\N';
}
}
}
await csvWriter.writeRecords(slicedData);
}
debug(`${this.name} saved CSV import file in ${Date.now() - now}ms`);
now = Date.now();
// Import from CSV file
const [result] = await this.transaction.raw(`LOAD DATA LOCAL INFILE '${filePath}' INTO TABLE \`${this.name}\` FIELDS TERMINATED BY ',' ENCLOSED BY '"' IGNORE 1 LINES (${Object.keys(data[0]).map(d => '`' + d + '`').join(',')});`);
if (result.affectedRows !== data.length) {
throw new errors.InternalServerError({
message: `CSV import failed: expected ${data.length} imported rows, got ${result.affectedRows}`
});
}
} else {
await this.knex.batchInsert(this.name, data).transacting(this.transaction);
}
debug(`${this.name} imported ${data.length} records in ${Date.now() - now}ms`);
}
/**
* Finalise the imported data, e.g. adding summary records based on a table's dependents
*/

View File

@ -28,7 +28,7 @@ class TagsImporter extends TableImporter {
const twoYearsAgo = new Date();
twoYearsAgo.setFullYear(twoYearsAgo.getFullYear() - 2);
return {
id: faker.database.mongodbObjectId(),
id: this.fastFakeObjectId(),
name: name,
slug: `${slugify(name)}-${faker.random.numeric(3)}`,
description: faker.lorem.sentence(),

View File

@ -16,7 +16,7 @@ class UsersImporter extends TableImporter {
async generate() {
const name = `${faker.name.firstName()} ${faker.name.lastName()}`;
return {
id: faker.database.mongodbObjectId(),
id: this.fastFakeObjectId(),
name: name,
slug: slugify(name),
password: await security.password.hash(faker.color.human()),

View File

@ -14,7 +14,7 @@ class WebMentionsImporter extends TableImporter {
}
generate() {
const id = faker.database.mongodbObjectId();
const id = this.fastFakeObjectId();
const author = `${faker.name.fullName()}`;

View File

@ -1,5 +1,3 @@
const {faker} = require('@faker-js/faker');
class JsonImporter {
constructor(knex, transaction) {
this.knex = knex;
@ -25,7 +23,7 @@ class JsonImporter {
}) {
for (const obj of data) {
if (!('id' in obj)) {
obj.id = faker.database.mongodbObjectId();
obj.id = this.fastFakeObjectId();
}
}
if (rows.findIndex(row => row === 'id') === -1) {

View File

@ -1,3 +1,7 @@
module.exports = function dateToDatabaseString(date) {
if (typeof date === 'string') {
// SQLite fix when reusing other dates from the db
return date;
}
return date.toISOString().replace('Z','').replace('T', ' ');
};

View File

@ -29,6 +29,7 @@ const generateEvents = ({
beta = 1;
break;
}
const data = probabilityDistributions.rbeta(total, alpha, beta, 0);
const startTimeValue = startTime.valueOf();
const timeDifference = endTime.valueOf() - startTimeValue;

View File

@ -29,6 +29,7 @@
"@faker-js/faker": "7.6.0",
"@tryghost/root-utils": "0.3.24",
"@tryghost/string": "0.2.10",
"csv-writer": "^1.6.0",
"probability-distributions": "0.9.1"
}
}

View File

@ -80,7 +80,7 @@ export class IncomingRecommendationService {
// More importantly, we might have missed some deletes which we can detect.
// So we do a slow revalidation of all incoming recommendations
// This also prevents doing multiple external fetches when doing quick reboots of Ghost after each other (requires Ghost to be up for at least 15 seconds)
if (!process.env.NODE_ENV?.startsWith('test')) {
if (!process.env.NODE_ENV?.startsWith('test') && process.env.NODE_ENV !== 'development') {
setTimeout(() => {
logging.info('Updating incoming recommendations on boot');
this.#updateIncomingRecommendations().catch((err) => {

View File

@ -48,7 +48,7 @@ describe('IncomingRecommendationService', function () {
// Sandbox time
const saved = process.env.NODE_ENV;
try {
process.env.NODE_ENV = 'development';
process.env.NODE_ENV = 'nottesting';
await service.init();
clock.tick(1000 * 60 * 60 * 24);
assert(refreshMentions.calledOnce);
@ -61,7 +61,7 @@ describe('IncomingRecommendationService', function () {
// Sandbox time
const saved = process.env.NODE_ENV;
try {
process.env.NODE_ENV = 'development';
process.env.NODE_ENV = 'nottesting';
refreshMentions.rejects(new Error('test'));
await service.init();

View File

@ -30,7 +30,7 @@
"fix": "yarn cache clean && rimraf -g '**/node_modules' && yarn",
"knex-migrator": "yarn workspace ghost run knex-migrator",
"setup": "yarn && git submodule update --init && NODE_ENV=development node .github/scripts/setup.js",
"reset:data": "cd ghost/core && node index.js generate-data --clear-database --quantities members:100,posts:1 --seed 123",
"reset:data": "cd ghost/core && node index.js generate-data --clear-database --quantities members:100000,posts:500 --seed 123",
"docker:reset": "docker-compose -f .github/scripts/docker-compose.yml down -v && docker-compose -f .github/scripts/docker-compose.yml up -d --wait",
"lint": "nx run-many -t lint",
"test": "nx run-many -t test",
@ -117,6 +117,7 @@
"nx": "16.8.1",
"rimraf": "5.0.5",
"ts-node": "10.9.2",
"typescript": "5.3.3"
"typescript": "5.3.3",
"inquirer": "8.2.4"
}
}

View File

@ -2668,16 +2668,16 @@
"@elastic/elasticsearch@8.10.0", "@elastic/elasticsearch@8.5.0":
version "8.5.0"
resolved "https://registry.npmjs.org/@elastic/elasticsearch/-/elasticsearch-8.5.0.tgz#407aee0950a082ee76735a567f2571cf4301d4ea"
resolved "https://registry.yarnpkg.com/@elastic/elasticsearch/-/elasticsearch-8.5.0.tgz#407aee0950a082ee76735a567f2571cf4301d4ea"
integrity sha512-iOgr/3zQi84WmPhAplnK2W13R89VXD2oc6WhlQmH3bARQwmI+De23ZJKBEn7bvuG/AHMAqasPXX7uJIiJa2MqQ==
dependencies:
"@elastic/transport" "^8.2.0"
tslib "^2.4.0"
"@elastic/transport@^8.2.0":
version "8.3.4"
resolved "https://registry.npmjs.org/@elastic/transport/-/transport-8.3.4.tgz#43c852e848dc8502bbd7f23f2d61bd5665cded99"
integrity sha512-+0o8o74sbzu3BO7oOZiP9ycjzzdOt4QwmMEjFc1zfO7M0Fh7QX1xrpKqZbSd8vBwihXNlSq/EnMPfgD2uFEmFg==
version "8.4.0"
resolved "https://registry.yarnpkg.com/@elastic/transport/-/transport-8.4.0.tgz#e1ec05f7a2857162c161e2c97008f9b21301a673"
integrity sha512-Yb3fDa7yGD0ca3uMbL64M3vM1cE5h5uHmBcTjkdB4VpCasRNKSd09iDpwqX8zX1tbBtxcaKYLceKthWvPeIxTw==
dependencies:
debug "^4.3.4"
hpagent "^1.0.0"
@ -14166,6 +14166,11 @@ csstype@^3.0.2:
resolved "https://registry.yarnpkg.com/csstype/-/csstype-3.1.1.tgz#841b532c45c758ee546a11d5bd7b7b473c8c30b9"
integrity sha512-DJR/VvkAvSZW9bTouZue2sSxDwdTN92uHjqeKVm+0dAqdfNykRzQ95tay8aXMBAAPpUiq4Qcug2L7neoRh2Egw==
csv-writer@^1.6.0:
version "1.6.0"
resolved "https://registry.yarnpkg.com/csv-writer/-/csv-writer-1.6.0.tgz#d0cea44b6b4d7d3baa2ecc6f3f7209233514bcf9"
integrity sha512-NOx7YDFWEsM/fTRAJjRpPp8t+MKRVvniAg9wQlUKx20MFrPs73WLJhFf5iteqrxNYnsy924K3Iroh3yNHeYd2g==
custom-error-instance@2.1.1:
version "2.1.1"
resolved "https://registry.yarnpkg.com/custom-error-instance/-/custom-error-instance-2.1.1.tgz#3cf6391487a6629a6247eb0ca0ce00081b7e361a"
@ -19953,6 +19958,27 @@ inline-source-map-comment@^1.0.5:
sum-up "^1.0.1"
xtend "^4.0.0"
inquirer@8.2.4:
version "8.2.4"
resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-8.2.4.tgz#ddbfe86ca2f67649a67daa6f1051c128f684f0b4"
integrity sha512-nn4F01dxU8VeKfq192IjLsxu0/OmMZ4Lg3xKAns148rCaXP6ntAoEkVYZThWjwON8AlzdZZi6oqnhNbxUG9hVg==
dependencies:
ansi-escapes "^4.2.1"
chalk "^4.1.1"
cli-cursor "^3.1.0"
cli-width "^3.0.0"
external-editor "^3.0.3"
figures "^3.0.0"
lodash "^4.17.21"
mute-stream "0.0.8"
ora "^5.4.1"
run-async "^2.4.0"
rxjs "^7.5.5"
string-width "^4.1.0"
strip-ansi "^6.0.0"
through "^2.3.6"
wrap-ansi "^7.0.0"
inquirer@8.2.6:
version "8.2.6"
resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-8.2.6.tgz#733b74888195d8d400a67ac332011b5fae5ea562"