Added central error handler to job manager

refs https://github.com/TryGhost/Ghost-Utils/issues/118

- Duplicating error handling across jobs is not best developer experience. Also, having custom error handling logic did not allow for recommended worker script behavior: allowing for unhandled exceptions to bubble up and be managed by parent process
This commit is contained in:
Naz 2020-12-07 16:43:57 +13:00
parent 8aa55feaf8
commit c1e3788570
5 changed files with 61 additions and 77 deletions

View File

@ -1,6 +1,5 @@
const logging = require('../../../../shared/logging');
const {parentPort} = require('bthreads');
const sentry = require('../../../../shared/sentry');
const debug = require('ghost-ignition').debug('jobs:email-analytics:fetch-all');
// one-off job to fetch all available events and re-process them idempotently
@ -27,46 +26,36 @@ if (parentPort) {
}
(async () => {
try {
const models = require('../../../models');
const settingsService = require('../../settings');
const models = require('../../../models');
const settingsService = require('../../settings');
// must be initialized before emailAnalyticsService is required otherwise
// requires are in the wrong order and settingsCache will always be empty
await models.init();
await settingsService.init();
// must be initialized before emailAnalyticsService is required otherwise
// requires are in the wrong order and settingsCache will always be empty
await models.init();
await settingsService.init();
const emailAnalyticsService = require('../');
const emailAnalyticsService = require('../');
const fetchStartDate = new Date();
debug('Starting email analytics fetch of all available events');
const eventStats = await emailAnalyticsService.fetchAll();
const fetchEndDate = new Date();
debug(`Finished fetching ${eventStats.totalEvents} analytics events in ${fetchEndDate - fetchStartDate}ms`);
const fetchStartDate = new Date();
debug('Starting email analytics fetch of all available events');
const eventStats = await emailAnalyticsService.fetchAll();
const fetchEndDate = new Date();
debug(`Finished fetching ${eventStats.totalEvents} analytics events in ${fetchEndDate - fetchStartDate}ms`);
const aggregateStartDate = new Date();
debug(`Starting email analytics aggregation for ${eventStats.emailIds.length} emails`);
await emailAnalyticsService.aggregateStats(eventStats);
const aggregateEndDate = new Date();
debug(`Finished aggregating email analytics in ${aggregateEndDate - aggregateStartDate}ms`);
const aggregateStartDate = new Date();
debug(`Starting email analytics aggregation for ${eventStats.emailIds.length} emails`);
await emailAnalyticsService.aggregateStats(eventStats);
const aggregateEndDate = new Date();
debug(`Finished aggregating email analytics in ${aggregateEndDate - aggregateStartDate}ms`);
logging.info(`Fetched ${eventStats.totalEvents} events and aggregated stats for ${eventStats.emailIds.length} emails and ${eventStats.memberIds.length} members in ${aggregateEndDate - fetchStartDate}ms`);
if (parentPort) {
parentPort.postMessage('done');
} else {
// give the logging pipes time finish writing before exit
setTimeout(() => {
process.exit(0);
}, 1000);
}
} catch (error) {
logging.error(error);
sentry.captureException(error);
logging.info(`Fetched ${eventStats.totalEvents} events and aggregated stats for ${eventStats.emailIds.length} emails in ${aggregateEndDate - fetchStartDate}ms`);
if (parentPort) {
parentPort.postMessage('done');
} else {
// give the logging pipes time finish writing before exit
setTimeout(() => {
process.exit(1);
process.exit(0);
}, 1000);
}
})();

View File

@ -1,6 +1,5 @@
const logging = require('../../../../shared/logging');
const {parentPort} = require('bthreads');
const sentry = require('../../../../shared/sentry');
const debug = require('ghost-ignition').debug('jobs:email-analytics:fetch-latest');
// recurring job to fetch analytics since the most recently seen event timestamp
@ -28,46 +27,36 @@ if (parentPort) {
}
(async () => {
try {
const models = require('../../../models');
const settingsService = require('../../settings');
const models = require('../../../models');
const settingsService = require('../../settings');
// must be initialized before emailAnalyticsService is required otherwise
// requires are in the wrong order and settingsCache will always be empty
await models.init();
await settingsService.init();
// must be initialized before emailAnalyticsService is required otherwise
// requires are in the wrong order and settingsCache will always be empty
await models.init();
await settingsService.init();
const emailAnalyticsService = require('../');
const emailAnalyticsService = require('../');
const fetchStartDate = new Date();
debug('Starting email analytics fetch of latest events');
const eventStats = await emailAnalyticsService.fetchLatest();
const fetchEndDate = new Date();
debug(`Finished fetching ${eventStats.totalEvents} analytics events in ${fetchEndDate - fetchStartDate}ms`);
const fetchStartDate = new Date();
debug('Starting email analytics fetch of latest events');
const eventStats = await emailAnalyticsService.fetchLatest();
const fetchEndDate = new Date();
debug(`Finished fetching ${eventStats.totalEvents} analytics events in ${fetchEndDate - fetchStartDate}ms`);
const aggregateStartDate = new Date();
debug(`Starting email analytics aggregation for ${eventStats.emailIds.length} emails`);
await emailAnalyticsService.aggregateStats(eventStats);
const aggregateEndDate = new Date();
debug(`Finished aggregating email analytics in ${aggregateEndDate - aggregateStartDate}ms`);
const aggregateStartDate = new Date();
debug(`Starting email analytics aggregation for ${eventStats.emailIds.length} emails`);
await emailAnalyticsService.aggregateStats(eventStats);
const aggregateEndDate = new Date();
debug(`Finished aggregating email analytics in ${aggregateEndDate - aggregateStartDate}ms`);
logging.info(`Fetched ${eventStats.totalEvents} events and aggregated stats for ${eventStats.emailIds.length} emails and ${eventStats.memberIds.length} members in ${aggregateEndDate - fetchStartDate}ms`);
if (parentPort) {
parentPort.postMessage('done');
} else {
// give the logging pipes time finish writing before exit
setTimeout(() => {
process.exit(0);
}, 1000);
}
} catch (error) {
logging.error(error);
sentry.captureException(error);
logging.info(`Fetched ${eventStats.totalEvents} events and aggregated stats for ${eventStats.emailIds.length} emails in ${aggregateEndDate - fetchStartDate}ms`);
if (parentPort) {
parentPort.postMessage('done');
} else {
// give the logging pipes time finish writing before exit
setTimeout(() => {
process.exit(1);
process.exit(0);
}, 1000);
}
})();

View File

@ -5,7 +5,13 @@
const JobManager = require('@tryghost/job-manager');
const logging = require('../../../shared/logging');
const sentry = require('../../../shared/sentry');
const jobManager = new JobManager(logging);
const errorHandler = (error, workerMeta) => {
logging.info(`Capturing error for worker during execution of job: ${workerMeta.name}`);
logging.error(error);
sentry.captureException(error);
};
const jobManager = new JobManager({logging, errorHandler});
module.exports = jobManager;

View File

@ -49,7 +49,7 @@
"@tryghost/errors": "0.2.5",
"@tryghost/helpers": "1.1.35",
"@tryghost/image-transform": "1.0.3",
"@tryghost/job-manager": "0.5.0",
"@tryghost/job-manager": "0.6.0",
"@tryghost/kg-card-factory": "2.1.4",
"@tryghost/kg-default-atoms": "2.0.2",
"@tryghost/kg-default-cards": "3.0.1",

View File

@ -381,13 +381,13 @@
optionalDependencies:
sharp "0.25.4"
"@tryghost/job-manager@0.5.0":
version "0.5.0"
resolved "https://registry.yarnpkg.com/@tryghost/job-manager/-/job-manager-0.5.0.tgz#94ea766503d2473835d10e5e75cbf9cf09d9cccf"
integrity sha512-BhVV0PgCL/Upd6fdcNQMYXzM7KAXcBhTOqsLuhJxaolf9lgWWBZOO575IGST9ezFU8xwidOF2Yecgem1ddpBQQ==
"@tryghost/job-manager@0.6.0":
version "0.6.0"
resolved "https://registry.yarnpkg.com/@tryghost/job-manager/-/job-manager-0.6.0.tgz#5023f4b37577ac6966bba3608896948f2d030004"
integrity sha512-kZh3sAexCkP4WghIskB6ZAJQap8KQWDtcLWRXTzCL1fO6BzOstJgw4gvRMDb2t8peUHhwf34fKbeIKecl5cv0g==
dependencies:
"@breejs/later" "4.0.2"
bree "4.0.0"
bree "4.1.0"
cron-validate "1.4.1"
fastq "1.9.0"
p-wait-for "3.1.0"
@ -1328,10 +1328,10 @@ braces@~3.0.2:
dependencies:
fill-range "^7.0.1"
bree@4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/bree/-/bree-4.0.0.tgz#1ce6b74cbbfd49fb87c171001f37ddcec1cea996"
integrity sha512-z9vM8rc4KNEzn8j+XOcJDt65ah2G/zuXkEeR2ovzX9A3kaoOL/jo41YLwdTZQ542YBOHcHomzd5pc6CkHAADgQ==
bree@4.1.0:
version "4.1.0"
resolved "https://registry.yarnpkg.com/bree/-/bree-4.1.0.tgz#c27c942d6d32fd2ac4809a2a80509298a31fca3e"
integrity sha512-4mQMvYGrTtVp6apa/t4bXMcug7q0Eb6cXgWbly7OdhECT9PFsjyaGrSGr2+2TWUD2TFVEeZEtkk1rl8oGcjvgQ==
dependencies:
"@babel/runtime" "^7.12.5"
"@breejs/later" "^4.0.2"