diff --git a/core/server/services/email-analytics/jobs/fetch-all.js b/core/server/services/email-analytics/jobs/fetch-all.js index 55e6959757..11305a9acb 100644 --- a/core/server/services/email-analytics/jobs/fetch-all.js +++ b/core/server/services/email-analytics/jobs/fetch-all.js @@ -1,6 +1,5 @@ const logging = require('../../../../shared/logging'); const {parentPort} = require('bthreads'); -const sentry = require('../../../../shared/sentry'); const debug = require('ghost-ignition').debug('jobs:email-analytics:fetch-all'); // one-off job to fetch all available events and re-process them idempotently @@ -27,46 +26,36 @@ if (parentPort) { } (async () => { - try { - const models = require('../../../models'); - const settingsService = require('../../settings'); + const models = require('../../../models'); + const settingsService = require('../../settings'); - // must be initialized before emailAnalyticsService is required otherwise - // requires are in the wrong order and settingsCache will always be empty - await models.init(); - await settingsService.init(); + // must be initialized before emailAnalyticsService is required otherwise + // requires are in the wrong order and settingsCache will always be empty + await models.init(); + await settingsService.init(); - const emailAnalyticsService = require('../'); + const emailAnalyticsService = require('../'); - const fetchStartDate = new Date(); - debug('Starting email analytics fetch of all available events'); - const eventStats = await emailAnalyticsService.fetchAll(); - const fetchEndDate = new Date(); - debug(`Finished fetching ${eventStats.totalEvents} analytics events in ${fetchEndDate - fetchStartDate}ms`); + const fetchStartDate = new Date(); + debug('Starting email analytics fetch of all available events'); + const eventStats = await emailAnalyticsService.fetchAll(); + const fetchEndDate = new Date(); + debug(`Finished fetching ${eventStats.totalEvents} analytics events in ${fetchEndDate - fetchStartDate}ms`); - const aggregateStartDate = new Date(); - debug(`Starting email analytics aggregation for ${eventStats.emailIds.length} emails`); - await emailAnalyticsService.aggregateStats(eventStats); - const aggregateEndDate = new Date(); - debug(`Finished aggregating email analytics in ${aggregateEndDate - aggregateStartDate}ms`); + const aggregateStartDate = new Date(); + debug(`Starting email analytics aggregation for ${eventStats.emailIds.length} emails`); + await emailAnalyticsService.aggregateStats(eventStats); + const aggregateEndDate = new Date(); + debug(`Finished aggregating email analytics in ${aggregateEndDate - aggregateStartDate}ms`); - logging.info(`Fetched ${eventStats.totalEvents} events and aggregated stats for ${eventStats.emailIds.length} emails and ${eventStats.memberIds.length} members in ${aggregateEndDate - fetchStartDate}ms`); - - if (parentPort) { - parentPort.postMessage('done'); - } else { - // give the logging pipes time finish writing before exit - setTimeout(() => { - process.exit(0); - }, 1000); - } - } catch (error) { - logging.error(error); - sentry.captureException(error); + logging.info(`Fetched ${eventStats.totalEvents} events and aggregated stats for ${eventStats.emailIds.length} emails in ${aggregateEndDate - fetchStartDate}ms`); + if (parentPort) { + parentPort.postMessage('done'); + } else { // give the logging pipes time finish writing before exit setTimeout(() => { - process.exit(1); + process.exit(0); }, 1000); } })(); diff --git a/core/server/services/email-analytics/jobs/fetch-latest.js b/core/server/services/email-analytics/jobs/fetch-latest.js index 13642eef64..776c992cac 100644 --- a/core/server/services/email-analytics/jobs/fetch-latest.js +++ b/core/server/services/email-analytics/jobs/fetch-latest.js @@ -1,6 +1,5 @@ const logging = require('../../../../shared/logging'); const {parentPort} = require('bthreads'); -const sentry = require('../../../../shared/sentry'); const debug = require('ghost-ignition').debug('jobs:email-analytics:fetch-latest'); // recurring job to fetch analytics since the most recently seen event timestamp @@ -28,46 +27,36 @@ if (parentPort) { } (async () => { - try { - const models = require('../../../models'); - const settingsService = require('../../settings'); + const models = require('../../../models'); + const settingsService = require('../../settings'); - // must be initialized before emailAnalyticsService is required otherwise - // requires are in the wrong order and settingsCache will always be empty - await models.init(); - await settingsService.init(); + // must be initialized before emailAnalyticsService is required otherwise + // requires are in the wrong order and settingsCache will always be empty + await models.init(); + await settingsService.init(); - const emailAnalyticsService = require('../'); + const emailAnalyticsService = require('../'); - const fetchStartDate = new Date(); - debug('Starting email analytics fetch of latest events'); - const eventStats = await emailAnalyticsService.fetchLatest(); - const fetchEndDate = new Date(); - debug(`Finished fetching ${eventStats.totalEvents} analytics events in ${fetchEndDate - fetchStartDate}ms`); + const fetchStartDate = new Date(); + debug('Starting email analytics fetch of latest events'); + const eventStats = await emailAnalyticsService.fetchLatest(); + const fetchEndDate = new Date(); + debug(`Finished fetching ${eventStats.totalEvents} analytics events in ${fetchEndDate - fetchStartDate}ms`); - const aggregateStartDate = new Date(); - debug(`Starting email analytics aggregation for ${eventStats.emailIds.length} emails`); - await emailAnalyticsService.aggregateStats(eventStats); - const aggregateEndDate = new Date(); - debug(`Finished aggregating email analytics in ${aggregateEndDate - aggregateStartDate}ms`); + const aggregateStartDate = new Date(); + debug(`Starting email analytics aggregation for ${eventStats.emailIds.length} emails`); + await emailAnalyticsService.aggregateStats(eventStats); + const aggregateEndDate = new Date(); + debug(`Finished aggregating email analytics in ${aggregateEndDate - aggregateStartDate}ms`); - logging.info(`Fetched ${eventStats.totalEvents} events and aggregated stats for ${eventStats.emailIds.length} emails and ${eventStats.memberIds.length} members in ${aggregateEndDate - fetchStartDate}ms`); - - if (parentPort) { - parentPort.postMessage('done'); - } else { - // give the logging pipes time finish writing before exit - setTimeout(() => { - process.exit(0); - }, 1000); - } - } catch (error) { - logging.error(error); - sentry.captureException(error); + logging.info(`Fetched ${eventStats.totalEvents} events and aggregated stats for ${eventStats.emailIds.length} emails in ${aggregateEndDate - fetchStartDate}ms`); + if (parentPort) { + parentPort.postMessage('done'); + } else { // give the logging pipes time finish writing before exit setTimeout(() => { - process.exit(1); + process.exit(0); }, 1000); } })(); diff --git a/core/server/services/jobs/job-service.js b/core/server/services/jobs/job-service.js index 22a779fe12..562b759a40 100644 --- a/core/server/services/jobs/job-service.js +++ b/core/server/services/jobs/job-service.js @@ -5,7 +5,13 @@ const JobManager = require('@tryghost/job-manager'); const logging = require('../../../shared/logging'); +const sentry = require('../../../shared/sentry'); -const jobManager = new JobManager(logging); +const errorHandler = (error, workerMeta) => { + logging.info(`Capturing error for worker during execution of job: ${workerMeta.name}`); + logging.error(error); + sentry.captureException(error); +}; +const jobManager = new JobManager({logging, errorHandler}); module.exports = jobManager; diff --git a/package.json b/package.json index 50803c3c26..24123a9e08 100644 --- a/package.json +++ b/package.json @@ -49,7 +49,7 @@ "@tryghost/errors": "0.2.5", "@tryghost/helpers": "1.1.35", "@tryghost/image-transform": "1.0.3", - "@tryghost/job-manager": "0.5.0", + "@tryghost/job-manager": "0.6.0", "@tryghost/kg-card-factory": "2.1.4", "@tryghost/kg-default-atoms": "2.0.2", "@tryghost/kg-default-cards": "3.0.1", diff --git a/yarn.lock b/yarn.lock index ef8876bfd1..a5ce07178a 100644 --- a/yarn.lock +++ b/yarn.lock @@ -381,13 +381,13 @@ optionalDependencies: sharp "0.25.4" -"@tryghost/job-manager@0.5.0": - version "0.5.0" - resolved "https://registry.yarnpkg.com/@tryghost/job-manager/-/job-manager-0.5.0.tgz#94ea766503d2473835d10e5e75cbf9cf09d9cccf" - integrity sha512-BhVV0PgCL/Upd6fdcNQMYXzM7KAXcBhTOqsLuhJxaolf9lgWWBZOO575IGST9ezFU8xwidOF2Yecgem1ddpBQQ== +"@tryghost/job-manager@0.6.0": + version "0.6.0" + resolved "https://registry.yarnpkg.com/@tryghost/job-manager/-/job-manager-0.6.0.tgz#5023f4b37577ac6966bba3608896948f2d030004" + integrity sha512-kZh3sAexCkP4WghIskB6ZAJQap8KQWDtcLWRXTzCL1fO6BzOstJgw4gvRMDb2t8peUHhwf34fKbeIKecl5cv0g== dependencies: "@breejs/later" "4.0.2" - bree "4.0.0" + bree "4.1.0" cron-validate "1.4.1" fastq "1.9.0" p-wait-for "3.1.0" @@ -1328,10 +1328,10 @@ braces@~3.0.2: dependencies: fill-range "^7.0.1" -bree@4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/bree/-/bree-4.0.0.tgz#1ce6b74cbbfd49fb87c171001f37ddcec1cea996" - integrity sha512-z9vM8rc4KNEzn8j+XOcJDt65ah2G/zuXkEeR2ovzX9A3kaoOL/jo41YLwdTZQ542YBOHcHomzd5pc6CkHAADgQ== +bree@4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/bree/-/bree-4.1.0.tgz#c27c942d6d32fd2ac4809a2a80509298a31fca3e" + integrity sha512-4mQMvYGrTtVp6apa/t4bXMcug7q0Eb6cXgWbly7OdhECT9PFsjyaGrSGr2+2TWUD2TFVEeZEtkk1rl8oGcjvgQ== dependencies: "@babel/runtime" "^7.12.5" "@breejs/later" "^4.0.2"