2020-12-02 11:17:44 +03:00
|
|
|
const path = require('path');
|
2020-12-02 15:15:27 +03:00
|
|
|
const moment = require('moment');
|
2020-12-02 11:17:44 +03:00
|
|
|
const config = require('../../../../shared/config');
|
|
|
|
const models = require('../../../models');
|
|
|
|
const jobsService = require('../../jobs');
|
|
|
|
|
|
|
|
let hasScheduled = false;
|
|
|
|
|
|
|
|
module.exports = {
|
|
|
|
async scheduleRecurringJobs() {
|
|
|
|
if (
|
|
|
|
!hasScheduled &&
|
2020-12-02 16:22:12 +03:00
|
|
|
config.get('emailAnalytics') &&
|
2020-12-02 11:17:44 +03:00
|
|
|
config.get('backgroundJobs:emailAnalytics') &&
|
|
|
|
!process.env.NODE_ENV.match(/^testing/)
|
|
|
|
) {
|
2021-03-02 10:26:33 +03:00
|
|
|
// Don't register email analytics job if we have no emails,
|
2021-08-09 12:34:42 +03:00
|
|
|
// processor usage from many sites spinning up threads can be high.
|
2021-03-02 10:26:33 +03:00
|
|
|
// Mega service will re-run this scheduling task when an email is sent
|
2020-12-02 15:13:54 +03:00
|
|
|
const emailCount = await models.Email
|
|
|
|
.where('created_at', '>', moment.utc().subtract(30, 'days').toDate())
|
2020-12-02 17:53:10 +03:00
|
|
|
.where('status', '<>', 'failed')
|
2020-12-02 15:13:54 +03:00
|
|
|
.count();
|
2020-12-02 11:17:44 +03:00
|
|
|
|
|
|
|
if (emailCount > 0) {
|
|
|
|
// use a random seconds value to avoid spikes to external APIs on the minute
|
|
|
|
const s = Math.floor(Math.random() * 60); // 0-59
|
|
|
|
// run every 5 minutes, on 1,6,11..., 2,7,12..., 3,8,13..., etc
|
|
|
|
const m = Math.floor(Math.random() * 5); // 0-4
|
|
|
|
|
2021-01-06 07:47:57 +03:00
|
|
|
jobsService.addJob({
|
|
|
|
at: `${s} ${m}/5 * * * *`,
|
|
|
|
job: path.resolve(__dirname, 'fetch-latest.js'),
|
|
|
|
name: 'email-analytics-fetch-latest'
|
|
|
|
});
|
2020-12-02 11:17:44 +03:00
|
|
|
|
|
|
|
hasScheduled = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return hasScheduled;
|
|
|
|
}
|
|
|
|
};
|