Extracted email analytics library code to external packages (#12541)

closes https://github.com/TryGhost/Team/issues/493

- all functionality except that directly related to Ghost's database and business logic now lives in external packages
  - @tryghost/email-analytics-service
  - @tryghost/email-analytics-provider-mailgun
This commit is contained in:
Kevin Ansfield 2021-03-02 07:26:33 +00:00 committed by GitHub
parent 9a6bfd0e71
commit 11802ebee0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 195 additions and 513 deletions

View File

@ -1,109 +0,0 @@
const _ = require('lodash');
const EventProcessingResult = require('./lib/event-processing-result');
const EventProcessor = require('./lib/event-processor');
const StatsAggregator = require('./lib/stats-aggregator');
const defaultProviders = require('./providers');
const debug = require('ghost-ignition').debug('services:email-analytics');
// when fetching a batch we should keep a record of which emails have associated
// events so we only aggregate those that are affected
class EmailAnalyticsService {
constructor({config, settings, logging, db, providers, eventProcessor, statsAggregator}) {
this.config = config;
this.settings = settings;
this.logging = logging || console;
this.db = db;
this.providers = providers || defaultProviders.init({config, settings, logging});
this.eventProcessor = eventProcessor || new EventProcessor({db, logging});
this.statsAggregator = statsAggregator || new StatsAggregator({db, logging});
}
async fetchAll() {
const result = new EventProcessingResult();
const [emailCount] = await this.db.knex('emails').count('id as count');
if (emailCount && emailCount.count <= 0) {
debug('fetchAll: skipping - no emails to track');
return result;
}
const startFetch = new Date();
debug('fetchAll: starting');
for (const [, provider] of Object.entries(this.providers)) {
const providerResults = await provider.fetchAll(this.processEventBatch.bind(this));
result.merge(providerResults);
}
debug(`fetchAll: finished (${Date.now() - startFetch}ms)`);
return result;
}
async fetchLatest({maxEvents = Infinity} = {}) {
const result = new EventProcessingResult();
const lastTimestamp = await this.getLastSeenEventTimestamp();
const [emailCount] = await this.db.knex('emails').count('id as count');
if (emailCount && emailCount.count <= 0) {
debug('fetchLatest: skipping - no emails to track');
return result;
}
const startFetch = new Date();
debug('fetchLatest: starting');
providersLoop:
for (const [, provider] of Object.entries(this.providers)) {
const providerResults = await provider.fetchLatest(lastTimestamp, this.processEventBatch.bind(this), {maxEvents});
result.merge(providerResults);
if (result.totalEvents >= maxEvents) {
break providersLoop;
}
}
debug(`fetchLatest: finished in ${Date.now() - startFetch}ms. Fetched ${result.totalEvents} events`);
return result;
}
async processEventBatch(events) {
const result = new EventProcessingResult();
for (const event of events) {
const batchResult = await this.eventProcessor.process(event);
result.merge(batchResult);
}
return result;
}
async aggregateStats({emailIds = [], memberIds = []}) {
for (const emailId of emailIds) {
await this.aggregateEmailStats(emailId);
}
for (const memberId of memberIds) {
await this.aggregateMemberStats(memberId);
}
}
aggregateEmailStats(emailId) {
return this.statsAggregator.aggregateEmail(emailId);
}
aggregateMemberStats(memberId) {
return this.statsAggregator.aggregateMember(memberId);
}
async getLastSeenEventTimestamp() {
const startDate = new Date();
// three separate queries is much faster than using max/greatest across columns with coalesce to handle nulls
const {maxDeliveredAt} = await this.db.knex('email_recipients').select(this.db.knex.raw('MAX(delivered_at) as maxDeliveredAt')).first() || {};
const {maxOpenedAt} = await this.db.knex('email_recipients').select(this.db.knex.raw('MAX(opened_at) as maxOpenedAt')).first() || {};
const {maxFailedAt} = await this.db.knex('email_recipients').select(this.db.knex.raw('MAX(failed_at) as maxFailedAt')).first() || {};
const lastSeenEventTimestamp = _.max([maxDeliveredAt, maxOpenedAt, maxFailedAt]);
debug(`getLastSeenEventTimestamp: finished in ${Date.now() - startDate}ms`);
return lastSeenEventTimestamp;
}
}
module.exports = EmailAnalyticsService;

View File

@ -1,12 +1,74 @@
const _ = require('lodash');
const config = require('../../../shared/config');
const logging = require('../../../shared/logging');
const db = require('../../data/db');
const settings = require('../settings/cache');
const EmailAnalyticsService = require('./email-analytics');
const {EmailAnalyticsService} = require('@tryghost/email-analytics-service');
const EventProcessor = require('./lib/event-processor');
const MailgunProvider = require('@tryghost/email-analytics-provider-mailgun');
const debug = require('ghost-ignition').debug('services:email-analytics');
const MIN_EMAIL_COUNT_FOR_OPEN_RATE = 5;
module.exports = new EmailAnalyticsService({
config,
logging,
db,
settings
settings,
eventProcessor: new EventProcessor({db, logging}),
providers: [
new MailgunProvider({config, settings, logging})
],
queries: {
async shouldFetchStats() {
// don't fetch stats from Mailgun if we haven't sent any emails
const [emailCount] = await db.knex('emails').count('id as count');
return emailCount && emailCount.count > 0;
},
async getLastSeenEventTimestamp() {
const startDate = new Date();
// three separate queries is much faster than using max/greatest (with coalesce to handle nulls) across columns
const {maxDeliveredAt} = await db.knex('email_recipients').select(db.knex.raw('MAX(delivered_at) as maxDeliveredAt')).first() || {};
const {maxOpenedAt} = await db.knex('email_recipients').select(db.knex.raw('MAX(opened_at) as maxOpenedAt')).first() || {};
const {maxFailedAt} = await db.knex('email_recipients').select(db.knex.raw('MAX(failed_at) as maxFailedAt')).first() || {};
const lastSeenEventTimestamp = _.max([maxDeliveredAt, maxOpenedAt, maxFailedAt]);
debug(`getLastSeenEventTimestamp: finished in ${Date.now() - startDate}ms`);
return lastSeenEventTimestamp;
},
async aggregateEmailStats(emailId) {
await db.knex('emails').update({
delivered_count: db.knex.raw(`(SELECT COUNT(id) FROM email_recipients WHERE email_id = ? AND delivered_at IS NOT NULL)`, [emailId]),
opened_count: db.knex.raw(`(SELECT COUNT(id) FROM email_recipients WHERE email_id = ? AND opened_at IS NOT NULL)`, [emailId]),
failed_count: db.knex.raw(`(SELECT COUNT(id) FROM email_recipients WHERE email_id = ? AND failed_at IS NOT NULL)`, [emailId])
}).where('id', emailId);
},
async aggregateMemberStats(memberId) {
const {trackedEmailCount} = await db.knex('email_recipients')
.select(db.knex.raw('COUNT(email_recipients.id) as trackedEmailCount'))
.leftJoin('emails', 'email_recipients.email_id', 'emails.id')
.where('email_recipients.member_id', memberId)
.where('emails.track_opens', true)
.first() || {};
const updateQuery = {
email_count: db.knex.raw('(SELECT COUNT(id) FROM email_recipients WHERE member_id = ?)', [memberId]),
email_opened_count: db.knex.raw('(SELECT COUNT(id) FROM email_recipients WHERE member_id = ? AND opened_at IS NOT NULL)', [memberId])
};
if (trackedEmailCount >= MIN_EMAIL_COUNT_FOR_OPEN_RATE) {
updateQuery.email_open_rate = db.knex.raw(`
ROUND(((SELECT COUNT(id) FROM email_recipients WHERE member_id = ? AND opened_at IS NOT NULL) * 1.0 / ? * 100), 0)
`, [memberId, trackedEmailCount]);
}
await db.knex('members')
.update(updateQuery)
.where('id', memberId);
}
}
});

View File

@ -14,8 +14,9 @@ module.exports = {
config.get('backgroundJobs:emailAnalytics') &&
!process.env.NODE_ENV.match(/^testing/)
) {
// don't register email analytics job if we have no emails,
// processer usage from many sites spinning up threads can be high
// Don't register email analytics job if we have no emails,
// processer usage from many sites spinning up threads can be high.
// Mega service will re-run this scheduling task when an email is sent
const emailCount = await models.Email
.where('created_at', '>', moment.utc().subtract(30, 'days').toDate())
.where('status', '<>', 'failed')

View File

@ -1,45 +0,0 @@
const _ = require('lodash');
class EventProcessingResult {
constructor(result = {}) {
// counts
this.delivered = 0;
this.opened = 0;
this.failed = 0;
this.unsubscribed = 0;
this.complained = 0;
this.unhandled = 0;
this.unprocessable = 0;
// ids seen whilst processing ready for passing to the stats aggregator
this.emailIds = [];
this.memberIds = [];
this.merge(result);
}
get totalEvents() {
return this.delivered
+ this.opened
+ this.failed
+ this.unsubscribed
+ this.complained
+ this.unhandled
+ this.unprocessable;
}
merge(other = {}) {
this.delivered += other.delivered || 0;
this.opened += other.opened || 0;
this.failed += other.failed || 0;
this.unsubscribed += other.unsubscribed || 0;
this.complained += other.complained || 0;
this.unhandled += other.unhandled || 0;
this.unprocessable += other.unprocessable || 0;
this.emailIds = _.compact(_.union(this.emailIds, other.emailIds || []));
this.memberIds = _.compact(_.union(this.memberIds, other.memberIds || []));
}
}
module.exports = EventProcessingResult;

View File

@ -1,175 +1,17 @@
const {EventProcessor} = require('@tryghost/email-analytics-service');
const moment = require('moment');
class EmailAnalyticsEventProcessor {
constructor({db, logging}) {
class GhostEventProcessor extends EventProcessor {
constructor({db}) {
super(...arguments);
this.db = db;
this.logging = logging || console;
// avoid having to query email_batch by provider_id for every event
this.providerIdEmailIdMap = {};
}
async process(event) {
if (event.type === 'delivered') {
return this.handleDelivered(event);
}
if (event.type === 'opened') {
return this.handleOpened(event);
}
if (event.type === 'failed') {
return this.handleFailed(event);
}
if (event.type === 'unsubscribed') {
return this.handleUnsubscribed(event);
}
if (event.type === 'complained') {
return this.handleComplained(event);
}
return {
unhandled: 1
};
}
async handleDelivered(event) {
const emailId = await this._getEmailId(event);
if (!emailId) {
return {unprocessable: 1};
}
// this doesn't work - the Base model intercepts the attr and tries to convert "COALESCE(...)" to a date
// await this.models.EmailRecipient
// .where({email_id: emailId, member_email: event.recipientEmail})
// .save({delivered_at: this.db.knex.raw('COALESCE(delivered_at, ?)', [moment.utc(event.timestamp).format('YYYY-MM-DD HH:mm:ss')])}, {patch: true, {context: {internal: true}}});
const updateResult = await this.db.knex('email_recipients')
.where('email_id', '=', emailId)
.where('member_email', '=', event.recipientEmail)
.update({
delivered_at: this.db.knex.raw('COALESCE(delivered_at, ?)', [moment.utc(event.timestamp).format('YYYY-MM-DD HH:mm:ss')])
});
if (updateResult !== 0) {
const memberId = await this._getMemberId(event);
return {
delivered: 1,
emailIds: [emailId],
memberIds: [memberId]
};
}
return {delivered: 1};
}
async handleOpened(event) {
const emailId = await this._getEmailId(event);
if (!emailId) {
return {unprocessable: 1};
}
const updateResult = await this.db.knex('email_recipients')
.where('email_id', '=', emailId)
.where('member_email', '=', event.recipientEmail)
.update({
opened_at: this.db.knex.raw('COALESCE(opened_at, ?)', [moment.utc(event.timestamp).format('YYYY-MM-DD HH:mm:ss')])
});
if (updateResult !== 0) {
const memberId = await this._getMemberId(event);
return {
opened: 1,
emailIds: [emailId],
memberIds: [memberId]
};
}
return {opened: 1};
}
async handleFailed(event) {
if (event.severity === 'permanent') {
const emailId = await this._getEmailId(event);
if (!emailId) {
return {unprocessable: 1};
}
await this.db.knex('email_recipients')
.where('email_id', '=', emailId)
.where('member_email', '=', event.recipientEmail)
.update({
failed_at: this.db.knex.raw('COALESCE(failed_at, ?)', [moment.utc(event.timestamp).format('YYYY-MM-DD HH:mm:ss')])
});
return {
failed: 1,
emailIds: [emailId]
};
}
if (event.severity === 'temporary') {
// we don't care about soft bounces at the moment
return {unhandled: 1};
}
}
async handleUnsubscribed(event) {
const emailId = await this._getEmailId(event);
if (!emailId) {
return {unprocessable: 1};
}
// saving via bookshelf triggers label fetch/update which errors and slows down processing
await this.db.knex('members')
.where('id', '=', this.db.knex('email_recipients')
.select('member_id')
.where('email_id', '=', emailId)
.where('member_email', '=', event.recipientEmail)
)
.update({
subscribed: false,
updated_at: moment.utc().toDate()
});
return {
unsubscribed: 1
};
}
async handleComplained(event) {
const emailId = await this._getEmailId(event);
if (!emailId) {
return {unprocessable: 1};
}
// saving via bookshelf triggers label fetch/update which errors and slows down processing
await this.db.knex('members')
.where('id', '=', this.db.knex('email_recipients')
.select('member_id')
.where('email_id', '=', emailId)
.where('member_email', '=', event.recipientEmail)
)
.update({
subscribed: false,
updated_at: moment.utc().toDate()
});
return {
complained: 1
};
}
async _getEmailId(event) {
async getEmailId(event) {
if (event.emailId) {
return event.emailId;
}
@ -191,14 +33,16 @@ class EmailAnalyticsEventProcessor {
this.providerIdEmailIdMap[event.providerId] = emailId;
return emailId;
}
return undefined;
}
async _getMemberId(event) {
if (event.memberId) {
return event.memberId;
}
async getMemberId(event) {
const emailId = await this.getEmailId(event);
const emailId = await this._getEmailId(event);
if (!emailId) {
return false;
}
if (emailId && event.recipientEmail) {
const {memberId} = await this.db.knex('email_recipients')
@ -209,7 +53,98 @@ class EmailAnalyticsEventProcessor {
return memberId;
}
return undefined;
}
async handleDelivered(event) {
const emailId = await this.getEmailId(event);
if (!emailId) {
return false;
}
const updateResult = await this.db.knex('email_recipients')
.where('email_id', '=', emailId)
.where('member_email', '=', event.recipientEmail)
.update({
delivered_at: this.db.knex.raw('COALESCE(delivered_at, ?)', [moment.utc(event.timestamp).format('YYYY-MM-DD HH:mm:ss')])
});
return updateResult > 0;
}
async handleOpened(event) {
const emailId = await this.getEmailId(event);
if (!emailId) {
return false;
}
const updateResult = await this.db.knex('email_recipients')
.where('email_id', '=', emailId)
.where('member_email', '=', event.recipientEmail)
.update({
opened_at: this.db.knex.raw('COALESCE(opened_at, ?)', [moment.utc(event.timestamp).format('YYYY-MM-DD HH:mm:ss')])
});
return updateResult > 0;
}
async handleTemporaryFailed(/*event*/) {
// noop - we don't do anything with temporary failures for now
}
async handlePermanentFailed(event) {
const emailId = await this.getEmailId(event);
if (!emailId) {
return false;
}
const updateResult = await this.db.knex('email_recipients')
.where('email_id', '=', emailId)
.where('member_email', '=', event.recipientEmail)
.update({
failed_at: this.db.knex.raw('COALESCE(failed_at, ?)', [moment.utc(event.timestamp).format('YYYY-MM-DD HH:mm:ss')])
});
return updateResult > 0;
}
async handleUnsubscribed(event) {
const memberId = await this.getMemberId(event);
if (!memberId) {
return false;
}
const updateResult = await this.db.knex('members')
.where('id', '=', memberId)
.update({
subscribed: false,
updated_at: moment.utc().toDate()
});
return updateResult > 0;
}
async handleComplained(event) {
const memberId = await this.getMemberId(event);
if (!memberId) {
return false;
}
const updateResult = await this.db.knex('members')
.where('id', '=', memberId)
.update({
subscribed: false,
updated_at: moment.utc().toDate()
});
return updateResult > 0;
}
}
module.exports = EmailAnalyticsEventProcessor;
module.exports = GhostEventProcessor;

View File

@ -1,41 +0,0 @@
class EmailAnalyticsStatsAggregator {
constructor({options, logging, db}) {
this.options = Object.assign({openRateEmailThreshold: 5}, options);
this.logging = logging || console;
this.db = db;
}
async aggregateEmail(emailId) {
await this.db.knex('emails').update({
delivered_count: this.db.knex.raw(`(SELECT COUNT(id) FROM email_recipients WHERE email_id = ? AND delivered_at IS NOT NULL)`, [emailId]),
opened_count: this.db.knex.raw(`(SELECT COUNT(id) FROM email_recipients WHERE email_id = ? AND opened_at IS NOT NULL)`, [emailId]),
failed_count: this.db.knex.raw(`(SELECT COUNT(id) FROM email_recipients WHERE email_id = ? AND failed_at IS NOT NULL)`, [emailId])
}).where('id', emailId);
}
async aggregateMember(memberId) {
const {trackedEmailCount} = await this.db.knex('email_recipients')
.select(this.db.knex.raw('COUNT(email_recipients.id) as trackedEmailCount'))
.leftJoin('emails', 'email_recipients.email_id', 'emails.id')
.where('email_recipients.member_id', memberId)
.where('emails.track_opens', true)
.first() || {};
const updateQuery = {
email_count: this.db.knex.raw('(SELECT COUNT(id) FROM email_recipients WHERE member_id = ?)', [memberId]),
email_opened_count: this.db.knex.raw('(SELECT COUNT(id) FROM email_recipients WHERE member_id = ? AND opened_at IS NOT NULL)', [memberId])
};
if (trackedEmailCount >= this.options.openRateEmailThreshold) {
updateQuery.email_open_rate = this.db.knex.raw(`
ROUND(((SELECT COUNT(id) FROM email_recipients WHERE member_id = ? AND opened_at IS NOT NULL) * 1.0 / ? * 100), 0)
`, [memberId, trackedEmailCount]);
}
await this.db.knex('members')
.update(updateQuery)
.where('id', memberId);
}
}
module.exports = EmailAnalyticsStatsAggregator;

View File

@ -1,10 +0,0 @@
module.exports = {
init({config, settings, logging = console}) {
return {
get mailgun() {
const Mailgun = require('./mailgun');
return new Mailgun({config, settings, logging});
}
};
}
};

View File

@ -1,130 +0,0 @@
const mailgunJs = require('mailgun-js');
const moment = require('moment');
const EventProcessingResult = require('../lib/event-processing-result');
const EVENT_FILTER = 'delivered OR opened OR failed OR unsubscribed OR complained';
const PAGE_LIMIT = 300;
const TRUST_THRESHOLD_S = 30 * 60; // 30 minutes
const DEFAULT_TAGS = ['bulk-email'];
class EmailAnalyticsMailgunProvider {
constructor({config, settings, mailgun, logging = console}) {
this.config = config;
this.settings = settings;
this.logging = logging;
this.tags = [...DEFAULT_TAGS];
this._mailgun = mailgun;
if (this.config.get('bulkEmail:mailgun:tag')) {
this.tags.push(this.config.get('bulkEmail:mailgun:tag'));
}
}
// unless an instance is passed in to the constructor, generate a new instance each
// time the getter is called to account for changes in config/settings over time
get mailgun() {
if (this._mailgun) {
return this._mailgun;
}
const bulkEmailConfig = this.config.get('bulkEmail');
const bulkEmailSetting = {
apiKey: this.settings.get('mailgun_api_key'),
domain: this.settings.get('mailgun_domain'),
baseUrl: this.settings.get('mailgun_base_url')
};
const hasMailgunConfig = !!(bulkEmailConfig && bulkEmailConfig.mailgun);
const hasMailgunSetting = !!(bulkEmailSetting && bulkEmailSetting.apiKey && bulkEmailSetting.baseUrl && bulkEmailSetting.domain);
if (!hasMailgunConfig && !hasMailgunSetting) {
this.logging.warn(`Bulk email service is not configured`);
return undefined;
}
const mailgunConfig = hasMailgunConfig ? bulkEmailConfig.mailgun : bulkEmailSetting;
const baseUrl = new URL(mailgunConfig.baseUrl);
return mailgunJs({
apiKey: mailgunConfig.apiKey,
domain: mailgunConfig.domain,
protocol: baseUrl.protocol,
host: baseUrl.hostname,
port: baseUrl.port,
endpoint: baseUrl.pathname,
retry: 5
});
}
// do not start from a particular time, grab latest then work back through
// pages until we get a blank response
fetchAll(batchHandler) {
const options = {
event: EVENT_FILTER,
limit: PAGE_LIMIT,
tags: this.tags.join(' AND ')
};
return this._fetchPages(options, batchHandler);
}
// fetch from the last known timestamp-TRUST_THRESHOLD then work forwards
// through pages until we get a blank response. This lets us get events
// quicker than the TRUST_THRESHOLD
fetchLatest(latestTimestamp, batchHandler, options) {
const beginDate = moment(latestTimestamp).subtract(TRUST_THRESHOLD_S, 's').toDate();
const mailgunOptions = {
limit: PAGE_LIMIT,
event: EVENT_FILTER,
tags: this.tags.join(' AND '),
begin: beginDate.toUTCString(),
ascending: 'yes'
};
return this._fetchPages(mailgunOptions, batchHandler, options);
}
async _fetchPages(mailgunOptions, batchHandler, {maxEvents = Infinity} = {}) {
const {mailgun} = this;
if (!mailgun) {
this.logging.warn(`Bulk email service is not configured`);
return new EventProcessingResult();
}
const result = new EventProcessingResult();
let page = await mailgun.events().get(mailgunOptions);
let events = page && page.items && page.items.map(this.normalizeEvent) || [];
pagesLoop:
while (events.length !== 0) {
const batchResult = await batchHandler(events);
result.merge(batchResult);
if (result.totalEvents >= maxEvents) {
break pagesLoop;
}
page = await mailgun.get(page.paging.next.replace('https://api.mailgun.net/v3', ''));
events = page && page.items && page.items.map(this.normalizeEvent) || [];
}
return result;
}
normalizeEvent(event) {
let providerId = event.message && event.message.headers && event.message.headers['message-id'];
return {
type: event.event,
severity: event.severity,
recipientEmail: event.recipient,
emailId: event['user-variables'] && event['user-variables']['email-id'],
providerId: providerId,
timestamp: new Date(event.timestamp * 1000)
};
}
}
module.exports = EmailAnalyticsMailgunProvider;

View File

@ -45,6 +45,8 @@
"@tryghost/admin-api-schema": "2.0.0-rc.1",
"@tryghost/bootstrap-socket": "0.2.7",
"@tryghost/constants": "0.1.6",
"@tryghost/email-analytics-provider-mailgun": "1.0.0-rc.1",
"@tryghost/email-analytics-service": "1.0.0-rc.1",
"@tryghost/errors": "0.2.9",
"@tryghost/helpers": "1.1.38",
"@tryghost/image-transform": "1.0.3",

View File

@ -420,6 +420,23 @@
dependencies:
"@elastic/elasticsearch" "^7.10.0"
"@tryghost/email-analytics-provider-mailgun@1.0.0-rc.1":
version "1.0.0-rc.1"
resolved "https://registry.yarnpkg.com/@tryghost/email-analytics-provider-mailgun/-/email-analytics-provider-mailgun-1.0.0-rc.1.tgz#47e41613cea9d6a82292d9f2d7763d7c9abc02e2"
integrity sha512-+KtipyooNZyQHdsoA0uPkefyZAqQl/u/IYe07L1e/IqYQQ/W1FKG0eXThPQG9770dkf8NcvQ6hODuC4qbRcTSg==
dependencies:
"@tryghost/email-analytics-service" "^1.0.0-rc.1"
mailgun-js "^0.22.0"
moment "^2.29.1"
"@tryghost/email-analytics-service@1.0.0-rc.1", "@tryghost/email-analytics-service@^1.0.0-rc.1":
version "1.0.0-rc.1"
resolved "https://registry.yarnpkg.com/@tryghost/email-analytics-service/-/email-analytics-service-1.0.0-rc.1.tgz#1d190f2c709579d92776d11c52c3ec2665f6039f"
integrity sha512-o6w5eBVO+/1PK0WBNheVnEdvNYT8UjbxlmP3QxEVR5r64IshYUdVTmNo75BER/B1OW6rNxmAH/IpogbFbFw5OQ==
dependencies:
ghost-ignition "^4.2.4"
lodash "^4.17.20"
"@tryghost/errors@0.2.6":
version "0.2.6"
resolved "https://registry.yarnpkg.com/@tryghost/errors/-/errors-0.2.6.tgz#4554c97a1666e4db323d8c664a4df5a714f77452"
@ -4143,7 +4160,7 @@ ghost-ignition@4.4.3:
prettyjson "1.2.1"
uuid "8.3.2"
ghost-ignition@4.4.4:
ghost-ignition@4.4.4, ghost-ignition@^4.2.4:
version "4.4.4"
resolved "https://registry.yarnpkg.com/ghost-ignition/-/ghost-ignition-4.4.4.tgz#6d16912a733a21582680d859de3a1dff868d4c67"
integrity sha512-tYWDirmtBtTq2etRr2co7n1PPhyoeVIHZhPfG7NDIH5GUDZ8QYlCmxC8nlhy3nnDve5ZunDAg0FlA3Rjqa0kug==
@ -6150,7 +6167,7 @@ mailcomposer@~0.2.10:
mime "~1.2.11"
mimelib "~0.2.15"
mailgun-js@0.22.0:
mailgun-js@0.22.0, mailgun-js@^0.22.0:
version "0.22.0"
resolved "https://registry.yarnpkg.com/mailgun-js/-/mailgun-js-0.22.0.tgz#128942b5e47a364a470791608852bf68c96b3a05"
integrity sha512-a2alg5nuTZA9Psa1pSEIEsbxr1Zrmqx4VkgGCQ30xVh0kIH7Bu57AYILo+0v8QLSdXtCyLaS+KVmdCrQo0uWFA==
@ -6618,7 +6635,7 @@ moment-timezone@0.5.23, moment-timezone@0.5.28, moment-timezone@0.5.31:
dependencies:
moment ">= 2.9.0"
moment@2.24.0, moment@2.27.0, "moment@>= 2.9.0", moment@^2.10.6, moment@^2.15.2, moment@^2.18.1, moment@^2.19.3:
moment@2.24.0, moment@2.27.0, "moment@>= 2.9.0", moment@^2.10.6, moment@^2.15.2, moment@^2.18.1, moment@^2.19.3, moment@^2.29.1:
version "2.24.0"
resolved "https://registry.yarnpkg.com/moment/-/moment-2.24.0.tgz#0d055d53f5052aa653c9f6eb68bb5d12bf5c2b5b"
integrity sha512-bV7f+6l2QigeBBZSM/6yTNq4P2fNpSWj/0e7jQcy87A8e7o2nAfP/34/2ky5Vw4B9S446EtIhodAzkFCcR4dQg==