🐛 Reduced concurrency when fetching Mailgun events (#16176)

refs https://github.com/TryGhost/Team/issues/2482

This change adds a small sleep in between dispatching events in the
worker thread that reads the events from Mailgun. That should reduce the
amount of queries we fire parallel to each other and could cause the
connection pool to run out of connections.

It also reduces the amount of concurrent sending to 2 from 10. Also to
make sure the connection pool doesn't run out of connections while
sending emails, and to reduce the chance of new connections falling back
on a (delayed) replicated database.
This commit is contained in:
Simon Backx 2023-01-24 18:02:10 +01:00
parent 977c95157a
commit 4b0ca9399d
4 changed files with 27 additions and 7 deletions

View File

@ -93,7 +93,7 @@ module.exports = {
} catch (error) {
return new FailedBatch(emailBatchId, error);
}
}, {concurrency: 10});
}, {concurrency: 2});
const successes = batchResults.filter(response => (response instanceof SuccessfulBatch));
const failures = batchResults.filter(response => (response instanceof FailedBatch));

View File

@ -8,6 +8,8 @@ const messages = {
emailError: 'An unexpected error occurred, please retry sending your newsletter.'
};
const MAX_SENDING_CONCURRENCY = 2;
/**
* @typedef {import('./sending-service')} SendingService
* @typedef {import('./email-segmenter')} EmailSegmenter
@ -267,8 +269,8 @@ class BatchSendingService {
}
};
// Run maximum 10 at the same time
await Promise.all(new Array(10).fill(0).map(() => runNext()));
// Run maximum MAX_SENDING_CONCURRENCY at the same time
await Promise.all(new Array(MAX_SENDING_CONCURRENCY).fill(0).map(() => runNext()));
if (succeededCount < batches.length) {
if (succeededCount > 0) {

View File

@ -1,5 +1,11 @@
const {EmailDeliveredEvent, EmailOpenedEvent, EmailBouncedEvent, SpamComplaintEvent, EmailUnsubscribedEvent, EmailTemporaryBouncedEvent} = require('@tryghost/email-events');
async function waitForEvent() {
return new Promise((resolve) => {
setTimeout(resolve, 100);
});
}
/**
* @typedef EmailIdentification
* @property {string} email
@ -43,6 +49,8 @@ class EmailEventProcessor {
emailId: recipient.emailId,
timestamp
}));
// We cannot await the dispatched domainEvent, but we need to limit the number of events thare are processed at the same time
await waitForEvent();
}
return recipient;
}
@ -61,6 +69,8 @@ class EmailEventProcessor {
emailId: recipient.emailId,
timestamp
}));
// We cannot await the dispatched domainEvent, but we need to limit the number of events thare are processed at the same time
await waitForEvent();
}
return recipient;
}
@ -81,6 +91,8 @@ class EmailEventProcessor {
emailRecipientId: recipient.emailRecipientId,
timestamp
}));
// We cannot await the dispatched domainEvent, but we need to limit the number of events thare are processed at the same time
await waitForEvent();
}
return recipient;
}
@ -101,6 +113,8 @@ class EmailEventProcessor {
emailRecipientId: recipient.emailRecipientId,
timestamp
}));
// We cannot await the dispatched domainEvent, but we need to limit the number of events thare are processed at the same time
await waitForEvent();
}
return recipient;
}
@ -118,6 +132,8 @@ class EmailEventProcessor {
emailId: recipient.emailId,
timestamp
}));
// We cannot await the dispatched domainEvent, but we need to limit the number of events thare are processed at the same time
await waitForEvent();
}
return recipient;
}
@ -135,6 +151,8 @@ class EmailEventProcessor {
emailId: recipient.emailId,
timestamp
}));
// We cannot await the dispatched domainEvent, but we need to limit the number of events thare are processed at the same time
await waitForEvent();
}
return recipient;
}

View File

@ -440,7 +440,7 @@ describe('Batch Sending Service', function () {
assert.equal(arg.batch, batches[0]);
});
it('Works for more than 10 batches', async function () {
it('Works for more than 2 batches', async function () {
const service = new BatchSendingService({});
let runningCount = 0;
let maxRunningCount = 0;
@ -461,7 +461,7 @@ describe('Batch Sending Service', function () {
sinon.assert.callCount(sendBatch, 101);
const sendBatches = sendBatch.getCalls().map(call => call.args[0].batch);
assert.deepEqual(sendBatches, batches);
assert.equal(maxRunningCount, 10);
assert.equal(maxRunningCount, 2);
});
it('Throws error if all batches fail', async function () {
@ -485,7 +485,7 @@ describe('Batch Sending Service', function () {
sinon.assert.callCount(sendBatch, 101);
const sendBatches = sendBatch.getCalls().map(call => call.args[0].batch);
assert.deepEqual(sendBatches, batches);
assert.equal(maxRunningCount, 10);
assert.equal(maxRunningCount, 2);
});
it('Throws error if a single batch fails', async function () {
@ -511,7 +511,7 @@ describe('Batch Sending Service', function () {
sinon.assert.callCount(sendBatch, 101);
const sendBatches = sendBatch.getCalls().map(call => call.args[0].batch);
assert.deepEqual(sendBatches, batches);
assert.equal(maxRunningCount, 10);
assert.equal(maxRunningCount, 2);
});
});