0
Fork 0
mirror of https://github.com/TryGhost/Ghost.git synced 2025-02-10 23:36:14 -05:00

Added batching support for bulk email service (#11388)

no issue

- The limitation on Mailgun side of API seems to be 1000 emails per message.
- The only place where I could find a hard limit of 1000 emails per
batch was this PHP SDK issue: https://github.com/mailgun/mailgun-php/issues/469
- To store ids of sent messages introduce a mega column on the emails table. They can be synced with stats or other metrics during even pooling in the future
- Removed redundant `join(',')` statement.The SDK accepts an array of emails as well. Less code - better code :)
This commit is contained in:
Naz Gargol 2019-11-13 17:52:23 +07:00 committed by GitHub
parent 208b710677
commit f5479e1473
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4 changed files with 43 additions and 15 deletions

View file

@ -389,6 +389,7 @@ module.exports = {
validations: {isIn: [['pending', 'submitting', 'submitted', 'failed']]}
},
error: {type: 'string', maxlength: 2000, nullable: true},
meta: {type: 'text', maxlength: 65535, nullable: true},
stats: {type: 'text', maxlength: 65535, nullable: true},
email_count: {type: 'integer', nullable: false, unsigned: true, defaultTo: 0},
subject: {type: 'string', maxlength: 300, nullable: true},

View file

@ -1,4 +1,5 @@
const {URL} = require('url');
const _ = require('lodash');
const mailgun = require('mailgun-js');
const configService = require('../../config');
const common = require('../../lib/common');
@ -46,31 +47,56 @@ module.exports = {
* @param {Email} message - The message to send
* @param {[EmailAddress]} recipients - the recipients to send the email to
* @param {[object]} recipientData - list of data keyed by email to inject into the email
* @returns {Promise<boolean>} A promise representing the success of the email sending
* @returns {Promise<Array<object>>} An array of promises representing the success of the batch email sending
*/
async send(message, recipients, recipientData) {
let BATCH_SIZE = 1000;
if (!mailgunInstance) {
return;
}
let fromAddress = message.from;
if (/@localhost$/.test(message.from)) {
if (/@localhost$/.test(message.from) || /@ghost.local$/.test(message.from)) {
fromAddress = 'localhost@example.com';
common.logging.warn(`Rewriting bulk email from address ${message.from} to ${fromAddress}`);
BATCH_SIZE = 2;
}
try {
const messageData = Object.assign({}, message, {
to: recipients.join(', '),
from: fromAddress,
'recipient-variables': recipientData
});
const chunkedRecipients = _.chunk(recipients, BATCH_SIZE);
if (config.mailgun.tag) {
Object.assign(messageData, {
'o:tag': config.mailgun.tag
return Promise.map(chunkedRecipients, (toAddresses) => {
const recipientVariables = {};
toAddresses.forEach((email) => {
recipientVariables[email] = recipientData[email];
});
}
await mailgunInstance.messages().send(messageData);
const messageData = Object.assign({}, message, {
to: toAddresses,
from: fromAddress,
'recipient-variables': recipientVariables
});
if (config.mailgun.tag) {
Object.assign(messageData, {
'o:tag': config.mailgun.tag
});
}
return mailgunInstance.messages().send(messageData);
});
} catch (err) {
common.logging.error({err});
}
},
async getStats(messageId) {
try {
let filter = {
'message-id': messageId
};
return await mailgunInstance.events().get(filter);
} catch (err) {
common.logging.error({err});
}

View file

@ -179,10 +179,11 @@ async function listener(emailModel, options) {
id: emailModel.id
});
await sendEmail(post, members);
const meta = await sendEmail(post, members);
await models.Email.edit({
status: 'submitted'
status: 'submitted',
meta: JSON.stringify(meta)
}, {
id: emailModel.id
});

View file

@ -19,7 +19,7 @@ var should = require('should'),
*/
describe('DB version integrity', function () {
// Only these variables should need updating
const currentSchemaHash = '34f9620db05fc136fa9abb7133e21615';
const currentSchemaHash = '1bfb322b4eeb5275cb1baa0df69d3ada';
const currentFixturesHash = 'b1787330f042f3954d73c43aa8bfa915';
// If this test is failing, then it is likely a change has been made that requires a DB version bump,