diff --git a/core/server/data/schema/schema.js b/core/server/data/schema/schema.js index dcbda76573..bc04ef80e9 100644 --- a/core/server/data/schema/schema.js +++ b/core/server/data/schema/schema.js @@ -389,6 +389,7 @@ module.exports = { validations: {isIn: [['pending', 'submitting', 'submitted', 'failed']]} }, error: {type: 'string', maxlength: 2000, nullable: true}, + meta: {type: 'text', maxlength: 65535, nullable: true}, stats: {type: 'text', maxlength: 65535, nullable: true}, email_count: {type: 'integer', nullable: false, unsigned: true, defaultTo: 0}, subject: {type: 'string', maxlength: 300, nullable: true}, diff --git a/core/server/services/bulk-email/index.js b/core/server/services/bulk-email/index.js index 31381e5ae8..e02782e466 100644 --- a/core/server/services/bulk-email/index.js +++ b/core/server/services/bulk-email/index.js @@ -1,4 +1,5 @@ const {URL} = require('url'); +const _ = require('lodash'); const mailgun = require('mailgun-js'); const configService = require('../../config'); const common = require('../../lib/common'); @@ -46,31 +47,56 @@ module.exports = { * @param {Email} message - The message to send * @param {[EmailAddress]} recipients - the recipients to send the email to * @param {[object]} recipientData - list of data keyed by email to inject into the email - * @returns {Promise} A promise representing the success of the email sending + * @returns {Promise>} An array of promises representing the success of the batch email sending */ async send(message, recipients, recipientData) { + let BATCH_SIZE = 1000; + if (!mailgunInstance) { return; } let fromAddress = message.from; - if (/@localhost$/.test(message.from)) { + if (/@localhost$/.test(message.from) || /@ghost.local$/.test(message.from)) { fromAddress = 'localhost@example.com'; common.logging.warn(`Rewriting bulk email from address ${message.from} to ${fromAddress}`); + + BATCH_SIZE = 2; } try { - const messageData = Object.assign({}, message, { - to: recipients.join(', '), - from: fromAddress, - 'recipient-variables': recipientData - }); + const chunkedRecipients = _.chunk(recipients, BATCH_SIZE); - if (config.mailgun.tag) { - Object.assign(messageData, { - 'o:tag': config.mailgun.tag + return Promise.map(chunkedRecipients, (toAddresses) => { + const recipientVariables = {}; + toAddresses.forEach((email) => { + recipientVariables[email] = recipientData[email]; }); - } - await mailgunInstance.messages().send(messageData); + const messageData = Object.assign({}, message, { + to: toAddresses, + from: fromAddress, + 'recipient-variables': recipientVariables + }); + + if (config.mailgun.tag) { + Object.assign(messageData, { + 'o:tag': config.mailgun.tag + }); + } + + return mailgunInstance.messages().send(messageData); + }); + } catch (err) { + common.logging.error({err}); + } + }, + + async getStats(messageId) { + try { + let filter = { + 'message-id': messageId + }; + + return await mailgunInstance.events().get(filter); } catch (err) { common.logging.error({err}); } diff --git a/core/server/services/mega/mega.js b/core/server/services/mega/mega.js index 691bcfbbdf..0bb35f1bc4 100644 --- a/core/server/services/mega/mega.js +++ b/core/server/services/mega/mega.js @@ -179,10 +179,11 @@ async function listener(emailModel, options) { id: emailModel.id }); - await sendEmail(post, members); + const meta = await sendEmail(post, members); await models.Email.edit({ - status: 'submitted' + status: 'submitted', + meta: JSON.stringify(meta) }, { id: emailModel.id }); diff --git a/core/test/unit/data/schema/integrity_spec.js b/core/test/unit/data/schema/integrity_spec.js index 62d0fb35c7..3d31104a65 100644 --- a/core/test/unit/data/schema/integrity_spec.js +++ b/core/test/unit/data/schema/integrity_spec.js @@ -19,7 +19,7 @@ var should = require('should'), */ describe('DB version integrity', function () { // Only these variables should need updating - const currentSchemaHash = '34f9620db05fc136fa9abb7133e21615'; + const currentSchemaHash = '1bfb322b4eeb5275cb1baa0df69d3ada'; const currentFixturesHash = 'b1787330f042f3954d73c43aa8bfa915'; // If this test is failing, then it is likely a change has been made that requires a DB version bump,