0
Fork 0
mirror of https://github.com/TryGhost/Ghost.git synced 2025-02-03 23:00:14 -05:00

🐛 Fixed SingleUseTokens being cleared on boot (#15999)

fixes https://github.com/TryGhost/Team/issues/1996

**Issue**
Our Magic links are valid for 24 hours. After first usage, the token
lives for a further 10 minutes, so that in the case of email servers or
clients that "visit" links, the token can still be used.

The implementation of the 10 minute window uses setTimeout, meaning if
the process is interrupted, the 10 minute window is ignored completely,
and the token will continue to live for the remainder of it's 24 hour
validity period. To prevent that, the tokens are cleared on boot at the
moment.

**Solution**

To remove the boot clearing logic, we need to make sure the tokens are
only valid for 10 minutes after first use even during restarts.

This commit adds 3 new fields to the SingleUseToken model:
- updated_at: for storing the last time the token was changed/used). Not
really used atm.
- first_used_at: for storing the first time the token was used
- used_count: for storing the number of times the token has been used

Using these fields:
- A token can only be used 3 times
- A token is only valid for 10 minutes after first use, even if the
server restarts in between
- A token is only valid for 24 hours after creation (not changed)

We now also delete expired tokens in a separate job instead of on boot /
in a timeout.
This commit is contained in:
Simon Backx 2023-01-04 09:49:39 +01:00 committed by GitHub
parent 497491fbef
commit 789e2c96c0
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
16 changed files with 403 additions and 81 deletions

View file

@ -29,6 +29,7 @@
"core/server/web/shared/**", "core/server/web/shared/**",
"!core/server/web/shared/middleware/**", "!core/server/web/shared/middleware/**",
"core/server/api/endpoints/**", "core/server/api/endpoints/**",
"!core/server/api/endpoints/utils" "!core/server/api/endpoints/utils",
"core/server/services/members/jobs/**"
] ]
} }

View file

@ -0,0 +1,20 @@
const {createAddColumnMigration, combineNonTransactionalMigrations} = require('../../utils');
module.exports = combineNonTransactionalMigrations(
createAddColumnMigration('tokens', 'updated_at', {
type: 'dateTime',
nullable: true
}),
createAddColumnMigration('tokens', 'first_used_at', {
type: 'dateTime',
nullable: true
}),
createAddColumnMigration('tokens', 'used_count', {
type: 'integer',
nullable: false,
unsigned: true,
defaultTo: 0
})
);

View file

@ -858,6 +858,9 @@ module.exports = {
token: {type: 'string', maxlength: 32, nullable: false, index: true}, token: {type: 'string', maxlength: 32, nullable: false, index: true},
data: {type: 'string', maxlength: 2000, nullable: true}, data: {type: 'string', maxlength: 2000, nullable: true},
created_at: {type: 'dateTime', nullable: false}, created_at: {type: 'dateTime', nullable: false},
updated_at: {type: 'dateTime', nullable: true},
first_used_at: {type: 'dateTime', nullable: true},
used_count: {type: 'integer', nullable: false, unsigned: true, defaultTo: 0},
created_by: {type: 'string', maxlength: 24, nullable: false} created_by: {type: 'string', maxlength: 24, nullable: false}
}, },
snippets: { snippets: {

View file

@ -1,12 +1,12 @@
const ghostBookshelf = require('./base'); const ghostBookshelf = require('./base');
const crypto = require('crypto'); const crypto = require('crypto');
const logging = require('@tryghost/logging');
const SingleUseToken = ghostBookshelf.Model.extend({ const SingleUseToken = ghostBookshelf.Model.extend({
tableName: 'tokens', tableName: 'tokens',
defaults() { defaults() {
return { return {
used_count: 0,
token: crypto token: crypto
.randomBytes(192 / 8) .randomBytes(192 / 8)
.toString('base64') .toString('base64')
@ -15,30 +15,7 @@ const SingleUseToken = ghostBookshelf.Model.extend({
.replace(/\//g, '_') .replace(/\//g, '_')
}; };
} }
}, { }, {});
async findOne(data, unfilteredOptions = {}) {
const model = await ghostBookshelf.Model.findOne.call(this, data, unfilteredOptions);
if (model) {
setTimeout(async () => {
try {
await this.destroy(Object.assign({
destroyBy: {
id: model.id
}
}, {
...unfilteredOptions,
transacting: null
}));
} catch (err) {
logging.error(err);
}
}, 10 * 60 * 1000);
}
return model;
}
});
const SingleUseTokens = ghostBookshelf.Collection.extend({ const SingleUseTokens = ghostBookshelf.Collection.extend({
model: SingleUseToken model: SingleUseToken

View file

@ -3,12 +3,17 @@ const {ValidationError} = require('@tryghost/errors');
class SingleUseTokenProvider { class SingleUseTokenProvider {
/** /**
* @param {import('../../models/base')} SingleUseTokenModel - A model for creating and retrieving tokens. * @param {Object} dependencies
* @param {number} validity - How long a token is valid for from it's creation in milliseconds. * @param {import('../../models/base')} dependencies.SingleUseTokenModel - A model for creating and retrieving tokens.
* @param {number} dependencies.validityPeriod - How long a token is valid for from it's creation in milliseconds.
* @param {number} dependencies.validityPeriodAfterUsage - How long a token is valid after first usage, in milliseconds.
* @param {number} dependencies.maxUsageCount - How many times a token can be used.
*/ */
constructor(SingleUseTokenModel, validity) { constructor({SingleUseTokenModel, validityPeriod, validityPeriodAfterUsage, maxUsageCount}) {
this.model = SingleUseTokenModel; this.model = SingleUseTokenModel;
this.validity = validity; this.validityPeriod = validityPeriod;
this.validityPeriodAfterUsage = validityPeriodAfterUsage;
this.maxUsageCount = maxUsageCount;
} }
/** /**
@ -37,8 +42,17 @@ class SingleUseTokenProvider {
* *
* @returns {Promise<Object<string, any>>} * @returns {Promise<Object<string, any>>}
*/ */
async validate(token) { async validate(token, options = {}) {
const model = await this.model.findOne({token}); if (!options.transacting) {
return await this.model.transaction((transacting) => {
return this.validate(token, {
...options,
transacting
});
});
}
const model = await this.model.findOne({token}, {transacting: options.transacting, forUpdate: true});
if (!model) { if (!model) {
throw new ValidationError({ throw new ValidationError({
@ -46,16 +60,46 @@ class SingleUseTokenProvider {
}); });
} }
const createdAtEpoch = model.get('created_at').getTime(); if (model.get('used_count') >= this.maxUsageCount) {
const tokenLifetimeMilliseconds = Date.now() - createdAtEpoch;
if (tokenLifetimeMilliseconds > this.validity) {
throw new ValidationError({ throw new ValidationError({
message: 'Token expired' message: 'Token expired'
}); });
} }
const createdAtEpoch = model.get('created_at').getTime();
const firstUsedAtEpoch = model.get('first_used_at')?.getTime() ?? createdAtEpoch;
// Is this token already used?
if (model.get('used_count') > 0) {
const timeSinceFirstUsage = Date.now() - firstUsedAtEpoch;
if (timeSinceFirstUsage > this.validityPeriodAfterUsage) {
throw new ValidationError({
message: 'Token expired'
});
}
}
const tokenLifetimeMilliseconds = Date.now() - createdAtEpoch;
if (tokenLifetimeMilliseconds > this.validityPeriod) {
throw new ValidationError({
message: 'Token expired'
});
}
if (!model.get('first_used_at')) {
await model.save({
first_used_at: new Date(),
updated_at: new Date(),
used_count: model.get('used_count') + 1
}, {autoRefresh: false, patch: true, transacting: options.transacting});
} else {
await model.save({
used_count: model.get('used_count') + 1,
updated_at: new Date()
}, {autoRefresh: false, patch: true, transacting: options.transacting});
}
try { try {
return JSON.parse(model.get('data')); return JSON.parse(model.get('data'));
} catch (err) { } catch (err) {

View file

@ -19,6 +19,8 @@ const memberAttributionService = require('../member-attribution');
const emailSuppressionList = require('../email-suppression-list'); const emailSuppressionList = require('../email-suppression-list');
const MAGIC_LINK_TOKEN_VALIDITY = 24 * 60 * 60 * 1000; const MAGIC_LINK_TOKEN_VALIDITY = 24 * 60 * 60 * 1000;
const MAGIC_LINK_TOKEN_VALIDITY_AFTER_USAGE = 10 * 60 * 1000;
const MAGIC_LINK_TOKEN_MAX_USAGE_COUNT = 3;
const ghostMailer = new mail.GhostMailer(); const ghostMailer = new mail.GhostMailer();
@ -30,7 +32,12 @@ function createApiInstance(config) {
auth: { auth: {
getSigninURL: config.getSigninURL.bind(config), getSigninURL: config.getSigninURL.bind(config),
allowSelfSignup: config.getAllowSelfSignup.bind(config), allowSelfSignup: config.getAllowSelfSignup.bind(config),
tokenProvider: new SingleUseTokenProvider(models.SingleUseToken, MAGIC_LINK_TOKEN_VALIDITY) tokenProvider: new SingleUseTokenProvider({
SingleUseTokenModel: models.SingleUseToken,
validityPeriod: MAGIC_LINK_TOKEN_VALIDITY,
validityPeriodAfterUsage: MAGIC_LINK_TOKEN_VALIDITY_AFTER_USAGE,
maxUsageCount: MAGIC_LINK_TOKEN_MAX_USAGE_COUNT
})
}, },
mail: { mail: {
transporter: { transporter: {

View file

@ -0,0 +1,50 @@
const {parentPort} = require('worker_threads');
const debug = require('@tryghost/debug')('jobs:clean-tokens');
const moment = require('moment');
// Exit early when cancelled to prevent stalling shutdown. No cleanup needed when cancelling as everything is idempotent and will pick up
// where it left off on next run
function cancel() {
if (parentPort) {
parentPort.postMessage('Expired complimentary subscriptions cleanup cancelled before completion');
parentPort.postMessage('cancelled');
} else {
setTimeout(() => {
process.exit(0);
}, 1000);
}
}
if (parentPort) {
parentPort.once('message', (message) => {
if (message === 'cancel') {
return cancel();
}
});
}
(async () => {
const cleanupStartDate = new Date();
const db = require('../../../data/db');
debug(`Starting cleanup of tokens`);
// We delete all tokens that are older than 24 hours.
const d = moment.utc().subtract(24, 'hours');
const deletedTokens = await db.knex('tokens')
.where('created_at', '<', d.format('YYYY-MM-DD HH:mm:ss')) // we need to be careful about the type here. .format() is the only thing that works across SQLite and MySQL
.delete();
const cleanupEndDate = new Date();
debug(`Removed ${deletedTokens} tokens created before ${d.toISOString()} in ${cleanupEndDate.valueOf() - cleanupStartDate.valueOf()}ms`);
if (parentPort) {
parentPort.postMessage(`Removed ${deletedTokens} tokens created before ${d.toISOString()} in ${cleanupEndDate.valueOf() - cleanupStartDate.valueOf()}ms`);
parentPort.postMessage('done');
} else {
// give the logging pipes time finish writing before exit
setTimeout(() => {
process.exit(0);
}, 1000);
}
})();

View file

@ -1,12 +1,15 @@
const path = require('path'); const path = require('path');
const jobsService = require('../../jobs'); const jobsService = require('../../jobs');
let hasScheduled = false; let hasScheduled = {
expiredComped: false,
tokens: false
};
module.exports = { module.exports = {
async scheduleExpiredCompCleanupJob() { async scheduleExpiredCompCleanupJob() {
if ( if (
!hasScheduled && !hasScheduled.expiredComped &&
!process.env.NODE_ENV.startsWith('test') !process.env.NODE_ENV.startsWith('test')
) { ) {
// use a random seconds value to avoid spikes to external APIs on the minute // use a random seconds value to avoid spikes to external APIs on the minute
@ -19,9 +22,31 @@ module.exports = {
name: 'clean-expired-comped' name: 'clean-expired-comped'
}); });
hasScheduled = true; hasScheduled.expiredComped = true;
} }
return hasScheduled; return hasScheduled.expiredComped;
},
async scheduleTokenCleanupJob() {
if (
!hasScheduled.tokens &&
!process.env.NODE_ENV.startsWith('test')
) {
// use a random seconds/minutes/hours value to avoid delete spikes to the database
const s = Math.floor(Math.random() * 60); // 0-59
const m = Math.floor(Math.random() * 60); // 0-59
const h = Math.floor(Math.random() * 24); // 0-23
jobsService.addJob({
at: `${s} ${m} ${h} * * *`, // Every day
job: require('path').resolve(__dirname, 'clean-tokens.js'),
name: 'clean-tokens'
});
hasScheduled.tokens = true;
}
return hasScheduled.tokens;
} }
}; };

View file

@ -135,15 +135,6 @@ module.exports = {
updateVerificationTrigger(); updateVerificationTrigger();
(async () => {
try {
const collection = await models.SingleUseToken.fetchAll();
await collection.invokeThen('destroy');
} catch (err) {
logging.error(err);
}
})();
if (!env?.startsWith('testing')) { if (!env?.startsWith('testing')) {
const membersMigrationJobName = 'members-migrations'; const membersMigrationJobName = 'members-migrations';
if (!(await jobsService.hasExecutedSuccessfully(membersMigrationJobName))) { if (!(await jobsService.hasExecutedSuccessfully(membersMigrationJobName))) {
@ -159,6 +150,9 @@ module.exports = {
// Schedule daily cron job to clean expired comp subs // Schedule daily cron job to clean expired comp subs
memberJobs.scheduleExpiredCompCleanupJob(); memberJobs.scheduleExpiredCompCleanupJob();
// Schedule daily cron job to clean expired tokens
memberJobs.scheduleTokenCleanupJob();
}, },
contentGating: require('./content-gating'), contentGating: require('./content-gating'),

View file

@ -7,12 +7,19 @@ const limitService = require('../limits');
const labs = require('../../../shared/labs'); const labs = require('../../../shared/labs');
const MAGIC_LINK_TOKEN_VALIDITY = 24 * 60 * 60 * 1000; const MAGIC_LINK_TOKEN_VALIDITY = 24 * 60 * 60 * 1000;
const MAGIC_LINK_TOKEN_VALIDITY_AFTER_USAGE = 10 * 60 * 1000;
const MAGIC_LINK_TOKEN_MAX_USAGE_COUNT = 3;
module.exports = new NewslettersService({ module.exports = new NewslettersService({
NewsletterModel: models.Newsletter, NewsletterModel: models.Newsletter,
MemberModel: models.Member, MemberModel: models.Member,
mail, mail,
singleUseTokenProvider: new SingleUseTokenProvider(models.SingleUseToken, MAGIC_LINK_TOKEN_VALIDITY), singleUseTokenProvider: new SingleUseTokenProvider({
SingleUseTokenModel: models.SingleUseToken,
validityPeriod: MAGIC_LINK_TOKEN_VALIDITY,
validityPeriodAfterUsage: MAGIC_LINK_TOKEN_VALIDITY_AFTER_USAGE,
maxUsageCount: MAGIC_LINK_TOKEN_MAX_USAGE_COUNT
}),
urlUtils, urlUtils,
limitService, limitService,
labs labs

View file

@ -17,6 +17,8 @@ const ObjectId = require('bson-objectid').default;
const settingsHelpers = require('../settings-helpers'); const settingsHelpers = require('../settings-helpers');
const MAGIC_LINK_TOKEN_VALIDITY = 24 * 60 * 60 * 1000; const MAGIC_LINK_TOKEN_VALIDITY = 24 * 60 * 60 * 1000;
const MAGIC_LINK_TOKEN_VALIDITY_AFTER_USAGE = 10 * 60 * 1000;
const MAGIC_LINK_TOKEN_MAX_USAGE_COUNT = 3;
/** /**
* @returns {SettingsBREADService} instance of the PostsService * @returns {SettingsBREADService} instance of the PostsService
@ -27,7 +29,12 @@ const getSettingsBREADServiceInstance = () => {
settingsCache: SettingsCache, settingsCache: SettingsCache,
labsService: labs, labsService: labs,
mail, mail,
singleUseTokenProvider: new SingleUseTokenProvider(models.SingleUseToken, MAGIC_LINK_TOKEN_VALIDITY), singleUseTokenProvider: new SingleUseTokenProvider({
SingleUseTokenModel: models.SingleUseToken,
validityPeriod: MAGIC_LINK_TOKEN_VALIDITY,
validityPeriodAfterUsage: MAGIC_LINK_TOKEN_VALIDITY_AFTER_USAGE,
maxUsageCount: MAGIC_LINK_TOKEN_MAX_USAGE_COUNT
}),
urlUtils urlUtils
}); });
}; };

View file

@ -301,7 +301,12 @@ describe('Settings API', function () {
describe('verify key update', function () { describe('verify key update', function () {
it('can update members_support_address via token', async function () { it('can update members_support_address via token', async function () {
const token = await (new SingleUseTokenProvider(models.SingleUseToken, 24 * 60 * 60 * 1000)).create({key: 'members_support_address', value: 'support@example.com'}); const token = await (new SingleUseTokenProvider({
SingleUseTokenModel: models.SingleUseToken,
validityPeriod: 24 * 60 * 60 * 1000,
validityPeriodAfterUsage: 10 * 60 * 1000,
maxUsageCount: 1
})).create({key: 'members_support_address', value: 'support@example.com'});
await agent.put('settings/verifications/') await agent.put('settings/verifications/')
.body({ .body({
token token
@ -324,7 +329,12 @@ describe('Settings API', function () {
}); });
it('cannot update invalid keys via token', async function () { it('cannot update invalid keys via token', async function () {
const token = await (new SingleUseTokenProvider(models.SingleUseToken, 24 * 60 * 60 * 1000)).create({key: 'members_support_address_invalid', value: 'support@example.com'}); const token = await (new SingleUseTokenProvider({
SingleUseTokenModel: models.SingleUseToken,
validityPeriod: 24 * 60 * 60 * 1000,
validityPeriodAfterUsage: 10 * 60 * 1000,
maxUsageCount: 1
})).create({key: 'members_support_address_invalid', value: 'support@example.com'});
await agent.put('settings/verifications/') await agent.put('settings/verifications/')
.body({ .body({
token token

View file

@ -150,6 +150,139 @@ describe('Members Signin', function () {
assert(!member, 'Member should not have been created'); assert(!member, 'Member should not have been created');
}); });
describe('Validity Period', function () {
let clock;
let startDate = new Date();
const email = 'validity-period-member1@test.com';
beforeEach(async function () {
// Remove ms precision (not supported by MySQL)
startDate.setMilliseconds(0);
clock = sinon.useFakeTimers(startDate);
});
afterEach(function () {
clock.restore();
});
it('Expires a token after 10 minutes of first usage', async function () {
const magicLink = await membersService.api.getMagicLink(email, 'signup');
const magicLinkUrl = new URL(magicLink);
const token = magicLinkUrl.searchParams.get('token');
// Use a first time
await membersAgent.get(`/?token=${token}&action=signup`)
.expectStatus(302)
.expectHeader('Location', /\/welcome-free\/$/)
.expectHeader('Set-Cookie', /members-ssr.*/);
// Fetch token in the database
const model = await models.SingleUseToken.findOne({token});
assert(!!model, 'Token should exist in the database');
assert.equal(model.get('used_count'), 1, 'used_count should be 1');
assert.equal(model.get('first_used_at').getTime(), startDate.getTime(), 'first_used_at should be set after first usage');
assert.equal(model.get('updated_at').getTime(), startDate.getTime(), 'updated_at should be set on changes');
// Use a second time, after 5 minutes
clock.tick(5 * 60 * 1000);
await membersAgent.get(`/?token=${token}&action=signup`)
.expectStatus(302)
.expectHeader('Location', /\/welcome-free\/$/)
.expectHeader('Set-Cookie', /members-ssr.*/);
await model.refresh();
assert.equal(model.get('used_count'), 2, 'used_count should be 2');
// Not changed
assert.equal(model.get('first_used_at').getTime(), startDate.getTime(), 'first_used_at should not be changed on second usage');
// Updated at should be changed
assert.equal(model.get('updated_at').getTime(), new Date().getTime(), 'updated_at should be set on changes');
const lastChangedAt = new Date();
// Wait another 6 minutes, and the usage of the token should be blocked now
clock.tick(6 * 60 * 1000);
await membersAgent.get('/?token=blah')
.expectStatus(302)
.expectHeader('Location', /\?\w*success=false/);
// No changes expected
await model.refresh();
assert.equal(model.get('used_count'), 2, 'used_count should not be changed');
assert.equal(model.get('first_used_at').getTime(), startDate.getTime(), 'first_used_at should not be changed');
assert.equal(model.get('updated_at').getTime(), lastChangedAt.getTime(), 'updated_at should not be changed');
});
it('Expires a token after 3 uses', async function () {
const magicLink = await membersService.api.getMagicLink(email, 'signup');
const magicLinkUrl = new URL(magicLink);
const token = magicLinkUrl.searchParams.get('token');
// Use a first time
await membersAgent.get(`/?token=${token}&action=signup`)
.expectStatus(302)
.expectHeader('Location', /\/welcome-free\/$/)
.expectHeader('Set-Cookie', /members-ssr.*/);
await membersAgent.get(`/?token=${token}&action=signup`)
.expectStatus(302)
.expectHeader('Location', /\/welcome-free\/$/)
.expectHeader('Set-Cookie', /members-ssr.*/);
await membersAgent.get(`/?token=${token}&action=signup`)
.expectStatus(302)
.expectHeader('Location', /\/welcome-free\/$/)
.expectHeader('Set-Cookie', /members-ssr.*/);
// Fetch token in the database
const model = await models.SingleUseToken.findOne({token});
assert(!!model, 'Token should exist in the database');
assert.equal(model.get('used_count'), 3, 'used_count should be 3');
assert.equal(model.get('first_used_at').getTime(), startDate.getTime(), 'first_used_at should be set after first usage');
assert.equal(model.get('updated_at').getTime(), startDate.getTime(), 'updated_at should be set on changes');
// Failed 4th usage
await membersAgent.get('/?token=blah')
.expectStatus(302)
.expectHeader('Location', /\?\w*success=false/);
// No changes expected
await model.refresh();
assert.equal(model.get('used_count'), 3, 'used_count should be 3');
assert.equal(model.get('first_used_at').getTime(), startDate.getTime(), 'first_used_at should be set after first usage');
assert.equal(model.get('updated_at').getTime(), startDate.getTime(), 'updated_at should be set on changes');
});
it('Expires a token after 24 hours if never used', async function () {
const magicLink = await membersService.api.getMagicLink(email, 'signup');
const magicLinkUrl = new URL(magicLink);
const token = magicLinkUrl.searchParams.get('token');
// Wait 24 hours
clock.tick(24 * 60 * 60 * 1000);
await membersAgent.get('/?token=blah')
.expectStatus(302)
.expectHeader('Location', /\?\w*success=false/);
// No changes expected
const model = await models.SingleUseToken.findOne({token});
assert(!!model, 'Token should exist in the database');
assert.equal(model.get('used_count'), 0, 'used_count should be 0');
assert.equal(model.get('first_used_at'), null, 'first_used_at should not be set');
assert.equal(model.get('updated_at').getTime(), startDate.getTime(), 'updated_at should not be set');
});
});
describe('Rate limiting', function () { describe('Rate limiting', function () {
let clock; let clock;

View file

@ -0,0 +1,60 @@
const sinon = require('sinon');
const {agentProvider, fixtureManager} = require('../../../utils/e2e-framework');
const assert = require('assert');
const models = require('../../../../core/server/models');
describe('Job: Clean tokens', function () {
let agent;
let jobsService;
let clock;
before(async function () {
agent = await agentProvider.getAdminAPIAgent();
await fixtureManager.init('newsletters', 'members:newsletters', 'members:emails');
await agent.loginAsOwner();
// Only reference services after Ghost boot
jobsService = require('../../../../core/server/services/jobs');
});
this.afterAll(function () {
sinon.restore();
});
it('Deletes tokens that are older than 24 hours', async function () {
// Go back 25 hours (reason: the job will be run at the current time, no way to change that)
clock = sinon.useFakeTimers(Date.now() - 25 * 60 * 60 * 1000);
// Create some tokens
const firstToken = await models.SingleUseToken.add({data: 'test'});
// Wait 24 hours
clock.tick(24 * 60 * 60 * 1000);
const secondToken = await models.SingleUseToken.add({data: 'test'});
// Wait one hour
clock.tick(1 * 60 * 60 * 1000);
// Run the job
const completedPromise = jobsService.awaitCompletion('clean-tokens');
const job = require('path').resolve(__dirname, '../../../../core/server/services/members/jobs', 'clean-tokens.js');
// NOTE: the job will not use the fake clock.
await jobsService.addJob({
job,
name: 'clean-tokens'
});
// We need to tick the clock to activate 'bree' and run the job
await clock.tickAsync(1000);
await completedPromise;
// Check second token exists
const secondTokenExists = await models.SingleUseToken.findOne({id: secondToken.id});
assert.ok(secondTokenExists, 'Second token should exist');
// Check first token is deleted
const firstTokenExists = await models.SingleUseToken.findOne({id: firstToken.id});
assert.ok(!firstTokenExists, 'First token should not exist');
});
});

View file

@ -35,7 +35,7 @@ const validateRouteSettings = require('../../../../../core/server/services/route
*/ */
describe('DB version integrity', function () { describe('DB version integrity', function () {
// Only these variables should need updating // Only these variables should need updating
const currentSchemaHash = 'aa2f277e624b5fbe5f18cb0d78ff18f4'; const currentSchemaHash = '7e561ad3b6eec1b9188f54ad46b04f40';
const currentFixturesHash = 'dcb7ba7c66b4b98d6c26a722985e756a'; const currentFixturesHash = 'dcb7ba7c66b4b98d6c26a722985e756a';
const currentSettingsHash = '9acce72858e75420b831297718595bbd'; const currentSettingsHash = '9acce72858e75420b831297718595bbd';
const currentRoutesHash = '3d180d52c663d173a6be791ef411ed01'; const currentRoutesHash = '3d180d52c663d173a6be791ef411ed01';

View file

@ -1,6 +1,7 @@
const models = require('../../../../core/server/models'); const models = require('../../../../core/server/models');
const should = require('should'); const should = require('should');
const sinon = require('sinon'); const sinon = require('sinon');
const assert = require('assert');
let clock; let clock;
let sandbox; let sandbox;
@ -17,28 +18,11 @@ describe('Unit: models/single-use-token', function () {
sandbox.restore(); sandbox.restore();
}); });
describe('fn: findOne', function () { describe('fn: defaults', function () {
it('Calls destroy after the grace period', async function () { it('Defaults to used_count of zero', async function () {
const data = {}; const model = new models.SingleUseToken();
const options = {}; const defaults = model.defaults();
const fakeModel = { assert.equal(defaults.used_count, 0);
id: 'fake_id'
};
const findOneSuperStub = sandbox.stub(models.Base.Model, 'findOne').resolves(fakeModel);
const destroyStub = sandbox.stub(models.SingleUseToken, 'destroy').resolves();
await models.SingleUseToken.findOne(data, options);
should.ok(findOneSuperStub.calledWith(data, options), 'super.findOne was called');
clock.tick(10000);
should.ok(!destroyStub.called, 'destroy was not called after 10 seconds');
clock.tick(10 * 60 * 1000 - 10000);
should.ok(destroyStub.called, 'destroy was not called after 10 seconds');
}); });
}); });
}); });