mirror of
https://github.com/TryGhost/Ghost.git
synced 2025-03-11 02:12:21 -05:00
Migrated db controller to API v2 (#10051)
refs #9866 - Migrated db import/export routes to use new db controller
This commit is contained in:
parent
5d2ab19881
commit
aa8e75914d
9 changed files with 402 additions and 6 deletions
126
core/server/api/v2/db.js
Normal file
126
core/server/api/v2/db.js
Normal file
|
@ -0,0 +1,126 @@
|
|||
const Promise = require('bluebird');
|
||||
const backupDatabase = require('../../data/db/backup');
|
||||
const exporter = require('../../data/exporter');
|
||||
const importer = require('../../data/importer');
|
||||
const common = require('../../lib/common');
|
||||
const models = require('../../models');
|
||||
|
||||
module.exports = {
|
||||
docName: 'db',
|
||||
|
||||
backupContent: {
|
||||
permissions: true,
|
||||
options: [
|
||||
'include',
|
||||
'filename'
|
||||
],
|
||||
validation: {
|
||||
options: {
|
||||
include: {
|
||||
values: exporter.EXCLUDED_TABLES
|
||||
}
|
||||
}
|
||||
},
|
||||
query(frame) {
|
||||
// NOTE: we need to have `include` property available as backupDatabase uses it internally
|
||||
Object.assign(frame.options, {include: frame.options.withRelated});
|
||||
|
||||
return backupDatabase(frame.options);
|
||||
}
|
||||
},
|
||||
|
||||
exportContent: {
|
||||
options: [
|
||||
'include'
|
||||
],
|
||||
validation: {
|
||||
options: {
|
||||
include: {
|
||||
values: exporter.EXCLUDED_TABLES
|
||||
}
|
||||
}
|
||||
},
|
||||
headers: {
|
||||
disposition: {
|
||||
type: 'file',
|
||||
value: () => (exporter.fileName())
|
||||
}
|
||||
},
|
||||
permissions: {
|
||||
method: 'exportContent'
|
||||
},
|
||||
query(frame) {
|
||||
return Promise.resolve()
|
||||
.then(() => exporter.doExport({include: frame.options.withRelated}))
|
||||
.catch((err) => {
|
||||
return Promise.reject(new common.errors.GhostError({err: err}));
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
importContent: {
|
||||
options: [
|
||||
'include'
|
||||
],
|
||||
validation: {
|
||||
options: {
|
||||
include: {
|
||||
values: exporter.EXCLUDED_TABLES
|
||||
}
|
||||
}
|
||||
},
|
||||
permissions: {
|
||||
method: 'importContent'
|
||||
},
|
||||
query(frame) {
|
||||
return importer.importFromFile(frame.data, {include: frame.options.withRelated});
|
||||
}
|
||||
},
|
||||
|
||||
deleteAllContent: {
|
||||
statusCode: 204,
|
||||
permissions: {
|
||||
method: 'deleteAllContent'
|
||||
},
|
||||
query() {
|
||||
/**
|
||||
* @NOTE:
|
||||
* We fetch all posts with `columns:id` to increase the speed of this endpoint.
|
||||
* And if you trigger `post.destroy(..)`, this will trigger bookshelf and model events.
|
||||
* But we only have to `id` available in the model. This won't work, because:
|
||||
* - model layer can't trigger event e.g. `post.page` to trigger `post|page.unpublished`.
|
||||
* - `onDestroyed` or `onDestroying` can contain custom logic
|
||||
*/
|
||||
function deleteContent() {
|
||||
return models.Base.transaction((transacting) => {
|
||||
const queryOpts = {
|
||||
columns: 'id',
|
||||
context: {internal: true},
|
||||
destroyAll: true,
|
||||
transacting: transacting
|
||||
};
|
||||
|
||||
return models.Post.findAll(queryOpts)
|
||||
.then((response) => {
|
||||
return Promise.map(response.models, (post) => {
|
||||
return models.Post.destroy(Object.assign({id: post.id}, queryOpts));
|
||||
}, {concurrency: 100});
|
||||
})
|
||||
.then(() => models.Tag.findAll(queryOpts))
|
||||
.then((response) => {
|
||||
return Promise.map(response.models, (tag) => {
|
||||
return models.Tag.destroy(Object.assign({id: tag.id}, queryOpts));
|
||||
}, {concurrency: 100});
|
||||
})
|
||||
.catch((err) => {
|
||||
throw new common.errors.GhostError({
|
||||
err: err
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
return backupDatabase().then(deleteContent);
|
||||
}
|
||||
}
|
||||
};
|
|
@ -6,6 +6,10 @@ module.exports = {
|
|||
return shared.http;
|
||||
},
|
||||
|
||||
get db() {
|
||||
return shared.pipeline(require('./db'), localUtils);
|
||||
},
|
||||
|
||||
get integrations() {
|
||||
return shared.pipeline(require('./integrations'), localUtils);
|
||||
},
|
||||
|
|
22
core/server/api/v2/utils/serializers/input/db.js
Normal file
22
core/server/api/v2/utils/serializers/input/db.js
Normal file
|
@ -0,0 +1,22 @@
|
|||
const _ = require('lodash');
|
||||
const debug = require('ghost-ignition').debug('api:v2:utils:serializers:input:db');
|
||||
const optionsUtil = require('../../../../shared/utils/options');
|
||||
|
||||
const INTERNAL_OPTIONS = ['transacting', 'forUpdate'];
|
||||
|
||||
module.exports = {
|
||||
all(apiConfig, frame) {
|
||||
debug('serialize all');
|
||||
|
||||
if (frame.options.include) {
|
||||
frame.options.include = optionsUtil.trimAndLowerCase(frame.options.include);
|
||||
}
|
||||
|
||||
if (!frame.options.context.internal) {
|
||||
debug('omit internal options');
|
||||
frame.options = _.omit(frame.options, INTERNAL_OPTIONS);
|
||||
}
|
||||
|
||||
debug(frame.options);
|
||||
}
|
||||
};
|
|
@ -1,7 +1,12 @@
|
|||
module.exports = {
|
||||
get db() {
|
||||
return require('./db');
|
||||
},
|
||||
|
||||
get integrations() {
|
||||
return require('./integrations');
|
||||
},
|
||||
|
||||
get pages() {
|
||||
return require('./pages');
|
||||
},
|
||||
|
|
40
core/server/api/v2/utils/serializers/output/db.js
Normal file
40
core/server/api/v2/utils/serializers/output/db.js
Normal file
|
@ -0,0 +1,40 @@
|
|||
const debug = require('ghost-ignition').debug('api:v2:utils:serializers:output:db');
|
||||
|
||||
module.exports = {
|
||||
backupContent(filename, apiConfig, frame) {
|
||||
debug('backupContent');
|
||||
|
||||
frame.response = {
|
||||
db: [{filename: filename}]
|
||||
};
|
||||
},
|
||||
|
||||
exportContent(exportedData, apiConfig, frame) {
|
||||
debug('exportContent');
|
||||
|
||||
frame.response = {
|
||||
db: [exportedData]
|
||||
};
|
||||
},
|
||||
|
||||
importContent(response, apiConfig, frame) {
|
||||
debug('exportContent');
|
||||
|
||||
// NOTE: response can contain 2 objects if images are imported
|
||||
const problems = (response.length === 2)
|
||||
? response[1].problems
|
||||
: response[0].problems;
|
||||
|
||||
frame.response = {
|
||||
db: [],
|
||||
problems: problems
|
||||
};
|
||||
},
|
||||
|
||||
deleteAllContent(response, apiConfig, frame) {
|
||||
frame.response = {
|
||||
db: []
|
||||
};
|
||||
}
|
||||
};
|
||||
|
|
@ -1,4 +1,8 @@
|
|||
module.exports = {
|
||||
get db() {
|
||||
return require('./db');
|
||||
},
|
||||
|
||||
get integrations() {
|
||||
return require('./integrations');
|
||||
},
|
||||
|
|
|
@ -137,14 +137,18 @@ module.exports = function apiRoutes() {
|
|||
router.del('/notifications/:notification_id', mw.authAdminApi, apiv2.http(apiv2.notifications.destroy));
|
||||
|
||||
// ## DB
|
||||
router.get('/db', mw.authAdminApi, api.http(api.db.exportContent));
|
||||
router.get('/db', mw.authAdminApi, apiv2.http(apiv2.db.exportContent));
|
||||
router.post('/db',
|
||||
mw.authAdminApi,
|
||||
upload.single('importfile'),
|
||||
shared.middlewares.validation.upload({type: 'db'}),
|
||||
api.http(api.db.importContent)
|
||||
apiv2.http(apiv2.db.importContent)
|
||||
);
|
||||
router.del('/db', mw.authAdminApi, apiv2.http(apiv2.db.deleteAllContent));
|
||||
router.post('/db/backup',
|
||||
mw.authenticateClient('Ghost Backup'),
|
||||
apiv2.http(apiv2.db.backupContent)
|
||||
);
|
||||
router.del('/db', mw.authAdminApi, api.http(api.db.deleteAllContent));
|
||||
|
||||
// ## Mail
|
||||
router.post('/mail', mw.authAdminApi, apiv2.http(apiv2.mail.send));
|
||||
|
@ -195,8 +199,6 @@ module.exports = function apiRoutes() {
|
|||
apiv2.http(apiv2.upload.image)
|
||||
);
|
||||
|
||||
router.post('/db/backup', mw.authenticateClient('Ghost Backup'), api.http(api.db.backupContent));
|
||||
|
||||
router.post('/uploads/icon',
|
||||
mw.authAdminApi,
|
||||
upload.single('uploadimage'),
|
||||
|
|
|
@ -68,6 +68,8 @@ describe('DB API', function () {
|
|||
}
|
||||
|
||||
should.not.exist(res.headers['x-cache-invalidate']);
|
||||
should.exist(res.headers['content-disposition']);
|
||||
|
||||
var jsonResponse = res.body;
|
||||
should.exist(jsonResponse.db);
|
||||
jsonResponse.db.should.have.length(1);
|
||||
|
@ -178,7 +180,7 @@ describe('DB API', function () {
|
|||
});
|
||||
});
|
||||
|
||||
it('export can be triggered by backup client', function (done) {
|
||||
it('export can not be triggered by client other than backup', function (done) {
|
||||
schedulerQuery = '?client_id=' + schedulerClient.slug + '&client_secret=' + schedulerClient.secret;
|
||||
fsStub = sandbox.stub(fs, 'writeFile').resolves();
|
||||
request.post(localUtils.API.getApiQuery('db/backup' + schedulerQuery))
|
||||
|
|
191
core/test/functional/api/v2/admin/db_spec.js
Normal file
191
core/test/functional/api/v2/admin/db_spec.js
Normal file
|
@ -0,0 +1,191 @@
|
|||
const path = require('path');
|
||||
const _ = require('lodash');
|
||||
const fs = require('fs-extra');
|
||||
const should = require('should');
|
||||
const supertest = require('supertest');
|
||||
const sinon = require('sinon');
|
||||
const config = require('../../../../../../core/server/config');
|
||||
const models = require('../../../../../../core/server/models');
|
||||
const common = require('../../../../../server/lib/common');
|
||||
const testUtils = require('../../../../utils');
|
||||
const localUtils = require('./utils');
|
||||
|
||||
let ghost = testUtils.startGhost;
|
||||
let request;
|
||||
let sandbox = sinon.sandbox.create();
|
||||
let eventsTriggered;
|
||||
|
||||
describe('DB API', () => {
|
||||
let backupClient;
|
||||
let schedulerClient;
|
||||
|
||||
before(() => {
|
||||
return ghost()
|
||||
.then(() => {
|
||||
request = supertest.agent(config.get('url'));
|
||||
})
|
||||
.then(() => {
|
||||
return localUtils.doAuth(request);
|
||||
})
|
||||
.then(() => {
|
||||
return models.Client.findAll();
|
||||
})
|
||||
.then((result) => {
|
||||
const clients = result.toJSON();
|
||||
backupClient = _.find(clients, {slug: 'ghost-backup'});
|
||||
schedulerClient = _.find(clients, {slug: 'ghost-scheduler'});
|
||||
});
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
eventsTriggered = {};
|
||||
|
||||
sandbox.stub(common.events, 'emit').callsFake((eventName, eventObj) => {
|
||||
if (!eventsTriggered[eventName]) {
|
||||
eventsTriggered[eventName] = [];
|
||||
}
|
||||
|
||||
eventsTriggered[eventName].push(eventObj);
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
sandbox.restore();
|
||||
});
|
||||
|
||||
it('should export data', () => {
|
||||
return request.get(localUtils.API.getApiQuery(`db/`))
|
||||
.set('Origin', config.get('url'))
|
||||
.expect('Content-Type', /json/)
|
||||
.expect('Cache-Control', testUtils.cacheRules.private)
|
||||
.expect(200)
|
||||
.expect('Content-Disposition', /Attachment; filename="[A-Za-z0-9._-]+\.json"/)
|
||||
.then((res) => {
|
||||
should.not.exist(res.headers['x-cache-invalidate']);
|
||||
should.exist(res.headers['content-disposition']);
|
||||
|
||||
const jsonResponse = res.body;
|
||||
should.exist(jsonResponse.db);
|
||||
jsonResponse.db.should.have.length(1);
|
||||
Object.keys(jsonResponse.db[0].data).length.should.eql(25);
|
||||
});
|
||||
});
|
||||
|
||||
it('include more tables', () => {
|
||||
return request.get(localUtils.API.getApiQuery('db/?include=clients,client_trusted_domains'))
|
||||
.set('Origin', config.get('url'))
|
||||
.expect('Content-Type', /json/)
|
||||
.expect(200)
|
||||
.then((res) => {
|
||||
const jsonResponse = res.body;
|
||||
should.exist(jsonResponse.db);
|
||||
jsonResponse.db.should.have.length(1);
|
||||
Object.keys(jsonResponse.db[0].data).length.should.eql(27);
|
||||
});
|
||||
});
|
||||
|
||||
it('import should fail without file', () => {
|
||||
return request.post(localUtils.API.getApiQuery('db/'))
|
||||
.set('Origin', config.get('url'))
|
||||
.set('Accept', 'application/json')
|
||||
.expect('Content-Type', /json/)
|
||||
.expect(403);
|
||||
});
|
||||
|
||||
it('import should fail with unsupported file', () => {
|
||||
return request.post(localUtils.API.getApiQuery('db/'))
|
||||
.set('Origin', config.get('url'))
|
||||
.expect('Content-Type', /json/)
|
||||
.attach('importfile', path.join(__dirname, '/../../../../utils/fixtures/csv/single-column-with-header.csv'))
|
||||
.expect(415);
|
||||
});
|
||||
|
||||
it('export can be triggered by backup client', () => {
|
||||
const backupQuery = `?client_id=${backupClient.slug}&client_secret=${backupClient.secret}`;
|
||||
const fsStub = sandbox.stub(fs, 'writeFile').resolves();
|
||||
|
||||
return request.post(localUtils.API.getApiQuery(`db/backup${backupQuery}`))
|
||||
.expect('Content-Type', /json/)
|
||||
.expect(200)
|
||||
.then((res) => {
|
||||
(typeof res.body).should.be.Object;
|
||||
should.exist(res.body.db[0].filename);
|
||||
fsStub.calledOnce.should.eql(true);
|
||||
});
|
||||
});
|
||||
|
||||
it('export can be triggered and named by backup client', () => {
|
||||
const backupQuery = `?client_id=${backupClient.slug}&client_secret=${backupClient.secret}&filename=test`;
|
||||
const fsStub = sandbox.stub(fs, 'writeFile').resolves();
|
||||
|
||||
return request.post(localUtils.API.getApiQuery(`db/backup${backupQuery}`))
|
||||
.expect('Content-Type', /json/)
|
||||
.expect(200)
|
||||
.then((res) => {
|
||||
(typeof res.body).should.be.Object;
|
||||
res.body.db[0].filename.should.match(/test\.json/);
|
||||
fsStub.calledOnce.should.eql(true);
|
||||
});
|
||||
});
|
||||
|
||||
it('export can not be triggered by client other than backup', () => {
|
||||
const schedulerQuery = `?client_id=${schedulerClient.slug}&client_secret=${schedulerClient.secret}`;
|
||||
const fsStub = sandbox.stub(fs, 'writeFile').resolves();
|
||||
|
||||
return request.post(localUtils.API.getApiQuery(`db/backup${schedulerQuery}`))
|
||||
.expect('Content-Type', /json/)
|
||||
.expect(403)
|
||||
.then(res => {
|
||||
should.exist(res.body.errors);
|
||||
res.body.errors[0].errorType.should.eql('NoPermissionError');
|
||||
fsStub.called.should.eql(false);
|
||||
});
|
||||
});
|
||||
|
||||
it('export can not be triggered by regular authentication', () => {
|
||||
const fsStub = sandbox.stub(fs, 'writeFile').resolves();
|
||||
|
||||
return request.post(localUtils.API.getApiQuery(`db/backup`))
|
||||
.set('Origin', config.get('url'))
|
||||
.expect('Content-Type', /json/)
|
||||
.expect(401)
|
||||
.then(res => {
|
||||
should.exist(res.body.errors);
|
||||
res.body.errors[0].errorType.should.eql('UnauthorizedError');
|
||||
fsStub.called.should.eql(false);
|
||||
});
|
||||
});
|
||||
|
||||
it('delete all content (owner)', () => {
|
||||
return request.get(localUtils.API.getApiQuery('posts/'))
|
||||
.set('Origin', config.get('url'))
|
||||
.expect('Content-Type', /json/)
|
||||
.expect('Cache-Control', testUtils.cacheRules.private)
|
||||
.expect(200)
|
||||
.then((res) => {
|
||||
let jsonResponse = res.body;
|
||||
let results = jsonResponse.posts;
|
||||
jsonResponse.posts.should.have.length(7);
|
||||
_.filter(results, {page: false, status: 'published'}).length.should.equal(7);
|
||||
})
|
||||
.then(() => {
|
||||
return request.delete(localUtils.API.getApiQuery('db/'))
|
||||
.set('Origin', config.get('url'))
|
||||
.set('Accept', 'application/json')
|
||||
.expect(204);
|
||||
})
|
||||
.then(() => {
|
||||
return request.get(localUtils.API.getApiQuery('posts/'))
|
||||
.set('Origin', config.get('url'))
|
||||
.expect('Content-Type', /json/)
|
||||
.expect('Cache-Control', testUtils.cacheRules.private)
|
||||
.expect(200)
|
||||
.then((res) => {
|
||||
res.body.posts.should.have.length(0);
|
||||
eventsTriggered['post.unpublished'].length.should.eql(7);
|
||||
eventsTriggered['post.deleted'].length.should.eql(7);
|
||||
eventsTriggered['tag.deleted'].length.should.eql(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
Loading…
Add table
Reference in a new issue