mirror of
https://github.com/TryGhost/Ghost.git
synced 2025-02-17 23:44:39 -05:00
✨ Added dotfile support and set it on by default
refs https://github.com/TryGhost/Ghost/issues/11794 - archiver has an undocumented dot option, that allows you to include dotfiles - our existing tests had a dotfile, but we didn't check properly that everything exists - Swapped to using folder-hash to compare if the original, and compressed-then-decompressed folders are identical - Added an example of a dotfolder with nested dotfile - Updated compress to use the dot option, make it optional but on by default
This commit is contained in:
parent
242db117a8
commit
7964f6ec82
5 changed files with 25 additions and 18 deletions
|
@ -4,6 +4,7 @@ const Promise = require('bluebird');
|
|||
const defaultOptions = {
|
||||
type: 'zip',
|
||||
glob: '**/*',
|
||||
dot: true,
|
||||
ignore: ['node_modules/**']
|
||||
};
|
||||
|
||||
|
@ -17,14 +18,16 @@ const defaultOptions = {
|
|||
* @param {Object} [options]
|
||||
* @param {String} options.type - zip by default see archiver for other options
|
||||
* @param {String} options.glob - the files to include, defaults to all files and folders
|
||||
* @param {Boolean} options.dot - include all dotfiles and dotfolders
|
||||
* @param {Array} options.ignore - any paths that should be ignored, sets node_modules by default
|
||||
*
|
||||
*/
|
||||
module.exports = (folderToZip, destination, options = {}) => {
|
||||
const opts = Object.assign({}, defaultOptions, options);
|
||||
|
||||
const archiver = require('archiver');
|
||||
const output = fs.createWriteStream(destination);
|
||||
const archive = archiver.create(opts.type, {});
|
||||
const archive = archiver.create(opts.type);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
// If folder to zip is a symlink, we want to get the target
|
||||
|
@ -42,6 +45,7 @@ module.exports = (folderToZip, destination, options = {}) => {
|
|||
});
|
||||
archive.glob(opts.glob, {
|
||||
cwd: folderToZip,
|
||||
dot: opts.dot,
|
||||
ignore: opts.ignore
|
||||
});
|
||||
archive.pipe(output);
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
"access": "public"
|
||||
},
|
||||
"devDependencies": {
|
||||
"folder-hash": "3.3.1",
|
||||
"mocha": "7.1.2",
|
||||
"should": "13.2.3",
|
||||
"sinon": "9.0.2"
|
||||
|
|
0
ghost/zip/test/fixtures/test-theme/.well-known/.gitkeep
vendored
Normal file
0
ghost/zip/test/fixtures/test-theme/.well-known/.gitkeep
vendored
Normal file
0
ghost/zip/test/fixtures/test-theme/.well-known/hello.txt
vendored
Normal file
0
ghost/zip/test/fixtures/test-theme/.well-known/hello.txt
vendored
Normal file
|
@ -4,6 +4,7 @@ require('./utils');
|
|||
|
||||
const path = require('path');
|
||||
const fs = require('fs-extra');
|
||||
const {hashElement} = require('folder-hash');
|
||||
|
||||
// Mimic how we expect this to be required
|
||||
const {compress, extract} = require('../');
|
||||
|
@ -33,29 +34,30 @@ describe('Compress and Extract should be opposite functions', function () {
|
|||
it('ensure symlinks work', function (done) {
|
||||
fs.symlink(folderToSymlink, symlinkPath);
|
||||
|
||||
compress(symlinkPath, zipDestination)
|
||||
let originalHash;
|
||||
|
||||
hashElement(symlinkPath)
|
||||
.then((_originalHash) => {
|
||||
originalHash = _originalHash;
|
||||
return compress(symlinkPath, zipDestination);
|
||||
})
|
||||
.then((res) => {
|
||||
res.should.be.an.Object().with.properties('path', 'size');
|
||||
res.path.should.eql(zipDestination);
|
||||
res.size.should.eql(321775);
|
||||
res.size.should.eql(323805);
|
||||
|
||||
extract(zipDestination, unzipDestination)
|
||||
.then((res) => {
|
||||
res.should.be.an.Object().with.properties('path');
|
||||
res.path.should.eql(unzipDestination);
|
||||
return extract(zipDestination, unzipDestination);
|
||||
})
|
||||
.then((res) => {
|
||||
res.should.be.an.Object().with.properties('path');
|
||||
res.path.should.eql(unzipDestination);
|
||||
|
||||
fs.readdir(unzipDestination, function (err, files) {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
return hashElement(unzipDestination);
|
||||
})
|
||||
.then((extractedHash) => {
|
||||
originalHash.children.toString().should.eql(extractedHash.children.toString());
|
||||
|
||||
files.length.should.eql(16);
|
||||
done();
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
return done(err);
|
||||
});
|
||||
done();
|
||||
})
|
||||
.catch((err) => {
|
||||
return done(err);
|
||||
|
|
Loading…
Add table
Reference in a new issue