0
Fork 0
mirror of https://github.com/verdaccio/verdaccio.git synced 2025-02-17 23:45:29 -05:00

Refactor, remove eslint warnings and for npm < 2.x, npm minimum higher as 2.15.x

This commit is contained in:
Juan Picado @jotadeveloper 2017-05-20 11:50:20 +02:00
parent ba6543a322
commit a1e6368c29
No known key found for this signature in database
GPG key ID: 18AC54485952D158
4 changed files with 167 additions and 141 deletions

View file

@ -15,9 +15,10 @@ let validate_name = Middleware.validate_name;
let validate_pkg = Middleware.validate_package;
module.exports = function(config, auth, storage) {
let app = express.Router();
let can = Middleware.allow(auth);
let notify = Notify.notify;
/* eslint new-cap:off */
const app = express.Router();
const can = Middleware.allow(auth);
const notify = Notify.notify;
// validate all of these params as a package name
// this might be too harsh, so ask if it causes trouble
@ -124,6 +125,23 @@ module.exports = function(config, auth, storage) {
*/
const respShouldBeArray = req.path.endsWith('/since');
res.set('Date', 'Mon, 10 Oct 1983 00:12:48 GMT');
const check_finish = function() {
if (!received_end) {
return;
}
if (processing_pkgs) {
return;
}
if (response_finished) {
return;
}
response_finished = true;
if (respShouldBeArray) {
res.end(']\n');
} else {
res.end('}\n');
}
};
if (respShouldBeArray) {
res.write('[');
@ -172,19 +190,6 @@ module.exports = function(config, auth, storage) {
received_end = true;
check_finish();
});
function check_finish() {
if (!received_end) return;
if (processing_pkgs) return;
if (response_finished) return;
response_finished = true;
if (respShouldBeArray) {
res.end(']\n');
} else {
res.end('}\n');
}
}
});
// placeholder 'cause npm require to be authenticated to publish
@ -216,13 +221,6 @@ module.exports = function(config, auth, storage) {
token: token,
});
} else {
if (typeof(req.body.name) !== 'string' || typeof(req.body.password) !== 'string') {
if (typeof(req.body.password_sha)) {
return next( Error[422]('your npm version is outdated\nPlease update to npm@1.4.5 or greater.\nSee https://github.com/rlidwka/sinopia/issues/93 for details.') );
} else {
return next( Error[422]('user/password is not found in request (npm issue?)') );
}
}
auth.add_user(req.body.name, req.body.password, function(err, user) {
if (err) {
if (err.status >= 400 && err.status < 500) {
@ -252,8 +250,10 @@ module.exports = function(config, auth, storage) {
});
});
function tag_package_version(req, res, next) {
if (typeof(req.body) !== 'string') return next('route');
const tag_package_version = function(req, res, next) {
if (typeof(req.body) !== 'string') {
return next('route');
}
let tags = {};
tags[req.params.tag] = req.body;
@ -262,7 +262,7 @@ module.exports = function(config, auth, storage) {
res.status(201);
return next({ok: 'package tagged'});
});
}
};
// tagging a package
app.put('/:package/:tag',
@ -292,8 +292,7 @@ module.exports = function(config, auth, storage) {
});
});
app.post('/-/package/:package/dist-tags',
can('publish'), media('application/json'), expect_json,
app.post('/-/package/:package/dist-tags', can('publish'), media('application/json'), expect_json,
function(req, res, next) {
storage.merge_tags(req.params.package, req.body, function(err) {
if (err) return next(err);
@ -302,8 +301,7 @@ module.exports = function(config, auth, storage) {
});
});
app.put('/-/package/:package/dist-tags',
can('publish'), media('application/json'), expect_json,
app.put('/-/package/:package/dist-tags', can('publish'), media('application/json'), expect_json,
function(req, res, next) {
storage.replace_tags(req.params.package, req.body, function(err) {
if (err) return next(err);
@ -312,8 +310,7 @@ module.exports = function(config, auth, storage) {
});
});
app.delete('/-/package/:package/dist-tags',
can('publish'), media('application/json'),
app.delete('/-/package/:package/dist-tags', can('publish'), media('application/json'),
function(req, res, next) {
storage.replace_tags(req.params.package, {}, function(err) {
if (err) return next(err);
@ -325,6 +322,79 @@ module.exports = function(config, auth, storage) {
// publishing a package
app.put('/:package/:_rev?/:revision?', can('publish'), media('application/json'), expect_json, function(req, res, next) {
let name = req.params.package;
let metadata;
const create_tarball = function(filename, data, cb) {
let stream = storage.add_tarball(name, filename);
stream.on('error', function(err) {
cb(err);
});
stream.on('success', function() {
cb();
});
// this is dumb and memory-consuming, but what choices do we have?
stream.end(new Buffer(data.data, 'base64'));
stream.done();
};
const create_version = function(version, data, cb) {
storage.add_version(name, version, data, null, cb);
};
const add_tags = function(tags, cb) {
storage.merge_tags(name, tags, cb);
};
const after_change = function(err, ok_message) {
// old npm behaviour
if (metadata._attachments == null) {
if (err) return next(err);
res.status(201);
return next({ok: ok_message, success: true});
}
// npm-registry-client 0.3+ embeds tarball into the json upload
// https://github.com/isaacs/npm-registry-client/commit/e9fbeb8b67f249394f735c74ef11fe4720d46ca0
// issue https://github.com/rlidwka/sinopia/issues/31, dealing with it here:
if (typeof(metadata._attachments) !== 'object'
|| Object.keys(metadata._attachments).length !== 1
|| typeof(metadata.versions) !== 'object'
|| Object.keys(metadata.versions).length !== 1) {
// npm is doing something strange again
// if this happens in normal circumstances, report it as a bug
return next( Error[400]('unsupported registry call') );
}
if (err && err.status != 409) {
return next(err);
}
// at this point document is either created or existed before
const t1 = Object.keys(metadata._attachments)[0];
create_tarball(Path.basename(t1), metadata._attachments[t1], function(err) {
if (err) {
return next(err);
}
const t2 = Object.keys(metadata.versions)[0];
metadata.versions[t2].readme = metadata.readme != null ? String(metadata.readme) : '';
create_version(t2, metadata.versions[t2], function(err) {
if (err) {
return next(err);
}
add_tags(metadata['dist-tags'], function(err) {
if (err) {
return next(err);
}
notify(metadata, config);
res.status(201);
return next({ok: ok_message, success: true});
});
});
});
};
if (Object.keys(req.body).length == 1 && Utils.is_object(req.body.users)) {
// 501 status is more meaningful, but npm doesn't show error message for 5xx
@ -332,7 +402,7 @@ module.exports = function(config, auth, storage) {
}
try {
var metadata = Utils.validate_metadata(req.body, name);
metadata = Utils.validate_metadata(req.body, name);
} catch(err) {
return next( Error[422]('bad incoming package data') );
}
@ -346,77 +416,14 @@ module.exports = function(config, auth, storage) {
after_change(err, 'created new package');
});
}
function after_change(err, ok_message) {
// old npm behaviour
if (metadata._attachments == null) {
if (err) return next(err);
res.status(201);
return next({ok: ok_message, success: true});
}
// npm-registry-client 0.3+ embeds tarball into the json upload
// https://github.com/isaacs/npm-registry-client/commit/e9fbeb8b67f249394f735c74ef11fe4720d46ca0
// issue #31, dealing with it here:
if (typeof(metadata._attachments) !== 'object'
|| Object.keys(metadata._attachments).length !== 1
|| typeof(metadata.versions) !== 'object'
|| Object.keys(metadata.versions).length !== 1) {
// npm is doing something strange again
// if this happens in normal circumstances, report it as a bug
return next( Error[400]('unsupported registry call') );
}
if (err && err.status != 409) return next(err);
// at this point document is either created or existed before
let t1 = Object.keys(metadata._attachments)[0];
create_tarball(Path.basename(t1), metadata._attachments[t1], function(err) {
if (err) return next(err);
let t2 = Object.keys(metadata.versions)[0];
metadata.versions[t2].readme = metadata.readme != null ? String(metadata.readme) : '';
create_version(t2, metadata.versions[t2], function(err) {
if (err) return next(err);
add_tags(metadata['dist-tags'], function(err) {
if (err) return next(err);
notify(metadata, config);
res.status(201);
return next({ok: ok_message, success: true});
});
});
});
}
function create_tarball(filename, data, cb) {
let stream = storage.add_tarball(name, filename);
stream.on('error', function(err) {
cb(err);
});
stream.on('success', function() {
cb();
});
// this is dumb and memory-consuming, but what choices do we have?
stream.end(new Buffer(data.data, 'base64'));
stream.done();
}
function create_version(version, data, cb) {
storage.add_version(name, version, data, null, cb);
}
function add_tags(tags, cb) {
storage.merge_tags(name, tags, cb);
}
});
});
// unpublishing an entire package
app.delete('/:package/-rev/*', can('publish'), function(req, res, next) {
storage.remove_package(req.params.package, function(err) {
if (err) return next(err);
if (err) {
return next(err);
}
res.status(201);
return next({ok: 'package removed'});
});
@ -425,7 +432,9 @@ module.exports = function(config, auth, storage) {
// removing a tarball
app.delete('/:package/-/:filename/-rev/:revision', can('publish'), function(req, res, next) {
storage.remove_tarball(req.params.package, req.params.filename, req.params.revision, function(err) {
if (err) return next(err);
if (err) {
return next(err);
}
res.status(201);
return next({ok: 'tarball removed'});
});
@ -433,9 +442,8 @@ module.exports = function(config, auth, storage) {
// uploading package tarball
app.put('/:package/-/:filename/*', can('publish'), media('application/octet-stream'), function(req, res, next) {
let name = req.params.package;
let stream = storage.add_tarball(name, req.params.filename);
const name = req.params.package;
const stream = storage.add_tarball(name, req.params.filename);
req.pipe(stream);
// checking if end event came before closing

View file

@ -14,6 +14,7 @@ let validate_name = Middleware.validate_name;
let validate_pkg = Middleware.validate_package;
module.exports = function(config, auth, storage) {
/* eslint new-cap:off */
let app = express.Router();
let can = Middleware.allow(auth);

View file

@ -1,3 +1,5 @@
/* eslint prefer-spread: "off" */
'use strict';
const fs = require('fs');
@ -5,24 +7,25 @@ const Error = require('http-errors');
const mkdirp = require('mkdirp');
const Path = require('path');
const MyStreams = require('./streams');
const locker = require('./file-locking');
function FSError(code) {
let err = Error(code);
const fSError = function(code) {
const err = Error(code);
err.code = code;
return err;
}
};
let locker = require('./file-locking');
const tempFile = function(str) {
return `${str}.tmp${String(Math.random()).substr(2)}`;
};
function tempFile(str) {
return str + '.tmp' + String(Math.random()).substr(2);
}
function renameTmp(src, dst, _cb) {
function cb(err) {
if (err) fs.unlink(src, function() {});
const renameTmp = function(src, dst, _cb) {
const cb = function(err) {
if (err) {
fs.unlink(src, function() {});
}
_cb(err);
}
};
if (process.platform !== 'win32') {
return fs.rename(src, dst, cb);
@ -33,11 +36,13 @@ function renameTmp(src, dst, _cb) {
let tmp = tempFile(dst);
fs.rename(dst, tmp, function(err) {
fs.rename(src, dst, cb);
if (!err) fs.unlink(tmp, () => {});
if (!err) {
fs.unlink(tmp, () => {});
}
});
}
};
function write(dest, data, cb) {
const write = function(dest, data, cb) {
let safe_write = function(cb) {
let tmpname = tempFile(dest);
fs.writeFile(tmpname, data, function(err) {
@ -56,18 +61,19 @@ function write(dest, data, cb) {
cb(err);
}
});
}
function write_stream(name) {
let stream = MyStreams.uploadTarballStream();
};
const write_stream = function(name) {
const stream = MyStreams.uploadTarballStream();
let _ended = 0;
stream.on('end', function() {
_ended = 1;
});
fs.exists(name, function(exists) {
if (exists) return stream.emit('error', FSError('EEXISTS'));
if (exists) {
return stream.emit('error', fSError('EEXISTS'));
}
let tmpname = name + '.tmp-'+String(Math.random()).replace(/^0\./, '');
let file = fs.createWriteStream(tmpname);
@ -75,7 +81,7 @@ function write_stream(name) {
stream.pipe(file);
stream.done = function() {
function onend() {
const onend = function() {
file.on('close', function() {
renameTmp(tmpname, name, function(err) {
if (err) {
@ -86,7 +92,7 @@ function write_stream(name) {
});
});
file.destroySoon();
}
};
if (_ended) {
onend();
} else {
@ -112,9 +118,9 @@ function write_stream(name) {
});
});
return stream;
}
};
function read_stream(name, stream, callback) {
const read_stream = function(name, stream, callback) {
let rstream = fs.createReadStream(name);
rstream.on('error', function(err) {
stream.emit('error', err);
@ -133,31 +139,37 @@ function read_stream(name, stream, callback) {
rstream.close();
};
return stream;
}
};
function create(name, contents, callback) {
const create = function(name, contents, callback) {
fs.exists(name, function(exists) {
if (exists) return callback( FSError('EEXISTS') );
if (exists) {
return callback( fSError('EEXISTS') );
}
write(name, contents, callback);
});
}
};
function update(name, contents, callback) {
const update = function(name, contents, callback) {
fs.exists(name, function(exists) {
if (!exists) return callback( FSError('ENOENT') );
if (!exists) {
return callback( fSError('ENOENT') );
}
write(name, contents, callback);
});
}
};
function read(name, callback) {
const read = function(name, callback) {
fs.readFile(name, callback);
}
};
module.exports.read = read;
module.exports.read_json = function(name, cb) {
read(name, function(err, res) {
if (err) return cb(err);
if (err) {
return cb(err);
}
let args = [];
try {
@ -171,14 +183,18 @@ module.exports.read_json = function(name, cb) {
module.exports.lock_and_read = function(name, cb) {
locker.readFile(name, {lock: true}, function(err, res) {
if (err) return cb(err);
if (err) {
return cb(err);
}
return cb(null, res);
});
};
module.exports.lock_and_read_json = function(name, cb) {
locker.readFile(name, {lock: true, parse: true}, function(err, res) {
if (err) return cb(err);
if (err) {
return cb(err);
}
return cb(null, res);
});
};

View file

@ -81,7 +81,8 @@
"build-docker:rpi": "docker build -f Dockerfile.rpi -t verdaccio:rpi ."
},
"engines": {
"node": ">=4.6.1"
"node": ">=4.6.1",
"npm" : ">=2.15.9"
},
"preferGlobal": true,
"publishConfig": {