mirror of
https://github.com/verdaccio/verdaccio.git
synced 2025-01-20 22:52:46 -05:00
Merge pull request #176 from verdaccio/clean-eslint
Clean eslint in a couple of files
This commit is contained in:
commit
b264e9f3df
8 changed files with 569 additions and 391 deletions
445
lib/config.js
445
lib/config.js
|
@ -1,209 +1,260 @@
|
|||
/* eslint prefer-rest-params: "off" */
|
||||
/* eslint prefer-spread: "off" */
|
||||
|
||||
'use strict';
|
||||
|
||||
let assert = require('assert');
|
||||
let Crypto = require('crypto');
|
||||
let Error = require('http-errors');
|
||||
let minimatch = require('minimatch');
|
||||
let Path = require('path');
|
||||
let LocalData = require('./local-data');
|
||||
var Utils = require('./utils');
|
||||
var Utils = require('./utils');
|
||||
let pkginfo = require('pkginfo')(module); // eslint-disable-line no-unused-vars
|
||||
let pkgVersion = module.exports.version;
|
||||
let pkgName = module.exports.name;
|
||||
const assert = require('assert');
|
||||
const Crypto = require('crypto');
|
||||
const Error = require('http-errors');
|
||||
const minimatch = require('minimatch');
|
||||
const Path = require('path');
|
||||
const LocalData = require('./local-data');
|
||||
const Utils = require('./utils');
|
||||
const pkginfo = require('pkginfo')(module); // eslint-disable-line no-unused-vars
|
||||
const pkgVersion = module.exports.version;
|
||||
const pkgName = module.exports.name;
|
||||
|
||||
// [[a, [b, c]], d] -> [a, b, c, d]
|
||||
/**
|
||||
* [[a, [b, c]], d] -> [a, b, c, d]
|
||||
* @param {*} array
|
||||
* @return {Array}
|
||||
*/
|
||||
function flatten(array) {
|
||||
let result = [];
|
||||
for (let i=0; i<array.length; i++) {
|
||||
if (Array.isArray(array[i])) {
|
||||
result.push.apply(result, flatten(array[i]));
|
||||
} else {
|
||||
result.push(array[i]);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
let result = [];
|
||||
for (let i=0; i<array.length; i++) {
|
||||
if (Array.isArray(array[i])) {
|
||||
result.push.apply(result, flatten(array[i]));
|
||||
} else {
|
||||
result.push(array[i]);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
function Config(config) {
|
||||
let self = Object.create(Config.prototype);
|
||||
for (var i in config) {
|
||||
if (self[i] == null) self[i] = config[i];
|
||||
}
|
||||
const parse_interval_table = {
|
||||
'': 1000,
|
||||
'ms': 1,
|
||||
's': 1000,
|
||||
'm': 60*1000,
|
||||
'h': 60*60*1000,
|
||||
'd': 86400000,
|
||||
'w': 7*86400000,
|
||||
'M': 30*86400000,
|
||||
'y': 365*86400000,
|
||||
};
|
||||
|
||||
if (!self.user_agent) {
|
||||
self.user_agent = `${pkgName}/${pkgVersion}`;
|
||||
}
|
||||
|
||||
// some weird shell scripts are valid yaml files parsed as string
|
||||
assert.equal(typeof(config), 'object', 'CONFIG: it doesn\'t look like a valid config file');
|
||||
|
||||
assert(self.storage, 'CONFIG: storage path not defined');
|
||||
self.localList = new LocalData(
|
||||
Path.join(
|
||||
Path.resolve(Path.dirname(self.self_path || ''), self.storage),
|
||||
'.sinopia-db.json'
|
||||
)
|
||||
);
|
||||
if (!self.secret) {
|
||||
self.secret = self.localList.data.secret;
|
||||
|
||||
if (!self.secret) {
|
||||
self.secret = Crypto.pseudoRandomBytes(32).toString('hex');
|
||||
self.localList.data.secret = self.secret;
|
||||
self.localList.sync();
|
||||
}
|
||||
}
|
||||
|
||||
let users = {
|
||||
'all': true,
|
||||
'anonymous': true,
|
||||
'undefined': true,
|
||||
'owner': true,
|
||||
'none': true,
|
||||
};
|
||||
|
||||
let check_user_or_uplink = function(arg) {
|
||||
assert(arg !== 'all' && arg !== 'owner' && arg !== 'anonymous' && arg !== 'undefined' && arg !== 'none', 'CONFIG: reserved user/uplink name: ' + arg);
|
||||
assert(!arg.match(/\s/), 'CONFIG: invalid user name: ' + arg);
|
||||
assert(users[arg] == null, 'CONFIG: duplicate user/uplink name: ' + arg);
|
||||
users[arg] = true;
|
||||
}
|
||||
|
||||
;['users', 'uplinks', 'packages'].forEach(function(x) {
|
||||
if (self[x] == null) self[x] = {};
|
||||
assert(Utils.is_object(self[x]), 'CONFIG: bad "'+x+'" value (object expected)');
|
||||
});
|
||||
|
||||
for (var i in self.users) {
|
||||
check_user_or_uplink(i);
|
||||
}
|
||||
for (var i in self.uplinks) {
|
||||
check_user_or_uplink(i);
|
||||
}
|
||||
|
||||
for (var i in self.users) {
|
||||
assert(self.users[i].password, 'CONFIG: no password for user: ' + i);
|
||||
assert(typeof(self.users[i].password) === 'string' &&
|
||||
self.users[i].password.match(/^[a-f0-9]{40}$/)
|
||||
, 'CONFIG: wrong password format for user: ' + i + ', sha1 expected');
|
||||
}
|
||||
|
||||
for (var i in self.uplinks) {
|
||||
assert(self.uplinks[i].url, 'CONFIG: no url for uplink: ' + i);
|
||||
assert( typeof(self.uplinks[i].url) === 'string'
|
||||
, 'CONFIG: wrong url format for uplink: ' + i);
|
||||
self.uplinks[i].url = self.uplinks[i].url.replace(/\/$/, '');
|
||||
}
|
||||
|
||||
function normalize_userlist() {
|
||||
let result = [];
|
||||
|
||||
for (let i=0; i<arguments.length; i++) {
|
||||
if (arguments[i] == null) continue;
|
||||
|
||||
// if it's a string, split it to array
|
||||
if (typeof(arguments[i]) === 'string') {
|
||||
result.push(arguments[i].split(/\s+/));
|
||||
} else if (Array.isArray(arguments[i])) {
|
||||
result.push(arguments[i]);
|
||||
} else {
|
||||
throw Error('CONFIG: bad package acl (array or string expected): ' + JSON.stringify(arguments[i]));
|
||||
}
|
||||
}
|
||||
return flatten(result);
|
||||
}
|
||||
|
||||
// add a default rule for all packages to make writing plugins easier
|
||||
if (self.packages['**'] == null) {
|
||||
self.packages['**'] = {};
|
||||
}
|
||||
|
||||
for (var i in self.packages) {
|
||||
assert(
|
||||
typeof(self.packages[i]) === 'object' &&
|
||||
!Array.isArray(self.packages[i])
|
||||
, 'CONFIG: bad "'+i+'" package description (object expected)');
|
||||
|
||||
self.packages[i].access = normalize_userlist(
|
||||
self.packages[i].allow_access,
|
||||
self.packages[i].access
|
||||
);
|
||||
delete self.packages[i].allow_access;
|
||||
|
||||
self.packages[i].publish = normalize_userlist(
|
||||
self.packages[i].allow_publish,
|
||||
self.packages[i].publish
|
||||
);
|
||||
delete self.packages[i].allow_publish;
|
||||
|
||||
self.packages[i].proxy = normalize_userlist(
|
||||
self.packages[i].proxy_access,
|
||||
self.packages[i].proxy
|
||||
);
|
||||
delete self.packages[i].proxy_access;
|
||||
}
|
||||
|
||||
// loading these from ENV if aren't in config
|
||||
['http_proxy', 'https_proxy', 'no_proxy'].forEach((function(v) {
|
||||
if (!(v in self)) {
|
||||
self[v] = process.env[v] || process.env[v.toUpperCase()];
|
||||
}
|
||||
}));
|
||||
|
||||
// unique identifier of self server (or a cluster), used to avoid loops
|
||||
if (!self.server_id) {
|
||||
self.server_id = Crypto.pseudoRandomBytes(6).toString('hex');
|
||||
}
|
||||
|
||||
return self;
|
||||
/**
|
||||
* Parse an internal string to number
|
||||
* @param {*} interval
|
||||
* @return {Number}
|
||||
*/
|
||||
function parse_interval(interval) {
|
||||
if (typeof(interval) === 'number') {
|
||||
return interval * 1000;
|
||||
}
|
||||
let result = 0;
|
||||
let last_suffix = Infinity;
|
||||
interval.split(/\s+/).forEach(function(x) {
|
||||
if (!x) return;
|
||||
let m = x.match(/^((0|[1-9][0-9]*)(\.[0-9]+)?)(ms|s|m|h|d|w|M|y|)$/);
|
||||
if (!m
|
||||
|| parse_interval_table[m[4]] >= last_suffix
|
||||
|| (m[4] === '' && last_suffix !== Infinity)) {
|
||||
throw Error('invalid interval: ' + interval);
|
||||
}
|
||||
last_suffix = parse_interval_table[m[4]];
|
||||
result += Number(m[1]) * parse_interval_table[m[4]];
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
Config.prototype.can_proxy_to = function(pkg, uplink) {
|
||||
return (this.get_package_spec(pkg).proxy || []).reduce(function(prev, curr) {
|
||||
if (uplink === curr) return true;
|
||||
return prev;
|
||||
}, false);
|
||||
};
|
||||
/**
|
||||
* Coordinates the application configuration
|
||||
*/
|
||||
class Config {
|
||||
|
||||
Config.prototype.get_package_spec = function(pkg) {
|
||||
for (let i in this.packages) {
|
||||
if (minimatch.makeRe(i).exec(pkg)) {
|
||||
return this.packages[i];
|
||||
}
|
||||
}
|
||||
return {};
|
||||
};
|
||||
/**
|
||||
* Constructor
|
||||
* @param {*} config config the content
|
||||
*/
|
||||
constructor(config) {
|
||||
const self = this;
|
||||
for (let i in config) {
|
||||
if (self[i] == null) {
|
||||
self[i] = config[i];
|
||||
}
|
||||
}
|
||||
|
||||
if (!self.user_agent) {
|
||||
self.user_agent = `${pkgName}/${pkgVersion}`;
|
||||
}
|
||||
|
||||
// some weird shell scripts are valid yaml files parsed as string
|
||||
assert.equal(typeof(config), 'object', 'CONFIG: it doesn\'t look like a valid config file');
|
||||
|
||||
assert(self.storage, 'CONFIG: storage path not defined');
|
||||
// local data handler is linked with the configuration handler
|
||||
self.localList = new LocalData(
|
||||
Path.join(
|
||||
Path.resolve(Path.dirname(self.self_path || ''), self.storage),
|
||||
// FUTURE: the database might be parameterizable from config.yaml
|
||||
'.sinopia-db.json'
|
||||
)
|
||||
);
|
||||
// it generates a secret key
|
||||
// FUTURE: this might be an external secret key, perhaps whitin config file?
|
||||
if (!self.secret) {
|
||||
self.secret = self.localList.data.secret;
|
||||
if (!self.secret) {
|
||||
self.secret = Crypto.pseudoRandomBytes(32).toString('hex');
|
||||
self.localList.data.secret = self.secret;
|
||||
self.localList.sync();
|
||||
}
|
||||
}
|
||||
|
||||
const users = {
|
||||
'all': true,
|
||||
'anonymous': true,
|
||||
'undefined': true,
|
||||
'owner': true,
|
||||
'none': true,
|
||||
};
|
||||
|
||||
const check_user_or_uplink = function(arg) {
|
||||
assert(arg !== 'all' && arg !== 'owner'
|
||||
&& arg !== 'anonymous' && arg !== 'undefined' && arg !== 'none', 'CONFIG: reserved user/uplink name: ' + arg);
|
||||
assert(!arg.match(/\s/), 'CONFIG: invalid user name: ' + arg);
|
||||
assert(users[arg] == null, 'CONFIG: duplicate user/uplink name: ' + arg);
|
||||
users[arg] = true;
|
||||
}
|
||||
// sanity check for strategic config properties
|
||||
;['users', 'uplinks', 'packages'].forEach(function(x) {
|
||||
if (self[x] == null) self[x] = {};
|
||||
assert(Utils.is_object(self[x]), `CONFIG: bad "${x}" value (object expected)`);
|
||||
});
|
||||
// sanity check for users
|
||||
for (let i in self.users) {
|
||||
if (Object.prototype.hasOwnProperty.call(self.users, i)) {
|
||||
check_user_or_uplink(i);
|
||||
}
|
||||
}
|
||||
// sanity check for uplinks
|
||||
for (let i in self.uplinks) {
|
||||
if (Object.prototype.hasOwnProperty.call(self.uplinks, i)) {
|
||||
check_user_or_uplink(i);
|
||||
}
|
||||
}
|
||||
for (let i in self.users) {
|
||||
if (Object.prototype.hasOwnProperty.call(self.users, i)) {
|
||||
assert(self.users[i].password, 'CONFIG: no password for user: ' + i);
|
||||
assert(typeof(self.users[i].password) === 'string' &&
|
||||
self.users[i].password.match(/^[a-f0-9]{40}$/)
|
||||
, 'CONFIG: wrong password format for user: ' + i + ', sha1 expected');
|
||||
}
|
||||
}
|
||||
for (let i in self.uplinks) {
|
||||
if (Object.prototype.hasOwnProperty.call(self.uplinks, i)) {
|
||||
assert(self.uplinks[i].url, 'CONFIG: no url for uplink: ' + i);
|
||||
assert( typeof(self.uplinks[i].url) === 'string'
|
||||
, 'CONFIG: wrong url format for uplink: ' + i);
|
||||
self.uplinks[i].url = self.uplinks[i].url.replace(/\/$/, '');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalise user list.
|
||||
* @return {Array}
|
||||
*/
|
||||
function normalize_userlist() {
|
||||
let result = [];
|
||||
|
||||
for (let i=0; i<arguments.length; i++) {
|
||||
if (arguments[i] == null) continue;
|
||||
|
||||
// if it's a string, split it to array
|
||||
if (typeof(arguments[i]) === 'string') {
|
||||
result.push(arguments[i].split(/\s+/));
|
||||
} else if (Array.isArray(arguments[i])) {
|
||||
result.push(arguments[i]);
|
||||
} else {
|
||||
throw Error('CONFIG: bad package acl (array or string expected): ' + JSON.stringify(arguments[i]));
|
||||
}
|
||||
}
|
||||
return flatten(result);
|
||||
}
|
||||
|
||||
// add a default rule for all packages to make writing plugins easier
|
||||
if (self.packages['**'] == null) {
|
||||
self.packages['**'] = {};
|
||||
}
|
||||
|
||||
for (let i in self.packages) {
|
||||
if (Object.prototype.hasOwnProperty.call(self.packages, i)) {
|
||||
assert(
|
||||
typeof(self.packages[i]) === 'object' &&
|
||||
!Array.isArray(self.packages[i])
|
||||
, 'CONFIG: bad "'+i+'" package description (object expected)');
|
||||
|
||||
self.packages[i].access = normalize_userlist(
|
||||
self.packages[i].allow_access,
|
||||
self.packages[i].access
|
||||
);
|
||||
delete self.packages[i].allow_access;
|
||||
|
||||
self.packages[i].publish = normalize_userlist(
|
||||
self.packages[i].allow_publish,
|
||||
self.packages[i].publish
|
||||
);
|
||||
delete self.packages[i].allow_publish;
|
||||
|
||||
self.packages[i].proxy = normalize_userlist(
|
||||
self.packages[i].proxy_access,
|
||||
self.packages[i].proxy
|
||||
);
|
||||
delete self.packages[i].proxy_access;
|
||||
}
|
||||
}
|
||||
|
||||
// loading these from ENV if aren't in config
|
||||
['http_proxy', 'https_proxy', 'no_proxy'].forEach((function(v) {
|
||||
if (!(v in self)) {
|
||||
self[v] = process.env[v] || process.env[v.toUpperCase()];
|
||||
}
|
||||
}));
|
||||
|
||||
// unique identifier of self server (or a cluster), used to avoid loops
|
||||
if (!self.server_id) {
|
||||
self.server_id = Crypto.pseudoRandomBytes(6).toString('hex');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether an uplink can proxy
|
||||
* @param {*} pkg
|
||||
* @param {*} uplink
|
||||
* @return {Boolean}
|
||||
*/
|
||||
can_proxy_to(pkg, uplink) {
|
||||
return (this.get_package_spec(pkg).proxy || []).reduce(function(prev, curr) {
|
||||
if (uplink === curr) return true;
|
||||
return prev;
|
||||
}, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check for package spec
|
||||
* @param {*} pkg
|
||||
* @return {Object}
|
||||
*/
|
||||
get_package_spec(pkg) {
|
||||
for (let i in this.packages) {
|
||||
if (minimatch.makeRe(i).exec(pkg)) {
|
||||
return this.packages[i];
|
||||
}
|
||||
}
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Config;
|
||||
|
||||
let parse_interval_table = {
|
||||
'': 1000,
|
||||
'ms': 1,
|
||||
's': 1000,
|
||||
'm': 60*1000,
|
||||
'h': 60*60*1000,
|
||||
'd': 86400000,
|
||||
'w': 7*86400000,
|
||||
'M': 30*86400000,
|
||||
'y': 365*86400000,
|
||||
};
|
||||
|
||||
module.exports.parse_interval = function(interval) {
|
||||
if (typeof(interval) === 'number') return interval * 1000;
|
||||
|
||||
let result = 0;
|
||||
let last_suffix = Infinity;
|
||||
interval.split(/\s+/).forEach(function(x) {
|
||||
if (!x) return;
|
||||
let m = x.match(/^((0|[1-9][0-9]*)(\.[0-9]+)?)(ms|s|m|h|d|w|M|y|)$/);
|
||||
if (!m
|
||||
|| parse_interval_table[m[4]] >= last_suffix
|
||||
|| (m[4] === '' && last_suffix !== Infinity)) {
|
||||
throw Error('invalid interval: ' + interval);
|
||||
}
|
||||
last_suffix = parse_interval_table[m[4]];
|
||||
result += Number(m[1]) * parse_interval_table[m[4]];
|
||||
});
|
||||
return result;
|
||||
};
|
||||
module.exports.parse_interval = parse_interval;
|
||||
|
|
|
@ -13,7 +13,7 @@ let Storage = require('./storage');
|
|||
module.exports = function(config_hash) {
|
||||
Logger.setup(config_hash.logs);
|
||||
|
||||
let config = Config(config_hash);
|
||||
let config = new Config(config_hash);
|
||||
let storage = new Storage(config);
|
||||
let auth = Auth(config);
|
||||
let app = express();
|
||||
|
|
|
@ -1,7 +1,12 @@
|
|||
'use strict';
|
||||
|
||||
let Path = require('path');
|
||||
const Path = require('path');
|
||||
|
||||
/**
|
||||
* Requires a module.
|
||||
* @param {*} path the module's path
|
||||
* @return {Object}
|
||||
*/
|
||||
function try_load(path) {
|
||||
try {
|
||||
return require(path);
|
||||
|
@ -13,6 +18,17 @@ function try_load(path) {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Load a plugin following the rules
|
||||
* - First try to load from the internal directory plugins (which will disappear soon or later).
|
||||
* - A seccond attempt from node_modules, in case to have multiple match as for instance verdaccio-ldap
|
||||
* and sinopia-ldap. All verdaccio prefix will have preferences.
|
||||
* @param {*} config a reference of the configuration settings
|
||||
* @param {*} plugin_configs
|
||||
* @param {*} params a set of params to initialise the plugin
|
||||
* @param {*} sanity_check callback that check the shape that should fulfill the plugin
|
||||
* @return {Array} list of plugins
|
||||
*/
|
||||
function load_plugins(config, plugin_configs, params, sanity_check) {
|
||||
let plugins = Object.keys(plugin_configs || {}).map(function(p) {
|
||||
let plugin;
|
||||
|
|
|
@ -1,8 +1,17 @@
|
|||
/* eslint no-invalid-this: "off" */
|
||||
|
||||
'use strict';
|
||||
|
||||
const lunr = require('lunr');
|
||||
|
||||
/**
|
||||
* Handle the search Indexer.
|
||||
*/
|
||||
class Search {
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
*/
|
||||
constructor() {
|
||||
this.index = lunr(function() {
|
||||
this.field('name', {boost: 10});
|
||||
|
@ -12,6 +21,13 @@ class Search {
|
|||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs a query to the indexer.
|
||||
* If the keyword is a * it returns all local elements
|
||||
* otherwise performs a search
|
||||
* @param {*} q the keyword
|
||||
* @return {Array} list of results.
|
||||
*/
|
||||
query(q) {
|
||||
return q === '*'
|
||||
? this.storage.config.localList.get().map( function( pkg ) {
|
||||
|
@ -19,6 +35,10 @@ class Search {
|
|||
}) : this.index.search(q);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new element to index
|
||||
* @param {*} pkg the package
|
||||
*/
|
||||
add(pkg) {
|
||||
this.index.add({
|
||||
id: pkg.name,
|
||||
|
@ -28,10 +48,17 @@ class Search {
|
|||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove an element from the index.
|
||||
* @param {*} name the id element
|
||||
*/
|
||||
remove(name) {
|
||||
this.index.remove({id: name});
|
||||
}
|
||||
|
||||
/**
|
||||
* Force a reindex.
|
||||
*/
|
||||
reindex() {
|
||||
let self = this;
|
||||
this.storage.get_local(function(err, packages) {
|
||||
|
@ -43,6 +70,10 @@ class Search {
|
|||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Set up the {Storage}
|
||||
* @param {*} storage An storage reference.
|
||||
*/
|
||||
configureStorage(storage) {
|
||||
this.storage = storage;
|
||||
this.reindex();
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
/* eslint prefer-rest-params: "off" */
|
||||
|
||||
'use strict';
|
||||
|
||||
// see https://secure.flickr.com/photos/girliemac/sets/72157628409467125
|
||||
|
|
|
@ -1,16 +1,15 @@
|
|||
'use strict';
|
||||
|
||||
let Stream = require('stream');
|
||||
let Util = require('util');
|
||||
const Stream = require('stream');
|
||||
const Util = require('util');
|
||||
|
||||
module.exports.ReadTarballStream = ReadTarball;
|
||||
module.exports.UploadTarballStream = UploadTarball;
|
||||
|
||||
//
|
||||
// This stream is used to read tarballs from repository
|
||||
//
|
||||
/**
|
||||
* This stream is used to read tarballs from repository.
|
||||
* @param {*} options
|
||||
* @return {Object}
|
||||
*/
|
||||
function ReadTarball(options) {
|
||||
let self = new Stream.PassThrough(options);
|
||||
const self = new Stream.PassThrough(options);
|
||||
Object.setPrototypeOf(self, ReadTarball.prototype);
|
||||
|
||||
// called when data is not needed anymore
|
||||
|
@ -21,11 +20,13 @@ function ReadTarball(options) {
|
|||
|
||||
Util.inherits(ReadTarball, Stream.PassThrough);
|
||||
|
||||
//
|
||||
// This stream is used to upload tarballs to a repository
|
||||
//
|
||||
/**
|
||||
* This stream is used to upload tarballs to a repository.
|
||||
* @param {*} options
|
||||
* @return {Object}
|
||||
*/
|
||||
function UploadTarball(options) {
|
||||
let self = new Stream.PassThrough(options);
|
||||
const self = new Stream.PassThrough(options);
|
||||
Object.setPrototypeOf(self, UploadTarball.prototype);
|
||||
|
||||
// called when user closes connection before upload finishes
|
||||
|
@ -39,10 +40,12 @@ function UploadTarball(options) {
|
|||
|
||||
Util.inherits(UploadTarball, Stream.PassThrough);
|
||||
|
||||
//
|
||||
// This function intercepts abstract calls and replays them allowing
|
||||
// us to attach those functions after we are ready to do so
|
||||
//
|
||||
/**
|
||||
* This function intercepts abstract calls and replays them allowing.
|
||||
* us to attach those functions after we are ready to do so
|
||||
* @param {*} self
|
||||
* @param {*} name
|
||||
*/
|
||||
function add_abstract_method(self, name) {
|
||||
self._called_methods = self._called_methods || {};
|
||||
self.__defineGetter__(name, function() {
|
||||
|
@ -60,3 +63,5 @@ function add_abstract_method(self, name) {
|
|||
});
|
||||
}
|
||||
|
||||
module.exports.ReadTarballStream = ReadTarball;
|
||||
module.exports.UploadTarballStream = UploadTarball;
|
||||
|
|
421
lib/utils.js
421
lib/utils.js
|
@ -1,197 +1,270 @@
|
|||
'use strict';
|
||||
|
||||
let assert = require('assert');
|
||||
let Semver = require('semver');
|
||||
let URL = require('url');
|
||||
let Logger = require('./logger');
|
||||
const assert = require('assert');
|
||||
const Semver = require('semver');
|
||||
const URL = require('url');
|
||||
const Logger = require('./logger');
|
||||
|
||||
module.exports.validate_package = function(name) {
|
||||
name = name.split('/', 2);
|
||||
if (name.length === 1) {
|
||||
// normal package
|
||||
return module.exports.validate_name(name[0]);
|
||||
} else {
|
||||
// scoped package
|
||||
return name[0][0] === '@'
|
||||
&& module.exports.validate_name(name[0].slice(1))
|
||||
&& module.exports.validate_name(name[1]);
|
||||
}
|
||||
};
|
||||
/**
|
||||
* Validate a package.
|
||||
* @param {*} name
|
||||
* @return {Boolean} whether the package is valid or not
|
||||
*/
|
||||
function validate_package(name) {
|
||||
name = name.split('/', 2);
|
||||
if (name.length === 1) {
|
||||
// normal package
|
||||
return module.exports.validate_name(name[0]);
|
||||
} else {
|
||||
// scoped package
|
||||
return name[0][0] === '@'
|
||||
&& module.exports.validate_name(name[0].slice(1))
|
||||
&& module.exports.validate_name(name[1]);
|
||||
}
|
||||
}
|
||||
|
||||
// from normalize-package-data/lib/fixer.js
|
||||
module.exports.validate_name = function(name) {
|
||||
if (typeof(name) !== 'string') return false;
|
||||
name = name.toLowerCase();
|
||||
/**
|
||||
* From normalize-package-data/lib/fixer.js
|
||||
* @param {*} name the package name
|
||||
* @return {Boolean} whether is valid or not
|
||||
*/
|
||||
function validate_name(name) {
|
||||
if (typeof(name) !== 'string') {
|
||||
return false;
|
||||
}
|
||||
name = name.toLowerCase();
|
||||
|
||||
// all URL-safe characters and "@" for issue #75
|
||||
if (!name.match(/^[-a-zA-Z0-9_.!~*'()@]+$/)
|
||||
|| name.charAt(0) === '.' // ".bin", etc.
|
||||
|| name.charAt(0) === '-' // "-" is reserved by couchdb
|
||||
|| name === 'node_modules'
|
||||
|| name === '__proto__'
|
||||
|| name === 'package.json'
|
||||
|| name === 'favicon.ico'
|
||||
) {
|
||||
return false;
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
};
|
||||
// all URL-safe characters and "@" for issue #75
|
||||
if (!name.match(/^[-a-zA-Z0-9_.!~*'()@]+$/)
|
||||
|| name.charAt(0) === '.' // ".bin", etc.
|
||||
|| name.charAt(0) === '-' // "-" is reserved by couchdb
|
||||
|| name === 'node_modules'
|
||||
|| name === '__proto__'
|
||||
|| name === 'package.json'
|
||||
|| name === 'favicon.ico'
|
||||
) {
|
||||
return false;
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.is_object = function(obj) {
|
||||
return typeof(obj) === 'object' && obj !== null && !Array.isArray(obj);
|
||||
};
|
||||
/**
|
||||
* Check whether an element is an Object
|
||||
* @param {*} obj the element
|
||||
* @return {Boolean}
|
||||
*/
|
||||
function is_object(obj) {
|
||||
return typeof(obj) === 'object' && obj !== null && !Array.isArray(obj);
|
||||
}
|
||||
|
||||
module.exports.validate_metadata = function(object, name) {
|
||||
assert(module.exports.is_object(object), 'not a json object');
|
||||
assert.equal(object.name, name);
|
||||
/**
|
||||
* Validate the package metadata, add additional properties whether are missing within
|
||||
* the metadata properties.
|
||||
* @param {*} object
|
||||
* @param {*} name
|
||||
* @return {Object} the object with additional properties as dist-tags ad versions
|
||||
*/
|
||||
function validate_metadata(object, name) {
|
||||
assert(module.exports.is_object(object), 'not a json object');
|
||||
assert.equal(object.name, name);
|
||||
|
||||
if (!module.exports.is_object(object['dist-tags'])) {
|
||||
object['dist-tags'] = {};
|
||||
}
|
||||
if (!module.exports.is_object(object['dist-tags'])) {
|
||||
object['dist-tags'] = {};
|
||||
}
|
||||
|
||||
if (!module.exports.is_object(object['versions'])) {
|
||||
object['versions'] = {};
|
||||
}
|
||||
if (!module.exports.is_object(object['versions'])) {
|
||||
object['versions'] = {};
|
||||
}
|
||||
|
||||
return object;
|
||||
};
|
||||
return object;
|
||||
}
|
||||
|
||||
module.exports.filter_tarball_urls = function(pkg, req, config) {
|
||||
function filter(_url) {
|
||||
if (!req.headers.host) return _url;
|
||||
/**
|
||||
* Iterate a packages's versions and filter each original tarbal url.
|
||||
* @param {*} pkg
|
||||
* @param {*} req
|
||||
* @param {*} config
|
||||
* @return {String} a filtered package
|
||||
*/
|
||||
function filter_tarball_urls(pkg, req, config) {
|
||||
/**
|
||||
* Filter a tarball url.
|
||||
* @param {*} _url
|
||||
* @return {String} a parsed url
|
||||
*/
|
||||
const filter = function(_url) {
|
||||
if (!req.headers.host) {
|
||||
return _url;
|
||||
}
|
||||
const filename = URL.parse(_url).pathname.replace(/^.*\//, '');
|
||||
let result;
|
||||
if (config.url_prefix != null) {
|
||||
result = config.url_prefix.replace(/\/$/, '');
|
||||
} else {
|
||||
result = `${req.protocol}://${req.headers.host}`;
|
||||
}
|
||||
return `${result}/${pkg.name.replace(/\//g, '%2f')}/-/${filename}`;
|
||||
};
|
||||
|
||||
let filename = URL.parse(_url).pathname.replace(/^.*\//, '');
|
||||
for (let ver in pkg.versions) {
|
||||
if (Object.prototype.hasOwnProperty.call(pkg.versions, ver)) {
|
||||
const dist = pkg.versions[ver].dist;
|
||||
if (dist != null && dist.tarball != null) {
|
||||
// dist.__verdaccio_orig_tarball = dist.tarball
|
||||
dist.tarball = filter(dist.tarball);
|
||||
}
|
||||
}
|
||||
}
|
||||
return pkg;
|
||||
}
|
||||
|
||||
if (config.url_prefix != null) {
|
||||
var result = config.url_prefix.replace(/\/$/, '');
|
||||
} else {
|
||||
var result = req.protocol + '://' + req.headers.host;
|
||||
}
|
||||
/**
|
||||
* Create a tag for a package
|
||||
* @param {*} data
|
||||
* @param {*} version
|
||||
* @param {*} tag
|
||||
* @return {Boolean} whether a package has been tagged
|
||||
*/
|
||||
function tag_version(data, version, tag) {
|
||||
if (tag) {
|
||||
if (data['dist-tags'][tag] !== version) {
|
||||
if (Semver.parse(version, true)) {
|
||||
// valid version - store
|
||||
data['dist-tags'][tag] = version;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
Logger.logger.warn({ver: version, tag: tag}, 'ignoring bad version @{ver} in @{tag}');
|
||||
if (tag && data['dist-tags'][tag]) {
|
||||
delete data['dist-tags'][tag];
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
return `${result}/${pkg.name.replace(/\//g, '%2f')}/-/${filename}`;
|
||||
}
|
||||
/**
|
||||
* Gets version from a package object taking into account semver weirdness.
|
||||
* @param {*} object
|
||||
* @param {*} version
|
||||
* @return {String} return the semantic version of a package
|
||||
*/
|
||||
function get_version(object, version) {
|
||||
// this condition must allow cast
|
||||
if (object.versions[version] != null) {
|
||||
return object.versions[version];
|
||||
}
|
||||
try {
|
||||
version = Semver.parse(version, true);
|
||||
for (let k in object.versions) {
|
||||
if (version.compare(Semver.parse(k, true)) === 0) {
|
||||
return object.versions[k];
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
for (let ver in pkg.versions) {
|
||||
let dist = pkg.versions[ver].dist;
|
||||
if (dist != null && dist.tarball != null) {
|
||||
// dist.__verdaccio_orig_tarball = dist.tarball
|
||||
dist.tarball = filter(dist.tarball);
|
||||
}
|
||||
}
|
||||
return pkg;
|
||||
};
|
||||
/**
|
||||
* Parse an internet address
|
||||
* Allow:
|
||||
- https:localhost:1234 - protocol + host + port
|
||||
- localhost:1234 - host + port
|
||||
- 1234 - port
|
||||
- http::1234 - protocol + port
|
||||
- https://localhost:443/ - full url + https
|
||||
- http://[::1]:443/ - ipv6
|
||||
- unix:/tmp/http.sock - unix sockets
|
||||
- https://unix:/tmp/http.sock - unix sockets (https)
|
||||
* @param {*} addr the internet address definition
|
||||
* @return {Object|Null} literal object that represent the address parsed
|
||||
*/
|
||||
function parse_address(addr) {
|
||||
//
|
||||
// TODO: refactor it to something more reasonable?
|
||||
//
|
||||
// protocol : // ( host )|( ipv6 ): port /
|
||||
let m = /^((https?):(\/\/)?)?((([^\/:]*)|\[([^\[\]]+)\]):)?(\d+)\/?$/.exec(addr);
|
||||
|
||||
module.exports.tag_version = function(data, version, tag) {
|
||||
if (tag) {
|
||||
if (data['dist-tags'][tag] !== version) {
|
||||
if (Semver.parse(version, true)) {
|
||||
// valid version - store
|
||||
data['dist-tags'][tag] = version;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
Logger.logger.warn({ver: version, tag: tag}, 'ignoring bad version @{ver} in @{tag}');
|
||||
if (tag && data['dist-tags'][tag]) {
|
||||
delete data['dist-tags'][tag];
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
if (m) return {
|
||||
proto: m[2] || 'http',
|
||||
host: m[6] || m[7] || 'localhost',
|
||||
port: m[8] || '4873',
|
||||
};
|
||||
|
||||
// gets version from a package object taking into account semver weirdness
|
||||
module.exports.get_version = function(object, version) {
|
||||
if (object.versions[version] != null) return object.versions[version];
|
||||
m = /^((https?):(\/\/)?)?unix:(.*)$/.exec(addr);
|
||||
|
||||
try {
|
||||
version = Semver.parse(version, true);
|
||||
for (let k in object.versions) {
|
||||
if (version.compare(Semver.parse(k, true)) === 0) {
|
||||
return object.versions[k];
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
if (m) {
|
||||
return {
|
||||
proto: m[2] || 'http',
|
||||
path: m[4],
|
||||
};
|
||||
}
|
||||
|
||||
module.exports.parse_address = function parse_address(addr) {
|
||||
//
|
||||
// Allow:
|
||||
//
|
||||
// - https:localhost:1234 - protocol + host + port
|
||||
// - localhost:1234 - host + port
|
||||
// - 1234 - port
|
||||
// - http::1234 - protocol + port
|
||||
// - https://localhost:443/ - full url + https
|
||||
// - http://[::1]:443/ - ipv6
|
||||
// - unix:/tmp/http.sock - unix sockets
|
||||
// - https://unix:/tmp/http.sock - unix sockets (https)
|
||||
//
|
||||
// TODO: refactor it to something more reasonable?
|
||||
//
|
||||
// protocol : // ( host )|( ipv6 ): port /
|
||||
var m = /^((https?):(\/\/)?)?((([^\/:]*)|\[([^\[\]]+)\]):)?(\d+)\/?$/.exec(addr);
|
||||
return null;
|
||||
}
|
||||
|
||||
if (m) return {
|
||||
proto: m[2] || 'http',
|
||||
host: m[6] || m[7] || 'localhost',
|
||||
port: m[8] || '4873',
|
||||
};
|
||||
/**
|
||||
* Function filters out bad semver versions and sorts the array.
|
||||
* @param {*} array
|
||||
* @return {Array} sorted Array
|
||||
*/
|
||||
function semver_sort(array) {
|
||||
return array
|
||||
.filter(function(x) {
|
||||
if (!Semver.parse(x, true)) {
|
||||
Logger.logger.warn( {ver: x}, 'ignoring bad version @{ver}' );
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
})
|
||||
.sort(Semver.compareLoose)
|
||||
.map(String);
|
||||
}
|
||||
|
||||
var m = /^((https?):(\/\/)?)?unix:(.*)$/.exec(addr);
|
||||
/**
|
||||
* Flatten arrays of tags.
|
||||
* @param {*} data
|
||||
*/
|
||||
function normalize_dist_tags(data) {
|
||||
let sorted;
|
||||
if (!data['dist-tags'].latest) {
|
||||
// overwrite latest with highest known version based on semver sort
|
||||
sorted = module.exports.semver_sort(Object.keys(data.versions));
|
||||
if (sorted && sorted.length) {
|
||||
data['dist-tags'].latest = sorted.pop();
|
||||
}
|
||||
}
|
||||
|
||||
if (m) return {
|
||||
proto: m[2] || 'http',
|
||||
path: m[4],
|
||||
};
|
||||
for (let tag in data['dist-tags']) {
|
||||
if (Array.isArray(data['dist-tags'][tag])) {
|
||||
if (data['dist-tags'][tag].length) {
|
||||
// sort array
|
||||
sorted = module.exports.semver_sort(data['dist-tags'][tag]);
|
||||
if (sorted.length) {
|
||||
// use highest version based on semver sort
|
||||
data['dist-tags'][tag] = sorted.pop();
|
||||
}
|
||||
} else {
|
||||
delete data['dist-tags'][tag];
|
||||
}
|
||||
} else if (typeof data['dist-tags'][tag] === 'string') {
|
||||
if (!Semver.parse(data['dist-tags'][tag], true)) {
|
||||
// if the version is invalid, delete the dist-tag entry
|
||||
delete data['dist-tags'][tag];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
||||
|
||||
// function filters out bad semver versions and sorts the array
|
||||
module.exports.semver_sort = function semver_sort(array) {
|
||||
return array
|
||||
.filter(function(x) {
|
||||
if (!Semver.parse(x, true)) {
|
||||
Logger.logger.warn( {ver: x}, 'ignoring bad version @{ver}' );
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
})
|
||||
.sort(Semver.compareLoose)
|
||||
.map(String);
|
||||
};
|
||||
|
||||
// flatten arrays of tags
|
||||
module.exports.normalize_dist_tags = function(data) {
|
||||
let sorted;
|
||||
|
||||
if (!data['dist-tags'].latest) {
|
||||
// overwrite latest with highest known version based on semver sort
|
||||
sorted = module.exports.semver_sort(Object.keys(data.versions));
|
||||
if (sorted && sorted.length) {
|
||||
data['dist-tags'].latest = sorted.pop();
|
||||
}
|
||||
}
|
||||
|
||||
for (let tag in data['dist-tags']) {
|
||||
if (Array.isArray(data['dist-tags'][tag])) {
|
||||
if (data['dist-tags'][tag].length) {
|
||||
// sort array
|
||||
sorted = module.exports.semver_sort(data['dist-tags'][tag]);
|
||||
if (sorted.length) {
|
||||
// use highest version based on semver sort
|
||||
data['dist-tags'][tag] = sorted.pop();
|
||||
}
|
||||
} else {
|
||||
delete data['dist-tags'][tag];
|
||||
}
|
||||
} else if (typeof data['dist-tags'][tag] === 'string') {
|
||||
if (!Semver.parse(data['dist-tags'][tag], true)) {
|
||||
// if the version is invalid, delete the dist-tag entry
|
||||
delete data['dist-tags'][tag];
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
module.exports.semver_sort = semver_sort;
|
||||
module.exports.parse_address = parse_address;
|
||||
module.exports.get_version = get_version;
|
||||
module.exports.normalize_dist_tags = normalize_dist_tags;
|
||||
module.exports.tag_version = tag_version;
|
||||
module.exports.filter_tarball_urls = filter_tarball_urls;
|
||||
module.exports.validate_metadata = validate_metadata;
|
||||
module.exports.is_object = is_object;
|
||||
module.exports.validate_name = validate_name;
|
||||
module.exports.validate_package = validate_package;
|
||||
|
|
|
@ -34,7 +34,7 @@ let packages = [
|
|||
|
||||
describe('search', function() {
|
||||
before(function() {
|
||||
let config = Config(config_hash);
|
||||
let config = new Config(config_hash);
|
||||
this.storage = new Storage(config);
|
||||
Search.configureStorage(this.storage);
|
||||
packages.map(function(item) {
|
||||
|
|
Loading…
Add table
Reference in a new issue