mirror of
https://github.com/verdaccio/verdaccio.git
synced 2025-01-06 22:40:26 -05:00
clean up phase, moving es6 local storage
This commit is contained in:
parent
d66aa7097a
commit
d484bb4f2f
8 changed files with 96 additions and 86 deletions
10
lib/auth.js
10
lib/auth.js
|
@ -1,8 +1,8 @@
|
|||
var Crypto = require('crypto')
|
||||
var jju = require('jju')
|
||||
var Error = require('http-errors')
|
||||
var Logger = require('./logger')
|
||||
var load_plugins = require('./plugin-loader').load_plugins
|
||||
const Crypto = require('crypto')
|
||||
const jju = require('jju')
|
||||
const Error = require('http-errors')
|
||||
const Logger = require('./logger')
|
||||
const load_plugins = require('./plugin-loader').load_plugins;
|
||||
|
||||
module.exports = Auth
|
||||
|
||||
|
|
|
@ -28,7 +28,10 @@ function Config(config) {
|
|||
for (var i in config) {
|
||||
if (self[i] == null) self[i] = config[i]
|
||||
}
|
||||
if (!self.user_agent) self.user_agent = pkgName + '/' + pkgVersion
|
||||
|
||||
if (!self.user_agent) {
|
||||
self.user_agent = `${pkgName}/${pkgVersion}`;
|
||||
}
|
||||
|
||||
// some weird shell scripts are valid yaml files parsed as string
|
||||
assert.equal(typeof(config), 'object', 'CONFIG: it doesn\'t look like a valid config file')
|
||||
|
@ -50,7 +53,13 @@ function Config(config) {
|
|||
}
|
||||
}
|
||||
|
||||
var users = {all:true, anonymous:true, 'undefined':true, owner:true, none:true}
|
||||
var users = {
|
||||
all: true,
|
||||
anonymous: true,
|
||||
'undefined': true,
|
||||
owner: true,
|
||||
none: true
|
||||
};
|
||||
|
||||
var check_user_or_uplink = function(arg) {
|
||||
assert(arg !== 'all' && arg !== 'owner' && arg !== 'anonymous' && arg !== 'undefined' && arg !== 'none', 'CONFIG: reserved user/uplink name: ' + arg)
|
||||
|
@ -64,13 +73,16 @@ function Config(config) {
|
|||
assert(Utils.is_object(self[x]), 'CONFIG: bad "'+x+'" value (object expected)')
|
||||
})
|
||||
|
||||
for (var i in self.users) check_user_or_uplink(i)
|
||||
for (var i in self.uplinks) check_user_or_uplink(i)
|
||||
for (var i in self.users) {
|
||||
check_user_or_uplink(i);
|
||||
}
|
||||
for (var i in self.uplinks) {
|
||||
check_user_or_uplink(i);
|
||||
}
|
||||
|
||||
for (var i in self.users) {
|
||||
assert(self.users[i].password, 'CONFIG: no password for user: ' + i)
|
||||
assert(
|
||||
typeof(self.users[i].password) === 'string' &&
|
||||
assert(typeof(self.users[i].password) === 'string' &&
|
||||
self.users[i].password.match(/^[a-f0-9]{40}$/)
|
||||
, 'CONFIG: wrong password format for user: ' + i + ', sha1 expected')
|
||||
}
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
"use strict";
|
||||
|
||||
var assert = require('assert')
|
||||
var async = require('async')
|
||||
var Crypto = require('crypto')
|
||||
|
@ -63,14 +65,13 @@ Storage.prototype.add_package = function(name, info, callback) {
|
|||
}
|
||||
|
||||
Storage.prototype.remove_package = function(name, callback) {
|
||||
var self = this
|
||||
self.logger.info( { name: name }
|
||||
this.logger.info( { name: name }
|
||||
, 'unpublishing @{name} (all)')
|
||||
|
||||
var storage = self.storage(name)
|
||||
var storage = this.storage(name)
|
||||
if (!storage) return callback( Error[404]('no such package available') )
|
||||
|
||||
storage.read_json(info_file, function(err, data) {
|
||||
storage.read_json(info_file, (err, data) => {
|
||||
if (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return callback( Error[404]('no such package available') )
|
||||
|
@ -78,7 +79,7 @@ Storage.prototype.remove_package = function(name, callback) {
|
|||
return callback(err)
|
||||
}
|
||||
}
|
||||
self._normalize_package(data)
|
||||
this._normalize_package(data)
|
||||
|
||||
storage.unlink(info_file, function(err) {
|
||||
if (err) return callback(err)
|
||||
|
@ -108,24 +109,23 @@ Storage.prototype.remove_package = function(name, callback) {
|
|||
}
|
||||
|
||||
Storage.prototype._read_create_package = function(name, callback) {
|
||||
var self = this
|
||||
var storage = self.storage(name)
|
||||
var storage = this.storage(name)
|
||||
if (!storage) {
|
||||
var data = get_boilerplate(name)
|
||||
self._normalize_package(data)
|
||||
this._normalize_package(data)
|
||||
return callback(null, data)
|
||||
}
|
||||
storage.read_json(info_file, function(err, data) {
|
||||
storage.read_json(info_file, (err, data) => {
|
||||
// TODO: race condition
|
||||
if (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
// if package doesn't exist, we create it here
|
||||
data = get_boilerplate(name)
|
||||
} else {
|
||||
return callback(self._internal_error(err, info_file, 'error reading'))
|
||||
return callback(this._internal_error(err, info_file, 'error reading'))
|
||||
}
|
||||
}
|
||||
self._normalize_package(data)
|
||||
this._normalize_package(data)
|
||||
callback(null, data)
|
||||
})
|
||||
}
|
||||
|
@ -133,8 +133,7 @@ Storage.prototype._read_create_package = function(name, callback) {
|
|||
// synchronize remote package info with the local one
|
||||
// TODO: readfile called twice
|
||||
Storage.prototype.update_versions = function(name, newdata, callback) {
|
||||
var self = this
|
||||
self._read_create_package(name, function(err, data) {
|
||||
this._read_create_package(name, (err, data) => {
|
||||
if (err) return callback(err)
|
||||
|
||||
var change = false
|
||||
|
@ -164,7 +163,7 @@ Storage.prototype.update_versions = function(name, newdata, callback) {
|
|||
//
|
||||
// see https://github.com/rlidwka/sinopia/issues/166
|
||||
var tarball_url = URL.parse(hash.url)
|
||||
var uplink_url = URL.parse(self.config.uplinks[verdata._verdaccio_uplink].url)
|
||||
var uplink_url = URL.parse(this.config.uplinks[verdata._verdaccio_uplink].url)
|
||||
if (uplink_url.host === tarball_url.host) {
|
||||
tarball_url.protocol = uplink_url.protocol
|
||||
hash.registry = verdata._verdaccio_uplink
|
||||
|
@ -197,8 +196,8 @@ Storage.prototype.update_versions = function(name, newdata, callback) {
|
|||
}
|
||||
|
||||
if (change) {
|
||||
self.logger.debug('updating package info')
|
||||
self._write_package(name, data, function(err) {
|
||||
this.logger.debug('updating package info')
|
||||
this._write_package(name, data, function(err) {
|
||||
callback(err, data)
|
||||
})
|
||||
} else {
|
||||
|
@ -208,8 +207,7 @@ Storage.prototype.update_versions = function(name, newdata, callback) {
|
|||
}
|
||||
|
||||
Storage.prototype.add_version = function(name, version, metadata, tag, callback) {
|
||||
var self = this
|
||||
self.update_package(name, function updater(data, cb) {
|
||||
this.update_package(name, (data, cb) => {
|
||||
// keep only one readme per package
|
||||
data.readme = metadata.readme
|
||||
delete metadata.readme
|
||||
|
@ -236,16 +234,14 @@ Storage.prototype.add_version = function(name, version, metadata, tag, callback)
|
|||
|
||||
data.versions[version] = metadata
|
||||
Utils.tag_version(data, version, tag)
|
||||
self.config.localList.add(name)
|
||||
this.config.localList.add(name)
|
||||
cb()
|
||||
}, callback)
|
||||
}
|
||||
|
||||
Storage.prototype.merge_tags = function(name, tags, callback) {
|
||||
var self = this
|
||||
|
||||
self.update_package(name, function updater(data, cb) {
|
||||
for (var t in tags) {
|
||||
this.update_package(name, function updater(data, cb) {
|
||||
for (let t in tags) {
|
||||
if (tags[t] === null) {
|
||||
delete data['dist-tags'][t]
|
||||
continue
|
||||
|
@ -262,12 +258,10 @@ Storage.prototype.merge_tags = function(name, tags, callback) {
|
|||
}
|
||||
|
||||
Storage.prototype.replace_tags = function(name, tags, callback) {
|
||||
var self = this
|
||||
|
||||
self.update_package(name, function updater(data, cb) {
|
||||
this.update_package(name, function updater(data, cb) {
|
||||
data['dist-tags'] = {}
|
||||
|
||||
for (var t in tags) {
|
||||
for (let t in tags) {
|
||||
if (tags[t] === null) {
|
||||
delete data['dist-tags'][t]
|
||||
continue
|
||||
|
@ -285,16 +279,15 @@ Storage.prototype.replace_tags = function(name, tags, callback) {
|
|||
|
||||
// currently supports unpublishing only
|
||||
Storage.prototype.change_package = function(name, metadata, revision, callback) {
|
||||
var self = this
|
||||
|
||||
if (!Utils.is_object(metadata.versions) || !Utils.is_object(metadata['dist-tags'])) {
|
||||
return callback( Error[422]('bad data') )
|
||||
}
|
||||
|
||||
self.update_package(name, function updater(data, cb) {
|
||||
for (var ver in data.versions) {
|
||||
this.update_package(name, (data, cb) => {
|
||||
for (let ver in data.versions) {
|
||||
if (metadata.versions[ver] == null) {
|
||||
self.logger.info( { name: name, version: ver }
|
||||
this.logger.info( { name: name, version: ver }
|
||||
, 'unpublishing @{name}@@{version}')
|
||||
delete data.versions[ver]
|
||||
|
||||
|
@ -315,9 +308,8 @@ Storage.prototype.change_package = function(name, metadata, revision, callback)
|
|||
|
||||
Storage.prototype.remove_tarball = function(name, filename, revision, callback) {
|
||||
assert(Utils.validate_name(filename))
|
||||
var self = this
|
||||
|
||||
self.update_package(name, function updater(data, cb) {
|
||||
this.update_package(name, (data, cb) => {
|
||||
if (data._attachments[filename]) {
|
||||
delete data._attachments[filename]
|
||||
cb()
|
||||
|
@ -326,7 +318,7 @@ Storage.prototype.remove_tarball = function(name, filename, revision, callback)
|
|||
}
|
||||
}, function(err) {
|
||||
if (err) return callback(err)
|
||||
var storage = self.storage(name)
|
||||
var storage = this.storage(name)
|
||||
if (storage) storage.unlink(filename, callback)
|
||||
})
|
||||
}
|
||||
|
@ -347,7 +339,6 @@ Storage.prototype.add_tarball = function(name, filename) {
|
|||
_transform.apply(stream, arguments)
|
||||
}
|
||||
|
||||
var self = this
|
||||
if (name === info_file || name === '__proto__') {
|
||||
process.nextTick(function() {
|
||||
stream.emit('error', Error[403]("can't use this filename"))
|
||||
|
@ -355,7 +346,7 @@ Storage.prototype.add_tarball = function(name, filename) {
|
|||
return stream
|
||||
}
|
||||
|
||||
var storage = self.storage(name)
|
||||
var storage = this.storage(name)
|
||||
if (!storage) {
|
||||
process.nextTick(function() {
|
||||
stream.emit('error', Error[404]("can't upload this package"))
|
||||
|
@ -365,12 +356,12 @@ Storage.prototype.add_tarball = function(name, filename) {
|
|||
|
||||
var wstream = storage.write_stream(filename)
|
||||
|
||||
wstream.on('error', function(err) {
|
||||
wstream.on('error', (err) => {
|
||||
if (err.code === 'EEXISTS') {
|
||||
stream.emit('error', Error[409]('this tarball is already present'))
|
||||
} else if (err.code === 'ENOENT') {
|
||||
// check if package exists to throw an appropriate message
|
||||
self.get_package(name, function(_err, res) {
|
||||
this.get_package(name, function(_err, res) {
|
||||
if (_err) {
|
||||
stream.emit('error', _err)
|
||||
} else {
|
||||
|
@ -386,8 +377,8 @@ Storage.prototype.add_tarball = function(name, filename) {
|
|||
// re-emitting open because it's handled in storage.js
|
||||
stream.emit('open')
|
||||
})
|
||||
wstream.on('success', function() {
|
||||
self.update_package(name, function updater(data, cb) {
|
||||
wstream.on('success', () => {
|
||||
this.update_package(name, function updater(data, cb) {
|
||||
data._attachments[filename] = {
|
||||
shasum: shasum.digest('hex'),
|
||||
}
|
||||
|
@ -453,41 +444,41 @@ Storage.prototype.get_tarball = function(name, filename, callback) {
|
|||
}
|
||||
|
||||
Storage.prototype.get_package = function(name, options, callback) {
|
||||
if (typeof(options) === 'function') callback = options, options = {}
|
||||
if (typeof(options) === 'function') {
|
||||
callback = options, options = {};
|
||||
}
|
||||
|
||||
var self = this
|
||||
var storage = self.storage(name)
|
||||
var storage = this.storage(name)
|
||||
if (!storage) return callback( Error[404]('no such package available') )
|
||||
|
||||
storage.read_json(info_file, function(err, result) {
|
||||
storage.read_json(info_file, (err, result) => {
|
||||
if (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return callback( Error[404]('no such package available') )
|
||||
} else {
|
||||
return callback(self._internal_error(err, info_file, 'error reading'))
|
||||
return callback(this._internal_error(err, info_file, 'error reading'))
|
||||
}
|
||||
}
|
||||
self._normalize_package(result)
|
||||
this._normalize_package(result)
|
||||
callback(err, result)
|
||||
})
|
||||
}
|
||||
|
||||
// walks through each package and calls `on_package` on them
|
||||
Storage.prototype._each_package = function (on_package, on_end) {
|
||||
var self = this
|
||||
var storages = {}
|
||||
|
||||
storages[self.config.storage] = true
|
||||
storages[this.config.storage] = true;
|
||||
|
||||
if (self.config.packages) {
|
||||
Object.keys(self.packages || {}).map(function (pkg) {
|
||||
if (self.config.packages[pkg].storage) {
|
||||
storages[self.config.packages[pkg].storage] = true
|
||||
if (this.config.packages) {
|
||||
Object.keys(this.packages || {}).map( pkg => {
|
||||
if (this.config.packages[pkg].storage) {
|
||||
storages[this.config.packages[pkg].storage] = true
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
var base = Path.dirname(self.config.self_path);
|
||||
const base = Path.dirname(this.config.self_path);
|
||||
|
||||
async.eachSeries(Object.keys(storages), function (storage, cb) {
|
||||
fs.readdir(Path.resolve(base, storage), function (err, files) {
|
||||
|
@ -502,7 +493,7 @@ Storage.prototype._each_package = function (on_package, on_end) {
|
|||
async.eachSeries(files, function (file2, cb) {
|
||||
if (Utils.validate_name(file2)) {
|
||||
on_package({
|
||||
name: file + '/' + file2,
|
||||
name: `${file}/${file2}`,
|
||||
path: Path.resolve(base, storage, file, file2),
|
||||
}, cb)
|
||||
} else {
|
||||
|
@ -583,16 +574,14 @@ Storage.prototype.update_package = function(name, updateFn, _callback) {
|
|||
}
|
||||
|
||||
Storage.prototype.search = function(startkey, options) {
|
||||
var self = this
|
||||
|
||||
var stream = new Stream.PassThrough({ objectMode: true })
|
||||
|
||||
self._each_package(function on_package(item, cb) {
|
||||
fs.stat(item.path, function(err, stats) {
|
||||
this._each_package((item, cb) => {
|
||||
fs.stat(item.path, (err, stats) => {
|
||||
if (err) return cb(err)
|
||||
|
||||
if (stats.mtime > startkey) {
|
||||
self.get_package(item.name, options, function(err, data) {
|
||||
this.get_package(item.name, options, function(err, data) {
|
||||
if (err) return cb(err)
|
||||
|
||||
var versions = Utils.semver_sort(Object.keys(data.versions))
|
||||
|
@ -635,8 +624,9 @@ Storage.prototype._normalize_package = function(pkg) {
|
|||
;['versions', 'dist-tags', '_distfiles', '_attachments', '_uplinks'].forEach(function(key) {
|
||||
if (!Utils.is_object(pkg[key])) pkg[key] = {}
|
||||
})
|
||||
if (typeof(pkg._rev) !== 'string') pkg._rev = '0-0000000000000000'
|
||||
|
||||
if (typeof(pkg._rev) !== 'string') {
|
||||
pkg._rev = '0-0000000000000000';
|
||||
}
|
||||
// normalize dist-tags
|
||||
Utils.normalize_dist_tags(pkg)
|
||||
}
|
||||
|
@ -644,7 +634,9 @@ Storage.prototype._normalize_package = function(pkg) {
|
|||
Storage.prototype._write_package = function(name, json, callback) {
|
||||
|
||||
// calculate revision a la couchdb
|
||||
if (typeof(json._rev) !== 'string') json._rev = '0-0000000000000000'
|
||||
if (typeof(json._rev) !== 'string') {
|
||||
json._rev = '0-0000000000000000';
|
||||
}
|
||||
var rev = json._rev.split('-')
|
||||
json._rev = ((+rev[0] || 0) + 1) + '-' + Crypto.pseudoRandomBytes(8).toString('hex')
|
||||
|
||||
|
@ -653,18 +645,18 @@ Storage.prototype._write_package = function(name, json, callback) {
|
|||
storage.write_json(info_file, json, callback)
|
||||
}
|
||||
|
||||
Storage.prototype.storage = function(package) {
|
||||
var path = this.config.get_package_spec(package).storage
|
||||
Storage.prototype.storage = function(pkg) {
|
||||
var path = this.config.get_package_spec(pkg).storage
|
||||
if (path == null) path = this.config.storage
|
||||
if (path == null || path === false) {
|
||||
this.logger.debug( { name: package }
|
||||
this.logger.debug( { name: pkg }
|
||||
, 'this package has no storage defined: @{name}' )
|
||||
return null
|
||||
}
|
||||
return Path_Wrapper(
|
||||
Path.join(
|
||||
Path.resolve(Path.dirname(this.config.self_path || ''), path),
|
||||
package
|
||||
pkg
|
||||
)
|
||||
)
|
||||
}
|
||||
|
|
|
@ -20,10 +20,10 @@ function load_plugins(config, plugin_configs, params, sanity_check) {
|
|||
|
||||
// npm package
|
||||
if (plugin === null && p.match(/^[^\.\/]/)) {
|
||||
plugin = try_load('verdaccio-' + p)
|
||||
plugin = try_load(`verdaccio-${p}`)
|
||||
// compatibility for old sinopia plugins
|
||||
if(!plugin) {
|
||||
plugin = try_load('sinopia-' + p)
|
||||
plugin = try_load(`sinopia-${p}`)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
|
||||
// see https://secure.flickr.com/photos/girliemac/sets/72157628409467125
|
||||
|
||||
var images = {
|
||||
const images = {
|
||||
100: 'aVvDhR', // '6512768893', // 100 - Continue
|
||||
101: 'aXXExP', // '6540479029', // 101 - Switching Protocols
|
||||
200: 'aVuVsF', // '6512628175', // 200 - OK
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
"use strict";
|
||||
|
||||
var assert = require('assert')
|
||||
var async = require('async')
|
||||
var Error = require('http-errors')
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
"use strict";
|
||||
|
||||
var JSONStream = require('JSONStream')
|
||||
var Error = require('http-errors')
|
||||
var request = require('request')
|
||||
|
@ -68,12 +70,14 @@ function _setupProxy(hostname, config, mainconfig, isHTTPS) {
|
|||
}
|
||||
|
||||
// use wget-like algorithm to determine if proxy shouldn't be used
|
||||
if (hostname[0] !== '.') hostname = '.' + hostname
|
||||
if (hostname[0] !== '.') {
|
||||
hostname = '.' + hostname;
|
||||
}
|
||||
if (typeof(no_proxy) === 'string' && no_proxy.length) {
|
||||
no_proxy = no_proxy.split(',')
|
||||
}
|
||||
if (Array.isArray(no_proxy)) {
|
||||
for (var i=0; i<no_proxy.length; i++) {
|
||||
for (let i=0; i<no_proxy.length; i++) {
|
||||
var no_proxy_item = no_proxy[i]
|
||||
if (no_proxy_item[0] !== '.') no_proxy_item = '.' + no_proxy_item
|
||||
if (hostname.lastIndexOf(no_proxy_item) === hostname.length - no_proxy_item.length) {
|
||||
|
@ -82,7 +86,7 @@ function _setupProxy(hostname, config, mainconfig, isHTTPS) {
|
|||
'not using proxy for @{url}, excluded by @{rule} rule')
|
||||
this.proxy = false
|
||||
}
|
||||
break
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -117,7 +121,7 @@ Storage.prototype.request = function(options, cb) {
|
|||
this._add_proxy_headers(options.req, headers)
|
||||
|
||||
// add/override headers specified in the config
|
||||
for (var key in this.config.headers) {
|
||||
for (let key in this.config.headers) {
|
||||
headers[key] = this.config.headers[key]
|
||||
}
|
||||
|
||||
|
|
|
@ -52,7 +52,7 @@ module.exports.validate_metadata = function(object, name) {
|
|||
object['versions'] = {}
|
||||
}
|
||||
|
||||
return object
|
||||
return object;
|
||||
}
|
||||
|
||||
module.exports.filter_tarball_urls = function(pkg, req, config) {
|
||||
|
@ -67,7 +67,7 @@ module.exports.filter_tarball_urls = function(pkg, req, config) {
|
|||
var result = req.protocol + '://' + req.headers.host
|
||||
}
|
||||
|
||||
return result + '/' + pkg.name.replace(/\//g, '%2f') + '/-/' + filename
|
||||
return `${result}/${pkg.name.replace(/\//g, '%2f')}/-/${filename}`;
|
||||
}
|
||||
|
||||
for (var ver in pkg.versions) {
|
||||
|
|
Loading…
Reference in a new issue