0
Fork 0
mirror of https://github.com/verdaccio/verdaccio.git synced 2024-12-30 22:34:10 -05:00

fix(flow): fix flow definitions

This commit is contained in:
Juan Picado @jotadeveloper 2018-02-19 19:29:14 +01:00
parent 27efad9648
commit d086073069
No known key found for this signature in database
GPG key ID: 18AC54485952D158
19 changed files with 428 additions and 204 deletions

View file

@ -75,6 +75,7 @@
"no-invalid-this": 2,
"new-cap": 2,
"one-var": 2,
"quote-props":["error", "as-needed"],
"no-console": [
2,
{

View file

@ -7,13 +7,18 @@
.*/coverage/.*
.*/.vscode/.*
.*/build/.*
[include]
.*/docs/.*
.*/scripts/.*
.*/assets/.*
.*/bin/.*
.*/systemd/.*
.*/website/.*
.*/wiki/.*
.*/docs/.*
.*/tools/.*
[libs]
node_modules/@verdaccio/types/lib/
[lints]
[options]
suppress_comment= \\(.\\|\n\\)*\\$FlowFixMe

109
flow-typed/npm/node-mocks-http_vx.x.x.js vendored Normal file
View file

@ -0,0 +1,109 @@
// flow-typed signature: 0c37b93b28df38b46c7edb9bc9d278ad
// flow-typed version: <<STUB>>/node-mocks-http_v1.6.7/flow_v0.64.0
/**
* This is an autogenerated libdef stub for:
*
* 'node-mocks-http'
*
* Fill this stub out by replacing all the `any` types.
*
* Once filled out, we encourage you to share your work with the
* community by sending a pull request to:
* https://github.com/flowtype/flow-typed
*/
declare module 'node-mocks-http' {
declare module.exports: any;
}
/**
* We include stubs for each file inside this npm package in case you need to
* require those files directly. Feel free to delete any files that aren't
* needed.
*/
declare module 'node-mocks-http/lib/express/mock-application' {
declare module.exports: any;
}
declare module 'node-mocks-http/lib/express/mock-express' {
declare module.exports: any;
}
declare module 'node-mocks-http/lib/express/mock-request' {
declare module.exports: any;
}
declare module 'node-mocks-http/lib/express/utils/define-getter' {
declare module.exports: any;
}
declare module 'node-mocks-http/lib/http-mock' {
declare module.exports: any;
}
declare module 'node-mocks-http/lib/mockEventEmitter' {
declare module.exports: any;
}
declare module 'node-mocks-http/lib/mockRequest' {
declare module.exports: any;
}
declare module 'node-mocks-http/lib/mockResponse' {
declare module.exports: any;
}
declare module 'node-mocks-http/lib/mockWritableStream' {
declare module.exports: any;
}
declare module 'node-mocks-http/lib/node/_http_incoming' {
declare module.exports: any;
}
declare module 'node-mocks-http/lib/node/_http_server' {
declare module.exports: any;
}
declare module 'node-mocks-http/lib/node/http' {
declare module.exports: any;
}
// Filename aliases
declare module 'node-mocks-http/lib/express/mock-application.js' {
declare module.exports: $Exports<'node-mocks-http/lib/express/mock-application'>;
}
declare module 'node-mocks-http/lib/express/mock-express.js' {
declare module.exports: $Exports<'node-mocks-http/lib/express/mock-express'>;
}
declare module 'node-mocks-http/lib/express/mock-request.js' {
declare module.exports: $Exports<'node-mocks-http/lib/express/mock-request'>;
}
declare module 'node-mocks-http/lib/express/utils/define-getter.js' {
declare module.exports: $Exports<'node-mocks-http/lib/express/utils/define-getter'>;
}
declare module 'node-mocks-http/lib/http-mock.js' {
declare module.exports: $Exports<'node-mocks-http/lib/http-mock'>;
}
declare module 'node-mocks-http/lib/mockEventEmitter.js' {
declare module.exports: $Exports<'node-mocks-http/lib/mockEventEmitter'>;
}
declare module 'node-mocks-http/lib/mockRequest.js' {
declare module.exports: $Exports<'node-mocks-http/lib/mockRequest'>;
}
declare module 'node-mocks-http/lib/mockResponse.js' {
declare module.exports: $Exports<'node-mocks-http/lib/mockResponse'>;
}
declare module 'node-mocks-http/lib/mockWritableStream.js' {
declare module.exports: $Exports<'node-mocks-http/lib/mockWritableStream'>;
}
declare module 'node-mocks-http/lib/node/_http_incoming.js' {
declare module.exports: $Exports<'node-mocks-http/lib/node/_http_incoming'>;
}
declare module 'node-mocks-http/lib/node/_http_server.js' {
declare module.exports: $Exports<'node-mocks-http/lib/node/_http_server'>;
}
declare module 'node-mocks-http/lib/node/http.js' {
declare module.exports: $Exports<'node-mocks-http/lib/node/http'>;
}

View file

@ -1,31 +1,31 @@
/* eslint comma-dangle: 0 */
module.exports = {
'name': 'verdaccio-jest',
'verbose': true,
'collectCoverage': true,
'coveragePathIgnorePatterns': [
name: 'verdaccio-jest',
verbose: true,
collectCoverage: true,
coveragePathIgnorePatterns: [
'node_modules',
'fixtures'
],
'testEnvironment': 'jest-environment-jsdom-global',
'testRegex': '(/test/unit.*\\.spec|test/functional.*\\.func|/test/webui/.*\\.spec)\\.js',
testEnvironment: 'jest-environment-jsdom-global',
testRegex: '(/test/unit.*\\.spec|test/functional.*\\.func|/test/webui/.*\\.spec)\\.js',
// 'testRegex': '(test/functional.*\\.func)\\.js'
'setupFiles': [
setupFiles: [
'./test/webui/global.js'
],
'modulePathIgnorePatterns': [
modulePathIgnorePatterns: [
'global.js'
],
'testPathIgnorePatterns': [
testPathIgnorePatterns: [
'__snapshots__'
],
'moduleNameMapper': {
moduleNameMapper: {
'\\.(scss)$': '<rootDir>/node_modules/identity-obj-proxy',
'github-markdown-css': '<rootDir>/node_modules/identity-obj-proxy',
'\\.(png)$': '<rootDir>/node_modules/identity-obj-proxy'
},
'transformIgnorePatterns': [
transformIgnorePatterns: [
'<rootDir>/node_modules/(?!react-syntax-highlighter)'
]
};

View file

@ -1,11 +1,11 @@
/* eslint comma-dangle: 0 */
module.exports = {
'name': 'verdaccio-e2e-jest',
'verbose': true,
'collectCoverage': false,
'globalSetup': './test/e2e/pre-setup.js',
'globalTeardown': './test/e2e/teardown.js',
'testEnvironment': './test/e2e/puppeteer_environment.js',
'testRegex': '(/test/e2e/e2e.*\\.spec)\\.js'
name: 'verdaccio-e2e-jest',
verbose: true,
collectCoverage: false,
globalSetup: './test/e2e/pre-setup.js',
globalTeardown: './test/e2e/teardown.js',
testEnvironment: './test/e2e/puppeteer_environment.js',
testRegex: '(/test/e2e/e2e.*\\.spec)\\.js'
};

View file

@ -52,7 +52,7 @@
"@commitlint/cli": "6.1.0",
"@commitlint/config-conventional": "6.1.0",
"@commitlint/travis-cli": "6.1.0",
"@verdaccio/types": "0.3.1",
"@verdaccio/types": "1.0.0",
"axios": "0.17.1",
"babel-cli": "6.26.0",
"babel-core": "6.26.0",
@ -105,6 +105,7 @@
"jest-environment-jsdom-global": "1.0.3",
"jest-environment-node": "22.2.0",
"localstorage-memory": "1.0.2",
"node-mocks-http": "1.6.7",
"node-sass": "4.7.2",
"normalize.css": "7.0.0",
"ora": "1.4.0",

View file

@ -57,11 +57,11 @@ class Config {
assert(self.storage, 'CONFIG: storage path not defined');
const users = {
'all': true,
'anonymous': true,
'undefined': true,
'owner': true,
'none': true,
all: true,
anonymous: true,
undefined: true,
owner: true,
none: true,
};
const check_user_or_uplink = function(arg) {

View file

@ -6,7 +6,6 @@ import Crypto from 'crypto';
import assert from 'assert';
import fs from 'fs';
import Path from 'path';
import Stream from 'stream';
import UrlNode from 'url';
import _ from 'lodash';
// $FlowFixMe
@ -172,7 +171,7 @@ class LocalStorage implements IStorage {
sha: version.dist.shasum,
};
/* eslint spaced-comment: 0 */
//$FlowFixMe
// $FlowFixMe
const upLink: string = version[Symbol.for('__verdaccio_uplink')];
if (_.isNil(upLink) === false) {
@ -232,7 +231,9 @@ class LocalStorage implements IStorage {
* @param {*} tag
* @param {*} callback
*/
addVersion(name: string, version: string, metadata: Version,
addVersion(name: string,
version: string,
metadata: Version,
tag: string,
callback: Callback) {
this._updatePackage(name, (data, cb) => {
@ -533,7 +534,7 @@ class LocalStorage implements IStorage {
* @private
* @return {ReadTarball}
*/
_streamSuccessReadTarBall(storage: any, filename: string) {
_streamSuccessReadTarBall(storage: any, filename: string): IReadTarball {
const stream: IReadTarball = new ReadTarball();
const readTarballStream = storage.readTarball(filename);
const e404 = Utils.ErrorCode.get404;
@ -588,7 +589,7 @@ class LocalStorage implements IStorage {
* @return {Function}
*/
search(startKey: string, options: any) {
const stream = new Stream.PassThrough({objectMode: true});
const stream = new UploadTarball({objectMode: true});
this._eachPackage((item, cb) => {
fs.stat(item.path, (err, stats) => {
@ -609,21 +610,21 @@ class LocalStorage implements IStorage {
if (data.versions[latest]) {
const version: Version = data.versions[latest];
const pkg: any = {
'name': version.name,
'description': version.description,
name: version.name,
description: version.description,
'dist-tags': {latest},
'maintainers': version.maintainers || [version.author].filter(Boolean),
'author': version.author,
'repository': version.repository,
'readmeFilename': version.readmeFilename || '',
'homepage': version.homepage,
'keywords': version.keywords,
'bugs': version.bugs,
'license': version.license,
'time': {
maintainers: version.maintainers || [version.author].filter(Boolean),
author: version.author,
repository: version.repository,
readmeFilename: version.readmeFilename || '',
homepage: version.homepage,
keywords: version.keywords,
bugs: version.bugs,
license: version.license,
time: {
modified: item.time ? new Date(item.time).toISOString() : stats.mtime,
},
'versions': {[latest]: 'latest'},
versions: {[latest]: 'latest'},
};
stream.push(pkg);

View file

@ -17,13 +17,14 @@ const DEFAULT_REVISION: string = `0-0000000000000000`;
const generatePackageTemplate = function(name: string): Package {
return {
// standard things
'name': name,
'versions': {},
name,
versions: {},
'dist-tags': {},
'time': {},
'_distfiles': {},
'_attachments': {},
'_uplinks': {},
time: {},
_distfiles: {},
_attachments: {},
_uplinks: {},
_rev: '',
};
};

View file

@ -17,6 +17,7 @@ import type {
IStorage,
IProxy,
IStorageHandler,
Versions,
ProxyList,
Package,
Config,
@ -27,17 +28,22 @@ import type {
Logger,
} from '@verdaccio/types';
import type {IReadTarball} from '@verdaccio/streams';
import type {IReadTarball, IUploadTarball} from '@verdaccio/streams';
const LoggerApi = require('../lib/logger');
const WHITELIST = ['_rev', 'name', 'versions', 'dist-tags', 'readme', 'time'];
const getDefaultMetadata = (name) => {
return {
'name': name,
'versions': {},
const getDefaultMetadata = function(name): Package {
const pkgMetadata: Package = {
name,
versions: {},
'dist-tags': {},
'_uplinks': {},
_uplinks: {},
_distfiles: {},
_attachments: {},
_rev: '',
};
return pkgMetadata;
};
/**
@ -63,9 +69,9 @@ class Storage implements IStorageHandler {
/**
* Add a {name} package to a system
Function checks if package with the same name is available from uplinks.
If it isn't, we create package locally
Used storages: local (write) && uplinks
Function checks if package with the same name is available from uplinks.
If it isn't, we create package locally
Used storages: local (write) && uplinks
* @param {*} name
* @param {*} metadata
* @param {*} callback
@ -97,6 +103,7 @@ class Storage implements IStorageHandler {
*/
const checkPackageRemote = () => {
return new Promise((resolve, reject) => {
// $FlowFixMe
self._syncUplinksMetadata(name, null, {}, (err, results, err_results) => {
// something weird
if (err && err.status !== 404) {
@ -111,7 +118,7 @@ class Storage implements IStorageHandler {
// if uplink fails with a status other than 404, we report failure
if (_.isNil(err_results[i][0]) === false) {
if (err_results[i][0].status !== 404) {
if (_.isNil(this.config.publish) === false &&
if (this.config.publish &&
_.isBoolean(this.config.publish.allow_offline) &&
this.config.publish.allow_offline) {
return resolve();
@ -159,20 +166,20 @@ class Storage implements IStorageHandler {
/**
* Add a new version of package {name} to a system
Used storages: local (write)
Used storages: local (write)
* @param {*} name
* @param {*} version
* @param {*} metadata
* @param {*} tag
* @param {*} callback
*/
addVersion(name: string, version: Version, metadata: Package, tag: string, callback: Callback) {
addVersion(name: string, version: string, metadata: Version, tag: string, callback: Callback) {
this.localStorage.addVersion(name, version, metadata, tag, callback);
}
/**
* Tags a package version with a provided tag
Used storages: local (write)
Used storages: local (write)
* @param {*} name
* @param {*} tag_hash
* @param {*} callback
@ -183,7 +190,7 @@ class Storage implements IStorageHandler {
/**
* Tags a package version with a provided tag
Used storages: local (write)
Used storages: local (write)
* @param {*} name
* @param {*} tag_hash
* @param {*} callback
@ -195,8 +202,8 @@ class Storage implements IStorageHandler {
/**
* Change an existing package (i.e. unpublish one version)
Function changes a package info from local storage and all uplinks with write access./
Used storages: local (write)
Function changes a package info from local storage and all uplinks with write access./
Used storages: local (write)
* @param {*} name
* @param {*} metadata
* @param {*} revision
@ -208,8 +215,8 @@ class Storage implements IStorageHandler {
/**
* Remove a package from a system
Function removes a package from local storage
Used storages: local (write)
Function removes a package from local storage
Used storages: local (write)
* @param {*} name
* @param {*} callback
*/
@ -220,11 +227,11 @@ class Storage implements IStorageHandler {
}
/**
Remove a tarball from a system
Function removes a tarball from local storage.
Tarball in question should not be linked to in any existing
versions, i.e. package version should be unpublished first.
Used storage: local (write)
Remove a tarball from a system
Function removes a tarball from local storage.
Tarball in question should not be linked to in any existing
versions, i.e. package version should be unpublished first.
Used storage: local (write)
* @param {*} name
* @param {*} filename
* @param {*} revision
@ -236,22 +243,22 @@ class Storage implements IStorageHandler {
/**
* Upload a tarball for {name} package
Function is syncronous and returns a WritableStream
Used storages: local (write)
Function is syncronous and returns a WritableStream
Used storages: local (write)
* @param {*} name
* @param {*} filename
* @return {Stream}
*/
add_tarball(name: string, filename: string) {
add_tarball(name: string, filename: string): IUploadTarball {
return this.localStorage.addTarball(name, filename);
}
/**
Get a tarball from a storage for {name} package
Function is syncronous and returns a ReadableStream
Function tries to read tarball locally, if it fails then it reads package
information in order to figure out where we can get this tarball from
Used storages: local || uplink (just one)
Get a tarball from a storage for {name} package
Function is syncronous and returns a ReadableStream
Function tries to read tarball locally, if it fails then it reads package
information in order to figure out where we can get this tarball from
Used storages: local || uplink (just one)
* @param {*} name
* @param {*} filename
* @return {Stream}
@ -266,7 +273,8 @@ class Storage implements IStorageHandler {
// information about it, so fetching package info is unnecessary
// trying local first
let localStream: IReadTarball = self.localStorage.getTarball(name, filename);
// flow: should be IReadTarball
let localStream: any = self.localStorage.getTarball(name, filename);
let is_open = false;
localStream.on('error', (err) => {
if (is_open || err.status !== 404) {
@ -276,7 +284,8 @@ class Storage implements IStorageHandler {
// local reported 404
let err404 = err;
localStream.abort();
localStream = null; // gc
// $FlowFixMe
localStream = null; // we force for garbage collector
self.localStorage.getPackageMetadata(name, (err, info: Package) => {
if (_.isNil(err) && info._distfiles && _.isNil(info._distfiles[filename]) === false) {
// information about this file exists locally
@ -285,7 +294,7 @@ class Storage implements IStorageHandler {
// we know nothing about this file, trying to get information elsewhere
self._syncUplinksMetadata(name, info, {}, (err, info: Package) => {
if (_.isNil(err) === false) {
return readStream.emit('error', err);
return readStream.emit('error', err);
}
if (_.isNil(info._distfiles) || _.isNil(info._distfiles[filename])) {
return readStream.emit('error', err404);
@ -367,7 +376,7 @@ class Storage implements IStorageHandler {
savestream.on('error', function(err) {
self.logger.warn( {err: err}
, 'error saving file: @{err.message}\n@{err.stack}' );
, 'error saving file: @{err.message}\n@{err.stack}' );
if (savestream) {
savestream.abort();
}
@ -381,11 +390,11 @@ class Storage implements IStorageHandler {
}
/**
Retrieve a package metadata for {name} package
Function invokes localStorage.getPackage and uplink.get_package for every
uplink with proxy_access rights against {name} and combines results
into one json object
Used storages: local && uplink (proxy_access)
Retrieve a package metadata for {name} package
Function invokes localStorage.getPackage and uplink.get_package for every
uplink with proxy_access rights against {name} and combines results
into one json object
Used storages: local && uplink (proxy_access)
* @param {object} options
* @property {string} options.name Package Name
@ -423,18 +432,18 @@ class Storage implements IStorageHandler {
result._attachments = {};
options.callback(null, result, uplink_errors);
});
});
});
}
/**
Retrieve remote and local packages more recent than {startkey}
Function streams all packages from all uplinks first, and then
local packages.
Note that local packages could override registry ones just because
they appear in JSON last. That's a trade-off we make to avoid
memory issues.
Used storages: local && uplink (proxy_access)
Retrieve remote and local packages more recent than {startkey}
Function streams all packages from all uplinks first, and then
local packages.
Note that local packages could override registry ones just because
they appear in JSON last. That's a trade-off we make to avoid
memory issues.
Used storages: local && uplink (proxy_access)
* @param {*} startkey
* @param {*} options
* @return {Stream}
@ -445,42 +454,42 @@ class Storage implements IStorageHandler {
let stream: any = new Stream.PassThrough({objectMode: true});
async.eachSeries(Object.keys(this.uplinks), function(up_name, cb) {
// shortcut: if `local=1` is supplied, don't call uplinks
if (options.req.query.local !== undefined) {
return cb();
}
// search by keyword for each uplink
let lstream: IUploadTarball = self.uplinks[up_name].search(options);
// join streams
lstream.pipe(stream, {end: false});
lstream.on('error', function(err) {
self.logger.error({err: err}, 'uplink error: @{err.message}');
cb(), cb = function() {};
});
lstream.on('end', function() {
cb(), cb = function() {};
});
stream.abort = function() {
if (lstream.abort) {
lstream.abort();
// shortcut: if `local=1` is supplied, don't call uplinks
if (options.req.query.local !== undefined) {
return cb();
}
cb(), cb = function() {};
};
},
// executed after all series
function() {
// attach a local search results
let lstream: IReadTarball = self.localStorage.search(startkey, options);
stream.abort = function() {
lstream.abort();
};
lstream.pipe(stream, {end: true});
lstream.on('error', function(err) {
self.logger.error({err: err}, 'search error: @{err.message}');
stream.end();
// search by keyword for each uplink
let lstream: IUploadTarball = self.uplinks[up_name].search(options);
// join streams
lstream.pipe(stream, {end: false});
lstream.on('error', function(err) {
self.logger.error({err: err}, 'uplink error: @{err.message}');
cb(), cb = function() {};
});
lstream.on('end', function() {
cb(), cb = function() {};
});
stream.abort = function() {
if (lstream.abort) {
lstream.abort();
}
cb(), cb = function() {};
};
},
// executed after all series
function() {
// attach a local search results
let lstream: IReadTarball = self.localStorage.search(startkey, options);
stream.abort = function() {
lstream.abort();
};
lstream.pipe(stream, {end: true});
lstream.on('error', function(err) {
self.logger.error({err: err}, 'search error: @{err.message}');
stream.end();
});
});
});
return stream;
}
@ -523,25 +532,18 @@ class Storage implements IStorageHandler {
/**
* Function fetches package metadata from uplinks and synchronizes it with local data
if package is available locally, it MUST be provided in pkginfo
returns callback(err, result, uplink_errors)
* @param {*} name
* @param {*} packageInfo
* @param {*} options
* @param {*} callback
if package is available locally, it MUST be provided in pkginfo
returns callback(err, result, uplink_errors)
*/
_syncUplinksMetadata(name: string, packageInfo: Package, options: any, callback: Callback) {
let exists = false;
_syncUplinksMetadata(name: string, packageInfo: Package, options: any, callback: Callback): void {
let exists = true;
const self = this;
const upLinks = [];
if (_.isNil(packageInfo)) {
if (!packageInfo || packageInfo === null) {
exists = false;
packageInfo = getDefaultMetadata(name);
} else {
exists = true;
}
for (let up in this.uplinks) {
if (this.config.hasProxyTo(name, up)) {
upLinks.push(this.uplinks[up]);
@ -591,7 +593,7 @@ class Storage implements IStorageHandler {
// added to fix verdaccio#73
if ('time' in upLinkResponse) {
packageInfo['time'] = upLinkResponse.time;
packageInfo.time = upLinkResponse.time;
}
this._updateVersionsHiddenUpLink(upLinkResponse.versions, upLink);
@ -616,8 +618,8 @@ class Storage implements IStorageHandler {
assert(!err && Array.isArray(upLinksErrors));
if (!exists) {
return callback( Utils.ErrorCode.get404('no such package available')
, null
, upLinksErrors );
, null
, upLinksErrors );
}
self.localStorage.updateVersions(name, packageInfo, function(err, packageJsonLocal: Package) {
@ -666,7 +668,7 @@ class Storage implements IStorageHandler {
/**
* Function gets a local info and an info from uplinks and tries to merge it
exported for unit tests only.
exported for unit tests only.
* @param {*} local
* @param {*} up
* @param {*} config

View file

@ -13,11 +13,13 @@ import {ReadTarball} from '@verdaccio/streams';
import type {
IProxy,
Config,
UpLinkConf,
Callback,
Headers,
Logger,
} from '@verdaccio/types';
import type {IUploadTarball} from '@verdaccio/streams';
// import type {IUploadTarball, IReadTarball} from '@verdaccio/streams';
const LoggerApi = require('./logger');
const encode = function(thing) {
@ -42,7 +44,7 @@ const setConfig = (config, key, def) => {
* (same for storage.js, local-storage.js, up-storage.js)
*/
class ProxyStorage implements IProxy {
config: Config;
config: UpLinkConf;
failed_requests: number;
userAgent: string;
ca: string | void;
@ -76,11 +78,11 @@ class ProxyStorage implements IProxy {
this.config.url = this.config.url.replace(/\/$/, '');
if (Number(this.config.timeout) >= 1000) {
if (this.config.timeout && Number(this.config.timeout) >= 1000) {
this.logger.warn(['Too big timeout value: ' + this.config.timeout,
'We changed time format to nginx-like one',
'(see http://nginx.org/en/docs/syntax.html)',
'so please update your config accordingly'].join('\n'));
'We changed time format to nginx-like one',
'(see http://nginx.org/en/docs/syntax.html)',
'so please update your config accordingly'].join('\n'));
}
// a bunch of different configurable timers
@ -96,14 +98,14 @@ class ProxyStorage implements IProxy {
* @param {*} cb
* @return {Request}
*/
request(options: any, cb: Callback) {
request(options: any, cb?: Callback) {
let json;
if (this._statusCheck() === false) {
let streamRead = new Stream.Readable();
process.nextTick(function() {
if (_.isFunction(cb)) {
if (cb) {
cb(ErrorCode.get500('uplink is offline'));
}
// $FlowFixMe
@ -142,6 +144,7 @@ class ProxyStorage implements IProxy {
// $FlowFixMe
processBody(err, body);
logActivity();
// $FlowFixMe
cb(err, res, body);
/**
@ -176,8 +179,8 @@ class ProxyStorage implements IProxy {
function logActivity() {
let message = '@{!status}, req: \'@{request.method} @{request.url}\'';
message += error
? ', error: @{!error}'
: ', bytes: @{bytes.in}/@{bytes.out}';
? ', error: @{!error}'
: ', bytes: @{bytes.in}/@{bytes.out}';
self.logger.warn({
err: err,
request: {method: method, url: uri},
@ -261,8 +264,9 @@ class ProxyStorage implements IProxy {
* @private
*/
_setAuth(headers: any) {
const auth = this.config.auth;
if (_.isNil(this.config.auth) || headers['authorization']) {
if (typeof auth === 'undefined' || headers['authorization']) {
return headers;
}
@ -273,10 +277,12 @@ class ProxyStorage implements IProxy {
// get NPM_TOKEN http://blog.npmjs.org/post/118393368555/deploying-with-npm-private-modules
// or get other variable export in env
let token: any = process.env.NPM_TOKEN;
if (this.config.auth.token) {
token = this.config.auth.token;
} else if (this.config.auth.token_env) {
token = process.env[this.config.auth.token_env];
if (auth.token) {
token = auth.token;
} else if (auth.token_env ) {
// $FlowFixMe
token = process.env[auth.token_env];
}
if (_.isNil(token)) {
@ -284,8 +290,9 @@ class ProxyStorage implements IProxy {
}
// define type Auth allow basic and bearer
const type = this.config.auth.type;
const type = auth.type;
this._setHeaderAuthorization(headers, type, token);
return headers;
}
@ -321,25 +328,28 @@ class ProxyStorage implements IProxy {
* Eg:
*
* uplinks:
npmjs:
url: https://registry.npmjs.org/
headers:
Accept: "application/vnd.npm.install-v2+json; q=1.0"
verdaccio-staging:
url: https://mycompany.com/npm
headers:
Accept: "application/json"
authorization: "Basic YourBase64EncodedCredentials=="
npmjs:
url: https://registry.npmjs.org/
headers:
Accept: "application/vnd.npm.install-v2+json; q=1.0"
verdaccio-staging:
url: https://mycompany.com/npm
headers:
Accept: "application/json"
authorization: "Basic YourBase64EncodedCredentials=="
* @param {Object} headers
* @private
*/
_overrideWithUplinkConfigHeaders(headers: any) {
_overrideWithUplinkConfigHeaders(headers: Headers) {
if (!this.config.headers) {
return headers;
}
// add/override headers specified in the config
/* eslint guard-for-in: 0 */
for (let key in this.config.headers) {
if (Object.prototype.hasOwnProperty.call(this.config.headers, key)) {
headers[key] = this.config.headers[key];
}
}
}
@ -349,10 +359,10 @@ class ProxyStorage implements IProxy {
* @return {Boolean}
*/
isUplinkValid(url: string) {
// $FlowFixMe
url = URL.parse(url);
// $FlowFixMe
return url.protocol === this.url.protocol && url.host === this.url.host && url.path.indexOf(this.url.path) === 0;
// $FlowFixMe
url = URL.parse(url);
// $FlowFixMe
return url.protocol === this.url.protocol && url.host === this.url.host && url.path.indexOf(this.url.path) === 0;
}
/**
@ -447,8 +457,8 @@ class ProxyStorage implements IProxy {
* @return {Stream}
*/
search(options: any) {
const transformStream: IUploadTarball = new Stream.PassThrough({objectMode: true});
const requestStream: IUploadTarball = this.request({
const transformStream: any = new Stream.PassThrough({objectMode: true});
const requestStream: stream$Readable = this.request({
uri: options.req.url,
req: options.req,
headers: {
@ -486,6 +496,8 @@ class ProxyStorage implements IProxy {
});
transformStream.abort = () => {
// FIXME: this is clearly a potential issue
// $FlowFixMe
requestStream.abort();
transformStream.emit('end');
};
@ -509,8 +521,8 @@ class ProxyStorage implements IProxy {
if (this.proxy === false) {
headers['X-Forwarded-For'] = (
req && req.headers['x-forwarded-for']
? req.headers['x-forwarded-for'] + ', '
: ''
? req.headers['x-forwarded-for'] + ', '
: ''
) + req.connection.remoteAddress;
}
}
@ -518,8 +530,8 @@ class ProxyStorage implements IProxy {
// always attach Via header to avoid loops, even if we're not proxying
headers['Via'] =
req && req.headers['via']
? req.headers['via'] + ', '
: '';
? req.headers['via'] + ', '
: '';
headers['Via'] += '1.1 ' + this.server_id + ' (Verdaccio)';
}
@ -604,7 +616,7 @@ class ProxyStorage implements IProxy {
if (this.proxy) {
this.logger.debug({url: this.url.href, rule: noProxyItem},
'not using proxy for @{url}, excluded by @{rule} rule');
// $FlowFixMe
// $FlowFixMe
this.proxy = false;
}
break;

View file

@ -278,14 +278,14 @@ function normalize_dist_tags(data) {
const parseIntervalTable = {
'': 1000,
'ms': 1,
's': 1000,
'm': 60*1000,
'h': 60*60*1000,
'd': 86400000,
'w': 7*86400000,
'M': 30*86400000,
'y': 365*86400000,
ms: 1,
s: 1000,
m: 60*1000,
h: 60*60*1000,
d: 86400000,
w: 7*86400000,
M: 30*86400000,
y: 365*86400000,
};
/**

View file

@ -10,7 +10,7 @@ import {readFile} from '../functional/lib/test.utils';
const readMetadata = (fileName: string = 'metadata') => readFile(`../../unit/partials/${fileName}`);
import type {IStorage} from '@verdaccio/types';
import type {IStorage, Config} from '@verdaccio/types';
setup([]);

View file

@ -1,5 +1,5 @@
const config = {
storage: __dirname + '/store/test-storage',
storage: `${__dirname}/store/test-storage`,
uplinks: {
'npmjs': {
'url': 'https://registry.npmjs.org/'
@ -17,6 +17,12 @@ const config = {
allow_publish: 'nobody'
},
'react': {
allow_access: '$all',
allow_publish: '$all',
proxy: 'npmjs'
},
'jquery': {
allow_access: '$all',
allow_publish: '$all',

79
test/unit/store.spec.js Normal file
View file

@ -0,0 +1,79 @@
// @flow
import _ from 'lodash';
import httpMocks from 'node-mocks-http';
// $FlowFixMe
import configExample from './partials/config';
import AppConfig from '../../src/lib/config';
import Storage from '../../src/lib/storage';
import {setup} from '../../src/lib/logger';
import type {IStorageHandler, Config} from '@verdaccio/types';
setup(configExample.logs);
const generateStorage = function(): IStorageHandler {
const storageConfig = _.clone(configExample);
const storage = `./unit/partials/store/test-storage-store.spec`;
storageConfig.self_path = __dirname;
storageConfig.storage = storage;
const config: Config = new AppConfig(storageConfig);
return new Storage(config);
}
describe('StorageTest', () => {
jest.setTimeout(1000000);
beforeAll((done)=> {
const storage: IStorageHandler = generateStorage();
var request = httpMocks.createRequest({
method: 'GET',
url: '/react',
params: {}
});
storage.getPackage({
name: 'react',
req: request,
callback: () => {
const stream = storage.get_tarball('react', 'react-16.1.0.tgz');
stream.on('content-length', function(content) {
if (content) {
expect(content).toBeTruthy();
done();
}
});
},
});
});
test('should be defined', () => {
const storage: IStorageHandler = generateStorage();
expect(storage).toBeDefined();
});
test('should fetch from uplink react metadata from nmpjs', (done) => {
const storage: IStorageHandler = generateStorage();
// $FlowFixMe
storage._syncUplinksMetadata('react', null, {}, (err, metadata, errors) => {
expect(metadata).toBeInstanceOf(Object);
done();
});
});
test('should fails on fetch from uplink metadata from nmpjs', (done) => {
const storage: IStorageHandler = generateStorage();
// $FlowFixMe
storage._syncUplinksMetadata('@verdaccio/404', null, {}, (err, metadata, errors) => {
expect(errors).toBeInstanceOf(Array);
expect(errors[0][0].statusCode).toBe(404);
expect(errors[0][0].message).toMatch(/package doesn't exist on uplink/);
done();
});
});
});

View file

@ -6,13 +6,20 @@ import _ from 'lodash';
import configExample from './partials/config';
import {setup} from '../../src/lib/logger';
import type {UpLinkConf, Config} from '@verdaccio/types';
setup([]);
describe('UpStorge', () => {
const uplinkDefault = {
url: 'https://registry.npmjs.org/'
const uplinkDefault: UpLinkConf = {
url: 'https://registry.npmjs.org/',
fail_timeout: '5m',
max_fails: 2,
maxage: '2m',
timeout: '1m',
};
let generateProxy = (config: UpLinkConf = uplinkDefault) => {
const appConfig: Config = new AppConfig(configExample);

View file

@ -24,9 +24,9 @@ export default {
plugins: [
new webpack.DefinePlugin({
'__DEBUG__': true,
__DEBUG__: true,
'process.env.NODE_ENV': '"development"',
'__APP_VERSION__': `"${getPackageVersion()}"`,
__APP_VERSION__: `"${getPackageVersion()}"`,
}),
new HTMLWebpackPlugin({
title: 'Verdaccio',

View file

@ -18,9 +18,9 @@ const prodConf = {
plugins: [
new webpack.DefinePlugin({
'__DEBUG__': false,
__DEBUG__: false,
'process.env.NODE_ENV': '"production"',
'__APP_VERSION__': `"${getPackageVersion()}"`,
__APP_VERSION__: `"${getPackageVersion()}"`,
}),
new webpack.optimize.UglifyJsPlugin({
sourceMap: true,

BIN
yarn.lock

Binary file not shown.