mirror of
https://github.com/verdaccio/verdaccio.git
synced 2024-12-30 22:34:10 -05:00
feat: adds gravatar support for maintainers
This commit is contained in:
parent
61e4e56a76
commit
4df6b3b252
4 changed files with 357 additions and 133 deletions
|
@ -63,7 +63,6 @@ export const API_MESSAGE = {
|
|||
TAG_UPDATED: 'tags updated',
|
||||
TAG_REMOVED: 'tag removed',
|
||||
TAG_ADDED: 'package tagged',
|
||||
|
||||
};
|
||||
|
||||
export const API_ERROR = {
|
||||
|
@ -86,6 +85,7 @@ export const API_ERROR = {
|
|||
WEB_DISABLED: 'Web interface is disabled in the config file',
|
||||
DEPRECATED_BASIC_HEADER: 'basic authentication is deprecated, please use JWT instead',
|
||||
BAD_FORMAT_USER_GROUP: 'user groups is different than an array',
|
||||
RESOURCE_UNAVAILABLE: 'resource unavailable',
|
||||
};
|
||||
|
||||
export const APP_ERROR = {
|
||||
|
|
176
src/lib/utils.js
176
src/lib/utils.js
|
@ -9,7 +9,12 @@ import asciidoctor from 'asciidoctor.js';
|
|||
import createError from 'http-errors';
|
||||
import marked from 'marked';
|
||||
|
||||
import {HTTP_STATUS, API_ERROR, DEFAULT_PORT, DEFAULT_DOMAIN} from './constants';
|
||||
import {
|
||||
HTTP_STATUS,
|
||||
API_ERROR,
|
||||
DEFAULT_PORT,
|
||||
DEFAULT_DOMAIN,
|
||||
} from './constants';
|
||||
import {generateGravatarUrl} from '../utils/user';
|
||||
|
||||
import type {Package} from '@verdaccio/types';
|
||||
|
@ -44,7 +49,11 @@ function validate_package(name: any): boolean {
|
|||
return validateName(name[0]);
|
||||
} else {
|
||||
// scoped package
|
||||
return name[0][0] === '@' && validateName(name[0].slice(1)) && validateName(name[1]);
|
||||
return (
|
||||
name[0][0] === '@' &&
|
||||
validateName(name[0].slice(1)) &&
|
||||
validateName(name[1])
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -60,13 +69,14 @@ function validateName(name: string): boolean {
|
|||
name = name.toLowerCase();
|
||||
|
||||
// all URL-safe characters and "@" for issue #75
|
||||
return !(!name.match(/^[-a-zA-Z0-9_.!~*'()@]+$/)
|
||||
|| name.charAt(0) === '.' // ".bin", etc.
|
||||
|| name.charAt(0) === '-' // "-" is reserved by couchdb
|
||||
|| name === 'node_modules'
|
||||
|| name === '__proto__'
|
||||
|| name === 'package.json'
|
||||
|| name === 'favicon.ico'
|
||||
return !(
|
||||
!name.match(/^[-a-zA-Z0-9_.!~*'()@]+$/) ||
|
||||
name.charAt(0) === '.' || // ".bin", etc.
|
||||
name.charAt(0) === '-' || // "-" is reserved by couchdb
|
||||
name === 'node_modules' ||
|
||||
name === '__proto__' ||
|
||||
name === 'package.json' ||
|
||||
name === 'favicon.ico'
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -109,15 +119,17 @@ function validate_metadata(object: Package, name: string) {
|
|||
* Create base url for registry.
|
||||
* @return {String} base registry url
|
||||
*/
|
||||
function combineBaseUrl(protocol: string, host: string, prefix?: string): string {
|
||||
function combineBaseUrl(
|
||||
protocol: string,
|
||||
host: string,
|
||||
prefix?: string
|
||||
): string {
|
||||
let result = `${protocol}://${host}`;
|
||||
|
||||
if (prefix) {
|
||||
prefix = prefix.replace(/\/$/, '');
|
||||
|
||||
result = (prefix.indexOf('/') === 0)
|
||||
? `${result}${prefix}`
|
||||
: prefix;
|
||||
result = prefix.indexOf('/') === 0 ? `${result}${prefix}` : prefix;
|
||||
}
|
||||
|
||||
return result;
|
||||
|
@ -135,13 +147,25 @@ export function extractTarballFromUrl(url: string) {
|
|||
* @param {*} config
|
||||
* @return {String} a filtered package
|
||||
*/
|
||||
export function convertDistRemoteToLocalTarballUrls(pkg: Package, req: $Request, urlPrefix: string | void) {
|
||||
export function convertDistRemoteToLocalTarballUrls(
|
||||
pkg: Package,
|
||||
req: $Request,
|
||||
urlPrefix: string | void
|
||||
) {
|
||||
for (let ver in pkg.versions) {
|
||||
if (Object.prototype.hasOwnProperty.call(pkg.versions, ver)) {
|
||||
const distName = pkg.versions[ver].dist;
|
||||
|
||||
if (_.isNull(distName) === false && _.isNull(distName.tarball) === false) {
|
||||
distName.tarball = getLocalRegistryTarballUri(distName.tarball, pkg.name, req, urlPrefix);
|
||||
if (
|
||||
_.isNull(distName) === false &&
|
||||
_.isNull(distName.tarball) === false
|
||||
) {
|
||||
distName.tarball = getLocalRegistryTarballUri(
|
||||
distName.tarball,
|
||||
pkg.name,
|
||||
req,
|
||||
urlPrefix
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -153,14 +177,23 @@ export function convertDistRemoteToLocalTarballUrls(pkg: Package, req: $Request,
|
|||
* @param {*} uri
|
||||
* @return {String} a parsed url
|
||||
*/
|
||||
export function getLocalRegistryTarballUri(uri: string, pkgName: string, req: $Request, urlPrefix: string | void) {
|
||||
export function getLocalRegistryTarballUri(
|
||||
uri: string,
|
||||
pkgName: string,
|
||||
req: $Request,
|
||||
urlPrefix: string | void
|
||||
) {
|
||||
const currentHost = req.headers.host;
|
||||
|
||||
if (!currentHost) {
|
||||
return uri;
|
||||
}
|
||||
const tarballName = extractTarballFromUrl(uri);
|
||||
const domainRegistry = combineBaseUrl(getWebProtocol(req), req.headers.host, urlPrefix);
|
||||
const domainRegistry = combineBaseUrl(
|
||||
getWebProtocol(req),
|
||||
req.headers.host,
|
||||
urlPrefix
|
||||
);
|
||||
|
||||
return `${domainRegistry}/${pkgName.replace(/\//g, '%2f')}/-/${tarballName}`;
|
||||
}
|
||||
|
@ -227,7 +260,9 @@ function parse_address(urlAddress: any) {
|
|||
// TODO: refactor it to something more reasonable?
|
||||
//
|
||||
// protocol : // ( host )|( ipv6 ): port /
|
||||
let urlPattern = /^((https?):(\/\/)?)?((([^\/:]*)|\[([^\[\]]+)\]):)?(\d+)\/?$/.exec(urlAddress);
|
||||
let urlPattern = /^((https?):(\/\/)?)?((([^\/:]*)|\[([^\[\]]+)\]):)?(\d+)\/?$/.exec(
|
||||
urlAddress
|
||||
);
|
||||
|
||||
if (urlPattern) {
|
||||
return {
|
||||
|
@ -254,9 +289,10 @@ function parse_address(urlAddress: any) {
|
|||
* @return {Array} sorted Array
|
||||
*/
|
||||
function semverSort(listVersions: Array<string>): string[] {
|
||||
return listVersions.filter(function(x) {
|
||||
return listVersions
|
||||
.filter(function(x) {
|
||||
if (!semver.parse(x, true)) {
|
||||
Logger.logger.warn( {ver: x}, 'ignoring bad version @{ver}' );
|
||||
Logger.logger.warn({ver: x}, 'ignoring bad version @{ver}');
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
|
@ -275,7 +311,7 @@ export function normalizeDistTags(pkg: Package) {
|
|||
// overwrite latest with highest known version based on semver sort
|
||||
sorted = semverSort(Object.keys(pkg.versions));
|
||||
if (sorted && sorted.length) {
|
||||
pkg[DIST_TAGS].latest = sorted.pop();
|
||||
pkg[DIST_TAGS].latest = sorted.pop();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -286,13 +322,13 @@ export function normalizeDistTags(pkg: Package) {
|
|||
// $FlowFixMe
|
||||
sorted = semverSort(pkg[DIST_TAGS][tag]);
|
||||
if (sorted.length) {
|
||||
// use highest version based on semver sort
|
||||
pkg[DIST_TAGS][tag] = sorted.pop();
|
||||
// use highest version based on semver sort
|
||||
pkg[DIST_TAGS][tag] = sorted.pop();
|
||||
}
|
||||
} else {
|
||||
delete pkg[DIST_TAGS][tag];
|
||||
}
|
||||
} else if (_.isString(pkg[DIST_TAGS][tag] )) {
|
||||
} else if (_.isString(pkg[DIST_TAGS][tag])) {
|
||||
if (!semver.parse(pkg[DIST_TAGS][tag], true)) {
|
||||
// if the version is invalid, delete the dist-tag entry
|
||||
delete pkg[DIST_TAGS][tag];
|
||||
|
@ -305,12 +341,12 @@ const parseIntervalTable = {
|
|||
'': 1000,
|
||||
ms: 1,
|
||||
s: 1000,
|
||||
m: 60*1000,
|
||||
h: 60*60*1000,
|
||||
m: 60 * 1000,
|
||||
h: 60 * 60 * 1000,
|
||||
d: 86400000,
|
||||
w: 7*86400000,
|
||||
M: 30*86400000,
|
||||
y: 365*86400000,
|
||||
w: 7 * 86400000,
|
||||
M: 30 * 86400000,
|
||||
y: 365 * 86400000,
|
||||
};
|
||||
|
||||
/**
|
||||
|
@ -319,7 +355,7 @@ const parseIntervalTable = {
|
|||
* @return {Number}
|
||||
*/
|
||||
function parseInterval(interval: any) {
|
||||
if (typeof(interval) === 'number') {
|
||||
if (typeof interval === 'number') {
|
||||
return interval * 1000;
|
||||
}
|
||||
let result = 0;
|
||||
|
@ -327,9 +363,11 @@ function parseInterval(interval: any) {
|
|||
interval.split(/\s+/).forEach(function(x) {
|
||||
if (!x) return;
|
||||
let m = x.match(/^((0|[1-9][0-9]*)(\.[0-9]+)?)(ms|s|m|h|d|w|M|y|)$/);
|
||||
if (!m
|
||||
|| parseIntervalTable[m[4]] >= last_suffix
|
||||
|| (m[4] === '' && last_suffix !== Infinity)) {
|
||||
if (
|
||||
!m ||
|
||||
parseIntervalTable[m[4]] >= last_suffix ||
|
||||
(m[4] === '' && last_suffix !== Infinity)
|
||||
) {
|
||||
throw Error('invalid interval: ' + interval);
|
||||
}
|
||||
last_suffix = parseIntervalTable[m[4]];
|
||||
|
@ -362,24 +400,31 @@ const ErrorCode = {
|
|||
return createError(HTTP_STATUS.BAD_REQUEST, customMessage);
|
||||
},
|
||||
getInternalError: (customMessage?: string) => {
|
||||
return customMessage ? createError(HTTP_STATUS.INTERNAL_ERROR, customMessage)
|
||||
return customMessage
|
||||
? createError(HTTP_STATUS.INTERNAL_ERROR, customMessage)
|
||||
: createError(HTTP_STATUS.INTERNAL_ERROR);
|
||||
},
|
||||
getForbidden: (message: string = 'can\'t use this filename') => {
|
||||
return createError(HTTP_STATUS.FORBIDDEN, message);
|
||||
},
|
||||
getServiceUnavailable: (message: string = 'resource temporarily unavailable') => {
|
||||
getServiceUnavailable: (
|
||||
message: string = API_ERROR.RESOURCE_UNAVAILABLE
|
||||
) => {
|
||||
return createError(HTTP_STATUS.SERVICE_UNAVAILABLE, message);
|
||||
},
|
||||
getNotFound: (customMessage?: string) => {
|
||||
return createError(HTTP_STATUS.NOT_FOUND, customMessage || API_ERROR.NO_PACKAGE);
|
||||
return createError(
|
||||
HTTP_STATUS.NOT_FOUND,
|
||||
customMessage || API_ERROR.NO_PACKAGE
|
||||
);
|
||||
},
|
||||
getCode: (statusCode: number, customMessage: string) => {
|
||||
return createError(statusCode, customMessage);
|
||||
},
|
||||
};
|
||||
|
||||
const parseConfigFile = (configPath: string) => YAML.safeLoad(fs.readFileSync(configPath, 'utf8'));
|
||||
const parseConfigFile = (configPath: string) =>
|
||||
YAML.safeLoad(fs.readFileSync(configPath, 'utf8'));
|
||||
|
||||
/**
|
||||
* Check whether the path already exist.
|
||||
|
@ -431,28 +476,42 @@ function deleteProperties(propertiesToDelete: Array<string>, objectItem: any) {
|
|||
return objectItem;
|
||||
}
|
||||
|
||||
function addGravatarSupport(pkgInfo: any) {
|
||||
if (_.isString(_.get(pkgInfo, 'latest.author.email'))) {
|
||||
pkgInfo.latest.author.avatar = generateGravatarUrl(pkgInfo.latest.author.email);
|
||||
} else {
|
||||
// _.get can't guarantee author property exist
|
||||
_.set(pkgInfo, 'latest.author.avatar', generateGravatarUrl());
|
||||
function addGravatarSupport(pkgInfo: Object): Object {
|
||||
const pkgInfoCopy = {...pkgInfo};
|
||||
const author = _.get(pkgInfo, 'latest.author', null);
|
||||
const contributors = _.get(pkgInfo, 'latest.contributors', []);
|
||||
const maintainers = _.get(pkgInfo, 'latest.maintainers', []);
|
||||
|
||||
// for author.
|
||||
if (author && _.isObject(author)) {
|
||||
pkgInfoCopy.latest.author.avatar = generateGravatarUrl(author.email);
|
||||
}
|
||||
|
||||
if (_.get(pkgInfo, 'latest.contributors.length', 0) > 0) {
|
||||
pkgInfo.latest.contributors = _.map(pkgInfo.latest.contributors, (contributor) => {
|
||||
if (_.isString(contributor.email)) {
|
||||
contributor.avatar = generateGravatarUrl(contributor.email);
|
||||
} else {
|
||||
contributor.avatar = generateGravatarUrl();
|
||||
}
|
||||
|
||||
return contributor;
|
||||
}
|
||||
);
|
||||
if (author && _.isString(author)) {
|
||||
pkgInfoCopy.latest.author = {
|
||||
avatar: generateGravatarUrl(),
|
||||
email: '',
|
||||
author,
|
||||
};
|
||||
}
|
||||
|
||||
return pkgInfo;
|
||||
// for contributors
|
||||
if (_.isEmpty(contributors) === false) {
|
||||
pkgInfoCopy.latest.contributors = contributors.map((contributor) => {
|
||||
contributor.avatar = generateGravatarUrl(contributor.email);
|
||||
return contributor;
|
||||
});
|
||||
}
|
||||
|
||||
// for maintainers
|
||||
if (_.isEmpty(maintainers) === false) {
|
||||
pkgInfoCopy.latest.maintainers = maintainers.map((maintainer) => {
|
||||
maintainer.avatar = generateGravatarUrl(maintainer.email);
|
||||
return maintainer;
|
||||
});
|
||||
}
|
||||
|
||||
return pkgInfoCopy;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -467,7 +526,10 @@ function parseReadme(packageName: string, readme: string): string {
|
|||
// asciidoc
|
||||
if (docTypeIdentifier.test(readme)) {
|
||||
const ascii = asciidoctor();
|
||||
return ascii.convert(readme, {safe: 'safe', attributes: {showtitle: true, icons: 'font'}});
|
||||
return ascii.convert(readme, {
|
||||
safe: 'safe',
|
||||
attributes: {showtitle: true, icons: 'font'},
|
||||
});
|
||||
}
|
||||
|
||||
if (readme) {
|
||||
|
|
|
@ -1,18 +1,18 @@
|
|||
// @flow
|
||||
import {stringToMD5} from '../lib/crypto-utils';
|
||||
import _ from 'lodash';
|
||||
|
||||
|
||||
export const GRAVATAR_DEFAULT = 'https://www.gravatar.com/avatar/00000000000000000000000000000000?d=mm';
|
||||
export const GRAVATAR_DEFAULT =
|
||||
'https://www.gravatar.com/avatar/00000000000000000000000000000000?d=mm';
|
||||
/**
|
||||
* Generate gravatar url from email address
|
||||
*/
|
||||
export function generateGravatarUrl(email?: string): string {
|
||||
if (typeof email === 'string') {
|
||||
email = email.trim().toLocaleLowerCase();
|
||||
const emailMD5 = stringToMD5(email);
|
||||
|
||||
export function generateGravatarUrl(email: string = ''): string {
|
||||
let emailCopy = email;
|
||||
if (_.isString(email) && _.size(email) > 0) {
|
||||
emailCopy = email.trim().toLocaleLowerCase();
|
||||
const emailMD5 = stringToMD5(emailCopy);
|
||||
return `https://www.gravatar.com/avatar/${emailMD5}`;
|
||||
} else {
|
||||
return GRAVATAR_DEFAULT;
|
||||
}
|
||||
return GRAVATAR_DEFAULT;
|
||||
}
|
||||
|
|
|
@ -1,18 +1,23 @@
|
|||
// @flow
|
||||
import assert from 'assert';
|
||||
import {generateGravatarUrl, GRAVATAR_DEFAULT} from '../../../src/utils/user';
|
||||
import {spliceURL} from '../../../src/utils/string';
|
||||
import Package from "../../../src/webui/components/Package/index";
|
||||
import {validateName as validate, convertDistRemoteToLocalTarballUrls, parseReadme} from '../../../src/lib/utils';
|
||||
import Logger, {setup} from '../../../src/lib/logger';
|
||||
import { generateGravatarUrl, GRAVATAR_DEFAULT } from '../../../src/utils/user';
|
||||
import { spliceURL } from '../../../src/utils/string';
|
||||
import Package from '../../../src/webui/components/Package/index';
|
||||
import {
|
||||
validateName as validate,
|
||||
convertDistRemoteToLocalTarballUrls,
|
||||
parseReadme,
|
||||
addGravatarSupport
|
||||
} from '../../../src/lib/utils';
|
||||
import Logger, { setup } from '../../../src/lib/logger';
|
||||
import { readFile } from '../../functional/lib/test.utils';
|
||||
|
||||
const readmeFile = (fileName: string = 'markdown.md') => readFile(`../../unit/partials/readme/${fileName}`);
|
||||
const readmeFile = (fileName: string = 'markdown.md') =>
|
||||
readFile(`../../unit/partials/readme/${fileName}`);
|
||||
|
||||
setup([]);
|
||||
|
||||
describe('Utilities', () => {
|
||||
|
||||
describe('String utilities', () => {
|
||||
test('should splice two strings and generate a url', () => {
|
||||
const url: string = spliceURL('http://domain.com', '/-/static/logo.png');
|
||||
|
@ -28,107 +33,122 @@ describe('Utilities', () => {
|
|||
});
|
||||
|
||||
describe('User utilities', () => {
|
||||
test('should generate gravatar url with email', () => {
|
||||
const gravatarUrl: string = generateGravatarUrl('user@verdaccio.org');
|
||||
test('should generate gravatar url with email', () => {
|
||||
const gravatarUrl: string = generateGravatarUrl('user@verdaccio.org');
|
||||
|
||||
expect(gravatarUrl).toMatch('https://www.gravatar.com/avatar/');
|
||||
expect(gravatarUrl).not.toMatch('000000000');
|
||||
});
|
||||
expect(gravatarUrl).toMatch('https://www.gravatar.com/avatar/');
|
||||
expect(gravatarUrl).not.toMatch('000000000');
|
||||
});
|
||||
|
||||
test('should generate generic gravatar url', () => {
|
||||
const gravatarUrl: string = generateGravatarUrl();
|
||||
test('should generate generic gravatar url', () => {
|
||||
const gravatarUrl: string = generateGravatarUrl();
|
||||
|
||||
expect(gravatarUrl).toMatch(GRAVATAR_DEFAULT);
|
||||
});
|
||||
expect(gravatarUrl).toMatch(GRAVATAR_DEFAULT);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Validations', () => {
|
||||
test('good ones', () => {
|
||||
assert( validate('verdaccio') );
|
||||
assert( validate('some.weird.package-zzz') );
|
||||
assert( validate('old-package@0.1.2.tgz') );
|
||||
assert(validate('verdaccio'));
|
||||
assert(validate('some.weird.package-zzz'));
|
||||
assert(validate('old-package@0.1.2.tgz'));
|
||||
});
|
||||
|
||||
test('uppercase', () => {
|
||||
assert( validate('EVE') );
|
||||
assert( validate('JSONStream') );
|
||||
assert(validate('EVE'));
|
||||
assert(validate('JSONStream'));
|
||||
});
|
||||
|
||||
test('no package.json', () => {
|
||||
assert( !validate('package.json') );
|
||||
assert(!validate('package.json'));
|
||||
});
|
||||
|
||||
test('no path seps', () => {
|
||||
assert( !validate('some/thing') );
|
||||
assert( !validate('some\\thing') );
|
||||
assert(!validate('some/thing'));
|
||||
assert(!validate('some\\thing'));
|
||||
});
|
||||
|
||||
test('no hidden', () => {
|
||||
assert( !validate('.bin') );
|
||||
assert(!validate('.bin'));
|
||||
});
|
||||
|
||||
test('no reserved', () => {
|
||||
assert( !validate('favicon.ico') );
|
||||
assert( !validate('node_modules') );
|
||||
assert( !validate('__proto__') );
|
||||
assert(!validate('favicon.ico'));
|
||||
assert(!validate('node_modules'));
|
||||
assert(!validate('__proto__'));
|
||||
});
|
||||
|
||||
test('other', () => {
|
||||
assert( !validate('pk g') );
|
||||
assert( !validate('pk\tg') );
|
||||
assert( !validate('pk%20g') );
|
||||
assert( !validate('pk+g') );
|
||||
assert( !validate('pk:g') );
|
||||
assert(!validate('pk g'));
|
||||
assert(!validate('pk\tg'));
|
||||
assert(!validate('pk%20g'));
|
||||
assert(!validate('pk+g'));
|
||||
assert(!validate('pk:g'));
|
||||
});
|
||||
});
|
||||
|
||||
describe('Packages utilities', () => {
|
||||
const metadata: Package = {
|
||||
"name": "npm_test",
|
||||
"versions": {
|
||||
"1.0.0": {
|
||||
"dist": {
|
||||
"tarball": "http:\/\/registry.org\/npm_test\/-\/npm_test-1.0.0.tgz"
|
||||
name: 'npm_test',
|
||||
versions: {
|
||||
'1.0.0': {
|
||||
dist: {
|
||||
tarball: 'http://registry.org/npm_test/-/npm_test-1.0.0.tgz'
|
||||
}
|
||||
},
|
||||
"1.0.1": {
|
||||
"dist": {
|
||||
"tarball": "http:\/\/registry.org\/npm_test\/-\/npm_test-1.0.1.tgz"
|
||||
'1.0.1': {
|
||||
dist: {
|
||||
tarball: 'http://registry.org/npm_test/-/npm_test-1.0.1.tgz'
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
const buildURI = (host, version) => `http://${host}/npm_test/-/npm_test-${version}.tgz`;
|
||||
const buildURI = (host, version) =>
|
||||
`http://${host}/npm_test/-/npm_test-${version}.tgz`;
|
||||
const host = 'fake.com';
|
||||
|
||||
test('convertDistRemoteToLocalTarballUrls', () => {
|
||||
// $FlowFixMe
|
||||
const convertDist = convertDistRemoteToLocalTarballUrls(Object.assign({}, metadata), {
|
||||
headers: {
|
||||
host,
|
||||
const convertDist = convertDistRemoteToLocalTarballUrls(
|
||||
Object.assign({}, metadata),
|
||||
// $FlowFixMe
|
||||
{
|
||||
headers: {
|
||||
host
|
||||
},
|
||||
get: () => 'http',
|
||||
protocol: 'http'
|
||||
},
|
||||
get: ()=> 'http',
|
||||
protocol: 'http'
|
||||
}, '');
|
||||
''
|
||||
);
|
||||
|
||||
expect(convertDist.versions['1.0.0'].dist.tarball).toEqual(buildURI(host, '1.0.0'));
|
||||
expect(convertDist.versions['1.0.1'].dist.tarball).toEqual(buildURI(host, '1.0.1'));
|
||||
expect(convertDist.versions['1.0.0'].dist.tarball).toEqual(
|
||||
buildURI(host, '1.0.0')
|
||||
);
|
||||
expect(convertDist.versions['1.0.1'].dist.tarball).toEqual(
|
||||
buildURI(host, '1.0.1')
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseReadme', () => {
|
||||
test('should pass for ascii text to html template', () => {
|
||||
const ascii = "= AsciiDoc";
|
||||
const ascii = '= AsciiDoc';
|
||||
|
||||
expect(parseReadme('testPackage', ascii)).toEqual('<h1>AsciiDoc</h1>\n');
|
||||
expect(parseReadme('testPackage', String(readmeFile('ascii.adoc')))).toMatchSnapshot();
|
||||
expect(
|
||||
parseReadme('testPackage', String(readmeFile('ascii.adoc')))
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('should pass for makrdown text to html template', () => {
|
||||
const markdown = '# markdown';
|
||||
expect(parseReadme('testPackage', markdown)).toEqual('<h1 id="markdown">markdown</h1>\n');
|
||||
expect(parseReadme('testPackage', String(readmeFile('markdown.md')))).toMatchSnapshot();
|
||||
expect(parseReadme('testPackage', markdown)).toEqual(
|
||||
'<h1 id="markdown">markdown</h1>\n'
|
||||
);
|
||||
expect(
|
||||
parseReadme('testPackage', String(readmeFile('markdown.md')))
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
|
||||
test('should pass for conversion of non-ascii to markdown text', () => {
|
||||
|
@ -137,20 +157,162 @@ describe('Utilities', () => {
|
|||
const randomTextNonAscii = 'simple text \n = ascii';
|
||||
const randomTextMarkdown = 'simple text \n # markdown';
|
||||
|
||||
expect(parseReadme('testPackage', randomText)).toEqual('<p>%%%%%**##==</p>\n');
|
||||
expect(parseReadme('testPackage', simpleText)).toEqual('<p>simple text</p>\n');
|
||||
expect(parseReadme('testPackage', randomTextNonAscii))
|
||||
.toEqual('<p>simple text \n = ascii</p>\n');
|
||||
expect(parseReadme('testPackage', randomTextMarkdown))
|
||||
.toEqual('<p>simple text </p>\n<h1 id="markdown">markdown</h1>\n');
|
||||
expect(parseReadme('testPackage', randomText)).toEqual(
|
||||
'<p>%%%%%**##==</p>\n'
|
||||
);
|
||||
expect(parseReadme('testPackage', simpleText)).toEqual(
|
||||
'<p>simple text</p>\n'
|
||||
);
|
||||
expect(parseReadme('testPackage', randomTextNonAscii)).toEqual(
|
||||
'<p>simple text \n = ascii</p>\n'
|
||||
);
|
||||
expect(parseReadme('testPackage', randomTextMarkdown)).toEqual(
|
||||
'<p>simple text </p>\n<h1 id="markdown">markdown</h1>\n'
|
||||
);
|
||||
});
|
||||
|
||||
test('should show error for no readme data', () => {
|
||||
const noData = '';
|
||||
const spy = jest.spyOn(Logger.logger, 'error')
|
||||
expect(parseReadme('testPackage', noData))
|
||||
.toEqual('<p>ERROR: No README data found!</p>\n');
|
||||
expect(spy).toHaveBeenCalledWith({'packageName': 'testPackage'}, '@{packageName}: No readme found');
|
||||
const spy = jest.spyOn(Logger.logger, 'error');
|
||||
expect(parseReadme('testPackage', noData)).toEqual(
|
||||
'<p>ERROR: No README data found!</p>\n'
|
||||
);
|
||||
expect(spy).toHaveBeenCalledWith(
|
||||
{ packageName: 'testPackage' },
|
||||
'@{packageName}: No readme found'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('addGravatarSupport', () => {
|
||||
test('check for blank object', () => {
|
||||
expect(addGravatarSupport({})).toEqual({});
|
||||
});
|
||||
|
||||
test('author, contributors and maintainers fields are not present', () => {
|
||||
const packageInfo = {
|
||||
latest: {}
|
||||
};
|
||||
expect(addGravatarSupport(packageInfo)).toEqual(packageInfo);
|
||||
});
|
||||
|
||||
test('author field is a blank object', () => {
|
||||
const packageInfo = { latest: { author: {} } };
|
||||
expect(addGravatarSupport(packageInfo)).toEqual(packageInfo);
|
||||
});
|
||||
|
||||
test('author field is a string type', () => {
|
||||
const packageInfo = {
|
||||
latest: { author: 'user@verdccio.org' }
|
||||
};
|
||||
const result = {
|
||||
latest: {
|
||||
author: {
|
||||
author: 'user@verdccio.org',
|
||||
avatar:
|
||||
'https://www.gravatar.com/avatar/00000000000000000000000000000000?d=mm',
|
||||
email: ''
|
||||
}
|
||||
}
|
||||
};
|
||||
expect(addGravatarSupport(packageInfo)).toEqual(result);
|
||||
});
|
||||
|
||||
test('author field is an object type with author information', () => {
|
||||
const packageInfo = {
|
||||
latest: { author: { name: 'verdaccio', email: 'user@verdccio.org' } }
|
||||
};
|
||||
const result = {
|
||||
latest: {
|
||||
author: {
|
||||
avatar:
|
||||
'https://www.gravatar.com/avatar/794d7f6ef93d0689437de3c3e48fadc7',
|
||||
email: 'user@verdccio.org',
|
||||
name: 'verdaccio'
|
||||
}
|
||||
}
|
||||
};
|
||||
expect(addGravatarSupport(packageInfo)).toEqual(result);
|
||||
});
|
||||
|
||||
test('contributor field is a blank array', () => {
|
||||
const packageInfo = {
|
||||
latest: {
|
||||
contributors: []
|
||||
}
|
||||
};
|
||||
expect(addGravatarSupport(packageInfo)).toEqual(packageInfo);
|
||||
});
|
||||
|
||||
test('contributors field has contributors', () => {
|
||||
const packageInfo = {
|
||||
latest: {
|
||||
contributors: [
|
||||
{ name: 'user', email: 'user@verdccio.org' },
|
||||
{ name: 'user1', email: 'user1@verdccio.org' }
|
||||
]
|
||||
}
|
||||
};
|
||||
|
||||
const result = {
|
||||
latest: {
|
||||
contributors: [
|
||||
{
|
||||
avatar:
|
||||
'https://www.gravatar.com/avatar/794d7f6ef93d0689437de3c3e48fadc7',
|
||||
email: 'user@verdccio.org',
|
||||
name: 'user'
|
||||
},
|
||||
{
|
||||
avatar:
|
||||
'https://www.gravatar.com/avatar/51105a49ce4a9c2bfabf0f6a2cba3762',
|
||||
email: 'user1@verdccio.org',
|
||||
name: 'user1'
|
||||
}
|
||||
]
|
||||
}
|
||||
};
|
||||
expect(addGravatarSupport(packageInfo)).toEqual(result);
|
||||
});
|
||||
|
||||
test('maintainers field is a blank array', () => {
|
||||
const packageInfo = {
|
||||
latest: {
|
||||
maintainers: []
|
||||
}
|
||||
};
|
||||
expect(addGravatarSupport(packageInfo)).toEqual(packageInfo);
|
||||
});
|
||||
|
||||
test('maintainers field has maintainers', () => {
|
||||
const packageInfo = {
|
||||
latest: {
|
||||
maintainers: [
|
||||
{ name: 'user', email: 'user@verdccio.org' },
|
||||
{ name: 'user1', email: 'user1@verdccio.org' }
|
||||
]
|
||||
}
|
||||
};
|
||||
|
||||
const result = {
|
||||
latest: {
|
||||
maintainers: [
|
||||
{
|
||||
avatar:
|
||||
'https://www.gravatar.com/avatar/794d7f6ef93d0689437de3c3e48fadc7',
|
||||
email: 'user@verdccio.org',
|
||||
name: 'user'
|
||||
},
|
||||
{
|
||||
avatar:
|
||||
'https://www.gravatar.com/avatar/51105a49ce4a9c2bfabf0f6a2cba3762',
|
||||
email: 'user1@verdccio.org',
|
||||
name: 'user1'
|
||||
}
|
||||
]
|
||||
}
|
||||
};
|
||||
expect(addGravatarSupport(packageInfo)).toEqual(result);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
Loading…
Reference in a new issue