mirror of
https://github.com/verdaccio/verdaccio.git
synced 2025-04-01 02:42:23 -05:00
Merge pull request #376 from verdaccio/3.x
The first merge for Release 3.x
This commit is contained in:
commit
09606fd4f4
179 changed files with 9864 additions and 4654 deletions
63
.babelrc
63
.babelrc
|
@ -1,28 +1,45 @@
|
|||
{
|
||||
"presets": [
|
||||
"react",
|
||||
["env",{
|
||||
"targets": {
|
||||
"browsers": [
|
||||
"last 5 versions",
|
||||
"FireFox >= 44",
|
||||
"Safari >= 7",
|
||||
"Explorer 11",
|
||||
"last 4 Edge versions"
|
||||
],
|
||||
"loose": true
|
||||
}
|
||||
}],
|
||||
"stage-2",
|
||||
"stage-3"
|
||||
],
|
||||
"plugins": [
|
||||
"react-hot-loader/babel",
|
||||
"transform-runtime",
|
||||
"transform-object-rest-spread",
|
||||
"transform-decorators-legacy"
|
||||
],
|
||||
"env": {
|
||||
"ui": {
|
||||
"presets": [
|
||||
"react",
|
||||
["env",{
|
||||
"targets": {
|
||||
"browsers": [
|
||||
"last 5 versions",
|
||||
"FireFox >= 44",
|
||||
"Safari >= 7",
|
||||
"Explorer 11",
|
||||
"last 4 Edge versions"
|
||||
],
|
||||
"loose": true
|
||||
}
|
||||
}],
|
||||
"stage-2",
|
||||
"stage-3"
|
||||
],
|
||||
"plugins": [
|
||||
"react-hot-loader/babel",
|
||||
"transform-runtime",
|
||||
"transform-object-rest-spread",
|
||||
"transform-decorators-legacy"
|
||||
]
|
||||
},
|
||||
"test": {
|
||||
"presets": [ "es2015-node4", "stage-2", "stage-3", "flow", "react"],
|
||||
"plugins": [
|
||||
"transform-class-properties",
|
||||
"transform-object-rest-spread"
|
||||
]
|
||||
},
|
||||
"registry": {
|
||||
"presets": [
|
||||
"es2015-node4", "flow"
|
||||
],
|
||||
"plugins": [
|
||||
"transform-object-rest-spread"
|
||||
]
|
||||
},
|
||||
"development": {
|
||||
"presets": ["flow"],
|
||||
"plugins": [
|
||||
|
|
|
@ -12,9 +12,9 @@
|
|||
!.eslintrc
|
||||
!.eslintignore
|
||||
!.stylelintrc
|
||||
|
||||
# not going to run tests inside the docker container
|
||||
test/
|
||||
!.flowconfig
|
||||
!.jest.config.js
|
||||
!.jestEnvironment.js
|
||||
|
||||
# do not copy over node_modules we will run `npm install` anyway
|
||||
node_modules
|
||||
|
|
|
@ -7,6 +7,6 @@ end_of_line = lf
|
|||
insert_final_newline = true
|
||||
|
||||
# 2 space indentation
|
||||
[{.,}*.{js,yml,yaml}]
|
||||
[{.,}*.{js,jsx,yml,yaml}]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
|
|
@ -2,4 +2,6 @@ node_modules
|
|||
coverage/
|
||||
wiki/
|
||||
static/
|
||||
flow-typed/
|
||||
website/
|
||||
build/
|
||||
|
|
14
.eslintrc
14
.eslintrc
|
@ -1,14 +1,17 @@
|
|||
{
|
||||
"plugins": [
|
||||
"react",
|
||||
"flowtype"
|
||||
"flowtype",
|
||||
"jest"
|
||||
],
|
||||
"extends": [
|
||||
"eslint:recommended",
|
||||
"google",
|
||||
"plugin:react/recommended",
|
||||
"plugin:flowtype/recommended"
|
||||
"plugin:flowtype/recommended",
|
||||
"plugin:jest/recommended"
|
||||
],
|
||||
"parser": "babel-eslint",
|
||||
"parserOptions": {
|
||||
"sourceType": "module",
|
||||
"ecmaVersion": 7,
|
||||
|
@ -20,7 +23,8 @@
|
|||
"env": {
|
||||
"browser": true,
|
||||
"node": true,
|
||||
"es6": true
|
||||
"es6": true,
|
||||
"jest": true
|
||||
},
|
||||
"rules": {
|
||||
"no-tabs": 0,
|
||||
|
@ -57,8 +61,8 @@
|
|||
"always"
|
||||
],
|
||||
"camelcase": 0,
|
||||
"require-jsdoc": 2,
|
||||
"valid-jsdoc": 2,
|
||||
"require-jsdoc": 0,
|
||||
"valid-jsdoc": 0,
|
||||
"prefer-spread": 1,
|
||||
"prefer-rest-params": 1,
|
||||
"no-var": 2,
|
||||
|
|
23
.flowconfig
Normal file
23
.flowconfig
Normal file
|
@ -0,0 +1,23 @@
|
|||
[ignore]
|
||||
.*/node_modules/.*
|
||||
.*/test/**/*.json
|
||||
.*/static/.*
|
||||
.*/test/unit/partials/.*
|
||||
.*/.nyc_output/.*
|
||||
.*/coverage/.*
|
||||
.*/.vscode/.*
|
||||
.*/build/.*
|
||||
|
||||
[include]
|
||||
|
||||
[libs]
|
||||
node_modules/@verdaccio/types/lib/
|
||||
|
||||
[lints]
|
||||
|
||||
[options]
|
||||
suppress_comment= \\(.\\|\n\\)*\\$FlowFixMe
|
||||
unsafe.enable_getters_and_setters=true
|
||||
|
||||
[version]
|
||||
^0.52.0
|
4
.gitignore
vendored
4
.gitignore
vendored
|
@ -1,6 +1,7 @@
|
|||
npm-debug.log
|
||||
verdaccio-*.tgz
|
||||
.DS_Store
|
||||
build/
|
||||
|
||||
###
|
||||
!bin/verdaccio
|
||||
|
@ -8,6 +9,8 @@ test-storage*
|
|||
.verdaccio_test_env
|
||||
node_modules
|
||||
package-lock.json
|
||||
build/
|
||||
npm_test-fails-add-tarball*
|
||||
|
||||
|
||||
# Istanbul
|
||||
|
@ -24,7 +27,6 @@ coverage/
|
|||
bundle.js
|
||||
bundle.js.map
|
||||
__tests__
|
||||
__snapshots__
|
||||
|
||||
# Compiled script
|
||||
static/
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"extends": "stylelint-config-standard",
|
||||
"extends": "stylelint-config-recommended-scss",
|
||||
"rules": {
|
||||
"selector-pseudo-class-no-unknown": [true, {
|
||||
"ignorePseudoClasses": ["/global/"]
|
||||
|
|
11
.travis.yml
11
.travis.yml
|
@ -1,11 +0,0 @@
|
|||
language: node_js
|
||||
node_js:
|
||||
- '4'
|
||||
- '6'
|
||||
- '7'
|
||||
- '8'
|
||||
sudo: false
|
||||
before_script:
|
||||
- npm install -g npm
|
||||
script: npm install && npm run build:webui && npm run test-travis
|
||||
after_success: npm run coverage:codecov
|
13
Dockerfile
13
Dockerfile
|
@ -1,10 +1,14 @@
|
|||
FROM node:8.9.0-alpine
|
||||
FROM node:9.2.0-alpine
|
||||
LABEL maintainer="https://github.com/verdaccio/verdaccio"
|
||||
|
||||
RUN apk --no-cache add openssl && \
|
||||
wget -O /usr/local/bin/dumb-init https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64 && \
|
||||
chmod +x /usr/local/bin/dumb-init && \
|
||||
apk del openssl
|
||||
apk del openssl && \
|
||||
apk --no-cache add ca-certificates wget && \
|
||||
wget -q -O /etc/apk/keys/sgerrand.rsa.pub https://raw.githubusercontent.com/sgerrand/alpine-pkg-glibc/master/sgerrand.rsa.pub && \
|
||||
wget https://github.com/sgerrand/alpine-pkg-glibc/releases/download/2.25-r0/glibc-2.25-r0.apk && \
|
||||
apk add glibc-2.25-r0.apk
|
||||
|
||||
ENV APPDIR /usr/local/app
|
||||
|
||||
|
@ -15,9 +19,12 @@ ADD . $APPDIR
|
|||
ENV NODE_ENV=production
|
||||
|
||||
RUN npm config set registry http://registry.npmjs.org/ && \
|
||||
npm install -g -s --no-progress yarn@0.28.4 --pure-lockfile && \
|
||||
yarn global add -s flow-bin@0.52.0 && \
|
||||
yarn install --production=false && \
|
||||
yarn run lint && \
|
||||
yarn run code:build && \
|
||||
yarn run build:webui && \
|
||||
yarn run test:unit -- --silent true --coverage false --bail && \
|
||||
yarn cache clean && \
|
||||
yarn install --production=true --pure-lockfile
|
||||
|
||||
|
|
21
LICENSE
Normal file
21
LICENSE
Normal file
|
@ -0,0 +1,21 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2018 Verdaccio community
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
384
LICENSE-docs
Normal file
384
LICENSE-docs
Normal file
|
@ -0,0 +1,384 @@
|
|||
Attribution 4.0 International
|
||||
|
||||
=======================================================================
|
||||
|
||||
Creative Commons Corporation ("Creative Commons") is not a law firm and
|
||||
does not provide legal services or legal advice. Distribution of
|
||||
Creative Commons public licenses does not create a lawyer-client or
|
||||
other relationship. Creative Commons makes its licenses and related
|
||||
information available on an "as-is" basis. Creative Commons gives no
|
||||
warranties regarding its licenses, any material licensed under their
|
||||
terms and conditions, or any related information. Creative Commons
|
||||
disclaims all liability for damages resulting from their use to the
|
||||
fullest extent possible.
|
||||
|
||||
Using Creative Commons Public Licenses
|
||||
|
||||
Creative Commons public licenses provide a standard set of terms and
|
||||
conditions that creators and other rights holders may use to share
|
||||
original works of authorship and other material subject to copyright
|
||||
and certain other rights specified in the public license below. The
|
||||
following considerations are for informational purposes only, are not
|
||||
exhaustive, and do not form part of our licenses.
|
||||
|
||||
Considerations for licensors: Our public licenses are
|
||||
intended for use by those authorized to give the public
|
||||
permission to use material in ways otherwise restricted by
|
||||
copyright and certain other rights. Our licenses are
|
||||
irrevocable. Licensors should read and understand the terms
|
||||
and conditions of the license they choose before applying it.
|
||||
Licensors should also secure all rights necessary before
|
||||
applying our licenses so that the public can reuse the
|
||||
material as expected. Licensors should clearly mark any
|
||||
material not subject to the license. This includes other CC-
|
||||
licensed material, or material used under an exception or
|
||||
limitation to copyright. More considerations for licensors:
|
||||
wiki.creativecommons.org/Considerations_for_licensors
|
||||
|
||||
Considerations for the public: By using one of our public
|
||||
licenses, a licensor grants the public permission to use the
|
||||
licensed material under specified terms and conditions. If
|
||||
the licensor's permission is not necessary for any reason--for
|
||||
example, because of any applicable exception or limitation to
|
||||
copyright--then that use is not regulated by the license. Our
|
||||
licenses grant only permissions under copyright and certain
|
||||
other rights that a licensor has authority to grant. Use of
|
||||
the licensed material may still be restricted for other
|
||||
reasons, including because others have copyright or other
|
||||
rights in the material. A licensor may make special requests,
|
||||
such as asking that all changes be marked or described.
|
||||
Although not required by our licenses, you are encouraged to
|
||||
respect those requests where reasonable. More_considerations
|
||||
for the public:
|
||||
wiki.creativecommons.org/Considerations_for_licensees
|
||||
|
||||
=======================================================================
|
||||
|
||||
Creative Commons Attribution 4.0 International Public License
|
||||
|
||||
By exercising the Licensed Rights (defined below), You accept and agree
|
||||
to be bound by the terms and conditions of this Creative Commons
|
||||
Attribution 4.0 International Public License ("Public License"). To the
|
||||
extent this Public License may be interpreted as a contract, You are
|
||||
granted the Licensed Rights in consideration of Your acceptance of
|
||||
these terms and conditions, and the Licensor grants You such rights in
|
||||
consideration of benefits the Licensor receives from making the
|
||||
Licensed Material available under these terms and conditions.
|
||||
|
||||
Section 1 -- Definitions.
|
||||
|
||||
a. Adapted Material means material subject to Copyright and Similar
|
||||
Rights that is derived from or based upon the Licensed Material
|
||||
and in which the Licensed Material is translated, altered,
|
||||
arranged, transformed, or otherwise modified in a manner requiring
|
||||
permission under the Copyright and Similar Rights held by the
|
||||
Licensor. For purposes of this Public License, where the Licensed
|
||||
Material is a musical work, performance, or sound recording,
|
||||
Adapted Material is always produced where the Licensed Material is
|
||||
synched in timed relation with a moving image.
|
||||
|
||||
b. Adapter's License means the license You apply to Your Copyright
|
||||
and Similar Rights in Your contributions to Adapted Material in
|
||||
accordance with the terms and conditions of this Public License.
|
||||
|
||||
c. Copyright and Similar Rights means copyright and/or similar rights
|
||||
closely related to copyright including, without limitation,
|
||||
performance, broadcast, sound recording, and Sui Generis Database
|
||||
Rights, without regard to how the rights are labeled or
|
||||
categorized. For purposes of this Public License, the rights
|
||||
specified in Section 2(b)(1)-(2) are not Copyright and Similar
|
||||
Rights.
|
||||
|
||||
d. Effective Technological Measures means those measures that, in the
|
||||
absence of proper authority, may not be circumvented under laws
|
||||
fulfilling obligations under Article 11 of the WIPO Copyright
|
||||
Treaty adopted on December 20, 1996, and/or similar international
|
||||
agreements.
|
||||
|
||||
e. Exceptions and Limitations means fair use, fair dealing, and/or
|
||||
any other exception or limitation to Copyright and Similar Rights
|
||||
that applies to Your use of the Licensed Material.
|
||||
|
||||
f. Licensed Material means the artistic or literary work, database,
|
||||
or other material to which the Licensor applied this Public
|
||||
License.
|
||||
|
||||
g. Licensed Rights means the rights granted to You subject to the
|
||||
terms and conditions of this Public License, which are limited to
|
||||
all Copyright and Similar Rights that apply to Your use of the
|
||||
Licensed Material and that the Licensor has authority to license.
|
||||
|
||||
h. Licensor means the individual(s) or entity(ies) granting rights
|
||||
under this Public License.
|
||||
|
||||
i. Share means to provide material to the public by any means or
|
||||
process that requires permission under the Licensed Rights, such
|
||||
as reproduction, public display, public performance, distribution,
|
||||
dissemination, communication, or importation, and to make material
|
||||
available to the public including in ways that members of the
|
||||
public may access the material from a place and at a time
|
||||
individually chosen by them.
|
||||
|
||||
j. Sui Generis Database Rights means rights other than copyright
|
||||
resulting from Directive 96/9/EC of the European Parliament and of
|
||||
the Council of 11 March 1996 on the legal protection of databases,
|
||||
as amended and/or succeeded, as well as other essentially
|
||||
equivalent rights anywhere in the world.
|
||||
|
||||
k. You means the individual or entity exercising the Licensed Rights
|
||||
under this Public License. Your has a corresponding meaning.
|
||||
|
||||
Section 2 -- Scope.
|
||||
|
||||
a. License grant.
|
||||
|
||||
1. Subject to the terms and conditions of this Public License,
|
||||
the Licensor hereby grants You a worldwide, royalty-free,
|
||||
non-sublicensable, non-exclusive, irrevocable license to
|
||||
exercise the Licensed Rights in the Licensed Material to:
|
||||
|
||||
a. reproduce and Share the Licensed Material, in whole or
|
||||
in part; and
|
||||
|
||||
b. produce, reproduce, and Share Adapted Material.
|
||||
|
||||
2. Exceptions and Limitations. For the avoidance of doubt, where
|
||||
Exceptions and Limitations apply to Your use, this Public
|
||||
License does not apply, and You do not need to comply with
|
||||
its terms and conditions.
|
||||
|
||||
3. Term. The term of this Public License is specified in Section
|
||||
6(a).
|
||||
|
||||
4. Media and formats; technical modifications allowed. The
|
||||
Licensor authorizes You to exercise the Licensed Rights in
|
||||
all media and formats whether now known or hereafter created,
|
||||
and to make technical modifications necessary to do so. The
|
||||
Licensor waives and/or agrees not to assert any right or
|
||||
authority to forbid You from making technical modifications
|
||||
necessary to exercise the Licensed Rights, including
|
||||
technical modifications necessary to circumvent Effective
|
||||
Technological Measures. For purposes of this Public License,
|
||||
simply making modifications authorized by this Section 2(a)
|
||||
(4) never produces Adapted Material.
|
||||
|
||||
5. Downstream recipients.
|
||||
|
||||
a. Offer from the Licensor -- Licensed Material. Every
|
||||
recipient of the Licensed Material automatically
|
||||
receives an offer from the Licensor to exercise the
|
||||
Licensed Rights under the terms and conditions of this
|
||||
Public License.
|
||||
|
||||
b. No downstream restrictions. You may not offer or impose
|
||||
any additional or different terms or conditions on, or
|
||||
apply any Effective Technological Measures to, the
|
||||
Licensed Material if doing so restricts exercise of the
|
||||
Licensed Rights by any recipient of the Licensed
|
||||
Material.
|
||||
|
||||
6. No endorsement. Nothing in this Public License constitutes or
|
||||
may be construed as permission to assert or imply that You
|
||||
are, or that Your use of the Licensed Material is, connected
|
||||
with, or sponsored, endorsed, or granted official status by,
|
||||
the Licensor or others designated to receive attribution as
|
||||
provided in Section 3(a)(1)(A)(i).
|
||||
|
||||
b. Other rights.
|
||||
|
||||
1. Moral rights, such as the right of integrity, are not
|
||||
licensed under this Public License, nor are publicity,
|
||||
privacy, and/or other similar personality rights; however, to
|
||||
the extent possible, the Licensor waives and/or agrees not to
|
||||
assert any such rights held by the Licensor to the limited
|
||||
extent necessary to allow You to exercise the Licensed
|
||||
Rights, but not otherwise.
|
||||
|
||||
2. Patent and trademark rights are not licensed under this
|
||||
Public License.
|
||||
|
||||
3. To the extent possible, the Licensor waives any right to
|
||||
collect royalties from You for the exercise of the Licensed
|
||||
Rights, whether directly or through a collecting society
|
||||
under any voluntary or waivable statutory or compulsory
|
||||
licensing scheme. In all other cases the Licensor expressly
|
||||
reserves any right to collect such royalties.
|
||||
|
||||
Section 3 -- License Conditions.
|
||||
|
||||
Your exercise of the Licensed Rights is expressly made subject to the
|
||||
following conditions.
|
||||
|
||||
a. Attribution.
|
||||
|
||||
1. If You Share the Licensed Material (including in modified
|
||||
form), You must:
|
||||
|
||||
a. retain the following if it is supplied by the Licensor
|
||||
with the Licensed Material:
|
||||
|
||||
i. identification of the creator(s) of the Licensed
|
||||
Material and any others designated to receive
|
||||
attribution, in any reasonable manner requested by
|
||||
the Licensor (including by pseudonym if
|
||||
designated);
|
||||
|
||||
ii. a copyright notice;
|
||||
|
||||
iii. a notice that refers to this Public License;
|
||||
|
||||
iv. a notice that refers to the disclaimer of
|
||||
warranties;
|
||||
|
||||
v. a URI or hyperlink to the Licensed Material to the
|
||||
extent reasonably practicable;
|
||||
|
||||
b. indicate if You modified the Licensed Material and
|
||||
retain an indication of any previous modifications; and
|
||||
|
||||
c. indicate the Licensed Material is licensed under this
|
||||
Public License, and include the text of, or the URI or
|
||||
hyperlink to, this Public License.
|
||||
|
||||
2. You may satisfy the conditions in Section 3(a)(1) in any
|
||||
reasonable manner based on the medium, means, and context in
|
||||
which You Share the Licensed Material. For example, it may be
|
||||
reasonable to satisfy the conditions by providing a URI or
|
||||
hyperlink to a resource that includes the required
|
||||
information.
|
||||
|
||||
3. If requested by the Licensor, You must remove any of the
|
||||
information required by Section 3(a)(1)(A) to the extent
|
||||
reasonably practicable.
|
||||
|
||||
4. If You Share Adapted Material You produce, the Adapter's
|
||||
License You apply must not prevent recipients of the Adapted
|
||||
Material from complying with this Public License.
|
||||
|
||||
Section 4 -- Sui Generis Database Rights.
|
||||
|
||||
Where the Licensed Rights include Sui Generis Database Rights that
|
||||
apply to Your use of the Licensed Material:
|
||||
|
||||
a. for the avoidance of doubt, Section 2(a)(1) grants You the right
|
||||
to extract, reuse, reproduce, and Share all or a substantial
|
||||
portion of the contents of the database;
|
||||
|
||||
b. if You include all or a substantial portion of the database
|
||||
contents in a database in which You have Sui Generis Database
|
||||
Rights, then the database in which You have Sui Generis Database
|
||||
Rights (but not its individual contents) is Adapted Material; and
|
||||
|
||||
c. You must comply with the conditions in Section 3(a) if You Share
|
||||
all or a substantial portion of the contents of the database.
|
||||
|
||||
For the avoidance of doubt, this Section 4 supplements and does not
|
||||
replace Your obligations under this Public License where the Licensed
|
||||
Rights include other Copyright and Similar Rights.
|
||||
|
||||
Section 5 -- Disclaimer of Warranties and Limitation of Liability.
|
||||
|
||||
a. UNLESS OTHERWISE SEPARATELY UNDERTAKEN BY THE LICENSOR, TO THE
|
||||
EXTENT POSSIBLE, THE LICENSOR OFFERS THE LICENSED MATERIAL AS-IS
|
||||
AND AS-AVAILABLE, AND MAKES NO REPRESENTATIONS OR WARRANTIES OF
|
||||
ANY KIND CONCERNING THE LICENSED MATERIAL, WHETHER EXPRESS,
|
||||
IMPLIED, STATUTORY, OR OTHER. THIS INCLUDES, WITHOUT LIMITATION,
|
||||
WARRANTIES OF TITLE, MERCHANTABILITY, FITNESS FOR A PARTICULAR
|
||||
PURPOSE, NON-INFRINGEMENT, ABSENCE OF LATENT OR OTHER DEFECTS,
|
||||
ACCURACY, OR THE PRESENCE OR ABSENCE OF ERRORS, WHETHER OR NOT
|
||||
KNOWN OR DISCOVERABLE. WHERE DISCLAIMERS OF WARRANTIES ARE NOT
|
||||
ALLOWED IN FULL OR IN PART, THIS DISCLAIMER MAY NOT APPLY TO YOU.
|
||||
|
||||
b. TO THE EXTENT POSSIBLE, IN NO EVENT WILL THE LICENSOR BE LIABLE
|
||||
TO YOU ON ANY LEGAL THEORY (INCLUDING, WITHOUT LIMITATION,
|
||||
NEGLIGENCE) OR OTHERWISE FOR ANY DIRECT, SPECIAL, INDIRECT,
|
||||
INCIDENTAL, CONSEQUENTIAL, PUNITIVE, EXEMPLARY, OR OTHER LOSSES,
|
||||
COSTS, EXPENSES, OR DAMAGES ARISING OUT OF THIS PUBLIC LICENSE OR
|
||||
USE OF THE LICENSED MATERIAL, EVEN IF THE LICENSOR HAS BEEN
|
||||
ADVISED OF THE POSSIBILITY OF SUCH LOSSES, COSTS, EXPENSES, OR
|
||||
DAMAGES. WHERE A LIMITATION OF LIABILITY IS NOT ALLOWED IN FULL OR
|
||||
IN PART, THIS LIMITATION MAY NOT APPLY TO YOU.
|
||||
|
||||
c. The disclaimer of warranties and limitation of liability provided
|
||||
above shall be interpreted in a manner that, to the extent
|
||||
possible, most closely approximates an absolute disclaimer and
|
||||
waiver of all liability.
|
||||
|
||||
Section 6 -- Term and Termination.
|
||||
|
||||
a. This Public License applies for the term of the Copyright and
|
||||
Similar Rights licensed here. However, if You fail to comply with
|
||||
this Public License, then Your rights under this Public License
|
||||
terminate automatically.
|
||||
|
||||
b. Where Your right to use the Licensed Material has terminated under
|
||||
Section 6(a), it reinstates:
|
||||
|
||||
1. automatically as of the date the violation is cured, provided
|
||||
it is cured within 30 days of Your discovery of the
|
||||
violation; or
|
||||
|
||||
2. upon express reinstatement by the Licensor.
|
||||
|
||||
For the avoidance of doubt, this Section 6(b) does not affect any
|
||||
right the Licensor may have to seek remedies for Your violations
|
||||
of this Public License.
|
||||
|
||||
c. For the avoidance of doubt, the Licensor may also offer the
|
||||
Licensed Material under separate terms or conditions or stop
|
||||
distributing the Licensed Material at any time; however, doing so
|
||||
will not terminate this Public License.
|
||||
|
||||
d. Sections 1, 5, 6, 7, and 8 survive termination of this Public
|
||||
License.
|
||||
|
||||
Section 7 -- Other Terms and Conditions.
|
||||
|
||||
a. The Licensor shall not be bound by any additional or different
|
||||
terms or conditions communicated by You unless expressly agreed.
|
||||
|
||||
b. Any arrangements, understandings, or agreements regarding the
|
||||
Licensed Material not stated herein are separate from and
|
||||
independent of the terms and conditions of this Public License.
|
||||
|
||||
Section 8 -- Interpretation.
|
||||
|
||||
a. For the avoidance of doubt, this Public License does not, and
|
||||
shall not be interpreted to, reduce, limit, restrict, or impose
|
||||
conditions on any use of the Licensed Material that could lawfully
|
||||
be made without permission under this Public License.
|
||||
|
||||
b. To the extent possible, if any provision of this Public License is
|
||||
deemed unenforceable, it shall be automatically reformed to the
|
||||
minimum extent necessary to make it enforceable. If the provision
|
||||
cannot be reformed, it shall be severed from this Public License
|
||||
without affecting the enforceability of the remaining terms and
|
||||
conditions.
|
||||
|
||||
c. No term or condition of this Public License will be waived and no
|
||||
failure to comply consented to unless expressly agreed to by the
|
||||
Licensor.
|
||||
|
||||
d. Nothing in this Public License constitutes or may be interpreted
|
||||
as a limitation upon, or waiver of, any privileges and immunities
|
||||
that apply to the Licensor or You, including from the legal
|
||||
processes of any jurisdiction or authority.
|
||||
|
||||
=======================================================================
|
||||
|
||||
Creative Commons is not a party to its public licenses.
|
||||
Notwithstanding, Creative Commons may elect to apply one of its public
|
||||
licenses to material it publishes and in those instances will be
|
||||
considered the "Licensor." Except for the limited purpose of indicating
|
||||
that material is shared under a Creative Commons public license or as
|
||||
otherwise permitted by the Creative Commons policies published at
|
||||
creativecommons.org/policies, Creative Commons does not authorize the
|
||||
use of the trademark "Creative Commons" or any other trademark or logo
|
||||
of Creative Commons without its prior written consent including,
|
||||
without limitation, in connection with any unauthorized modifications
|
||||
to any of its public licenses or any other arrangements,
|
||||
understandings, or agreements concerning use of licensed material. For
|
||||
the avoidance of doubt, this paragraph does not form part of the public
|
||||
licenses.
|
||||
|
||||
Creative Commons may be contacted at creativecommons.org.
|
|
@ -40,7 +40,7 @@ See in detail each of these [use cases](https://github.com/verdaccio/verdaccio/t
|
|||
|
||||
### Prerequisites
|
||||
|
||||
* Node.js >= 4.6.1
|
||||
* Node.js >= `2.x` (4.6.1) | `3.x` (6.12.0)
|
||||
* `npm` or `yarn`
|
||||
|
||||
Installation and starting (application will create default config in config.yaml you can edit later)
|
||||
|
@ -225,3 +225,9 @@ If you have any issue you can try the following options, do no desist to ask or
|
|||
* [Running discussions](https://github.com/verdaccio/verdaccio/issues?q=is%3Aissue+is%3Aopen+label%3Adiscuss)
|
||||
* [Chat Room](https://gitter.im/verdaccio/)
|
||||
* [Logos](https://github.com/verdaccio/verdaccio/tree/master/assets)
|
||||
|
||||
|
||||
## License
|
||||
Verdaccio is [MIT licensed](https://github.com/verdaccio/verdaccio/blob/master/LICENSE).
|
||||
|
||||
The Verdaccio documentation and logos (e.g., .md, .png, .sketch) files in the /wiki and /assets folder) is [Creative Commons licensed](https://github.com/verdaccio/verdaccio/blob/master/LICENSE).
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
require('../src/lib/cli');
|
||||
require('../build/lib/cli');
|
||||
|
|
16
circle.yml
16
circle.yml
|
@ -1,9 +1,9 @@
|
|||
machine:
|
||||
environment:
|
||||
YARN_VERSION: 0.27.5
|
||||
YARN_VERSION: 1.3.2
|
||||
PATH: "${PATH}:${HOME}/.yarn/bin:${HOME}/${CIRCLE_PROJECT_REPONAME}/node_modules/.bin"
|
||||
node:
|
||||
version: 8
|
||||
version: 9
|
||||
dependencies:
|
||||
pre:
|
||||
- 'echo "//registry.npmjs.org/:_authToken=${NPM_TOKEN}" > ~/.npmrc'
|
||||
|
@ -11,8 +11,8 @@ dependencies:
|
|||
if [[ ! -e ~/.yarn/bin/yarn || $(yarn --version) != "${YARN_VERSION}" ]]; then
|
||||
curl -o- -L https://yarnpkg.com/install.sh | bash -s -- --version $YARN_VERSION
|
||||
fi
|
||||
- nvm install 4
|
||||
- nvm install 6
|
||||
- nvm install 8
|
||||
|
||||
cache_directories:
|
||||
- ~/.yarn
|
||||
|
@ -24,12 +24,11 @@ test:
|
|||
override:
|
||||
- yarn run pre:ci
|
||||
- nvm alias default 6
|
||||
- yarn run test:ci
|
||||
- nvm alias default 4
|
||||
- yarn run test:ci
|
||||
- yarn run test
|
||||
- nvm alias default 8
|
||||
- yarn run test:ci
|
||||
- yarn run coverage:publish
|
||||
- yarn run test
|
||||
- nvm alias default 9
|
||||
- yarn run test
|
||||
deployment:
|
||||
production:
|
||||
tag: /(v)?[0-9]+(\.[0-9]+)*/
|
||||
|
@ -39,4 +38,5 @@ general:
|
|||
branches:
|
||||
ignore:
|
||||
- gh-pages # list of branches to ignore
|
||||
- l10n_website
|
||||
- /release\/.*/ # or ignore regexes
|
||||
|
|
43
flow-typed/npm/body-parser_v1.x.x.js
vendored
Normal file
43
flow-typed/npm/body-parser_v1.x.x.js
vendored
Normal file
|
@ -0,0 +1,43 @@
|
|||
// flow-typed signature: 18dadbe162b608c79b9b31c3d2f1c822
|
||||
// flow-typed version: b43dff3e0e/body-parser_v1.x.x/flow_>=v0.17.x
|
||||
|
||||
import type { Middleware, $Request, $Response } from 'express';
|
||||
|
||||
declare type bodyParser$Options = {
|
||||
inflate?: boolean;
|
||||
limit?: number | string;
|
||||
type?: string | string[] | ((req: $Request) => any);
|
||||
verify?: (req: $Request, res: $Response, buf: Buffer, encoding: string) => void;
|
||||
};
|
||||
|
||||
declare type bodyParser$OptionsText = bodyParser$Options & {
|
||||
reviver?: (key: string, value: any) => any;
|
||||
strict?: boolean;
|
||||
};
|
||||
|
||||
declare type bodyParser$OptionsJson = bodyParser$Options & {
|
||||
reviver?: (key: string, value: any) => any;
|
||||
strict?: boolean;
|
||||
};
|
||||
|
||||
declare type bodyParser$OptionsUrlencoded = bodyParser$Options & {
|
||||
extended?: boolean;
|
||||
parameterLimit?: number;
|
||||
};
|
||||
|
||||
declare module "body-parser" {
|
||||
|
||||
declare type Options = bodyParser$Options;
|
||||
declare type OptionsText = bodyParser$OptionsText;
|
||||
declare type OptionsJson = bodyParser$OptionsJson;
|
||||
declare type OptionsUrlencoded = bodyParser$OptionsUrlencoded;
|
||||
|
||||
declare function json(options?: OptionsJson): Middleware;
|
||||
|
||||
declare function raw(options?: Options): Middleware;
|
||||
|
||||
declare function text(options?: OptionsText): Middleware;
|
||||
|
||||
declare function urlencoded(options?: OptionsUrlencoded): Middleware;
|
||||
|
||||
}
|
207
flow-typed/npm/express_v4.x.x.js
vendored
Normal file
207
flow-typed/npm/express_v4.x.x.js
vendored
Normal file
|
@ -0,0 +1,207 @@
|
|||
// flow-typed signature: f0e399a136d6e8dc8b1fbdc078e2850c
|
||||
// flow-typed version: ed397013d1/express_v4.x.x/flow_>=v0.32.x
|
||||
|
||||
import type { Server } from 'http';
|
||||
import type { Socket } from 'net';
|
||||
|
||||
declare type express$RouterOptions = {
|
||||
caseSensitive?: boolean,
|
||||
mergeParams?: boolean,
|
||||
strict?: boolean
|
||||
};
|
||||
|
||||
declare class express$RequestResponseBase {
|
||||
app: express$Application;
|
||||
get(field: string): string | void;
|
||||
}
|
||||
|
||||
declare type express$RequestParams = {
|
||||
[param: string]: string
|
||||
}
|
||||
|
||||
declare class express$Request extends http$IncomingMessage mixins express$RequestResponseBase {
|
||||
baseUrl: string;
|
||||
body: any;
|
||||
cookies: {[cookie: string]: string};
|
||||
connection: Socket;
|
||||
fresh: boolean;
|
||||
hostname: string;
|
||||
ip: string;
|
||||
ips: Array<string>;
|
||||
method: string;
|
||||
originalUrl: string;
|
||||
params: express$RequestParams;
|
||||
path: string;
|
||||
protocol: 'https' | 'http';
|
||||
query: {[name: string]: string | Array<string>};
|
||||
route: string;
|
||||
secure: boolean;
|
||||
signedCookies: {[signedCookie: string]: string};
|
||||
stale: boolean;
|
||||
subdomains: Array<string>;
|
||||
xhr: boolean;
|
||||
accepts(types: string): string | false;
|
||||
accepts(types: Array<string>): string | false;
|
||||
acceptsCharsets(...charsets: Array<string>): string | false;
|
||||
acceptsEncodings(...encoding: Array<string>): string | false;
|
||||
acceptsLanguages(...lang: Array<string>): string | false;
|
||||
header(field: string): string | void;
|
||||
is(type: string): boolean;
|
||||
param(name: string, defaultValue?: string): string | void;
|
||||
}
|
||||
|
||||
declare type express$CookieOptions = {
|
||||
domain?: string,
|
||||
encode?: (value: string) => string,
|
||||
expires?: Date,
|
||||
httpOnly?: boolean,
|
||||
maxAge?: number,
|
||||
path?: string,
|
||||
secure?: boolean,
|
||||
signed?: boolean
|
||||
};
|
||||
|
||||
declare type express$Path = string | RegExp;
|
||||
|
||||
declare type express$RenderCallback = (err: Error | null, html?: string) => mixed;
|
||||
|
||||
declare type express$SendFileOptions = {
|
||||
maxAge?: number,
|
||||
root?: string,
|
||||
lastModified?: boolean,
|
||||
headers?: {[name: string]: string},
|
||||
dotfiles?: 'allow' | 'deny' | 'ignore'
|
||||
};
|
||||
|
||||
declare class express$Response extends http$ServerResponse mixins express$RequestResponseBase {
|
||||
headersSent: boolean;
|
||||
locals: {[name: string]: mixed};
|
||||
append(field: string, value?: string): this;
|
||||
attachment(filename?: string): this;
|
||||
cookie(name: string, value: string, options?: express$CookieOptions): this;
|
||||
clearCookie(name: string, options?: express$CookieOptions): this;
|
||||
download(path: string, filename?: string, callback?: (err?: ?Error) => void): this;
|
||||
format(typesObject: {[type: string]: Function}): this;
|
||||
json(body?: mixed): this;
|
||||
jsonp(body?: mixed): this;
|
||||
links(links: {[name: string]: string}): this;
|
||||
location(path: string): this;
|
||||
redirect(url: string, ...args: Array<void>): this;
|
||||
redirect(status: number, url: string, ...args: Array<void>): this;
|
||||
render(view: string, locals?: {[name: string]: mixed}, callback?: express$RenderCallback): this;
|
||||
send(body?: mixed): this;
|
||||
sendFile(path: string, options?: express$SendFileOptions, callback?: (err?: ?Error) => mixed): this;
|
||||
sendStatus(statusCode: number): this;
|
||||
header(field: string, value?: string): this;
|
||||
header(headers: {[name: string]: string}): this;
|
||||
set(field: string, value?: string|string[]): this;
|
||||
set(headers: {[name: string]: string}): this;
|
||||
status(statusCode: number): this;
|
||||
type(type: string): this;
|
||||
vary(field: string): this;
|
||||
req: express$Request;
|
||||
}
|
||||
|
||||
declare type express$NextFunction = (err?: ?Error | 'route') => mixed;
|
||||
declare type express$Middleware =
|
||||
((req: $Subtype<express$Request>, res: express$Response, next: express$NextFunction) => mixed) |
|
||||
((error: Error, req: $Subtype<express$Request>, res: express$Response, next: express$NextFunction) => mixed);
|
||||
declare interface express$RouteMethodType<T> {
|
||||
(middleware: express$Middleware): T;
|
||||
(...middleware: Array<express$Middleware>): T;
|
||||
(path: express$Path|express$Path[], ...middleware: Array<express$Middleware>): T;
|
||||
}
|
||||
declare class express$Route {
|
||||
all: express$RouteMethodType<this>;
|
||||
get: express$RouteMethodType<this>;
|
||||
post: express$RouteMethodType<this>;
|
||||
put: express$RouteMethodType<this>;
|
||||
head: express$RouteMethodType<this>;
|
||||
delete: express$RouteMethodType<this>;
|
||||
options: express$RouteMethodType<this>;
|
||||
trace: express$RouteMethodType<this>;
|
||||
copy: express$RouteMethodType<this>;
|
||||
lock: express$RouteMethodType<this>;
|
||||
mkcol: express$RouteMethodType<this>;
|
||||
move: express$RouteMethodType<this>;
|
||||
purge: express$RouteMethodType<this>;
|
||||
propfind: express$RouteMethodType<this>;
|
||||
proppatch: express$RouteMethodType<this>;
|
||||
unlock: express$RouteMethodType<this>;
|
||||
report: express$RouteMethodType<this>;
|
||||
mkactivity: express$RouteMethodType<this>;
|
||||
checkout: express$RouteMethodType<this>;
|
||||
merge: express$RouteMethodType<this>;
|
||||
|
||||
// @TODO Missing 'm-search' but get flow illegal name error.
|
||||
|
||||
notify: express$RouteMethodType<this>;
|
||||
subscribe: express$RouteMethodType<this>;
|
||||
unsubscribe: express$RouteMethodType<this>;
|
||||
patch: express$RouteMethodType<this>;
|
||||
search: express$RouteMethodType<this>;
|
||||
connect: express$RouteMethodType<this>;
|
||||
}
|
||||
|
||||
declare class express$Router extends express$Route {
|
||||
constructor(options?: express$RouterOptions): void;
|
||||
route(path: string): express$Route;
|
||||
static (options?: express$RouterOptions): express$Router;
|
||||
use(middleware: express$Middleware): this;
|
||||
use(...middleware: Array<express$Middleware>): this;
|
||||
use(path: express$Path|express$Path[], ...middleware: Array<express$Middleware>): this;
|
||||
use(path: string, router: express$Router): this;
|
||||
handle(req: http$IncomingMessage, res: http$ServerResponse, next: express$NextFunction): void;
|
||||
param(
|
||||
param: string,
|
||||
callback: (
|
||||
req: $Subtype<express$Request>,
|
||||
res: express$Response,
|
||||
next: express$NextFunction,
|
||||
id: string
|
||||
) => mixed
|
||||
): void;
|
||||
|
||||
// Can't use regular callable signature syntax due to https://github.com/facebook/flow/issues/3084
|
||||
$call: (req: http$IncomingMessage, res: http$ServerResponse, next?: ?express$NextFunction) => void;
|
||||
}
|
||||
|
||||
declare class express$Application extends express$Router mixins events$EventEmitter {
|
||||
constructor(): void;
|
||||
locals: {[name: string]: mixed};
|
||||
mountpath: string;
|
||||
listen(port: number, hostname?: string, backlog?: number, callback?: (err?: ?Error) => mixed): Server;
|
||||
listen(port: number, hostname?: string, callback?: (err?: ?Error) => mixed): Server;
|
||||
listen(port: number, callback?: (err?: ?Error) => mixed): Server;
|
||||
listen(path: string, callback?: (err?: ?Error) => mixed): Server;
|
||||
listen(handle: Object, callback?: (err?: ?Error) => mixed): Server;
|
||||
disable(name: string): void;
|
||||
disabled(name: string): boolean;
|
||||
enable(name: string): express$Application;
|
||||
enabled(name: string): boolean;
|
||||
engine(name: string, callback: Function): void;
|
||||
/**
|
||||
* Mixed will not be taken as a value option. Issue around using the GET http method name and the get for settings.
|
||||
*/
|
||||
// get(name: string): mixed;
|
||||
set(name: string, value: mixed): mixed;
|
||||
render(name: string, optionsOrFunction: {[name: string]: mixed}, callback: express$RenderCallback): void;
|
||||
handle(req: http$IncomingMessage, res: http$ServerResponse, next?: ?express$NextFunction): void;
|
||||
}
|
||||
|
||||
declare module 'express' {
|
||||
declare export type RouterOptions = express$RouterOptions;
|
||||
declare export type CookieOptions = express$CookieOptions;
|
||||
declare export type Middleware = express$Middleware;
|
||||
declare export type NextFunction = express$NextFunction;
|
||||
declare export type RequestParams = express$RequestParams;
|
||||
declare export type $Response = express$Response;
|
||||
declare export type $Request = express$Request;
|
||||
declare export type $Application = express$Application;
|
||||
|
||||
declare module.exports: {
|
||||
(): express$Application, // If you try to call like a function, it will use this signature
|
||||
static: (root: string, options?: Object) => express$Middleware, // `static` property on the function
|
||||
Router: typeof express$Router, // `Router` property on the function
|
||||
};
|
||||
}
|
459
flow-typed/npm/jest_v19.x.x.js
vendored
Normal file
459
flow-typed/npm/jest_v19.x.x.js
vendored
Normal file
|
@ -0,0 +1,459 @@
|
|||
// flow-typed signature: bdff15032a92c1b6daf0ab0067861cb1
|
||||
// flow-typed version: b43dff3e0e/jest_v19.x.x/flow_>=v0.16.x
|
||||
|
||||
type JestMockFn = {
|
||||
(...args: Array<any>): any,
|
||||
/**
|
||||
* An object for introspecting mock calls
|
||||
*/
|
||||
mock: {
|
||||
/**
|
||||
* An array that represents all calls that have been made into this mock
|
||||
* function. Each call is represented by an array of arguments that were
|
||||
* passed during the call.
|
||||
*/
|
||||
calls: Array<Array<any>>,
|
||||
/**
|
||||
* An array that contains all the object instances that have been
|
||||
* instantiated from this mock function.
|
||||
*/
|
||||
instances: mixed,
|
||||
},
|
||||
/**
|
||||
* Resets all information stored in the mockFn.mock.calls and
|
||||
* mockFn.mock.instances arrays. Often this is useful when you want to clean
|
||||
* up a mock's usage data between two assertions.
|
||||
*/
|
||||
mockClear(): Function,
|
||||
/**
|
||||
* Resets all information stored in the mock. This is useful when you want to
|
||||
* completely restore a mock back to its initial state.
|
||||
*/
|
||||
mockReset(): Function,
|
||||
/**
|
||||
* Accepts a function that should be used as the implementation of the mock.
|
||||
* The mock itself will still record all calls that go into and instances
|
||||
* that come from itself -- the only difference is that the implementation
|
||||
* will also be executed when the mock is called.
|
||||
*/
|
||||
mockImplementation(fn: Function): JestMockFn,
|
||||
/**
|
||||
* Accepts a function that will be used as an implementation of the mock for
|
||||
* one call to the mocked function. Can be chained so that multiple function
|
||||
* calls produce different results.
|
||||
*/
|
||||
mockImplementationOnce(fn: Function): JestMockFn,
|
||||
/**
|
||||
* Just a simple sugar function for returning `this`
|
||||
*/
|
||||
mockReturnThis(): void,
|
||||
/**
|
||||
* Deprecated: use jest.fn(() => value) instead
|
||||
*/
|
||||
mockReturnValue(value: any): JestMockFn,
|
||||
/**
|
||||
* Sugar for only returning a value once inside your mock
|
||||
*/
|
||||
mockReturnValueOnce(value: any): JestMockFn,
|
||||
}
|
||||
|
||||
type JestAsymmetricEqualityType = {
|
||||
/**
|
||||
* A custom Jasmine equality tester
|
||||
*/
|
||||
asymmetricMatch(value: mixed): boolean,
|
||||
}
|
||||
|
||||
type JestCallsType = {
|
||||
allArgs(): mixed,
|
||||
all(): mixed,
|
||||
any(): boolean,
|
||||
count(): number,
|
||||
first(): mixed,
|
||||
mostRecent(): mixed,
|
||||
reset(): void,
|
||||
}
|
||||
|
||||
type JestClockType = {
|
||||
install(): void,
|
||||
mockDate(date: Date): void,
|
||||
tick(milliseconds?:number): void,
|
||||
uninstall(): void,
|
||||
}
|
||||
|
||||
type JestMatcherResult = {
|
||||
message?: string | ()=>string,
|
||||
pass: boolean,
|
||||
}
|
||||
|
||||
type JestMatcher = (actual: any, expected: any) => JestMatcherResult;
|
||||
|
||||
type JestExpectType = {
|
||||
not: JestExpectType,
|
||||
/**
|
||||
* If you have a mock function, you can use .lastCalledWith to test what
|
||||
* arguments it was last called with.
|
||||
*/
|
||||
lastCalledWith(...args: Array<any>): void,
|
||||
/**
|
||||
* toBe just checks that a value is what you expect. It uses === to check
|
||||
* strict equality.
|
||||
*/
|
||||
toBe(value: any): void,
|
||||
/**
|
||||
* Use .toHaveBeenCalled to ensure that a mock function got called.
|
||||
*/
|
||||
toBeCalled(): void,
|
||||
/**
|
||||
* Use .toBeCalledWith to ensure that a mock function was called with
|
||||
* specific arguments.
|
||||
*/
|
||||
toBeCalledWith(...args: Array<any>): void,
|
||||
/**
|
||||
* Using exact equality with floating point numbers is a bad idea. Rounding
|
||||
* means that intuitive things fail.
|
||||
*/
|
||||
toBeCloseTo(num: number, delta: any): void,
|
||||
/**
|
||||
* Use .toBeDefined to check that a variable is not undefined.
|
||||
*/
|
||||
toBeDefined(): void,
|
||||
/**
|
||||
* Use .toBeFalsy when you don't care what a value is, you just want to
|
||||
* ensure a value is false in a boolean context.
|
||||
*/
|
||||
toBeFalsy(): void,
|
||||
/**
|
||||
* To compare floating point numbers, you can use toBeGreaterThan.
|
||||
*/
|
||||
toBeGreaterThan(number: number): void,
|
||||
/**
|
||||
* To compare floating point numbers, you can use toBeGreaterThanOrEqual.
|
||||
*/
|
||||
toBeGreaterThanOrEqual(number: number): void,
|
||||
/**
|
||||
* To compare floating point numbers, you can use toBeLessThan.
|
||||
*/
|
||||
toBeLessThan(number: number): void,
|
||||
/**
|
||||
* To compare floating point numbers, you can use toBeLessThanOrEqual.
|
||||
*/
|
||||
toBeLessThanOrEqual(number: number): void,
|
||||
/**
|
||||
* Use .toBeInstanceOf(Class) to check that an object is an instance of a
|
||||
* class.
|
||||
*/
|
||||
toBeInstanceOf(cls: Class<*>): void,
|
||||
/**
|
||||
* .toBeNull() is the same as .toBe(null) but the error messages are a bit
|
||||
* nicer.
|
||||
*/
|
||||
toBeNull(): void,
|
||||
/**
|
||||
* Use .toBeTruthy when you don't care what a value is, you just want to
|
||||
* ensure a value is true in a boolean context.
|
||||
*/
|
||||
toBeTruthy(): void,
|
||||
/**
|
||||
* Use .toBeUndefined to check that a variable is undefined.
|
||||
*/
|
||||
toBeUndefined(): void,
|
||||
/**
|
||||
* Use .toContain when you want to check that an item is in a list. For
|
||||
* testing the items in the list, this uses ===, a strict equality check.
|
||||
*/
|
||||
toContain(item: any): void,
|
||||
/**
|
||||
* Use .toContainEqual when you want to check that an item is in a list. For
|
||||
* testing the items in the list, this matcher recursively checks the
|
||||
* equality of all fields, rather than checking for object identity.
|
||||
*/
|
||||
toContainEqual(item: any): void,
|
||||
/**
|
||||
* Use .toEqual when you want to check that two objects have the same value.
|
||||
* This matcher recursively checks the equality of all fields, rather than
|
||||
* checking for object identity.
|
||||
*/
|
||||
toEqual(value: any): void,
|
||||
/**
|
||||
* Use .toHaveBeenCalled to ensure that a mock function got called.
|
||||
*/
|
||||
toHaveBeenCalled(): void,
|
||||
/**
|
||||
* Use .toHaveBeenCalledTimes to ensure that a mock function got called exact
|
||||
* number of times.
|
||||
*/
|
||||
toHaveBeenCalledTimes(number: number): void,
|
||||
/**
|
||||
* Use .toHaveBeenCalledWith to ensure that a mock function was called with
|
||||
* specific arguments.
|
||||
*/
|
||||
toHaveBeenCalledWith(...args: Array<any>): void,
|
||||
/**
|
||||
* If you have a mock function, you can use .toHaveBeenLastCalledWith to test what
|
||||
* arguments it was last called with.
|
||||
*/
|
||||
toHaveBeenLastCalledWith(...args: Array<any>): void,
|
||||
/**
|
||||
* Check that an object has a .length property and it is set to a certain
|
||||
* numeric value.
|
||||
*/
|
||||
toHaveLength(number: number): void,
|
||||
/**
|
||||
*
|
||||
*/
|
||||
toHaveProperty(propPath: string, value?: any): void,
|
||||
/**
|
||||
* Use .toMatch to check that a string matches a regular expression.
|
||||
*/
|
||||
toMatch(regexp: RegExp): void,
|
||||
/**
|
||||
* Use .toMatchObject to check that a javascript object matches a subset of the properties of an object.
|
||||
*/
|
||||
toMatchObject(object: Object): void,
|
||||
/**
|
||||
* This ensures that a React component matches the most recent snapshot.
|
||||
*/
|
||||
toMatchSnapshot(name?: string): void,
|
||||
/**
|
||||
* Use .toThrow to test that a function throws when it is called.
|
||||
*/
|
||||
toThrow(message?: string | Error): void,
|
||||
/**
|
||||
* Use .toThrowError to test that a function throws a specific error when it
|
||||
* is called. The argument can be a string for the error message, a class for
|
||||
* the error, or a regex that should match the error.
|
||||
*/
|
||||
toThrowError(message?: string | Error | RegExp): void,
|
||||
/**
|
||||
* Use .toThrowErrorMatchingSnapshot to test that a function throws a error
|
||||
* matching the most recent snapshot when it is called.
|
||||
*/
|
||||
toThrowErrorMatchingSnapshot(): void,
|
||||
}
|
||||
|
||||
type JestObjectType = {
|
||||
/**
|
||||
* Disables automatic mocking in the module loader.
|
||||
*
|
||||
* After this method is called, all `require()`s will return the real
|
||||
* versions of each module (rather than a mocked version).
|
||||
*/
|
||||
disableAutomock(): JestObjectType,
|
||||
/**
|
||||
* An un-hoisted version of disableAutomock
|
||||
*/
|
||||
autoMockOff(): JestObjectType,
|
||||
/**
|
||||
* Enables automatic mocking in the module loader.
|
||||
*/
|
||||
enableAutomock(): JestObjectType,
|
||||
/**
|
||||
* An un-hoisted version of enableAutomock
|
||||
*/
|
||||
autoMockOn(): JestObjectType,
|
||||
/**
|
||||
* Clears the mock.calls and mock.instances properties of all mocks.
|
||||
* Equivalent to calling .mockClear() on every mocked function.
|
||||
*/
|
||||
clearAllMocks(): JestObjectType,
|
||||
/**
|
||||
* Resets the state of all mocks. Equivalent to calling .mockReset() on every
|
||||
* mocked function.
|
||||
*/
|
||||
resetAllMocks(): JestObjectType,
|
||||
/**
|
||||
* Removes any pending timers from the timer system.
|
||||
*/
|
||||
clearAllTimers(): void,
|
||||
/**
|
||||
* The same as `mock` but not moved to the top of the expectation by
|
||||
* babel-jest.
|
||||
*/
|
||||
doMock(moduleName: string, moduleFactory?: any): JestObjectType,
|
||||
/**
|
||||
* The same as `unmock` but not moved to the top of the expectation by
|
||||
* babel-jest.
|
||||
*/
|
||||
dontMock(moduleName: string): JestObjectType,
|
||||
/**
|
||||
* Returns a new, unused mock function. Optionally takes a mock
|
||||
* implementation.
|
||||
*/
|
||||
fn(implementation?: Function): JestMockFn,
|
||||
/**
|
||||
* Determines if the given function is a mocked function.
|
||||
*/
|
||||
isMockFunction(fn: Function): boolean,
|
||||
/**
|
||||
* Given the name of a module, use the automatic mocking system to generate a
|
||||
* mocked version of the module for you.
|
||||
*/
|
||||
genMockFromModule(moduleName: string): any,
|
||||
/**
|
||||
* Mocks a module with an auto-mocked version when it is being required.
|
||||
*
|
||||
* The second argument can be used to specify an explicit module factory that
|
||||
* is being run instead of using Jest's automocking feature.
|
||||
*
|
||||
* The third argument can be used to create virtual mocks -- mocks of modules
|
||||
* that don't exist anywhere in the system.
|
||||
*/
|
||||
mock(moduleName: string, moduleFactory?: any, options?: Object): JestObjectType,
|
||||
/**
|
||||
* Resets the module registry - the cache of all required modules. This is
|
||||
* useful to isolate modules where local state might conflict between tests.
|
||||
*/
|
||||
resetModules(): JestObjectType,
|
||||
/**
|
||||
* Exhausts the micro-task queue (usually interfaced in node via
|
||||
* process.nextTick).
|
||||
*/
|
||||
runAllTicks(): void,
|
||||
/**
|
||||
* Exhausts the macro-task queue (i.e., all tasks queued by setTimeout(),
|
||||
* setInterval(), and setImmediate()).
|
||||
*/
|
||||
runAllTimers(): void,
|
||||
/**
|
||||
* Exhausts all tasks queued by setImmediate().
|
||||
*/
|
||||
runAllImmediates(): void,
|
||||
/**
|
||||
* Executes only the macro task queue (i.e. all tasks queued by setTimeout()
|
||||
* or setInterval() and setImmediate()).
|
||||
*/
|
||||
runTimersToTime(msToRun: number): void,
|
||||
/**
|
||||
* Executes only the macro-tasks that are currently pending (i.e., only the
|
||||
* tasks that have been queued by setTimeout() or setInterval() up to this
|
||||
* point)
|
||||
*/
|
||||
runOnlyPendingTimers(): void,
|
||||
/**
|
||||
* Explicitly supplies the mock object that the module system should return
|
||||
* for the specified module. Note: It is recommended to use jest.mock()
|
||||
* instead.
|
||||
*/
|
||||
setMock(moduleName: string, moduleExports: any): JestObjectType,
|
||||
/**
|
||||
* Indicates that the module system should never return a mocked version of
|
||||
* the specified module from require() (e.g. that it should always return the
|
||||
* real module).
|
||||
*/
|
||||
unmock(moduleName: string): JestObjectType,
|
||||
/**
|
||||
* Instructs Jest to use fake versions of the standard timer functions
|
||||
* (setTimeout, setInterval, clearTimeout, clearInterval, nextTick,
|
||||
* setImmediate and clearImmediate).
|
||||
*/
|
||||
useFakeTimers(): JestObjectType,
|
||||
/**
|
||||
* Instructs Jest to use the real versions of the standard timer functions.
|
||||
*/
|
||||
useRealTimers(): JestObjectType,
|
||||
/**
|
||||
* Creates a mock function similar to jest.fn but also tracks calls to
|
||||
* object[methodName].
|
||||
*/
|
||||
spyOn(object: Object, methodName: string): JestMockFn,
|
||||
}
|
||||
|
||||
type JestSpyType = {
|
||||
calls: JestCallsType,
|
||||
}
|
||||
|
||||
/** Runs this function after every test inside this context */
|
||||
declare function afterEach(fn: Function): void;
|
||||
/** Runs this function before every test inside this context */
|
||||
declare function beforeEach(fn: Function): void;
|
||||
/** Runs this function after all tests have finished inside this context */
|
||||
declare function afterAll(fn: Function): void;
|
||||
/** Runs this function before any tests have started inside this context */
|
||||
declare function beforeAll(fn: Function): void;
|
||||
/** A context for grouping tests together */
|
||||
declare function describe(name: string, fn: Function): void;
|
||||
|
||||
/** An individual test unit */
|
||||
declare var it: {
|
||||
/**
|
||||
* An individual test unit
|
||||
*
|
||||
* @param {string} Name of Test
|
||||
* @param {Function} Test
|
||||
*/
|
||||
(name: string, fn?: Function): ?Promise<void>,
|
||||
/**
|
||||
* Only run this test
|
||||
*
|
||||
* @param {string} Name of Test
|
||||
* @param {Function} Test
|
||||
*/
|
||||
only(name: string, fn?: Function): ?Promise<void>,
|
||||
/**
|
||||
* Skip running this test
|
||||
*
|
||||
* @param {string} Name of Test
|
||||
* @param {Function} Test
|
||||
*/
|
||||
skip(name: string, fn?: Function): ?Promise<void>,
|
||||
/**
|
||||
* Run the test concurrently
|
||||
*
|
||||
* @param {string} Name of Test
|
||||
* @param {Function} Test
|
||||
*/
|
||||
concurrent(name: string, fn?: Function): ?Promise<void>,
|
||||
};
|
||||
declare function fit(name: string, fn: Function): ?Promise<void>;
|
||||
/** An individual test unit */
|
||||
declare var test: typeof it;
|
||||
/** A disabled group of tests */
|
||||
declare var xdescribe: typeof describe;
|
||||
/** A focused group of tests */
|
||||
declare var fdescribe: typeof describe;
|
||||
/** A disabled individual test */
|
||||
declare var xit: typeof it;
|
||||
/** A disabled individual test */
|
||||
declare var xtest: typeof it;
|
||||
|
||||
/** The expect function is used every time you want to test a value */
|
||||
declare var expect: {
|
||||
/** The object that you want to make assertions against */
|
||||
(value: any): JestExpectType,
|
||||
/** Add additional Jasmine matchers to Jest's roster */
|
||||
extend(matchers: {[name:string]: JestMatcher}): void,
|
||||
/** Add a module that formats application-specific data structures. */
|
||||
addSnapshotSerializer(serializer: (input: Object) => string): void,
|
||||
assertions(expectedAssertions: number): void,
|
||||
any(value: mixed): JestAsymmetricEqualityType,
|
||||
anything(): void,
|
||||
arrayContaining(value: Array<mixed>): void,
|
||||
objectContaining(value: Object): void,
|
||||
/** Matches any received string that contains the exact expected string. */
|
||||
stringContaining(value: string): void,
|
||||
stringMatching(value: string | RegExp): void,
|
||||
};
|
||||
|
||||
// TODO handle return type
|
||||
// http://jasmine.github.io/2.4/introduction.html#section-Spies
|
||||
declare function spyOn(value: mixed, method: string): Object;
|
||||
|
||||
/** Holds all functions related to manipulating test runner */
|
||||
declare var jest: JestObjectType
|
||||
|
||||
/**
|
||||
* The global Jamine object, this is generally not exposed as the public API,
|
||||
* using features inside here could break in later versions of Jest.
|
||||
*/
|
||||
declare var jasmine: {
|
||||
DEFAULT_TIMEOUT_INTERVAL: number,
|
||||
any(value: mixed): JestAsymmetricEqualityType,
|
||||
anything(): void,
|
||||
arrayContaining(value: Array<mixed>): void,
|
||||
clock(): JestClockType,
|
||||
createSpy(name: string): JestSpyType,
|
||||
createSpyObj(baseName: string, methodNames: Array<string>): {[methodName: string]: JestSpyType},
|
||||
objectContaining(value: Object): void,
|
||||
stringMatching(value: string): void,
|
||||
}
|
514
flow-typed/npm/lodash_v4.x.x.js
vendored
Normal file
514
flow-typed/npm/lodash_v4.x.x.js
vendored
Normal file
|
@ -0,0 +1,514 @@
|
|||
// flow-typed signature: 495348fe7e36289229ca4b9b8cbad572
|
||||
// flow-typed version: 9821eaaefe/lodash_v4.x.x/flow_>=v0.47.x
|
||||
|
||||
declare module 'lodash' {
|
||||
declare type TemplateSettings = {
|
||||
escape?: RegExp,
|
||||
evaluate?: RegExp,
|
||||
imports?: Object,
|
||||
interpolate?: RegExp,
|
||||
variable?: string,
|
||||
};
|
||||
|
||||
declare type TruncateOptions = {
|
||||
length?: number,
|
||||
omission?: string,
|
||||
separator?: RegExp|string,
|
||||
};
|
||||
|
||||
declare type DebounceOptions = {
|
||||
leading?: bool,
|
||||
maxWait?: number,
|
||||
trailing?: bool,
|
||||
};
|
||||
|
||||
declare type ThrottleOptions = {
|
||||
leading?: bool,
|
||||
trailing?: bool,
|
||||
};
|
||||
|
||||
declare type NestedArray<T> = Array<Array<T>>;
|
||||
|
||||
declare type matchesIterateeShorthand = Object;
|
||||
declare type matchesPropertyIterateeShorthand = [string, any];
|
||||
declare type propertyIterateeShorthand = string;
|
||||
|
||||
declare type OPredicate<A, O> =
|
||||
| ((value: A, key: string, object: O) => any)
|
||||
| matchesIterateeShorthand
|
||||
| matchesPropertyIterateeShorthand
|
||||
| propertyIterateeShorthand;
|
||||
|
||||
declare type OIterateeWithResult<V, O, R> = Object|string|((value: V, key: string, object: O) => R);
|
||||
declare type OIteratee<O> = OIterateeWithResult<any, O, any>;
|
||||
declare type OFlatMapIteratee<T, U> = OIterateeWithResult<any, T, Array<U>>;
|
||||
|
||||
declare type Predicate<T> =
|
||||
| ((value: T, index: number, array: Array<T>) => any)
|
||||
| matchesIterateeShorthand
|
||||
| matchesPropertyIterateeShorthand
|
||||
| propertyIterateeShorthand;
|
||||
|
||||
declare type _ValueOnlyIteratee<T> = (value: T) => mixed;
|
||||
declare type ValueOnlyIteratee<T> = _ValueOnlyIteratee<T>|string;
|
||||
declare type _Iteratee<T> = (item: T, index: number, array: ?Array<T>) => mixed;
|
||||
declare type Iteratee<T> = _Iteratee<T>|Object|string;
|
||||
declare type FlatMapIteratee<T, U> = ((item: T, index: number, array: ?Array<T>) => Array<U>)|Object|string;
|
||||
declare type Comparator<T> = (item: T, item2: T) => bool;
|
||||
|
||||
declare type MapIterator<T,U> =
|
||||
| ((item: T, index: number, array: Array<T>) => U)
|
||||
| propertyIterateeShorthand;
|
||||
|
||||
declare type OMapIterator<T,O,U> =
|
||||
| ((item: T, key: string, object: O) => U)
|
||||
| propertyIterateeShorthand;
|
||||
|
||||
declare class Lodash {
|
||||
// Array
|
||||
chunk<T>(array: ?Array<T>, size?: number): Array<Array<T>>;
|
||||
compact<T,N:?T>(array: Array<N>): Array<T>;
|
||||
concat<T>(base: Array<T>, ...elements: Array<any>): Array<T|any>;
|
||||
difference<T>(array: ?Array<T>, values?: Array<T>): Array<T>;
|
||||
differenceBy<T>(array: ?Array<T>, values: Array<T>, iteratee: ValueOnlyIteratee<T>): T[];
|
||||
differenceWith<T>(array: T[], values: T[], comparator?: Comparator<T>): T[];
|
||||
drop<T>(array: ?Array<T>, n?: number): Array<T>;
|
||||
dropRight<T>(array: ?Array<T>, n?: number): Array<T>;
|
||||
dropRightWhile<T>(array: ?Array<T>, predicate?: Predicate<T>): Array<T>;
|
||||
dropWhile<T>(array: ?Array<T>, predicate?: Predicate<T>): Array<T>;
|
||||
fill<T, U>(array: ?Array<T>, value: U, start?: number, end?: number): Array<T|U>;
|
||||
findIndex<T>(array: ?Array<T>, predicate?: Predicate<T>, fromIndex?: number): number;
|
||||
findLastIndex<T>(array: ?Array<T>, predicate?: Predicate<T>, fromIndex?: number): number;
|
||||
// alias of _.head
|
||||
first<T>(array: ?Array<T>): T;
|
||||
flatten<T,X>(array: Array<Array<T>|X>): Array<T|X>;
|
||||
flattenDeep<T>(array: any[]): Array<T>;
|
||||
flattenDepth(array: any[], depth?: number): any[];
|
||||
fromPairs<T>(pairs: Array<T>): Object;
|
||||
head<T>(array: ?Array<T>): T;
|
||||
indexOf<T>(array: ?Array<T>, value: T, fromIndex?: number): number;
|
||||
initial<T>(array: ?Array<T>): Array<T>;
|
||||
intersection<T>(...arrays: Array<Array<T>>): Array<T>;
|
||||
//Workaround until (...parameter: T, parameter2: U) works
|
||||
intersectionBy<T>(a1: Array<T>, iteratee?: ValueOnlyIteratee<T>): Array<T>;
|
||||
intersectionBy<T>(a1: Array<T>, a2: Array<T>, iteratee?: ValueOnlyIteratee<T>): Array<T>;
|
||||
intersectionBy<T>(a1: Array<T>, a2: Array<T>, a3: Array<T>, iteratee?: ValueOnlyIteratee<T>): Array<T>;
|
||||
intersectionBy<T>(a1: Array<T>, a2: Array<T>, a3: Array<T>, a4: Array<T>, iteratee?: ValueOnlyIteratee<T>): Array<T>;
|
||||
//Workaround until (...parameter: T, parameter2: U) works
|
||||
intersectionWith<T>(a1: Array<T>, comparator: Comparator<T>): Array<T>;
|
||||
intersectionWith<T>(a1: Array<T>, a2: Array<T>, comparator: Comparator<T>): Array<T>;
|
||||
intersectionWith<T>(a1: Array<T>, a2: Array<T>, a3: Array<T>, comparator: Comparator<T>): Array<T>;
|
||||
intersectionWith<T>(a1: Array<T>, a2: Array<T>, a3: Array<T>, a4: Array<T>, comparator: Comparator<T>): Array<T>;
|
||||
join<T>(array: ?Array<T>, separator?: string): string;
|
||||
last<T>(array: ?Array<T>): T;
|
||||
lastIndexOf<T>(array: ?Array<T>, value: T, fromIndex?: number): number;
|
||||
nth<T>(array: T[], n?: number): T;
|
||||
pull<T>(array: ?Array<T>, ...values?: Array<T>): Array<T>;
|
||||
pullAll<T>(array: ?Array<T>, values: Array<T>): Array<T>;
|
||||
pullAllBy<T>(array: ?Array<T>, values: Array<T>, iteratee?: ValueOnlyIteratee<T>): Array<T>;
|
||||
pullAllWith<T>(array?: T[], values: T[], comparator?: Function): T[];
|
||||
pullAt<T>(array: ?Array<T>, ...indexed?: Array<number>): Array<T>;
|
||||
pullAt<T>(array: ?Array<T>, indexed?: Array<number>): Array<T>;
|
||||
remove<T>(array: ?Array<T>, predicate?: Predicate<T>): Array<T>;
|
||||
reverse<T>(array: ?Array<T>): Array<T>;
|
||||
slice<T>(array: ?Array<T>, start?: number, end?: number): Array<T>;
|
||||
sortedIndex<T>(array: ?Array<T>, value: T): number;
|
||||
sortedIndexBy<T>(array: ?Array<T>, value: T, iteratee?: ValueOnlyIteratee<T>): number;
|
||||
sortedIndexOf<T>(array: ?Array<T>, value: T): number;
|
||||
sortedLastIndex<T>(array: ?Array<T>, value: T): number;
|
||||
sortedLastIndexBy<T>(array: ?Array<T>, value: T, iteratee?: ValueOnlyIteratee<T>): number;
|
||||
sortedLastIndexOf<T>(array: ?Array<T>, value: T): number;
|
||||
sortedUniq<T>(array: ?Array<T>): Array<T>;
|
||||
sortedUniqBy<T>(array: ?Array<T>, iteratee?: (value: T) => mixed): Array<T>;
|
||||
tail<T>(array: ?Array<T>): Array<T>;
|
||||
take<T>(array: ?Array<T>, n?: number): Array<T>;
|
||||
takeRight<T>(array: ?Array<T>, n?: number): Array<T>;
|
||||
takeRightWhile<T>(array: ?Array<T>, predicate?: Predicate<T>): Array<T>;
|
||||
takeWhile<T>(array: ?Array<T>, predicate?: Predicate<T>): Array<T>;
|
||||
union<T>(...arrays?: Array<Array<T>>): Array<T>;
|
||||
//Workaround until (...parameter: T, parameter2: U) works
|
||||
unionBy<T>(a1: Array<T>, iteratee?: ValueOnlyIteratee<T>): Array<T>;
|
||||
unionBy<T>(a1: Array<T>, a2: Array<T>, iteratee?: ValueOnlyIteratee<T>): Array<T>;
|
||||
unionBy<T>(a1: Array<T>, a2: Array<T>, a3: Array<T>, iteratee?: ValueOnlyIteratee<T>): Array<T>;
|
||||
unionBy<T>(a1: Array<T>, a2: Array<T>, a3: Array<T>, a4: Array<T>, iteratee?: ValueOnlyIteratee<T>): Array<T>;
|
||||
//Workaround until (...parameter: T, parameter2: U) works
|
||||
unionWith<T>(a1: Array<T>, comparator?: Comparator<T>): Array<T>;
|
||||
unionWith<T>(a1: Array<T>, a2: Array<T>, comparator?: Comparator<T>): Array<T>;
|
||||
unionWith<T>(a1: Array<T>, a2: Array<T>, a3: Array<T>, comparator?: Comparator<T>): Array<T>;
|
||||
unionWith<T>(a1: Array<T>, a2: Array<T>, a3: Array<T>, a4: Array<T>, comparator?: Comparator<T>): Array<T>;
|
||||
uniq<T>(array: ?Array<T>): Array<T>;
|
||||
uniqBy<T>(array: ?Array<T>, iteratee?: ValueOnlyIteratee<T>): Array<T>;
|
||||
uniqWith<T>(array: ?Array<T>, comparator?: Comparator<T>): Array<T>;
|
||||
unzip<T>(array: ?Array<T>): Array<T>;
|
||||
unzipWith<T>(array: ?Array<T>, iteratee?: Iteratee<T>): Array<T>;
|
||||
without<T>(array: ?Array<T>, ...values?: Array<T>): Array<T>;
|
||||
xor<T>(...array: Array<Array<T>>): Array<T>;
|
||||
//Workaround until (...parameter: T, parameter2: U) works
|
||||
xorBy<T>(a1: Array<T>, iteratee?: ValueOnlyIteratee<T>): Array<T>;
|
||||
xorBy<T>(a1: Array<T>, a2: Array<T>, iteratee?: ValueOnlyIteratee<T>): Array<T>;
|
||||
xorBy<T>(a1: Array<T>, a2: Array<T>, a3: Array<T>, iteratee?: ValueOnlyIteratee<T>): Array<T>;
|
||||
xorBy<T>(a1: Array<T>, a2: Array<T>, a3: Array<T>, a4: Array<T>, iteratee?: ValueOnlyIteratee<T>): Array<T>;
|
||||
//Workaround until (...parameter: T, parameter2: U) works
|
||||
xorWith<T>(a1: Array<T>, comparator?: Comparator<T>): Array<T>;
|
||||
xorWith<T>(a1: Array<T>, a2: Array<T>, comparator?: Comparator<T>): Array<T>;
|
||||
xorWith<T>(a1: Array<T>, a2: Array<T>, a3: Array<T>, comparator?: Comparator<T>): Array<T>;
|
||||
xorWith<T>(a1: Array<T>, a2: Array<T>, a3: Array<T>, a4: Array<T>, comparator?: Comparator<T>): Array<T>;
|
||||
zip<A, B>(a1: A[], a2: B[]): Array<[A, B]>;
|
||||
zip<A, B, C>(a1: A[], a2: B[], a3: C[]): Array<[A, B, C]>;
|
||||
zip<A, B, C, D>(a1: A[], a2: B[], a3: C[], a4: D[]): Array<[A, B, C, D]>;
|
||||
zip<A, B, C, D, E>(a1: A[], a2: B[], a3: C[], a4: D[], a5: E[]): Array<[A, B, C, D, E]>;
|
||||
|
||||
zipObject(props?: Array<any>, values?: Array<any>): Object;
|
||||
zipObjectDeep(props?: any[], values?: any): Object;
|
||||
//Workaround until (...parameter: T, parameter2: U) works
|
||||
zipWith<T>(a1: NestedArray<T>, iteratee?: Iteratee<T>): Array<T>;
|
||||
zipWith<T>(a1: NestedArray<T>, a2: NestedArray<T>, iteratee?: Iteratee<T>): Array<T>;
|
||||
zipWith<T>(a1: NestedArray<T>, a2: NestedArray<T>, a3: NestedArray<T>, iteratee?: Iteratee<T>): Array<T>;
|
||||
zipWith<T>(a1: NestedArray<T>, a2: NestedArray<T>, a3: NestedArray<T>, a4: NestedArray<T>, iteratee?: Iteratee<T>): Array<T>;
|
||||
|
||||
// Collection
|
||||
countBy<T>(array: ?Array<T>, iteratee?: ValueOnlyIteratee<T>): Object;
|
||||
countBy<T: Object>(object: T, iteratee?: ValueOnlyIteratee<T>): Object;
|
||||
// alias of _.forEach
|
||||
each<T>(array: ?Array<T>, iteratee?: Iteratee<T>): Array<T>;
|
||||
each<T: Object>(object: T, iteratee?: OIteratee<T>): T;
|
||||
// alias of _.forEachRight
|
||||
eachRight<T>(array: ?Array<T>, iteratee?: Iteratee<T>): Array<T>;
|
||||
eachRight<T: Object>(object: T, iteratee?: OIteratee<T>): T;
|
||||
every<T>(array: ?Array<T>, iteratee?: Iteratee<T>): bool;
|
||||
every<T: Object>(object: T, iteratee?: OIteratee<T>): bool;
|
||||
filter<T>(array: ?Array<T>, predicate?: Predicate<T>): Array<T>;
|
||||
filter<A, T: {[id: string]: A}>(object: T, predicate?: OPredicate<A, T>): Array<A>;
|
||||
find<T>(array: ?Array<T>, predicate?: Predicate<T>, fromIndex?: number): T|void;
|
||||
find<V, A, T: {[id: string]: A}>(object: T, predicate?: OPredicate<A, T>, fromIndex?: number): V;
|
||||
findLast<T>(array: ?Array<T>, predicate?: Predicate<T>, fromIndex?: number): T|void;
|
||||
findLast<V, A, T: {[id: string]: A}>(object: T, predicate?: OPredicate<A, T>): V;
|
||||
flatMap<T, U>(array: ?Array<T>, iteratee?: FlatMapIteratee<T, U>): Array<U>;
|
||||
flatMap<T: Object, U>(object: T, iteratee?: OFlatMapIteratee<T, U>): Array<U>;
|
||||
flatMapDeep<T, U>(array: ?Array<T>, iteratee?: FlatMapIteratee<T, U>): Array<U>;
|
||||
flatMapDeep<T: Object, U>(object: T, iteratee?: OFlatMapIteratee<T, U>): Array<U>;
|
||||
flatMapDepth<T, U>(array: ?Array<T>, iteratee?: FlatMapIteratee<T, U>, depth?: number): Array<U>;
|
||||
flatMapDepth<T: Object, U>(object: T, iteratee?: OFlatMapIteratee<T, U>, depth?: number): Array<U>;
|
||||
forEach<T>(array: ?Array<T>, iteratee?: Iteratee<T>): Array<T>;
|
||||
forEach<T: Object>(object: T, iteratee?: OIteratee<T>): T;
|
||||
forEachRight<T>(array: ?Array<T>, iteratee?: Iteratee<T>): Array<T>;
|
||||
forEachRight<T: Object>(object: T, iteratee?: OIteratee<T>): T;
|
||||
groupBy<V, T>(array: ?Array<T>, iteratee?: ValueOnlyIteratee<T>): {[key: V]: Array<T>};
|
||||
groupBy<V, A, T: {[id: string]: A}>(object: T, iteratee?: ValueOnlyIteratee<A>): {[key: V]: Array<A>};
|
||||
includes<T>(array: ?Array<T>, value: T, fromIndex?: number): bool;
|
||||
includes<T: Object>(object: T, value: any, fromIndex?: number): bool;
|
||||
includes(str: string, value: string, fromIndex?: number): bool;
|
||||
invokeMap<T>(array: ?Array<T>, path: ((value: T) => Array<string>|string)|Array<string>|string, ...args?: Array<any>): Array<any>;
|
||||
invokeMap<T: Object>(object: T, path: ((value: any) => Array<string>|string)|Array<string>|string, ...args?: Array<any>): Array<any>;
|
||||
keyBy<T, V>(array: ?Array<T>, iteratee?: ValueOnlyIteratee<T>): {[key: V]: ?T};
|
||||
keyBy<V, A, I, T: {[id: I]: A}>(object: T, iteratee?: ValueOnlyIteratee<A>): {[key: V]: ?A};
|
||||
map<T, U>(array: ?Array<T>, iteratee?: MapIterator<T, U>): Array<U>;
|
||||
map<V, T: Object, U>(object: ?T, iteratee?: OMapIterator<V, T, U>): Array<U>;
|
||||
map(str: ?string, iteratee?: (char: string, index: number, str: string) => any): string;
|
||||
orderBy<T>(array: ?Array<T>, iteratees?: Array<Iteratee<T>>|string, orders?: Array<'asc'|'desc'>|string): Array<T>;
|
||||
orderBy<V, T: Object>(object: T, iteratees?: Array<OIteratee<*>>|string, orders?: Array<'asc'|'desc'>|string): Array<V>;
|
||||
partition<T>(array: ?Array<T>, predicate?: Predicate<T>): NestedArray<T>;
|
||||
partition<V, A, T: {[id: string]: A}>(object: T, predicate?: OPredicate<A, T>): NestedArray<V>;
|
||||
reduce<T, U>(array: ?Array<T>, iteratee?: (accumulator: U, value: T, index: number, array: ?Array<T>) => U, accumulator?: U): U;
|
||||
reduce<T: Object, U>(object: T, iteratee?: (accumulator: U, value: any, key: string, object: T) => U, accumulator?: U): U;
|
||||
reduceRight<T, U>(array: ?Array<T>, iteratee?: (accumulator: U, value: T, index: number, array: ?Array<T>) => U, accumulator?: U): U;
|
||||
reduceRight<T: Object, U>(object: T, iteratee?: (accumulator: U, value: any, key: string, object: T) => U, accumulator?: U): U;
|
||||
reject<T>(array: ?Array<T>, predicate?: Predicate<T>): Array<T>;
|
||||
reject<V: Object, A, T: {[id: string]: A}>(object: T, predicate?: OPredicate<A, T>): Array<V>;
|
||||
sample<T>(array: ?Array<T>): T;
|
||||
sample<V, T: Object>(object: T): V;
|
||||
sampleSize<T>(array: ?Array<T>, n?: number): Array<T>;
|
||||
sampleSize<V, T: Object>(object: T, n?: number): Array<V>;
|
||||
shuffle<T>(array: ?Array<T>): Array<T>;
|
||||
shuffle<V, T: Object>(object: T): Array<V>;
|
||||
size(collection: Array<any>|Object): number;
|
||||
some<T>(array: ?Array<T>, predicate?: Predicate<T>): bool;
|
||||
some<A, T: {[id: string]: A}>(object?: ?T, predicate?: OPredicate<A, T>): bool;
|
||||
sortBy<T>(array: ?Array<T>, ...iteratees?: Array<Iteratee<T>>): Array<T>;
|
||||
sortBy<T>(array: ?Array<T>, iteratees?: Array<Iteratee<T>>): Array<T>;
|
||||
sortBy<V, T: Object>(object: T, ...iteratees?: Array<OIteratee<T>>): Array<V>;
|
||||
sortBy<V, T: Object>(object: T, iteratees?: Array<OIteratee<T>>): Array<V>;
|
||||
|
||||
// Date
|
||||
now(): number;
|
||||
|
||||
// Function
|
||||
after(n: number, fn: Function): Function;
|
||||
ary(func: Function, n?: number): Function;
|
||||
before(n: number, fn: Function): Function;
|
||||
bind(func: Function, thisArg: any, ...partials: Array<any>): Function;
|
||||
bindKey(obj: Object, key: string, ...partials: Array<any>): Function;
|
||||
curry(func: Function, arity?: number): Function;
|
||||
curryRight(func: Function, arity?: number): Function;
|
||||
debounce(func: Function, wait?: number, options?: DebounceOptions): Function;
|
||||
defer(func: Function, ...args?: Array<any>): number;
|
||||
delay(func: Function, wait: number, ...args?: Array<any>): number;
|
||||
flip(func: Function): Function;
|
||||
memoize(func: Function, resolver?: Function): Function;
|
||||
negate(predicate: Function): Function;
|
||||
once(func: Function): Function;
|
||||
overArgs(func: Function, ...transforms: Array<Function>): Function;
|
||||
overArgs(func: Function, transforms: Array<Function>): Function;
|
||||
partial(func: Function, ...partials: any[]): Function;
|
||||
partialRight(func: Function, ...partials: Array<any>): Function;
|
||||
partialRight(func: Function, partials: Array<any>): Function;
|
||||
rearg(func: Function, ...indexes: Array<number>): Function;
|
||||
rearg(func: Function, indexes: Array<number>): Function;
|
||||
rest(func: Function, start?: number): Function;
|
||||
spread(func: Function): Function;
|
||||
throttle(func: Function, wait?: number, options?: ThrottleOptions): Function;
|
||||
unary(func: Function): Function;
|
||||
wrap(value: any, wrapper: Function): Function;
|
||||
|
||||
// Lang
|
||||
castArray(value: *): any[];
|
||||
clone<T>(value: T): T;
|
||||
cloneDeep<T>(value: T): T;
|
||||
cloneDeepWith<T, U>(value: T, customizer?: ?(value: T, key: number|string, object: T, stack: any) => U): U;
|
||||
cloneWith<T, U>(value: T, customizer?: ?(value: T, key: number|string, object: T, stack: any) => U): U;
|
||||
conformsTo<T:{[key:string]:mixed}>(source: T, predicates: T&{[key:string]:(x:any)=>boolean}): boolean;
|
||||
eq(value: any, other: any): bool;
|
||||
gt(value: any, other: any): bool;
|
||||
gte(value: any, other: any): bool;
|
||||
isArguments(value: any): bool;
|
||||
isArray(value: any): bool;
|
||||
isArrayBuffer(value: any): bool;
|
||||
isArrayLike(value: any): bool;
|
||||
isArrayLikeObject(value: any): bool;
|
||||
isBoolean(value: any): bool;
|
||||
isBuffer(value: any): bool;
|
||||
isDate(value: any): bool;
|
||||
isElement(value: any): bool;
|
||||
isEmpty(value: any): bool;
|
||||
isEqual(value: any, other: any): bool;
|
||||
isEqualWith<T, U>(value: T, other: U, customizer?: (objValue: any, otherValue: any, key: number|string, object: T, other: U, stack: any) => bool|void): bool;
|
||||
isError(value: any): bool;
|
||||
isFinite(value: any): bool;
|
||||
isFunction(value: Function): true;
|
||||
isFunction(value: number|string|void|null|Object): false;
|
||||
isInteger(value: any): bool;
|
||||
isLength(value: any): bool;
|
||||
isMap(value: any): bool;
|
||||
isMatch(object?: ?Object, source: Object): bool;
|
||||
isMatchWith<T: Object, U: Object>(object: T, source: U, customizer?: (objValue: any, srcValue: any, key: number|string, object: T, source: U) => bool|void): bool;
|
||||
isNaN(value: any): bool;
|
||||
isNative(value: any): bool;
|
||||
isNil(value: any): bool;
|
||||
isNull(value: any): bool;
|
||||
isNumber(value: any): bool;
|
||||
isObject(value: any): bool;
|
||||
isObjectLike(value: any): bool;
|
||||
isPlainObject(value: any): bool;
|
||||
isRegExp(value: any): bool;
|
||||
isSafeInteger(value: any): bool;
|
||||
isSet(value: any): bool;
|
||||
isString(value: string): true;
|
||||
isString(value: number|bool|Function|void|null|Object|Array<any>): false;
|
||||
isSymbol(value: any): bool;
|
||||
isTypedArray(value: any): bool;
|
||||
isUndefined(value: any): bool;
|
||||
isWeakMap(value: any): bool;
|
||||
isWeakSet(value: any): bool;
|
||||
lt(value: any, other: any): bool;
|
||||
lte(value: any, other: any): bool;
|
||||
toArray(value: any): Array<any>;
|
||||
toFinite(value: any): number;
|
||||
toInteger(value: any): number;
|
||||
toLength(value: any): number;
|
||||
toNumber(value: any): number;
|
||||
toPlainObject(value: any): Object;
|
||||
toSafeInteger(value: any): number;
|
||||
toString(value: any): string;
|
||||
|
||||
// Math
|
||||
add(augend: number, addend: number): number;
|
||||
ceil(number: number, precision?: number): number;
|
||||
divide(dividend: number, divisor: number): number;
|
||||
floor(number: number, precision?: number): number;
|
||||
max<T>(array: ?Array<T>): T;
|
||||
maxBy<T>(array: ?Array<T>, iteratee?: Iteratee<T>): T;
|
||||
mean(array: Array<*>): number;
|
||||
meanBy<T>(array: Array<T>, iteratee?: Iteratee<T>): number;
|
||||
min<T>(array: ?Array<T>): T;
|
||||
minBy<T>(array: ?Array<T>, iteratee?: Iteratee<T>): T;
|
||||
multiply(multiplier: number, multiplicand: number): number;
|
||||
round(number: number, precision?: number): number;
|
||||
subtract(minuend: number, subtrahend: number): number;
|
||||
sum(array: Array<*>): number;
|
||||
sumBy<T>(array: Array<T>, iteratee?: Iteratee<T>): number;
|
||||
|
||||
// number
|
||||
clamp(number: number, lower?: number, upper: number): number;
|
||||
inRange(number: number, start?: number, end: number): bool;
|
||||
random(lower?: number, upper?: number, floating?: bool): number;
|
||||
|
||||
// Object
|
||||
assign(object?: ?Object, ...sources?: Array<Object>): Object;
|
||||
assignIn<A, B>(a: A, b: B): A & B;
|
||||
assignIn<A, B, C>(a: A, b: B, c: C): A & B & C;
|
||||
assignIn<A, B, C, D>(a: A, b: B, c: C, d: D): A & B & C & D;
|
||||
assignIn<A, B, C, D, E>(a: A, b: B, c: C, d: D, e: E): A & B & C & D & E;
|
||||
assignInWith<T: Object, A: Object>(object: T, s1: A, customizer?: (objValue: any, srcValue: any, key: string, object: T, source: A) => any|void): Object;
|
||||
assignInWith<T: Object, A: Object, B: Object>(object: T, s1: A, s2: B, customizer?: (objValue: any, srcValue: any, key: string, object: T, source: A|B) => any|void): Object;
|
||||
assignInWith<T: Object, A: Object, B: Object, C: Object>(object: T, s1: A, s2: B, s3: C, customizer?: (objValue: any, srcValue: any, key: string, object: T, source: A|B|C) => any|void): Object;
|
||||
assignInWith<T: Object, A: Object, B: Object, C: Object, D: Object>(object: T, s1: A, s2: B, s3: C, s4: D, customizer?: (objValue: any, srcValue: any, key: string, object: T, source: A|B|C|D) => any|void): Object;
|
||||
assignWith<T: Object, A: Object>(object: T, s1: A, customizer?: (objValue: any, srcValue: any, key: string, object: T, source: A) => any|void): Object;
|
||||
assignWith<T: Object, A: Object, B: Object>(object: T, s1: A, s2: B, customizer?: (objValue: any, srcValue: any, key: string, object: T, source: A|B) => any|void): Object;
|
||||
assignWith<T: Object, A: Object, B: Object, C: Object>(object: T, s1: A, s2: B, s3: C, customizer?: (objValue: any, srcValue: any, key: string, object: T, source: A|B|C) => any|void): Object;
|
||||
assignWith<T: Object, A: Object, B: Object, C: Object, D: Object>(object: T, s1: A, s2: B, s3: C, s4: D, customizer?: (objValue: any, srcValue: any, key: string, object: T, source: A|B|C|D) => any|void): Object;
|
||||
at(object?: ?Object, ...paths: Array<string>): Array<any>;
|
||||
at(object?: ?Object, paths: Array<string>): Array<any>;
|
||||
create<T>(prototype: T, properties?: Object): $Supertype<T>;
|
||||
defaults(object?: ?Object, ...sources?: Array<Object>): Object;
|
||||
defaultsDeep(object?: ?Object, ...sources?: Array<Object>): Object;
|
||||
// alias for _.toPairs
|
||||
entries(object?: ?Object): NestedArray<any>;
|
||||
// alias for _.toPairsIn
|
||||
entriesIn(object?: ?Object): NestedArray<any>;
|
||||
// alias for _.assignIn
|
||||
extend<A, B>(a: A, b: B): A & B;
|
||||
extend<A, B, C>(a: A, b: B, c: C): A & B & C;
|
||||
extend<A, B, C, D>(a: A, b: B, c: C, d: D): A & B & C & D;
|
||||
extend<A, B, C, D, E>(a: A, b: B, c: C, d: D, e: E): A & B & C & D & E;
|
||||
// alias for _.assignInWith
|
||||
extendWith<T: Object, A: Object>(object: T, s1: A, customizer?: (objValue: any, srcValue: any, key: string, object: T, source: A) => any|void): Object;
|
||||
extendWith<T: Object, A: Object, B: Object>(object: T, s1: A, s2: B, customizer?: (objValue: any, srcValue: any, key: string, object: T, source: A|B) => any|void): Object;
|
||||
extendWith<T: Object, A: Object, B: Object, C: Object>(object: T, s1: A, s2: B, s3: C, customizer?: (objValue: any, srcValue: any, key: string, object: T, source: A|B|C) => any|void): Object;
|
||||
extendWith<T: Object, A: Object, B: Object, C: Object, D: Object>(object: T, s1: A, s2: B, s3: C, s4: D, customizer?: (objValue: any, srcValue: any, key: string, object: T, source: A|B|C|D) => any|void): Object;
|
||||
findKey<A, T: {[id: string]: A}>(object?: ?T, predicate?: OPredicate<A, T>): string|void;
|
||||
findLastKey<A, T: {[id: string]: A}>(object?: ?T, predicate?: OPredicate<A, T>): string|void;
|
||||
forIn(object?: ?Object, iteratee?: OIteratee<*>): Object;
|
||||
forInRight(object?: ?Object, iteratee?: OIteratee<*>): Object;
|
||||
forOwn(object?: ?Object, iteratee?: OIteratee<*>): Object;
|
||||
forOwnRight(object?: ?Object, iteratee?: OIteratee<*>): Object;
|
||||
functions(object?: ?Object): Array<string>;
|
||||
functionsIn(object?: ?Object): Array<string>;
|
||||
get(object?: ?Object|?Array<any>, path?: ?Array<string>|string, defaultValue?: any): any;
|
||||
has(object?: ?Object, path?: ?Array<string>|string): bool;
|
||||
hasIn(object?: ?Object, path?: ?Array<string>|string): bool;
|
||||
invert(object?: ?Object, multiVal?: bool): Object;
|
||||
invertBy(object: ?Object, iteratee?: Function): Object;
|
||||
invoke(object?: ?Object, path?: ?Array<string>|string, ...args?: Array<any>): any;
|
||||
keys(object?: ?Object): Array<string>;
|
||||
keysIn(object?: ?Object): Array<string>;
|
||||
mapKeys(object?: ?Object, iteratee?: OIteratee<*>): Object;
|
||||
mapValues(object?: ?Object, iteratee?: OIteratee<*>): Object;
|
||||
merge(object?: ?Object, ...sources?: Array<?Object>): Object;
|
||||
mergeWith<T: Object, A: Object>(object: T, customizer?: (objValue: any, srcValue: any, key: string, object: T, source: A) => any|void): Object;
|
||||
mergeWith<T: Object, A: Object, B: Object>(object: T, s1: A, s2: B, customizer?: (objValue: any, srcValue: any, key: string, object: T, source: A|B) => any|void): Object;
|
||||
mergeWith<T: Object, A: Object, B: Object, C: Object>(object: T, s1: A, s2: B, s3: C, customizer?: (objValue: any, srcValue: any, key: string, object: T, source: A|B|C) => any|void): Object;
|
||||
mergeWith<T: Object, A: Object, B: Object, C: Object, D: Object>(object: T, s1: A, s2: B, s3: C, s4: D, customizer?: (objValue: any, srcValue: any, key: string, object: T, source: A|B|C|D) => any|void): Object;
|
||||
omit(object?: ?Object, ...props: Array<string>): Object;
|
||||
omit(object?: ?Object, props: Array<string>): Object;
|
||||
omitBy<A, T: {[id: string]: A}>(object?: ?T, predicate?: OPredicate<A, T>): Object;
|
||||
pick(object?: ?Object, ...props: Array<string>): Object;
|
||||
pick(object?: ?Object, props: Array<string>): Object;
|
||||
pickBy<A, T: {[id: string]: A}>(object?: ?T, predicate?: OPredicate<A, T>): Object;
|
||||
result(object?: ?Object, path?: ?Array<string>|string, defaultValue?: any): any;
|
||||
set(object?: ?Object, path?: ?Array<string>|string, value: any): Object;
|
||||
setWith<T>(object: T, path?: ?Array<string>|string, value: any, customizer?: (nsValue: any, key: string, nsObject: T) => any): Object;
|
||||
toPairs(object?: ?Object|Array<*>): NestedArray<any>;
|
||||
toPairsIn(object?: ?Object): NestedArray<any>;
|
||||
transform(collection: Object|Array<any>, iteratee?: OIteratee<*>, accumulator?: any): any;
|
||||
unset(object?: ?Object, path?: ?Array<string>|string): bool;
|
||||
update(object: Object, path: string[]|string, updater: Function): Object;
|
||||
updateWith(object: Object, path: string[]|string, updater: Function, customizer?: Function): Object;
|
||||
values(object?: ?Object): Array<any>;
|
||||
valuesIn(object?: ?Object): Array<any>;
|
||||
|
||||
// Seq
|
||||
// harder to read, but this is _()
|
||||
(value: any): any;
|
||||
chain<T>(value: T): any;
|
||||
tap<T>(value: T, interceptor: (value:T)=>any): T;
|
||||
thru<T1,T2>(value: T1, interceptor: (value:T1)=>T2): T2;
|
||||
// TODO: _.prototype.*
|
||||
|
||||
// String
|
||||
camelCase(string?: ?string): string;
|
||||
capitalize(string?: string): string;
|
||||
deburr(string?: string): string;
|
||||
endsWith(string?: string, target?: string, position?: number): bool;
|
||||
escape(string?: string): string;
|
||||
escapeRegExp(string?: string): string;
|
||||
kebabCase(string?: string): string;
|
||||
lowerCase(string?: string): string;
|
||||
lowerFirst(string?: string): string;
|
||||
pad(string?: string, length?: number, chars?: string): string;
|
||||
padEnd(string?: string, length?: number, chars?: string): string;
|
||||
padStart(string?: string, length?: number, chars?: string): string;
|
||||
parseInt(string: string, radix?: number): number;
|
||||
repeat(string?: string, n?: number): string;
|
||||
replace(string?: string, pattern: RegExp|string, replacement: ((string: string) => string)|string): string;
|
||||
snakeCase(string?: string): string;
|
||||
split(string?: string, separator: RegExp|string, limit?: number): Array<string>;
|
||||
startCase(string?: string): string;
|
||||
startsWith(string?: string, target?: string, position?: number): bool;
|
||||
template(string?: string, options?: TemplateSettings): Function;
|
||||
toLower(string?: string): string;
|
||||
toUpper(string?: string): string;
|
||||
trim(string?: string, chars?: string): string;
|
||||
trimEnd(string?: string, chars?: string): string;
|
||||
trimStart(string?: string, chars?: string): string;
|
||||
truncate(string?: string, options?: TruncateOptions): string;
|
||||
unescape(string?: string): string;
|
||||
upperCase(string?: string): string;
|
||||
upperFirst(string?: string): string;
|
||||
words(string?: string, pattern?: RegExp|string): Array<string>;
|
||||
|
||||
// Util
|
||||
attempt(func: Function): any;
|
||||
bindAll(object?: ?Object, methodNames: Array<string>): Object;
|
||||
bindAll(object?: ?Object, ...methodNames: Array<string>): Object;
|
||||
cond(pairs: NestedArray<Function>): Function;
|
||||
conforms(source: Object): Function;
|
||||
constant<T>(value: T): () => T;
|
||||
defaultTo<T1:string|boolean|Object,T2>(value: T1, default: T2): T1;
|
||||
// NaN is a number instead of its own type, otherwise it would behave like null/void
|
||||
defaultTo<T1:number,T2>(value: T1, default: T2): T1|T2;
|
||||
defaultTo<T1:void|null,T2>(value: T1, default: T2): T2;
|
||||
flow(...funcs?: Array<Function>): Function;
|
||||
flow(funcs?: Array<Function>): Function;
|
||||
flowRight(...funcs?: Array<Function>): Function;
|
||||
flowRight(funcs?: Array<Function>): Function;
|
||||
identity<T>(value: T): T;
|
||||
iteratee(func?: any): Function;
|
||||
matches(source: Object): Function;
|
||||
matchesProperty(path?: ?Array<string>|string, srcValue: any): Function;
|
||||
method(path?: ?Array<string>|string, ...args?: Array<any>): Function;
|
||||
methodOf(object?: ?Object, ...args?: Array<any>): Function;
|
||||
mixin<T: Function|Object>(object?: T, source: Object, options?: { chain: bool }): T;
|
||||
noConflict(): Lodash;
|
||||
noop(...args: Array<mixed>): void;
|
||||
nthArg(n?: number): Function;
|
||||
over(...iteratees: Array<Function>): Function;
|
||||
over(iteratees: Array<Function>): Function;
|
||||
overEvery(...predicates: Array<Function>): Function;
|
||||
overEvery(predicates: Array<Function>): Function;
|
||||
overSome(...predicates: Array<Function>): Function;
|
||||
overSome(predicates: Array<Function>): Function;
|
||||
property(path?: ?Array<string>|string): Function;
|
||||
propertyOf(object?: ?Object): Function;
|
||||
range(start: number, end: number, step?: number): Array<number>;
|
||||
range(end: number, step?: number): Array<number>;
|
||||
rangeRight(start: number, end: number, step?: number): Array<number>;
|
||||
rangeRight(end: number, step?: number): Array<number>;
|
||||
runInContext(context?: Object): Function;
|
||||
|
||||
stubArray(): Array<*>;
|
||||
stubFalse(): false;
|
||||
stubObject(): {};
|
||||
stubString(): '';
|
||||
stubTrue(): true;
|
||||
times(n: number, ...rest: Array<void>): Array<number>;
|
||||
times<T>(n: number, iteratee: ((i: number) => T)): Array<T>;
|
||||
toPath(value: any): Array<string>;
|
||||
uniqueId(prefix?: string): string;
|
||||
|
||||
// Properties
|
||||
VERSION: string;
|
||||
templateSettings: TemplateSettings;
|
||||
}
|
||||
|
||||
declare var exports: Lodash;
|
||||
}
|
5
flow-typed/npm/request_v2.x.x.js
vendored
Normal file
5
flow-typed/npm/request_v2.x.x.js
vendored
Normal file
|
@ -0,0 +1,5 @@
|
|||
declare function request(options: any, callback: any): Promise<any>;
|
||||
|
||||
declare module 'request' {
|
||||
declare module.exports: request
|
||||
}
|
18
flow-typed/npm/rimraf_v2.x.x.js
vendored
Normal file
18
flow-typed/npm/rimraf_v2.x.x.js
vendored
Normal file
|
@ -0,0 +1,18 @@
|
|||
// flow-typed signature: 1dff23447d5e18f5ac2b05aaec7cfb74
|
||||
// flow-typed version: a453e98ea2/rimraf_v2.x.x/flow_>=v0.25.0
|
||||
|
||||
declare module 'rimraf' {
|
||||
declare type Options = {
|
||||
maxBusyTries?: number,
|
||||
emfileWait?: number,
|
||||
glob?: boolean,
|
||||
disableGlob?: boolean
|
||||
};
|
||||
|
||||
declare type Callback = (err: ?Error, path: ?string) => void;
|
||||
|
||||
declare module.exports: {
|
||||
(f: string, opts?: Options | Callback, callback?: Callback): void;
|
||||
sync(path: string, opts?: Options): void;
|
||||
};
|
||||
}
|
30
jest.config.js
Normal file
30
jest.config.js
Normal file
|
@ -0,0 +1,30 @@
|
|||
/* eslint comma-dangle: 0 */
|
||||
|
||||
module.exports = {
|
||||
'name': 'verdaccio-jest',
|
||||
'verbose': true,
|
||||
'collectCoverage': true,
|
||||
'coveragePathIgnorePatterns': [
|
||||
'node_modules',
|
||||
'fixtures'
|
||||
],
|
||||
'testRegex': '(/test/unit.*\\.spec|test/functional.*\\.func|/test/webui/.*\\.spec)\\.js',
|
||||
// 'testRegex': '(test/functional.*\\.func)\\.js'
|
||||
'setupFiles': [
|
||||
'./test/webui/global.js'
|
||||
],
|
||||
'modulePathIgnorePatterns': [
|
||||
'global.js'
|
||||
],
|
||||
'testPathIgnorePatterns': [
|
||||
'__snapshots__'
|
||||
],
|
||||
'moduleNameMapper': {
|
||||
'\\.(scss)$': '<rootDir>/node_modules/identity-obj-proxy',
|
||||
'github-markdown-css': '<rootDir>/node_modules/identity-obj-proxy',
|
||||
'\\.(png)$': '<rootDir>/node_modules/identity-obj-proxy'
|
||||
},
|
||||
'transformIgnorePatterns': [
|
||||
'<rootDir>/node_modules/(?!react-syntax-highlighter)'
|
||||
]
|
||||
};
|
1
jestEnvironment.js
Normal file
1
jestEnvironment.js
Normal file
|
@ -0,0 +1 @@
|
|||
require.requireActual('babel/polyfill');
|
150
package.json
150
package.json
|
@ -15,30 +15,32 @@
|
|||
"verdaccio": "./bin/verdaccio"
|
||||
},
|
||||
"dependencies": {
|
||||
"@verdaccio/file-locking": "0.0.3",
|
||||
"@verdaccio/file-locking": "0.0.5",
|
||||
"@verdaccio/local-storage": "0.1.0",
|
||||
"@verdaccio/streams": "0.0.2",
|
||||
"@verdaccio/types": "0.1.0",
|
||||
"JSONStream": "^1.1.1",
|
||||
"apache-md5": "^1.1.2",
|
||||
"async": "^2.0.1",
|
||||
"async": "^2.6.0",
|
||||
"body-parser": "^1.15.0",
|
||||
"bunyan": "^1.8.0",
|
||||
"chalk": "^2.0.1",
|
||||
"commander": "^2.11.0",
|
||||
"compression": "1.6.2",
|
||||
"chalk": "^2.3.0",
|
||||
"commander": "^2.12.2",
|
||||
"compression": "1.7.1",
|
||||
"cookies": "^0.7.0",
|
||||
"cors": "^2.8.3",
|
||||
"express": "4.15.3",
|
||||
"express": "4.16.2",
|
||||
"global": "^4.3.2",
|
||||
"handlebars": "4.0.5",
|
||||
"handlebars": "4.0.11",
|
||||
"http-errors": "^1.4.0",
|
||||
"js-string-escape": "1.0.1",
|
||||
"js-yaml": "^3.6.0",
|
||||
"jsonwebtoken": "^7.4.1",
|
||||
"jsonwebtoken": "^8.1.0",
|
||||
"lockfile": "^1.0.1",
|
||||
"lodash": "4.17.4",
|
||||
"lunr": "^0.7.0",
|
||||
"marked": "0.3.9",
|
||||
"mime": "^1.3.6",
|
||||
"mime": "^2.0.3",
|
||||
"minimatch": "^3.0.2",
|
||||
"mkdirp": "^0.5.1",
|
||||
"pkginfo": "^0.4.0",
|
||||
|
@ -47,67 +49,77 @@
|
|||
"unix-crypt-td-js": "^1.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"axios": "0.16.2",
|
||||
"babel-cli": "6.24.1",
|
||||
"babel-core": "6.25.0",
|
||||
"babel-eslint": "7.2.3",
|
||||
"babel-loader": "7.1.1",
|
||||
"babel-plugin-flow-runtime": "0.11.1",
|
||||
"axios": "0.17.1",
|
||||
"babel-cli": "6.26.0",
|
||||
"babel-core": "6.26.0",
|
||||
"babel-eslint": "8.0.3",
|
||||
"babel-jest": "^21.2.0",
|
||||
"babel-loader": "7.1.2",
|
||||
"babel-plugin-flow-runtime": "0.15.0",
|
||||
"babel-plugin-transform-async-to-generator": "^6.24.1",
|
||||
"babel-plugin-transform-class-properties": "^6.24.1",
|
||||
"babel-plugin-transform-decorators-legacy": "1.3.4",
|
||||
"babel-plugin-transform-es2015-classes": "^6.24.1",
|
||||
"babel-plugin-transform-runtime": "6.23.0",
|
||||
"babel-polyfill": "^6.26.0",
|
||||
"babel-preset-env": "1.5.2",
|
||||
"babel-preset-env": "1.6.1",
|
||||
"babel-preset-es2015": "^6.24.1",
|
||||
"babel-preset-es2015-node4": "2.1.1",
|
||||
"babel-preset-flow": "6.23.0",
|
||||
"babel-preset-react": "6.24.1",
|
||||
"babel-preset-stage-2": "6.24.1",
|
||||
"babel-preset-stage-3": "6.24.1",
|
||||
"babel-runtime": "6.23.0",
|
||||
"codacy-coverage": "2.0.2",
|
||||
"babel-register": "^6.26.0",
|
||||
"babel-runtime": "6.26.0",
|
||||
"codecov": "2.2.0",
|
||||
"coveralls": "2.13.1",
|
||||
"css-loader": "0.28.4",
|
||||
"element-react": "1.0.16",
|
||||
"element-theme-default": "1.3.7",
|
||||
"cross-env": "5.1.1",
|
||||
"css-loader": "0.28.7",
|
||||
"element-react": "1.4.3",
|
||||
"element-theme-default": "1.4.12",
|
||||
"enzyme": "^3.2.0",
|
||||
"enzyme-adapter-react-16": "^1.1.0",
|
||||
"eslint": "4.2.0",
|
||||
"eslint-config-google": "0.8.0",
|
||||
"eslint-loader": "1.8.0",
|
||||
"eslint-plugin-babel": "4.1.1",
|
||||
"eslint-plugin-flowtype": "2.35.0",
|
||||
"eslint-plugin-import": "2.6.1",
|
||||
"eslint-plugin-react": "7.1.0",
|
||||
"extract-text-webpack-plugin": "3.0.0",
|
||||
"file-loader": "0.11.2",
|
||||
"flow-runtime": "0.13.0",
|
||||
"eslint-config-google": "0.9.1",
|
||||
"eslint-loader": "1.9.0",
|
||||
"eslint-plugin-babel": "4.1.2",
|
||||
"eslint-plugin-flowtype": "2.39.1",
|
||||
"eslint-plugin-import": "2.8.0",
|
||||
"eslint-plugin-react": "7.5.1",
|
||||
"eslint-plugin-jest": "^21.2.0",
|
||||
"extract-text-webpack-plugin": "3.0.2",
|
||||
"file-loader": "1.1.5",
|
||||
"flow-bin": "0.52.0",
|
||||
"flow-runtime": "0.16.0",
|
||||
"friendly-errors-webpack-plugin": "1.6.1",
|
||||
"fs-extra": "4.0.1",
|
||||
"github-markdown-css": "2.8.0",
|
||||
"html-webpack-plugin": "2.29.0",
|
||||
"fs-extra": "4.0.2",
|
||||
"github-markdown-css": "2.9.0",
|
||||
"html-webpack-plugin": "2.30.1",
|
||||
"identity-obj-proxy": "^3.0.0",
|
||||
"in-publish": "2.0.0",
|
||||
"jest": "^21.2.1",
|
||||
"localstorage-memory": "1.0.2",
|
||||
"mocha": "3.4.2",
|
||||
"mocha-lcov-reporter": "1.3.0",
|
||||
"node-sass": "4.5.3",
|
||||
"node-sass": "4.7.2",
|
||||
"normalize.css": "7.0.0",
|
||||
"nyc": "11.0.3",
|
||||
"ora": "1.3.0",
|
||||
"prop-types": "15.5.10",
|
||||
"react": "15.6.1",
|
||||
"react-dom": "15.6.1",
|
||||
"prop-types": "15.6.0",
|
||||
"react": "16.2.0",
|
||||
"react-dom": "16.2.0",
|
||||
"react-hot-loader": "3.0.0-beta.7",
|
||||
"react-router-dom": "4.1.1",
|
||||
"react-syntax-highlighter": "5.6.2",
|
||||
"rimraf": "2.6.1",
|
||||
"react-router-dom": "4.2.2",
|
||||
"react-syntax-highlighter": "5.8.0",
|
||||
"rimraf": "2.6.2",
|
||||
"sass-loader": "6.0.6",
|
||||
"source-map-loader": "0.2.1",
|
||||
"source-map-loader": "0.2.3",
|
||||
"standard-version": "4.2.0",
|
||||
"style-loader": "0.18.2",
|
||||
"stylelint": "7.13.0",
|
||||
"stylelint-config-standard": "16.0.0",
|
||||
"stylelint-webpack-plugin": "0.8.0",
|
||||
"url-loader": "0.5.8",
|
||||
"webpack": "3.2.0",
|
||||
"webpack-dev-server": "2.5.0",
|
||||
"webpack-merge": "4.1.0"
|
||||
"style-loader": "0.19.0",
|
||||
"stylelint": "8.3.1",
|
||||
"stylelint-config-recommended-scss": "^2.0.0",
|
||||
"stylelint-scss": "^2.1.0",
|
||||
"stylelint-webpack-plugin": "0.9.0",
|
||||
"url-loader": "0.6.2",
|
||||
"webpack": "3.9.1",
|
||||
"webpack-dev-server": "2.9.5",
|
||||
"webpack-merge": "4.1.1"
|
||||
},
|
||||
"keywords": [
|
||||
"private",
|
||||
|
@ -122,29 +134,25 @@
|
|||
"scripts": {
|
||||
"release": "standard-version -a -s",
|
||||
"prepublish": "in-publish && npm run build:webui || not-in-publish",
|
||||
"test": "mocha ./test/functional ./test/unit --reporter=spec --full-trace",
|
||||
"pre:ci": "npm run build:webui",
|
||||
"test:ci": "npm run test:coverage",
|
||||
"test:only": "mocha ./test/functional ./test/unit",
|
||||
"test:coverage": "nyc npm t",
|
||||
"coverage:html": "nyc report --reporter=html",
|
||||
"coverage:publish": "nyc report --reporter=lcov | codecov",
|
||||
"lint": "eslint .",
|
||||
"flow": "flow",
|
||||
"pretest": "npm run code:build",
|
||||
"test": "cross-env NODE_ENV=test BABEL_ENV=test jest --maxWorkers 2",
|
||||
"test:unit": "cross-env NODE_ENV=test BABEL_ENV=test jest '(/test/unit.*\\.spec|/test/webui/.*\\.spec)\\.js' --maxWorkers 2",
|
||||
"pre:ci": "npm run lint && npm run build:webui",
|
||||
"coverage:publish": "codecov",
|
||||
"lint": "npm run flow && eslint .",
|
||||
"lint:css": "stylelint 'src/**/*.scss' --syntax scss",
|
||||
"pre:webpack": "npm run lint && rimraf static/*",
|
||||
"dev:webui": "babel-node tools/dev.server.js",
|
||||
"build:webui": "npm run pre:webpack && webpack --config tools/webpack.prod.config.babel.js",
|
||||
"dev:start": "cross-env BABEL_ENV=registry babel-node src/lib/cli",
|
||||
"code:build": "cross-env BABEL_ENV=registry babel src/ --out-dir build/ --ignore src/webui/ --copy-files",
|
||||
"pre:webpack": "rimraf static/*",
|
||||
"dev:webui": "cross-env BABEL_ENV=ui babel-node tools/dev.server.js",
|
||||
"build:webui": "npm run pre:webpack && BABEL_ENV=ui webpack --config tools/webpack.prod.config.babel.js",
|
||||
"build:docker": "docker build -t verdaccio . --no-cache",
|
||||
"build:docker:rpi": "docker build -f Dockerfile.rpi -t verdaccio:rpi ."
|
||||
},
|
||||
"jest": {
|
||||
"snapshotSerializers": [
|
||||
"jest-serializer-enzyme"
|
||||
]
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=4.6.1",
|
||||
"npm": ">=2.15.9"
|
||||
"node": ">=6.12.0",
|
||||
"npm": ">=3"
|
||||
},
|
||||
"preferGlobal": true,
|
||||
"publishConfig": {
|
||||
|
|
|
@ -27,13 +27,13 @@ module.exports = function(route, auth, storage) {
|
|||
|
||||
// tagging a package
|
||||
route.put('/:package/:tag',
|
||||
can('publish'), media(mime.lookup('json')), tag_package_version);
|
||||
can('publish'), media(mime.getType('json')), tag_package_version);
|
||||
|
||||
route.post('/-/package/:package/dist-tags/:tag',
|
||||
can('publish'), media(mime.lookup('json')), tag_package_version);
|
||||
can('publish'), media(mime.getType('json')), tag_package_version);
|
||||
|
||||
route.put('/-/package/:package/dist-tags/:tag',
|
||||
can('publish'), media(mime.lookup('json')), tag_package_version);
|
||||
can('publish'), media(mime.getType('json')), tag_package_version);
|
||||
|
||||
route.delete('/-/package/:package/dist-tags/:tag', can('publish'), function(req, res, next) {
|
||||
const tags = {};
|
||||
|
@ -50,16 +50,20 @@ module.exports = function(route, auth, storage) {
|
|||
});
|
||||
|
||||
route.get('/-/package/:package/dist-tags', can('access'), function(req, res, next) {
|
||||
storage.get_package(req.params.package, {req: req}, function(err, info) {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
storage.get_package({
|
||||
name: req.params.package,
|
||||
req,
|
||||
callback: function(err, info) {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
|
||||
next(info['dist-tags']);
|
||||
next(info['dist-tags']);
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
route.post('/-/package/:package/dist-tags', can('publish'), media(mime.lookup('json')), expect_json,
|
||||
route.post('/-/package/:package/dist-tags', can('publish'), media(mime.getType('json')), expect_json,
|
||||
function(req, res, next) {
|
||||
storage.merge_tags(req.params.package, req.body, function(err) {
|
||||
if (err) {
|
||||
|
@ -69,26 +73,4 @@ module.exports = function(route, auth, storage) {
|
|||
return next({ok: 'tags updated'});
|
||||
});
|
||||
});
|
||||
|
||||
route.put('/-/package/:package/dist-tags', can('publish'), media(mime.lookup('json')), expect_json,
|
||||
function(req, res, next) {
|
||||
storage.replace_tags(req.params.package, req.body, function(err) {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
res.status(201);
|
||||
return next({ok: 'tags updated'});
|
||||
});
|
||||
});
|
||||
|
||||
route.delete('/-/package/:package/dist-tags', can('publish'), media(mime.lookup('json')),
|
||||
function(req, res, next) {
|
||||
storage.replace_tags(req.params.package, {}, function(err) {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
res.status(201);
|
||||
return next({ok: 'tags removed'});
|
||||
});
|
||||
});
|
||||
};
|
||||
|
|
|
@ -10,7 +10,7 @@ module.exports = function(route, auth, storage, config) {
|
|||
const can = Middleware.allow(auth);
|
||||
// TODO: anonymous user?
|
||||
route.get('/:package/:version?', can('access'), function(req, res, next) {
|
||||
storage.get_package(req.params.package, {req: req}, function(err, info) {
|
||||
const getPackageMetaCallback = function(err, info) {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
|
@ -37,6 +37,12 @@ module.exports = function(route, auth, storage, config) {
|
|||
}
|
||||
|
||||
return next( createError[404]('version not found: ' + req.params.version) );
|
||||
};
|
||||
|
||||
storage.get_package({
|
||||
name: req.params.package,
|
||||
req,
|
||||
callback: getPackageMetaCallback,
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -17,7 +17,7 @@ module.exports = function(router, auth, storage, config) {
|
|||
const can = Middleware.allow(auth);
|
||||
|
||||
// publishing a package
|
||||
router.put('/:package/:_rev?/:revision?', can('publish'), media(mime.lookup('json')), expect_json, function(req, res, next) {
|
||||
router.put('/:package/:_rev?/:revision?', can('publish'), media(mime.getType('json')), expect_json, function(req, res, next) {
|
||||
const name = req.params.package;
|
||||
let metadata;
|
||||
const create_tarball = function(filename, data, cb) {
|
||||
|
@ -170,7 +170,7 @@ module.exports = function(router, auth, storage, config) {
|
|||
});
|
||||
|
||||
// adding a version
|
||||
router.put('/:package/:version/-tag/:tag', can('publish'), media(mime.lookup('json')), expect_json, function(req, res, next) {
|
||||
router.put('/:package/:version/-tag/:tag', can('publish'), media(mime.getType('json')), expect_json, function(req, res, next) {
|
||||
let name = req.params.package;
|
||||
let version = req.params.version;
|
||||
let tag = req.params.tag;
|
||||
|
|
|
@ -1,22 +1,21 @@
|
|||
'use strict';
|
||||
import express from 'express';
|
||||
import Error from 'http-errors';
|
||||
import compression from 'compression';
|
||||
import _ from 'lodash';
|
||||
import cors from 'cors';
|
||||
import Storage from '../lib/storage';
|
||||
import {loadPlugin} from '../lib/plugin-loader';
|
||||
|
||||
const express = require('express');
|
||||
const Error = require('http-errors');
|
||||
const compression = require('compression');
|
||||
const Auth = require('../lib/auth');
|
||||
const Logger = require('../lib/logger');
|
||||
const Config = require('../lib/config');
|
||||
const Middleware = require('./web/middleware');
|
||||
const Cats = require('../lib/status-cats');
|
||||
const Storage = require('../lib/storage');
|
||||
const _ = require('lodash');
|
||||
const cors = require('cors');
|
||||
const load_plugins = require('../lib/plugin-loader').load_plugins;
|
||||
|
||||
module.exports = function(config_hash) {
|
||||
module.exports = function(configHash) {
|
||||
// Config
|
||||
Logger.setup(config_hash.logs);
|
||||
const config = new Config(config_hash);
|
||||
Logger.setup(configHash.logs);
|
||||
const config = new Config(configHash);
|
||||
const storage = new Storage(config);
|
||||
const auth = new Auth(config);
|
||||
const app = express();
|
||||
|
@ -86,7 +85,7 @@ module.exports = function(config_hash) {
|
|||
config: config,
|
||||
logger: Logger.logger,
|
||||
};
|
||||
const plugins = load_plugins(config, config.middlewares, plugin_params, function(plugin) {
|
||||
const plugins = loadPlugin(config, config.middlewares, plugin_params, function(plugin) {
|
||||
return plugin.register_middlewares;
|
||||
});
|
||||
plugins.forEach(function(plugin) {
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
'use strict';
|
||||
|
||||
import Search from '../../lib/search';
|
||||
const bodyParser = require('body-parser');
|
||||
const express = require('express');
|
||||
const marked = require('marked');
|
||||
const Search = require('../../lib/search');
|
||||
const _ = require('lodash');
|
||||
const Middleware = require('./middleware');
|
||||
const match = Middleware.match;
|
||||
const validateName = Middleware.validate_name;
|
||||
|
@ -13,6 +13,7 @@ const route = express.Router(); // eslint-disable-line
|
|||
const async = require('async');
|
||||
const HTTPError = require('http-errors');
|
||||
const Utils = require('../../lib/utils');
|
||||
const {generateGravatarUrl} = require('../../utils/user');
|
||||
|
||||
/*
|
||||
This file include all verdaccio only API(Web UI), for npm API please see ../endpoint/
|
||||
|
@ -35,7 +36,7 @@ module.exports = function(config, auth, storage) {
|
|||
|
||||
// Get list of all visible package
|
||||
route.get('/packages', function(req, res, next) {
|
||||
storage.get_local(function(err, packages) {
|
||||
storage.getLocalDatabase(function(err, packages) {
|
||||
if (err) {
|
||||
// that function shouldn't produce any
|
||||
throw err;
|
||||
|
@ -71,17 +72,21 @@ module.exports = function(config, auth, storage) {
|
|||
});
|
||||
|
||||
// Get package readme
|
||||
route.get('/package/readme(/@:scope?)?/:package/:version?', can('access'), function(req, res, next) {
|
||||
route.get('/package/readme/(@:scope/)?:package/:version?', can('access'), function(req, res, next) {
|
||||
let packageName = req.params.package;
|
||||
if (req.params.scope) {
|
||||
packageName = `@${req.params.scope}/${packageName}`;
|
||||
}
|
||||
storage.get_package(packageName, {req: req}, function(err, info) {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
res.set('Content-Type', 'text/plain');
|
||||
next(marked(info.readme || 'ERROR: No README data found!'));
|
||||
storage.get_package({
|
||||
name: packageName,
|
||||
req,
|
||||
callback: function(err, info) {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
res.set('Content-Type', 'text/plain');
|
||||
next(marked(info.readme || 'ERROR: No README data found!'));
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -91,22 +96,25 @@ module.exports = function(config, auth, storage) {
|
|||
const packages = [];
|
||||
|
||||
const getPackageInfo = function(i) {
|
||||
storage.get_package(results[i].ref, (err, entry) => {
|
||||
if (!err && entry) {
|
||||
auth.allow_access(entry.name, req.remote_user, function(err, allowed) {
|
||||
if (err || !allowed) {
|
||||
return;
|
||||
}
|
||||
storage.get_package({
|
||||
name: results[i].ref,
|
||||
callback: (err, entry) => {
|
||||
if (!err && entry) {
|
||||
auth.allow_access(entry.name, req.remote_user, function(err, allowed) {
|
||||
if (err || !allowed) {
|
||||
return;
|
||||
}
|
||||
|
||||
packages.push(entry.versions[entry['dist-tags'].latest]);
|
||||
});
|
||||
}
|
||||
packages.push(entry.versions[entry['dist-tags'].latest]);
|
||||
});
|
||||
}
|
||||
|
||||
if (i >= results.length - 1) {
|
||||
next(packages);
|
||||
} else {
|
||||
getPackageInfo(i + 1);
|
||||
}
|
||||
if (i >= results.length - 1) {
|
||||
next(packages);
|
||||
} else {
|
||||
getPackageInfo(i + 1);
|
||||
}
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
|
@ -138,6 +146,57 @@ module.exports = function(config, auth, storage) {
|
|||
res.redirect(base);
|
||||
});
|
||||
|
||||
route.get('/sidebar/(@:scope/)?:package', function(req, res, next) {
|
||||
let packageName = req.params.package;
|
||||
if (req.params.scope) {
|
||||
packageName = `@${req.params.scope}/${packageName}`;
|
||||
}
|
||||
|
||||
storage.get_package({
|
||||
name: packageName,
|
||||
keepUpLinkData: true,
|
||||
req,
|
||||
callback: function(err, info) {
|
||||
res.set('Content-Type', 'application/json');
|
||||
|
||||
if (!err) {
|
||||
info.latest = info.versions[info['dist-tags'].latest];
|
||||
let propertyToDelete = ['readme', 'versions'];
|
||||
|
||||
_.forEach(propertyToDelete, ((property) => {
|
||||
delete info[property];
|
||||
}));
|
||||
|
||||
|
||||
if (typeof _.get(info, 'latest.author.email') === 'string') {
|
||||
info.latest.author.avatar = generateGravatarUrl(info.latest.author.email);
|
||||
} else {
|
||||
// _.get can't guarantee author property exist
|
||||
_.set(info, 'latest.author.avatar', generateGravatarUrl());
|
||||
}
|
||||
|
||||
if (_.get(info, 'latest.contributors.length', 0) > 0) {
|
||||
info.latest.contributors = _.map(info.latest.contributors, (contributor) => {
|
||||
if (typeof contributor.email === 'string') {
|
||||
contributor.avatar = generateGravatarUrl(contributor.email);
|
||||
} else {
|
||||
contributor.avatar = generateGravatarUrl();
|
||||
}
|
||||
|
||||
return contributor;
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
res.end(JSON.stringify(info));
|
||||
} else {
|
||||
res.status(404);
|
||||
res.end();
|
||||
}
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
// What are you looking for? logout? client side will remove token when user click logout,
|
||||
// or it will auto expire after 24 hours.
|
||||
// This token is different with the token send to npm client.
|
||||
|
|
|
@ -1,14 +1,13 @@
|
|||
'use strict';
|
||||
import express from 'express';
|
||||
import _ from 'lodash';
|
||||
import fs from 'fs';
|
||||
import Search from '../../lib/search';
|
||||
import * as Utils from '../../lib/utils';
|
||||
|
||||
const express = require('express');
|
||||
const Search = require('../../lib/search');
|
||||
const Middleware = require('./middleware');
|
||||
const Utils = require('../../lib/utils');
|
||||
/* eslint new-cap:off */
|
||||
const router = express.Router();
|
||||
const _ = require('lodash');
|
||||
const env = require('../../config/env');
|
||||
const fs = require('fs');
|
||||
const template = fs.readFileSync(`${env.DIST_PATH}/index.html`).toString();
|
||||
const spliceURL = require('../../utils/string').spliceURL;
|
||||
|
||||
|
|
|
@ -1,12 +1,10 @@
|
|||
/* eslint prefer-spread: "off" */
|
||||
/* eslint prefer-rest-params: "off" */
|
||||
|
||||
'use strict';
|
||||
|
||||
import {loadPlugin} from '../lib/plugin-loader';
|
||||
const Crypto = require('crypto');
|
||||
const Error = require('http-errors');
|
||||
const Logger = require('./logger');
|
||||
const load_plugins = require('./plugin-loader').load_plugins;
|
||||
const pkgJson = require('../../package.json');
|
||||
const jwt = require('jsonwebtoken');
|
||||
/**
|
||||
|
@ -35,7 +33,7 @@ class Auth {
|
|||
}
|
||||
}
|
||||
|
||||
this.plugins = load_plugins(config, config.auth, plugin_params, function(p) {
|
||||
this.plugins = loadPlugin(config, config.auth, plugin_params, function(p) {
|
||||
return p.authenticate || p.allow_access || p.allow_publish;
|
||||
});
|
||||
|
||||
|
|
183
src/lib/bootstrap.js
vendored
Normal file
183
src/lib/bootstrap.js
vendored
Normal file
|
@ -0,0 +1,183 @@
|
|||
import {assign, isObject, isFunction} from 'lodash';
|
||||
import Path from 'path';
|
||||
import URL from 'url';
|
||||
import fs from 'fs';
|
||||
import http from'http';
|
||||
import https from 'https';
|
||||
import constants from 'constants';
|
||||
|
||||
const server = require('../api/index');
|
||||
const Utils = require('./utils');
|
||||
const logger = require('./logger');
|
||||
|
||||
/**
|
||||
* Retrieve all addresses defined in the config file.
|
||||
* Verdaccio is able to listen multiple ports
|
||||
* @param {String} argListen
|
||||
* @param {String} configListen
|
||||
* eg:
|
||||
* listen:
|
||||
- localhost:5555
|
||||
- localhost:5557
|
||||
@return {Array}
|
||||
*/
|
||||
function getListListenAddresses(argListen, configListen) {
|
||||
// command line || config file || default
|
||||
let addresses;
|
||||
if (argListen) {
|
||||
addresses = [argListen];
|
||||
} else if (Array.isArray(configListen)) {
|
||||
addresses = configListen;
|
||||
} else if (configListen) {
|
||||
addresses = [configListen];
|
||||
} else {
|
||||
addresses = ['4873'];
|
||||
}
|
||||
addresses = addresses.map(function(addr) {
|
||||
const parsedAddr = Utils.parse_address(addr);
|
||||
|
||||
if (!parsedAddr) {
|
||||
logger.logger.warn({addr: addr},
|
||||
'invalid address - @{addr}, we expect a port (e.g. "4873"),'
|
||||
+ ' host:port (e.g. "localhost:4873") or full url'
|
||||
+ ' (e.g. "http://localhost:4873/")');
|
||||
}
|
||||
|
||||
return parsedAddr;
|
||||
}).filter(Boolean);
|
||||
|
||||
return addresses;
|
||||
}
|
||||
|
||||
/**
|
||||
* Trigger the server after configuration has been loaded.
|
||||
* @param {Object} config
|
||||
* @param {Object} cliArguments
|
||||
* @param {String} configPath
|
||||
* @param {String} pkgVersion
|
||||
* @param {String} pkgName
|
||||
*/
|
||||
function startVerdaccio(config, cliListen, configPath, pkgVersion, pkgName, callback) {
|
||||
if (isObject(config) === false) {
|
||||
throw new Error('config file must be an object');
|
||||
}
|
||||
|
||||
if (!config.self_path) {
|
||||
config.self_path = Path.resolve(configPath);
|
||||
}
|
||||
if (!config.https) {
|
||||
config.https = {enable: false};
|
||||
}
|
||||
|
||||
const app = server(config);
|
||||
const addresses = getListListenAddresses(cliListen, config.listen);
|
||||
|
||||
addresses.forEach(function(addr) {
|
||||
let webServer;
|
||||
if (addr.proto === 'https') {
|
||||
// https must either have key cert and ca or a pfx and (optionally) a passphrase
|
||||
if (!config.https || !config.https.key || !config.https.cert || !config.https.ca) {
|
||||
displayHTTPSWarning(configPath);
|
||||
}
|
||||
|
||||
webServer = handleHTTPS(app, configPath, config);
|
||||
} else { // http
|
||||
webServer = http.createServer(app);
|
||||
}
|
||||
|
||||
unlinkAddressPath(addr);
|
||||
|
||||
callback(webServer, addr, pkgName, pkgVersion);
|
||||
});
|
||||
}
|
||||
|
||||
function unlinkAddressPath(addr) {
|
||||
if (addr.path && fs.existsSync(addr.path)) {
|
||||
fs.unlinkSync(addr.path);
|
||||
}
|
||||
}
|
||||
|
||||
function displayHTTPSWarning(configPath) {
|
||||
const resolveConfigPath = function(file) {
|
||||
return Path.resolve(Path.dirname(configPath), file);
|
||||
};
|
||||
|
||||
logger.logger.fatal([
|
||||
'You have enabled HTTPS and need to specify either ',
|
||||
' "https.key", "https.cert" and "https.ca" or ',
|
||||
' "https.pfx" and optionally "https.passphrase" ',
|
||||
'to run https server',
|
||||
'',
|
||||
// commands are borrowed from node.js docs
|
||||
'To quickly create self-signed certificate, use:',
|
||||
' $ openssl genrsa -out ' + resolveConfigPath('verdaccio-key.pem') + ' 2048',
|
||||
' $ openssl req -new -sha256 -key ' + resolveConfigPath('verdaccio-key.pem') + ' -out ' + resolveConfigPath('verdaccio-csr.pem'),
|
||||
' $ openssl x509 -req -in ' + resolveConfigPath('verdaccio-csr.pem') +
|
||||
' -signkey ' + resolveConfigPath('verdaccio-key.pem') + ' -out ' + resolveConfigPath('verdaccio-cert.pem'),
|
||||
'',
|
||||
'And then add to config file (' + configPath + '):',
|
||||
' https:',
|
||||
' key: verdaccio-key.pem',
|
||||
' cert: verdaccio-cert.pem',
|
||||
' ca: verdaccio-cert.pem',
|
||||
].join('\n'));
|
||||
process.exit(2);
|
||||
}
|
||||
|
||||
function handleHTTPS(app, configPath, config) {
|
||||
try {
|
||||
let httpsOptions = {
|
||||
secureProtocol: 'SSLv23_method', // disable insecure SSLv2 and SSLv3
|
||||
secureOptions: constants.SSL_OP_NO_SSLv2 | constants.SSL_OP_NO_SSLv3,
|
||||
};
|
||||
|
||||
if (config.https.pfx) {
|
||||
httpsOptions = assign(httpsOptions, {
|
||||
pfx: fs.readFileSync(config.https.pfx),
|
||||
passphrase: config.https.passphrase || '',
|
||||
});
|
||||
} else {
|
||||
httpsOptions = assign(httpsOptions, {
|
||||
key: fs.readFileSync(config.https.key),
|
||||
cert: fs.readFileSync(config.https.cert),
|
||||
ca: fs.readFileSync(config.https.ca),
|
||||
});
|
||||
}
|
||||
return https.createServer(httpsOptions, app);
|
||||
} catch (err) { // catch errors related to certificate loading
|
||||
logger.logger.fatal({err: err}, 'cannot create server: @{err.message}');
|
||||
process.exit(2);
|
||||
}
|
||||
}
|
||||
|
||||
function listenDefaultCallback(webServer, addr, pkgName, pkgVersion) {
|
||||
webServer.listen(addr.port || addr.path, addr.host, () => {
|
||||
// send a message for tests
|
||||
if (isFunction(process.send)) {
|
||||
process.send({
|
||||
verdaccio_started: true,
|
||||
});
|
||||
}
|
||||
}).on('error', function(err) {
|
||||
logger.logger.fatal({err: err}, 'cannot create server: @{err.message}');
|
||||
process.exit(2);
|
||||
});
|
||||
|
||||
logger.logger.warn({
|
||||
addr: ( addr.path
|
||||
? URL.format({
|
||||
protocol: 'unix',
|
||||
pathname: addr.path,
|
||||
})
|
||||
: URL.format({
|
||||
protocol: addr.proto,
|
||||
hostname: addr.host,
|
||||
port: addr.port,
|
||||
pathname: '/',
|
||||
})
|
||||
),
|
||||
version: pkgName + '/' + pkgVersion,
|
||||
}, 'http address - @{addr} - @{version}');
|
||||
}
|
||||
|
||||
export {startVerdaccio, listenDefaultCallback};
|
178
src/lib/cli.js
178
src/lib/cli.js
|
@ -2,9 +2,9 @@
|
|||
|
||||
/* eslint no-sync:0 */
|
||||
/* eslint no-empty:0 */
|
||||
'use strict';
|
||||
|
||||
const _ = require('lodash');
|
||||
import {startVerdaccio, listenDefaultCallback} from './bootstrap';
|
||||
import findConfigFile from './config-path';
|
||||
|
||||
if (process.getuid && process.getuid() === 0) {
|
||||
global.console.error('Verdaccio doesn\'t need superuser privileges. Don\'t run it under root.');
|
||||
|
@ -22,13 +22,6 @@ const logger = require('./logger');
|
|||
logger.setup(); // default setup
|
||||
|
||||
const commander = require('commander');
|
||||
const constants = require('constants');
|
||||
const fs = require('fs');
|
||||
const http = require('http');
|
||||
const https = require('https');
|
||||
const Path = require('path');
|
||||
const URL = require('url');
|
||||
const server = require('../api/index');
|
||||
const Utils = require('./utils');
|
||||
const pkginfo = require('pkginfo')(module); // eslint-disable-line no-unused-vars
|
||||
const pkgVersion = module.exports.version;
|
||||
|
@ -45,170 +38,25 @@ if (commander.args.length == 1 && !commander.config) {
|
|||
commander.config = commander.args.pop();
|
||||
}
|
||||
|
||||
if (commander.args.length != 0) {
|
||||
if (commander.args.length !== 0) {
|
||||
commander.help();
|
||||
}
|
||||
let verdaccioConfiguration;
|
||||
let configPathLocation;
|
||||
const cliListner = commander.listen;
|
||||
|
||||
let config;
|
||||
let config_path;
|
||||
try {
|
||||
if (commander.config) {
|
||||
config_path = Path.resolve(commander.config);
|
||||
} else {
|
||||
config_path = require('./config-path')();
|
||||
}
|
||||
config = Utils.parseConfigFile(config_path);
|
||||
logger.logger.warn({file: config_path}, 'config file - @{file}');
|
||||
configPathLocation = findConfigFile(commander.config);
|
||||
verdaccioConfiguration = Utils.parseConfigFile(configPathLocation);
|
||||
process.title = verdaccioConfiguration.web && verdaccioConfiguration.web.title || 'verdaccio';
|
||||
|
||||
logger.logger.warn({file: configPathLocation}, 'config file - @{file}');
|
||||
} catch (err) {
|
||||
logger.logger.fatal({file: config_path, err: err}, 'cannot open config file @{file}: @{!err.message}');
|
||||
logger.logger.fatal({file: configPathLocation, err: err}, 'cannot open config file @{file}: @{!err.message}');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
process.title = config.web && config.web.title || 'verdaccio';
|
||||
|
||||
afterConfigLoad();
|
||||
|
||||
/**
|
||||
* Retrieve all addresses defined in the config file.
|
||||
* Verdaccio is able to listen multiple ports
|
||||
* eg:
|
||||
* listen:
|
||||
- localhost:5555
|
||||
- localhost:5557
|
||||
@return {Array}
|
||||
*/
|
||||
function get_listen_addresses() {
|
||||
// command line || config file || default
|
||||
let addresses;
|
||||
if (commander.listen) {
|
||||
addresses = [commander.listen];
|
||||
} else if (Array.isArray(config.listen)) {
|
||||
addresses = config.listen;
|
||||
} else if (config.listen) {
|
||||
addresses = [config.listen];
|
||||
} else {
|
||||
addresses = ['4873'];
|
||||
}
|
||||
addresses = addresses.map(function(addr) {
|
||||
let parsed_addr = Utils.parse_address(addr);
|
||||
|
||||
if (!parsed_addr) {
|
||||
logger.logger.warn({addr: addr},
|
||||
'invalid address - @{addr}, we expect a port (e.g. "4873"),'
|
||||
+ ' host:port (e.g. "localhost:4873") or full url'
|
||||
+ ' (e.g. "http://localhost:4873/")');
|
||||
}
|
||||
|
||||
return parsed_addr;
|
||||
}).filter(Boolean);
|
||||
|
||||
return addresses;
|
||||
}
|
||||
|
||||
/**
|
||||
* Trigger the server after configuration has been loaded.
|
||||
*/
|
||||
function afterConfigLoad() {
|
||||
if (!config.self_path) {
|
||||
config.self_path = Path.resolve(config_path);
|
||||
}
|
||||
if (!config.https) {
|
||||
config.https = {enable: false};
|
||||
}
|
||||
const app = server(config);
|
||||
get_listen_addresses().forEach(function(addr) {
|
||||
let webServer;
|
||||
if (addr.proto === 'https') { // https must either have key cert and ca or a pfx and (optionally) a passphrase
|
||||
if (!config.https || !((config.https.key && config.https.cert && config.https.ca) || config.https.pfx)) {
|
||||
let conf_path = function(file) {
|
||||
if (!file) return config_path;
|
||||
return Path.resolve(Path.dirname(config_path), file);
|
||||
};
|
||||
|
||||
logger.logger.fatal([
|
||||
'You need to specify either ',
|
||||
' "https.key", "https.cert" and "https.ca" or ',
|
||||
' "https.pfx" and optionally "https.passphrase" ',
|
||||
'to run https server',
|
||||
'',
|
||||
// commands are borrowed from node.js docs
|
||||
'To quickly create self-signed certificate, use:',
|
||||
' $ openssl genrsa -out ' + conf_path('verdaccio-key.pem') + ' 2048',
|
||||
' $ openssl req -new -sha256 -key ' + conf_path('verdaccio-key.pem') + ' -out ' + conf_path('verdaccio-csr.pem'),
|
||||
' $ openssl x509 -req -in ' + conf_path('verdaccio-csr.pem') +
|
||||
' -signkey ' + conf_path('verdaccio-key.pem') + ' -out ' + conf_path('verdaccio-cert.pem'),
|
||||
'',
|
||||
'And then add to config file (' + conf_path() + '):',
|
||||
' https:',
|
||||
' key: verdaccio-key.pem',
|
||||
' cert: verdaccio-cert.pem',
|
||||
' ca: verdaccio-cert.pem',
|
||||
].join('\n'));
|
||||
process.exit(2);
|
||||
}
|
||||
|
||||
try {
|
||||
const httpsOptions = {
|
||||
secureProtocol: 'SSLv23_method', // disable insecure SSLv2 and SSLv3
|
||||
secureOptions: constants.SSL_OP_NO_SSLv2 | constants.SSL_OP_NO_SSLv3,
|
||||
};
|
||||
|
||||
if (config.https.pfx) {
|
||||
Object.assign(httpsOptions, {
|
||||
pfx: fs.readFileSync(config.https.pfx),
|
||||
passphrase: config.https.passphrase || '',
|
||||
});
|
||||
} else {
|
||||
Object.assign(httpsOptions, {
|
||||
key: fs.readFileSync(config.https.key),
|
||||
cert: fs.readFileSync(config.https.cert),
|
||||
ca: fs.readFileSync(config.https.ca),
|
||||
});
|
||||
}
|
||||
webServer = https.createServer(httpsOptions, app);
|
||||
} catch (err) { // catch errors related to certificate loading
|
||||
logger.logger.fatal({err: err}, 'cannot create server: @{err.message}');
|
||||
process.exit(2);
|
||||
}
|
||||
} else { // http
|
||||
webServer = http.createServer(app);
|
||||
}
|
||||
|
||||
if (addr.path && fs.existsSync(addr.path)) {
|
||||
fs.unlinkSync(addr.path);
|
||||
}
|
||||
|
||||
webServer
|
||||
.listen(addr.port || addr.path, addr.host)
|
||||
.on('error', function(err) {
|
||||
logger.logger.fatal({err: err}, 'cannot create server: @{err.message}');
|
||||
process.exit(2);
|
||||
});
|
||||
|
||||
logger.logger.warn({
|
||||
addr: ( addr.path
|
||||
? URL.format({
|
||||
protocol: 'unix',
|
||||
pathname: addr.path,
|
||||
})
|
||||
: URL.format({
|
||||
protocol: addr.proto,
|
||||
hostname: addr.host,
|
||||
port: addr.port,
|
||||
pathname: '/',
|
||||
})
|
||||
),
|
||||
version: pkgName + '/' + pkgVersion,
|
||||
}, 'http address - @{addr} - @{version}');
|
||||
});
|
||||
|
||||
// undocumented stuff for tests
|
||||
if (_.isFunction(process.send)) {
|
||||
process.send({
|
||||
verdaccio_started: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
startVerdaccio(verdaccioConfiguration, cliListner, configPathLocation, pkgVersion, pkgName, listenDefaultCallback);
|
||||
|
||||
process.on('uncaughtException', function(err) {
|
||||
logger.logger.fatal( {
|
||||
|
|
|
@ -1,110 +1,114 @@
|
|||
'use strict';
|
||||
import fs from 'fs';
|
||||
import _ from 'lodash';
|
||||
import Path from 'path';
|
||||
import logger from './logger';
|
||||
import mkdirp from 'mkdirp';
|
||||
|
||||
import {folder_exists, file_exists} from './utils';
|
||||
|
||||
const fs = require('fs');
|
||||
const Path = require('path');
|
||||
const logger = require('./logger');
|
||||
const CONFIG_FILE = 'config.yaml';
|
||||
const pkgJson = require('../../package.json');
|
||||
const XDG = 'xdg';
|
||||
const WIN = 'win';
|
||||
const WIN32 = 'win32';
|
||||
const pkgJSON = require('../../package.json');
|
||||
|
||||
/**
|
||||
* Find and get the first config file that match.
|
||||
* @return {String} the config file path
|
||||
*/
|
||||
function find_config_file() {
|
||||
const paths = get_paths();
|
||||
|
||||
for (let i=0; i<paths.length; i++) {
|
||||
if (file_exists(paths[i].path)) return paths[i].path;
|
||||
function findConfigFile(configPath) {
|
||||
if (_.isNil(configPath) === false) {
|
||||
return Path.resolve(configPath);
|
||||
}
|
||||
|
||||
create_config_file(paths[0]);
|
||||
return paths[0].path;
|
||||
const configPaths = getConfigPaths();
|
||||
|
||||
if (_.isEmpty(configPaths)) {
|
||||
throw new Error('no configuration files can be proccesed');
|
||||
}
|
||||
|
||||
const primaryConf = _.find(configPaths, (configLocation) => file_exists(configLocation.path));
|
||||
if (_.isNil(primaryConf) === false) {
|
||||
return primaryConf.path;
|
||||
}
|
||||
|
||||
return createConfigFile(_.head(configPaths)).path;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a default config file in your system.
|
||||
* @param {String} config_path
|
||||
*/
|
||||
function create_config_file(config_path) {
|
||||
require('mkdirp').sync(Path.dirname(config_path.path));
|
||||
logger.logger.info({file: config_path.path}, 'Creating default config file in @{file}');
|
||||
function createConfigFile(configLocation) {
|
||||
createConfigFolder(configLocation);
|
||||
|
||||
let created_config = fs.readFileSync(require.resolve('../../conf/default.yaml'), 'utf8');
|
||||
const defaultConfig = updateStorageLinks(configLocation, readDefaultConfig());
|
||||
|
||||
fs.writeFileSync(configLocation.path, defaultConfig);
|
||||
|
||||
return configLocation;
|
||||
}
|
||||
|
||||
function readDefaultConfig() {
|
||||
return fs.readFileSync(require.resolve('../../conf/default.yaml'), 'utf8');
|
||||
}
|
||||
|
||||
function createConfigFolder(configLocation) {
|
||||
mkdirp.sync(Path.dirname(configLocation.path));
|
||||
logger.logger.info({file: configLocation.path}, 'Creating default config file in @{file}');
|
||||
}
|
||||
|
||||
function updateStorageLinks(configLocation, defaultConfig) {
|
||||
if (configLocation.type !== XDG) {
|
||||
return defaultConfig;
|
||||
}
|
||||
|
||||
if (config_path.type === 'xdg') {
|
||||
// $XDG_DATA_HOME defines the base directory relative to which user specific data files should be stored,
|
||||
// If $XDG_DATA_HOME is either not set or empty, a default equal to $HOME/.local/share should be used.
|
||||
let data_dir = process.env.XDG_DATA_HOME|| Path.join(process.env.HOME, '.local', 'share');
|
||||
if (folder_exists(data_dir)) {
|
||||
data_dir = Path.resolve(Path.join(data_dir, pkgJson.name, 'storage'));
|
||||
created_config = created_config.replace(/^storage: .\/storage$/m, `storage: ${data_dir}`);
|
||||
let dataDir = process.env.XDG_DATA_HOME || Path.join(process.env.HOME, '.local', 'share');
|
||||
if (folder_exists(dataDir)) {
|
||||
dataDir = Path.resolve(Path.join(dataDir, pkgJSON.name, 'storage'));
|
||||
return defaultConfig.replace(/^storage: .\/storage$/m, `storage: ${dataDir}`);
|
||||
} else {
|
||||
return defaultConfig;
|
||||
}
|
||||
}
|
||||
|
||||
fs.writeFileSync(config_path.path, created_config);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve a list of possible config file locations.
|
||||
* @return {Array}
|
||||
*/
|
||||
function get_paths() {
|
||||
let try_paths = [];
|
||||
let xdg_config = process.env.XDG_CONFIG_HOME
|
||||
|| process.env.HOME && Path.join(process.env.HOME, '.config');
|
||||
if (xdg_config && folder_exists(xdg_config)) {
|
||||
try_paths.push({
|
||||
path: Path.join(xdg_config, pkgJson.name, CONFIG_FILE),
|
||||
type: 'xdg',
|
||||
});
|
||||
}
|
||||
function getConfigPaths() {
|
||||
return [getXDGDirectory(), getWindowsDirectory(), getRelativeDefaultDirectory(), getOldDirectory()].filter((path) => !!path);
|
||||
}
|
||||
|
||||
if (process.platform === 'win32' && process.env.APPDATA && folder_exists(process.env.APPDATA)) {
|
||||
try_paths.push({
|
||||
path: Path.resolve(Path.join(process.env.APPDATA, pkgJson.name, CONFIG_FILE)),
|
||||
type: 'win',
|
||||
});
|
||||
}
|
||||
const getXDGDirectory = () => {
|
||||
const XDGConfig = getXDGHome() ||
|
||||
process.env.HOME && Path.join(process.env.HOME, '.config');
|
||||
|
||||
try_paths.push({
|
||||
path: Path.resolve(Path.join('.', pkgJson.name, CONFIG_FILE)),
|
||||
if (XDGConfig && folder_exists(XDGConfig)) {
|
||||
return {
|
||||
path: Path.join(XDGConfig, pkgJSON.name, CONFIG_FILE),
|
||||
type: XDG,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
const getXDGHome = () => process.env.XDG_CONFIG_HOME;
|
||||
|
||||
const getWindowsDirectory = () => {
|
||||
if (process.platform === WIN32 && process.env.APPDATA && folder_exists(process.env.APPDATA)) {
|
||||
return {
|
||||
path: Path.resolve(Path.join(process.env.APPDATA, pkgJSON.name, CONFIG_FILE)),
|
||||
type: WIN,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
const getRelativeDefaultDirectory = () => {
|
||||
return {
|
||||
path: Path.resolve(Path.join('.', pkgJSON.name, CONFIG_FILE)),
|
||||
type: 'def',
|
||||
});
|
||||
};
|
||||
};
|
||||
|
||||
// backward compatibility
|
||||
try_paths.push({
|
||||
const getOldDirectory = () => {
|
||||
return {
|
||||
path: Path.resolve(Path.join('.', CONFIG_FILE)),
|
||||
type: 'old',
|
||||
});
|
||||
};
|
||||
};
|
||||
|
||||
return try_paths;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether the path already exist.
|
||||
* @param {String} path
|
||||
* @return {Boolean}
|
||||
*/
|
||||
function folder_exists(path) {
|
||||
try {
|
||||
const stat = fs.statSync(path);
|
||||
return stat.isDirectory();
|
||||
} catch(_) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether the file already exist.
|
||||
* @param {String} path
|
||||
* @return {Boolean}
|
||||
*/
|
||||
function file_exists(path) {
|
||||
try {
|
||||
const stat = fs.statSync(path);
|
||||
return stat.isFile();
|
||||
} catch(_) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = find_config_file;
|
||||
export default findConfigFile;
|
||||
|
|
|
@ -211,7 +211,7 @@ class Config {
|
|||
* @return {String}
|
||||
*/
|
||||
checkSecretKey(secret) {
|
||||
if (_.isNil(secret) === false) {
|
||||
if (_.isNil(secret) === false && secret !== '') {
|
||||
this.secret = secret;
|
||||
return secret;
|
||||
}
|
||||
|
|
875
src/lib/local-storage.js
Normal file
875
src/lib/local-storage.js
Normal file
|
@ -0,0 +1,875 @@
|
|||
// @flow
|
||||
|
||||
/* eslint prefer-rest-params: 0 */
|
||||
|
||||
import Crypto from 'crypto';
|
||||
import assert from 'assert';
|
||||
import fs from 'fs';
|
||||
import Path from 'path';
|
||||
import Stream from 'stream';
|
||||
import UrlNode from 'url';
|
||||
import _ from 'lodash';
|
||||
// $FlowFixMe
|
||||
import async from 'async';
|
||||
import * as Utils from './utils';
|
||||
import {
|
||||
generatePackageTemplate, normalizePackage, generateRevision, cleanUpReadme,
|
||||
fileExist, noSuchFile, DEFAULT_REVISION, pkgFileName,
|
||||
} from './storage-utils';
|
||||
import {loadPlugin} from '../lib/plugin-loader';
|
||||
import LocalDatabase from '@verdaccio/local-storage';
|
||||
import {UploadTarball, ReadTarball} from '@verdaccio/streams';
|
||||
import type {
|
||||
IStorage,
|
||||
Package,
|
||||
Config,
|
||||
MergeTags,
|
||||
Version,
|
||||
DistFile,
|
||||
Callback,
|
||||
Logger,
|
||||
} from '@verdaccio/types';
|
||||
import type {
|
||||
ILocalData,
|
||||
IPackageStorage,
|
||||
} from '@verdaccio/local-storage';
|
||||
|
||||
/**
|
||||
* Implements Storage interface (same for storage.js, local-storage.js, up-storage.js).
|
||||
*/
|
||||
class LocalStorage implements IStorage {
|
||||
|
||||
config: Config;
|
||||
localData: ILocalData;
|
||||
logger: Logger;
|
||||
|
||||
constructor(config: Config, logger: Logger) {
|
||||
this.logger = logger.child({sub: 'fs'});
|
||||
this.config = config;
|
||||
this.localData = this._loadStorage(config, logger);
|
||||
}
|
||||
|
||||
_loadStorage(config: Config, logger: Logger) {
|
||||
const Storage = this._loadStorePlugin();
|
||||
|
||||
if (_.isNil(Storage)) {
|
||||
return new LocalDatabase(this.config, logger);
|
||||
} else {
|
||||
return Storage;
|
||||
}
|
||||
}
|
||||
|
||||
_loadStorePlugin() {
|
||||
const plugin_params = {
|
||||
config: this.config,
|
||||
logger: this.logger,
|
||||
};
|
||||
|
||||
return _.head(loadPlugin(this.config, this.config.store, plugin_params, function(plugin) {
|
||||
return plugin.getPackageStorage;
|
||||
}));
|
||||
}
|
||||
|
||||
addPackage(name: string, pkg: Package, callback: Callback) {
|
||||
const storage: IPackageStorage = this._getLocalStorage(name);
|
||||
|
||||
if (_.isNil(storage)) {
|
||||
return callback( Utils.ErrorCode.get404('this package cannot be added'));
|
||||
}
|
||||
|
||||
storage.createPackage(pkgFileName, generatePackageTemplate(name), (err) => {
|
||||
if (_.isNull(err) === false && err.code === fileExist) {
|
||||
return callback( Utils.ErrorCode.get409());
|
||||
}
|
||||
|
||||
const latest = Utils.getLatestVersion(pkg);
|
||||
if (_.isNil(latest) === false && pkg.versions[latest]) {
|
||||
return callback(null, pkg.versions[latest]);
|
||||
}
|
||||
|
||||
return callback();
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove package.
|
||||
* @param {*} name
|
||||
* @param {*} callback
|
||||
* @return {Function}
|
||||
*/
|
||||
removePackage(name: string, callback: Callback) {
|
||||
let storage: IPackageStorage = this._getLocalStorage(name);
|
||||
|
||||
if (_.isNil(storage)) {
|
||||
return callback( Utils.ErrorCode.get404());
|
||||
}
|
||||
|
||||
storage.readPackage(pkgFileName, (err, data) => {
|
||||
if (_.isNil(err) === false) {
|
||||
if (err.code === noSuchFile) {
|
||||
return callback( Utils.ErrorCode.get404());
|
||||
} else {
|
||||
return callback(err);
|
||||
}
|
||||
}
|
||||
|
||||
data = normalizePackage(data);
|
||||
|
||||
const removeFailed = this.localData.remove(name);
|
||||
|
||||
if (removeFailed) {
|
||||
// This will happen when database is locked
|
||||
return callback(Utils.ErrorCode.get422(removeFailed.message));
|
||||
}
|
||||
|
||||
storage.deletePackage(pkgFileName, (err) => {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
const attachments = Object.keys(data._attachments);
|
||||
|
||||
this._deleteAttachments(storage, attachments, callback);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Synchronize remote package info with the local one
|
||||
* @param {*} name
|
||||
* @param {*} packageInfo
|
||||
* @param {*} callback
|
||||
*/
|
||||
updateVersions(name: string, packageInfo: Package, callback: Callback) {
|
||||
this._readCreatePackage(name, (err, packageLocalJson) => {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
let change = false;
|
||||
for (let versionId in packageInfo.versions) {
|
||||
if (_.isNil(packageLocalJson.versions[versionId])) {
|
||||
let version = packageInfo.versions[versionId];
|
||||
|
||||
// we don't keep readmes for package versions,
|
||||
// only one readme per package
|
||||
version = cleanUpReadme(version);
|
||||
|
||||
change = true;
|
||||
packageLocalJson.versions[versionId] = version;
|
||||
|
||||
if (version.dist && version.dist.tarball) {
|
||||
const urlObject: any = UrlNode.parse(version.dist.tarball);
|
||||
const filename = urlObject.pathname.replace(/^.*\//, '');
|
||||
|
||||
// we do NOT overwrite any existing records
|
||||
if (_.isNil(packageLocalJson._distfiles[filename])) {
|
||||
let hash: DistFile = packageLocalJson._distfiles[filename] = {
|
||||
url: version.dist.tarball,
|
||||
sha: version.dist.shasum,
|
||||
};
|
||||
/* eslint spaced-comment: 0 */
|
||||
//$FlowFixMe
|
||||
const upLink: string = version[Symbol.for('__verdaccio_uplink')];
|
||||
|
||||
if (_.isNil(upLink) === false) {
|
||||
this._updateUplinkToRemoteProtocol(hash, upLink);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (let tag in packageInfo['dist-tags']) {
|
||||
if (!packageLocalJson['dist-tags'][tag] || packageLocalJson['dist-tags'][tag] !== packageInfo['dist-tags'][tag]) {
|
||||
change = true;
|
||||
packageLocalJson['dist-tags'][tag] = packageInfo['dist-tags'][tag];
|
||||
}
|
||||
}
|
||||
|
||||
for (let up in packageInfo._uplinks) {
|
||||
if (Object.prototype.hasOwnProperty.call(packageInfo._uplinks, up)) {
|
||||
const need_change = !Utils.is_object(packageLocalJson._uplinks[up])
|
||||
|| packageInfo._uplinks[up].etag !== packageLocalJson._uplinks[up].etag
|
||||
|| packageInfo._uplinks[up].fetched !== packageLocalJson._uplinks[up].fetched;
|
||||
|
||||
if (need_change) {
|
||||
change = true;
|
||||
packageLocalJson._uplinks[up] = packageInfo._uplinks[up];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (packageInfo.readme !== packageLocalJson.readme) {
|
||||
packageLocalJson.readme = packageInfo.readme;
|
||||
change = true;
|
||||
}
|
||||
|
||||
if ('time' in packageInfo) {
|
||||
packageLocalJson.time = packageInfo.time;
|
||||
change = true;
|
||||
}
|
||||
|
||||
if (change) {
|
||||
this.logger.debug('updating package info');
|
||||
this._writePackage(name, packageLocalJson, function(err) {
|
||||
callback(err, packageLocalJson);
|
||||
});
|
||||
} else {
|
||||
callback(null, packageLocalJson);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new version to a previous local package.
|
||||
* @param {*} name
|
||||
* @param {*} version
|
||||
* @param {*} metadata
|
||||
* @param {*} tag
|
||||
* @param {*} callback
|
||||
*/
|
||||
addVersion(name: string, version: string, metadata: Version,
|
||||
tag: string,
|
||||
callback: Callback) {
|
||||
this._updatePackage(name, (data, cb) => {
|
||||
// keep only one readme per package
|
||||
data.readme = metadata.readme;
|
||||
|
||||
// TODO: lodash remove
|
||||
metadata = cleanUpReadme(metadata);
|
||||
|
||||
if (data.versions[version] != null) {
|
||||
return cb( Utils.ErrorCode.get409() );
|
||||
}
|
||||
|
||||
// if uploaded tarball has a different shasum, it's very likely that we have some kind of error
|
||||
if (Utils.is_object(metadata.dist) && _.isString(metadata.dist.tarball)) {
|
||||
let tarball = metadata.dist.tarball.replace(/.*\//, '');
|
||||
|
||||
if (Utils.is_object(data._attachments[tarball])) {
|
||||
|
||||
if (_.isNil(data._attachments[tarball].shasum) === false && _.isNil(metadata.dist.shasum) === false) {
|
||||
if (data._attachments[tarball].shasum != metadata.dist.shasum) {
|
||||
const errorMessage = `shasum error, ${data._attachments[tarball].shasum} != ${metadata.dist.shasum}`;
|
||||
return cb( Utils.ErrorCode.get400(errorMessage) );
|
||||
}
|
||||
}
|
||||
|
||||
let currentDate = new Date().toISOString();
|
||||
data.time['modified'] = currentDate;
|
||||
|
||||
if (('created' in data.time) === false) {
|
||||
data.time.created = currentDate;
|
||||
}
|
||||
|
||||
data.time[version] = currentDate;
|
||||
data._attachments[tarball].version = version;
|
||||
}
|
||||
}
|
||||
|
||||
data.versions[version] = metadata;
|
||||
Utils.tag_version(data, version, tag);
|
||||
|
||||
let addFailed = this.localData.add(name);
|
||||
if (addFailed) {
|
||||
return cb(Utils.ErrorCode.get422(addFailed.message));
|
||||
}
|
||||
|
||||
cb();
|
||||
}, callback);
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge a new list of tags for a local packages with the existing one.
|
||||
* @param {*} name
|
||||
* @param {*} tags
|
||||
* @param {*} callback
|
||||
*/
|
||||
mergeTags(name: string, tags: MergeTags, callback: Callback) {
|
||||
this._updatePackage(name, (data, cb) => {
|
||||
for (let t: string in tags) {
|
||||
if (_.isNull(tags[t])) {
|
||||
delete data['dist-tags'][t];
|
||||
continue;
|
||||
}
|
||||
|
||||
if (_.isNil(data.versions[tags[t]])) {
|
||||
return cb( this._getVersionNotFound() );
|
||||
}
|
||||
const key: string = tags[t];
|
||||
Utils.tag_version(data, key, t);
|
||||
}
|
||||
cb();
|
||||
}, callback);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return version not found
|
||||
* @return {String}
|
||||
* @private
|
||||
*/
|
||||
_getVersionNotFound() {
|
||||
return Utils.ErrorCode.get404('this version doesn\'t exist');
|
||||
}
|
||||
|
||||
/**
|
||||
* Return file no available
|
||||
* @return {String}
|
||||
* @private
|
||||
*/
|
||||
_getFileNotAvailable() {
|
||||
return Utils.ErrorCode.get404('no such file available');
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the package metadata, tags and attachments (tarballs).
|
||||
* Note: Currently supports unpublishing only.
|
||||
* @param {*} name
|
||||
* @param {*} pkg
|
||||
* @param {*} revision
|
||||
* @param {*} callback
|
||||
* @return {Function}
|
||||
*/
|
||||
changePackage(name: string,
|
||||
pkg: Package,
|
||||
revision?: string, callback: Callback) {
|
||||
if (!Utils.is_object(pkg.versions) || !Utils.is_object(pkg['dist-tags'])) {
|
||||
return callback( Utils.ErrorCode.get422());
|
||||
}
|
||||
|
||||
this._updatePackage(name, (jsonData, cb) => {
|
||||
for (let ver in jsonData.versions) {
|
||||
|
||||
if (_.isNil(pkg.versions[ver])) {
|
||||
this.logger.info( {name: name, version: ver}, 'unpublishing @{name}@@{version}');
|
||||
|
||||
delete jsonData.versions[ver];
|
||||
|
||||
for (let file in jsonData._attachments) {
|
||||
if (jsonData._attachments[file].version === ver) {
|
||||
delete jsonData._attachments[file].version;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
jsonData['dist-tags'] = pkg['dist-tags'];
|
||||
cb();
|
||||
}, function(err) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
callback();
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Remove a tarball.
|
||||
* @param {*} name
|
||||
* @param {*} filename
|
||||
* @param {*} revision
|
||||
* @param {*} callback
|
||||
*/
|
||||
removeTarball(name: string, filename: string,
|
||||
revision: string, callback: Callback) {
|
||||
assert(Utils.validate_name(filename));
|
||||
|
||||
this._updatePackage(name, (data, cb) => {
|
||||
if (data._attachments[filename]) {
|
||||
delete data._attachments[filename];
|
||||
cb();
|
||||
} else {
|
||||
cb(this._getFileNotAvailable());
|
||||
}
|
||||
}, (err) => {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
const storage = this._getLocalStorage(name);
|
||||
|
||||
if (storage) {
|
||||
storage.deletePackage(filename, callback);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a tarball.
|
||||
* @param {String} name
|
||||
* @param {String} filename
|
||||
* @return {Stream}
|
||||
*/
|
||||
addTarball(name: string, filename: string) {
|
||||
assert(Utils.validate_name(filename));
|
||||
|
||||
let length = 0;
|
||||
const shaOneHash = Crypto.createHash('sha1');
|
||||
const uploadStream = new UploadTarball();
|
||||
const _transform = uploadStream._transform;
|
||||
const storage = this._getLocalStorage(name);
|
||||
uploadStream.abort = function() {};
|
||||
uploadStream.done = function() {};
|
||||
|
||||
uploadStream._transform = function(data) {
|
||||
shaOneHash.update(data);
|
||||
// measure the length for validation reasons
|
||||
length += data.length;
|
||||
_transform.apply(uploadStream, arguments);
|
||||
};
|
||||
|
||||
if (name === pkgFileName || name === '__proto__') {
|
||||
process.nextTick(() => {
|
||||
uploadStream.emit('error', Utils.ErrorCode.get403());
|
||||
});
|
||||
return uploadStream;
|
||||
}
|
||||
|
||||
if (!storage) {
|
||||
process.nextTick(() => {
|
||||
uploadStream.emit('error', ('can\'t upload this package'));
|
||||
});
|
||||
return uploadStream;
|
||||
}
|
||||
|
||||
const writeStream = storage.writeTarball(filename);
|
||||
|
||||
writeStream.on('error', (err) => {
|
||||
if (err.code === fileExist) {
|
||||
uploadStream.emit('error', Utils.ErrorCode.get409());
|
||||
} else if (err.code === noSuchFile) {
|
||||
// check if package exists to throw an appropriate message
|
||||
this.getPackageMetadata(name, function(_err, res) {
|
||||
if (_err) {
|
||||
uploadStream.emit('error', _err);
|
||||
} else {
|
||||
uploadStream.emit('error', err);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
uploadStream.emit('error', err);
|
||||
}
|
||||
});
|
||||
|
||||
writeStream.on('open', function() {
|
||||
// re-emitting open because it's handled in storage.js
|
||||
uploadStream.emit('open');
|
||||
});
|
||||
|
||||
writeStream.on('success', () => {
|
||||
this._updatePackage(name, function updater(data, cb) {
|
||||
data._attachments[filename] = {
|
||||
shasum: shaOneHash.digest('hex'),
|
||||
};
|
||||
cb();
|
||||
}, function(err) {
|
||||
if (err) {
|
||||
uploadStream.emit('error', err);
|
||||
} else {
|
||||
uploadStream.emit('success');
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
uploadStream.abort = function() {
|
||||
writeStream.abort();
|
||||
};
|
||||
|
||||
uploadStream.done = function() {
|
||||
if (!length) {
|
||||
uploadStream.emit('error', Utils.ErrorCode.get422('refusing to accept zero-length file'));
|
||||
writeStream.abort();
|
||||
} else {
|
||||
writeStream.done();
|
||||
}
|
||||
};
|
||||
|
||||
uploadStream.pipe(writeStream);
|
||||
|
||||
return uploadStream;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a tarball.
|
||||
* @param {*} name
|
||||
* @param {*} filename
|
||||
* @return {ReadTarball}
|
||||
*/
|
||||
getTarball(name: string, filename: string) {
|
||||
assert(Utils.validate_name(filename));
|
||||
|
||||
const storage: IPackageStorage = this._getLocalStorage(name);
|
||||
|
||||
if (_.isNil(storage)) {
|
||||
return this._createFailureStreamResponse();
|
||||
}
|
||||
|
||||
return this._streamSuccessReadTarBall(storage, filename);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a stream that emits a read failure.
|
||||
* @private
|
||||
* @return {ReadTarball}
|
||||
*/
|
||||
_createFailureStreamResponse() {
|
||||
const stream = new ReadTarball();
|
||||
|
||||
process.nextTick(() => {
|
||||
stream.emit('error', this._getFileNotAvailable());
|
||||
});
|
||||
return stream;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a stream that emits the tarball data
|
||||
* @param {Object} storage
|
||||
* @param {String} filename
|
||||
* @private
|
||||
* @return {ReadTarball}
|
||||
*/
|
||||
_streamSuccessReadTarBall(storage: IPackageStorage, filename: string) {
|
||||
const stream = new ReadTarball();
|
||||
const readTarballStream = storage.readTarball(filename);
|
||||
const e404 = Utils.ErrorCode.get404;
|
||||
|
||||
stream.abort = function() {
|
||||
if (_.isNil(readTarballStream) === false) {
|
||||
readTarballStream.abort();
|
||||
}
|
||||
};
|
||||
|
||||
readTarballStream.on('error', function(err) {
|
||||
if (err && err.code === noSuchFile) {
|
||||
stream.emit('error', e404('no such file available'));
|
||||
} else {
|
||||
stream.emit('error', err);
|
||||
}
|
||||
});
|
||||
|
||||
readTarballStream.on('content-length', function(v) {
|
||||
stream.emit('content-length', v);
|
||||
});
|
||||
|
||||
readTarballStream.on('open', function() {
|
||||
// re-emitting open because it's handled in storage.js
|
||||
stream.emit('open');
|
||||
readTarballStream.pipe(stream);
|
||||
});
|
||||
|
||||
return stream;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve a package by name.
|
||||
* @param {*} name
|
||||
* @param {*} callback
|
||||
* @return {Function}
|
||||
*/
|
||||
getPackageMetadata(name: string, callback?: Callback = () => {}): void {
|
||||
|
||||
const storage: IPackageStorage = this._getLocalStorage(name);
|
||||
if (_.isNil(storage)) {
|
||||
return callback( Utils.ErrorCode.get404() );
|
||||
}
|
||||
|
||||
this._readPackage(storage, callback);
|
||||
}
|
||||
|
||||
/**
|
||||
* Search a local package.
|
||||
* @param {*} startKey
|
||||
* @param {*} options
|
||||
* @return {Function}
|
||||
*/
|
||||
search(startKey: string, options: any) {
|
||||
const stream = new Stream.PassThrough({objectMode: true});
|
||||
|
||||
this._eachPackage((item, cb) => {
|
||||
fs.stat(item.path, (err, stats) => {
|
||||
if (_.isNil(err) === false) {
|
||||
return cb(err);
|
||||
}
|
||||
|
||||
if (stats.mtime.getTime() > parseInt(startKey, 10)) {
|
||||
this.getPackageMetadata(item.name, (err: Error, data: Package) => {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
}
|
||||
|
||||
const listVersions: Array<string> = Object.keys(data.versions);
|
||||
const versions: Array<string> = Utils.semver_sort(listVersions);
|
||||
const latest: string = data['dist-tags'] && data['dist-tags'].latest ? data['dist-tags'].latest : versions.pop();
|
||||
|
||||
if (data.versions[latest]) {
|
||||
const version: Version = data.versions[latest];
|
||||
const pkg: any = {
|
||||
'name': version.name,
|
||||
'description': version.description,
|
||||
'dist-tags': {latest},
|
||||
'maintainers': version.maintainers || [version.author].filter(Boolean),
|
||||
'author': version.author,
|
||||
'repository': version.repository,
|
||||
'readmeFilename': version.readmeFilename || '',
|
||||
'homepage': version.homepage,
|
||||
'keywords': version.keywords,
|
||||
'bugs': version.bugs,
|
||||
'license': version.license,
|
||||
'time': {
|
||||
modified: item.time ? new Date(item.time).toISOString() : stats.mtime,
|
||||
},
|
||||
'versions': {[latest]: 'latest'},
|
||||
};
|
||||
|
||||
stream.push(pkg);
|
||||
}
|
||||
|
||||
cb();
|
||||
});
|
||||
} else {
|
||||
cb();
|
||||
}
|
||||
});
|
||||
}, function onEnd(err) {
|
||||
if (err) {
|
||||
return stream.emit('error', err);
|
||||
}
|
||||
stream.end();
|
||||
});
|
||||
|
||||
return stream;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve a wrapper that provide access to the package location.
|
||||
* @param {Object} packageInfo package name.
|
||||
* @return {Object}
|
||||
*/
|
||||
_getLocalStorage(packageInfo: string): IPackageStorage {
|
||||
const path: string = this._getLocalStoragePath(this.config.getMatchedPackagesSpec(packageInfo).storage);
|
||||
|
||||
if (_.isString(path) === false) {
|
||||
this.logger.debug( {name: packageInfo}, 'this package has no storage defined: @{name}' );
|
||||
return;
|
||||
}
|
||||
|
||||
return this.localData.getPackageStorage(packageInfo, path);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read a json file from storage.
|
||||
* @param {Object} storage
|
||||
* @param {Function} callback
|
||||
*/
|
||||
_readPackage(storage: IPackageStorage, callback: Callback) {
|
||||
storage.readPackage(pkgFileName, (err, result) => {
|
||||
if (err) {
|
||||
if (err.code === noSuchFile) {
|
||||
return callback( Utils.ErrorCode.get404() );
|
||||
} else {
|
||||
return callback(this._internalError(err, pkgFileName, 'error reading'));
|
||||
}
|
||||
}
|
||||
|
||||
callback(err, normalizePackage(result));
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify the right local storage location.
|
||||
* @param {String} path
|
||||
* @return {String}
|
||||
* @private
|
||||
*/
|
||||
_getLocalStoragePath(path: string): string {
|
||||
if (_.isNil(path) === false) {
|
||||
return path;
|
||||
}
|
||||
|
||||
return this.config.storage;
|
||||
}
|
||||
|
||||
/**
|
||||
* Walks through each package and calls `on_package` on them.
|
||||
* @param {*} onPackage
|
||||
* @param {*} onEnd
|
||||
*/
|
||||
_eachPackage(onPackage: Callback, onEnd: Callback) {
|
||||
const storages = {};
|
||||
|
||||
storages[this.config.storage] = true;
|
||||
if (this.config.packages) {
|
||||
Object.keys(this.config.packages || {}).map( (pkg) => {
|
||||
if (this.config.packages[pkg].storage) {
|
||||
storages[this.config.packages[pkg].storage] = true;
|
||||
}
|
||||
});
|
||||
}
|
||||
const base = Path.dirname(this.config.self_path);
|
||||
|
||||
async.eachSeries(Object.keys(storages), function(storage, cb) {
|
||||
fs.readdir(Path.resolve(base, storage), function(err, files) {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
}
|
||||
|
||||
async.eachSeries(files, function(file, cb) {
|
||||
if (file.match(/^@/)) {
|
||||
// scoped
|
||||
fs.readdir(Path.resolve(base, storage, file), function(err, files) {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
}
|
||||
|
||||
async.eachSeries(files, (file2, cb) => {
|
||||
if (Utils.validate_name(file2)) {
|
||||
onPackage({
|
||||
name: `${file}/${file2}`,
|
||||
path: Path.resolve(base, storage, file, file2),
|
||||
}, cb);
|
||||
} else {
|
||||
cb();
|
||||
}
|
||||
}, cb);
|
||||
});
|
||||
} else if (Utils.validate_name(file)) {
|
||||
onPackage({
|
||||
name: file,
|
||||
path: Path.resolve(base, storage, file),
|
||||
}, cb);
|
||||
} else {
|
||||
cb();
|
||||
}
|
||||
}, cb);
|
||||
});
|
||||
}, onEnd);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve either a previous created local package or a boilerplate.
|
||||
* @param {*} name
|
||||
* @param {*} callback
|
||||
* @return {Function}
|
||||
*/
|
||||
_readCreatePackage(name: string, callback: Callback) {
|
||||
const storage: IPackageStorage = this._getLocalStorage(name);
|
||||
if (_.isNil(storage)) {
|
||||
return this._createNewPackage(name, callback);
|
||||
}
|
||||
|
||||
storage.readPackage(pkgFileName, (err, data) => {
|
||||
// TODO: race condition
|
||||
if (_.isNil(err) === false) {
|
||||
if (err.code === noSuchFile) {
|
||||
data = generatePackageTemplate(name);
|
||||
} else {
|
||||
return callback(this._internalError(err, pkgFileName, 'error reading'));
|
||||
}
|
||||
}
|
||||
|
||||
callback(null, normalizePackage(data));
|
||||
});
|
||||
}
|
||||
|
||||
_createNewPackage(name: string, callback: Callback): Callback {
|
||||
return callback(null, normalizePackage(generatePackageTemplate(name)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle internal error
|
||||
* @param {*} err
|
||||
* @param {*} file
|
||||
* @param {*} message
|
||||
* @return {Object} Error instance
|
||||
*/
|
||||
_internalError(err: string, file: string, message: string) {
|
||||
this.logger.error( {err: err, file: file}, `${message} @{file}: @{!err.message}` );
|
||||
|
||||
return Utils.ErrorCode.get500();
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {*} name package name
|
||||
* @param {*} updateHandler function(package, cb) - update function
|
||||
* @param {*} callback callback that gets invoked after it's all updated
|
||||
* @return {Function}
|
||||
*/
|
||||
_updatePackage(name: string, updateHandler: Callback, callback: Callback) {
|
||||
const storage: IPackageStorage = this._getLocalStorage(name);
|
||||
|
||||
if (!storage) {
|
||||
return callback( Utils.ErrorCode.get404() );
|
||||
}
|
||||
|
||||
storage.updatePackage(name, updateHandler, this._writePackage.bind(this), normalizePackage,
|
||||
callback);
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the revision (_rev) string for a package.
|
||||
* @param {*} name
|
||||
* @param {*} json
|
||||
* @param {*} callback
|
||||
* @return {Function}
|
||||
*/
|
||||
_writePackage(name: string, json: Package, callback: Callback) {
|
||||
const storage: IPackageStorage = this._getLocalStorage(name);
|
||||
if (_.isNil(storage)) {
|
||||
return callback();
|
||||
}
|
||||
storage.savePackage(pkgFileName, this._setDefaultRevision(json), callback);
|
||||
}
|
||||
|
||||
_setDefaultRevision(json: Package) {
|
||||
// calculate revision a la couchdb
|
||||
if (_.isString(json._rev) === false) {
|
||||
json._rev = DEFAULT_REVISION;
|
||||
}
|
||||
|
||||
json._rev = generateRevision(json._rev);
|
||||
|
||||
return json;
|
||||
}
|
||||
|
||||
_deleteAttachments(storage: IPackageStorage, attachments: string[], callback: Callback): void {
|
||||
const unlinkNext = function(cb) {
|
||||
if (_.isEmpty(attachments)) {
|
||||
return cb();
|
||||
}
|
||||
|
||||
const attachment = attachments.shift();
|
||||
storage.deletePackage(attachment, function() {
|
||||
unlinkNext(cb);
|
||||
});
|
||||
};
|
||||
|
||||
unlinkNext(function() {
|
||||
// try to unlink the directory, but ignore errors because it can fail
|
||||
storage.removePackage(function(err) {
|
||||
callback(err);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure the dist file remains as the same protocol
|
||||
* @param {Object} hash metadata
|
||||
* @param {String} upLinkKey registry key
|
||||
* @private
|
||||
*/
|
||||
_updateUplinkToRemoteProtocol(hash: DistFile, upLinkKey: string): void {
|
||||
// if we got this information from a known registry,
|
||||
// use the same protocol for the tarball
|
||||
//
|
||||
// see https://github.com/rlidwka/sinopia/issues/166
|
||||
const tarballUrl: any = UrlNode.parse(hash.url);
|
||||
const uplinkUrl: any = UrlNode.parse(this.config.uplinks[upLinkKey].url);
|
||||
|
||||
if (uplinkUrl.host === tarballUrl.host) {
|
||||
tarballUrl.protocol = uplinkUrl.protocol;
|
||||
hash.registry = upLinkKey;
|
||||
hash.url = UrlNode.format(tarballUrl);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default LocalStorage;
|
|
@ -14,13 +14,13 @@ const pkgJSON = require('../../package.json');
|
|||
*/
|
||||
function getlvl(x) {
|
||||
switch(true) {
|
||||
case x < 15 : return 'trace';
|
||||
case x < 25 : return 'debug';
|
||||
case x < 35 : return 'info';
|
||||
case x == 35 : return 'http';
|
||||
case x < 45 : return 'warn';
|
||||
case x < 55 : return 'error';
|
||||
default : return 'fatal';
|
||||
case x < 15: return 'trace';
|
||||
case x < 25: return 'debug';
|
||||
case x < 35: return 'info';
|
||||
case x == 35: return 'http';
|
||||
case x < 45: return 'warn';
|
||||
case x < 55: return 'error';
|
||||
default: return 'fatal';
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,14 +1,14 @@
|
|||
'use strict';
|
||||
|
||||
const Path = require('path');
|
||||
const logger = require('./logger');
|
||||
import Path from 'path';
|
||||
import _ from 'lodash';
|
||||
import logger from './logger';
|
||||
|
||||
/**
|
||||
* Requires a module.
|
||||
* @param {*} path the module's path
|
||||
* @return {Object}
|
||||
*/
|
||||
function try_load(path) {
|
||||
function tryLoad(path) {
|
||||
try {
|
||||
return require(path);
|
||||
} catch(err) {
|
||||
|
@ -19,6 +19,14 @@ function try_load(path) {
|
|||
}
|
||||
}
|
||||
|
||||
function isValid(plugin) {
|
||||
return (_.isFunction(plugin) || _.isFunction(plugin.default));
|
||||
}
|
||||
|
||||
function isES6(plugin) {
|
||||
return Object.keys(plugin).includes('default');
|
||||
}
|
||||
|
||||
/**
|
||||
* Load a plugin following the rules
|
||||
* - First try to load from the internal directory plugins (which will disappear soon or later).
|
||||
|
@ -30,29 +38,29 @@ function try_load(path) {
|
|||
* @param {*} sanity_check callback that check the shape that should fulfill the plugin
|
||||
* @return {Array} list of plugins
|
||||
*/
|
||||
function load_plugins(config, plugin_configs, params, sanity_check) {
|
||||
function loadPlugin(config, plugin_configs, params, sanity_check) {
|
||||
let plugins = Object.keys(plugin_configs || {}).map(function(p) {
|
||||
let plugin;
|
||||
|
||||
// try local plugins first
|
||||
plugin = try_load(Path.resolve(__dirname + '/..//plugins', p));
|
||||
plugin = tryLoad(Path.resolve(__dirname + '/..//plugins', p));
|
||||
|
||||
// npm package
|
||||
if (plugin === null && p.match(/^[^\.\/]/)) {
|
||||
plugin = try_load(`verdaccio-${p}`);
|
||||
plugin = tryLoad(`verdaccio-${p}`);
|
||||
// compatibility for old sinopia plugins
|
||||
if (!plugin) {
|
||||
plugin = try_load(`sinopia-${p}`);
|
||||
plugin = tryLoad(`sinopia-${p}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (plugin === null) {
|
||||
plugin = try_load(p);
|
||||
plugin = tryLoad(p);
|
||||
}
|
||||
|
||||
// relative to config path
|
||||
if (plugin === null && p.match(/^\.\.?($|\/)/)) {
|
||||
plugin = try_load(Path.resolve(Path.dirname(config.self_path), p));
|
||||
plugin = tryLoad(Path.resolve(Path.dirname(config.self_path), p));
|
||||
}
|
||||
|
||||
if (plugin === null) {
|
||||
|
@ -60,12 +68,14 @@ function load_plugins(config, plugin_configs, params, sanity_check) {
|
|||
throw Error('"' + p + '" plugin not found\ntry "npm install verdaccio-' + p + '"');
|
||||
}
|
||||
|
||||
if (typeof(plugin) !== 'function') {
|
||||
if (!isValid(plugin)) {
|
||||
logger.logger.error({content: p}, '@{content} doesn\'t look like a valid plugin');
|
||||
throw Error('"' + p + '" doesn\'t look like a valid plugin');
|
||||
}
|
||||
|
||||
plugin = plugin(plugin_configs[p], params);
|
||||
/* eslint new-cap:off */
|
||||
plugin = isES6(plugin) ? new plugin.default(plugin_configs[p], params) : plugin(plugin_configs[p], params);
|
||||
/* eslint new-cap:off */
|
||||
|
||||
if (plugin === null || !sanity_check(plugin)) {
|
||||
logger.logger.error({content: p}, '@{content} doesn\'t look like a valid plugin');
|
||||
|
@ -78,4 +88,4 @@ function load_plugins(config, plugin_configs, params, sanity_check) {
|
|||
return plugins;
|
||||
}
|
||||
|
||||
exports.load_plugins = load_plugins;
|
||||
export {loadPlugin};
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
/* eslint no-invalid-this: "off" */
|
||||
|
||||
'use strict';
|
||||
|
||||
const lunr = require('lunr');
|
||||
|
||||
/**
|
||||
|
@ -61,7 +59,7 @@ class Search {
|
|||
*/
|
||||
reindex() {
|
||||
let self = this;
|
||||
this.storage.get_local(function(err, packages) {
|
||||
this.storage.getLocalDatabase(function(err, packages) {
|
||||
if (err) throw err; // that function shouldn't produce any
|
||||
let i = packages.length;
|
||||
while (i--) {
|
||||
|
@ -80,4 +78,4 @@ class Search {
|
|||
}
|
||||
}
|
||||
|
||||
module.exports = new Search();
|
||||
export default new Search();
|
||||
|
|
84
src/lib/storage-utils.js
Normal file
84
src/lib/storage-utils.js
Normal file
|
@ -0,0 +1,84 @@
|
|||
// @flow
|
||||
|
||||
import _ from 'lodash';
|
||||
import crypto from 'crypto';
|
||||
import * as Utils from './utils';
|
||||
|
||||
import type {
|
||||
Package, Version,
|
||||
} from '@verdaccio/types';
|
||||
|
||||
const pkgFileName = 'package.json';
|
||||
const fileExist: string = 'EEXISTS';
|
||||
const noSuchFile: string = 'ENOENT';
|
||||
const resourceNotAvailable: string = 'EAGAIN';
|
||||
const DEFAULT_REVISION: string = `0-0000000000000000`;
|
||||
|
||||
const generatePackageTemplate = function(name: string): Package {
|
||||
return {
|
||||
// standard things
|
||||
'name': name,
|
||||
'versions': {},
|
||||
'dist-tags': {},
|
||||
'time': {},
|
||||
'_distfiles': {},
|
||||
'_attachments': {},
|
||||
'_uplinks': {},
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Normalise package properties, tags, revision id.
|
||||
* @param {Object} pkg package reference.
|
||||
*/
|
||||
function normalizePackage(pkg: Package) {
|
||||
const pkgProperties = [
|
||||
'versions',
|
||||
'dist-tags',
|
||||
'_distfiles',
|
||||
'_attachments',
|
||||
'_uplinks',
|
||||
'time'];
|
||||
|
||||
pkgProperties.forEach((key) => {
|
||||
if (_.isNil(Utils.is_object(pkg[key]))) {
|
||||
pkg[key] = {};
|
||||
}
|
||||
});
|
||||
|
||||
if (_.isString(pkg._rev) === false) {
|
||||
pkg._rev = DEFAULT_REVISION;
|
||||
}
|
||||
|
||||
// normalize dist-tags
|
||||
Utils.normalize_dist_tags(pkg);
|
||||
|
||||
return pkg;
|
||||
}
|
||||
|
||||
function generateRevision(rev: string): string {
|
||||
const _rev = rev.split('-');
|
||||
|
||||
return ((+_rev[0] || 0) + 1) + '-' + crypto.pseudoRandomBytes(8).toString('hex');
|
||||
}
|
||||
|
||||
function cleanUpReadme(version: Version): Version {
|
||||
if(_.isNil(version) === false) {
|
||||
delete version.readme;
|
||||
}
|
||||
|
||||
return version;
|
||||
}
|
||||
|
||||
export {
|
||||
generatePackageTemplate,
|
||||
normalizePackage,
|
||||
generateRevision,
|
||||
cleanUpReadme,
|
||||
DEFAULT_REVISION,
|
||||
fileExist,
|
||||
noSuchFile,
|
||||
pkgFileName,
|
||||
resourceNotAvailable,
|
||||
};
|
||||
|
|
@ -1,19 +1,18 @@
|
|||
'use strict';
|
||||
|
||||
const _ = require('lodash');
|
||||
const assert = require('assert');
|
||||
const async = require('async');
|
||||
const Error = require('http-errors');
|
||||
const semver = require('semver');
|
||||
const Stream = require('stream');
|
||||
import _ from 'lodash';
|
||||
import assert from 'assert';
|
||||
import async from 'async';
|
||||
import Error from 'http-errors';
|
||||
import semver from 'semver';
|
||||
import Stream from 'stream';
|
||||
|
||||
const Search = require('./search');
|
||||
const LocalStorage = require('./storage/local/local-storage');
|
||||
const Logger = require('./logger');
|
||||
const MyStreams = require('@verdaccio/streams');
|
||||
const Proxy = require('./storage/up-storage');
|
||||
const Utils = require('./utils');
|
||||
import Search from './search';
|
||||
import LocalStorage from './local-storage';
|
||||
import {ReadTarball} from '@verdaccio/streams';
|
||||
import ProxyStorage from './up-storage';
|
||||
import * as Utils from './utils';
|
||||
|
||||
const Logger = require('../lib/logger');
|
||||
const WHITELIST = ['_rev', 'name', 'versions', 'dist-tags', 'readme', 'time'];
|
||||
const getDefaultMetadata = (name) => {
|
||||
return {
|
||||
|
@ -36,11 +35,8 @@ class Storage {
|
|||
constructor(config) {
|
||||
this.config = config;
|
||||
this._setupUpLinks(this.config);
|
||||
this.localStorage = new LocalStorage(config, Logger.logger, Utils);
|
||||
this.localStorage.localList.data.secret = this.config.checkSecretKey(this.localStorage.localList.data.secret);
|
||||
this.localStorage.localList.sync();
|
||||
// an instance for local storage
|
||||
this.logger = Logger.logger.child();
|
||||
this.localStorage = new LocalStorage(this.config, Logger.logger);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -61,7 +57,7 @@ class Storage {
|
|||
*/
|
||||
const checkPackageLocal = () => {
|
||||
return new Promise((resolve, reject) => {
|
||||
this.localStorage.getPackageMetadata(name, {}, (err, results) => {
|
||||
this.localStorage.getPackageMetadata(name, (err, results) => {
|
||||
if (!_.isNil(err) && err.status !== 404) {
|
||||
return reject(err);
|
||||
}
|
||||
|
@ -171,7 +167,8 @@ class Storage {
|
|||
* @param {*} callback
|
||||
*/
|
||||
replace_tags(name, tag_hash, callback) {
|
||||
this.localStorage.replaceTags(name, tag_hash, callback);
|
||||
this.logger.warn('method deprecated');
|
||||
this.localStorage.mergeTags(name, tag_hash, callback);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -238,7 +235,7 @@ class Storage {
|
|||
* @return {Stream}
|
||||
*/
|
||||
get_tarball(name, filename) {
|
||||
let readStream = new MyStreams.ReadTarball();
|
||||
let readStream = new ReadTarball();
|
||||
readStream.abort = function() {};
|
||||
|
||||
let self = this;
|
||||
|
@ -247,17 +244,17 @@ class Storage {
|
|||
// information about it, so fetching package info is unnecessary
|
||||
|
||||
// trying local first
|
||||
let rstream = self.localStorage.getTarball(name, filename);
|
||||
let localStream = self.localStorage.getTarball(name, filename);
|
||||
let is_open = false;
|
||||
rstream.on('error', function(err) {
|
||||
localStream.on('error', (err) => {
|
||||
if (is_open || err.status !== 404) {
|
||||
return readStream.emit('error', err);
|
||||
}
|
||||
|
||||
// local reported 404
|
||||
let err404 = err;
|
||||
rstream.abort();
|
||||
rstream = null; // gc
|
||||
localStream.abort();
|
||||
localStream = null; // gc
|
||||
self.localStorage.getPackageMetadata(name, (err, info) => {
|
||||
if (_.isNil(err) && info._distfiles && _.isNil(info._distfiles[filename]) === false) {
|
||||
// information about this file exists locally
|
||||
|
@ -276,12 +273,12 @@ class Storage {
|
|||
}
|
||||
});
|
||||
});
|
||||
rstream.on('content-length', function(v) {
|
||||
localStream.on('content-length', function(v) {
|
||||
readStream.emit('content-length', v);
|
||||
});
|
||||
rstream.on('open', function() {
|
||||
localStream.on('open', function() {
|
||||
is_open = true;
|
||||
rstream.pipe(readStream);
|
||||
localStream.pipe(readStream);
|
||||
});
|
||||
return readStream;
|
||||
|
||||
|
@ -297,7 +294,7 @@ class Storage {
|
|||
}
|
||||
}
|
||||
if (uplink == null) {
|
||||
uplink = new Proxy({
|
||||
uplink = new ProxyStorage({
|
||||
url: file.url,
|
||||
cache: true,
|
||||
_autogenerated: true,
|
||||
|
@ -362,28 +359,32 @@ class Storage {
|
|||
uplink with proxy_access rights against {name} and combines results
|
||||
into one json object
|
||||
Used storages: local && uplink (proxy_access)
|
||||
* @param {*} name
|
||||
* @param {*} options
|
||||
* @param {*} callback
|
||||
*/
|
||||
get_package(name, options, callback) {
|
||||
if (_.isFunction(options)) {
|
||||
callback = options, options = {};
|
||||
}
|
||||
|
||||
this.localStorage.getPackageMetadata(name, options, (err, data) => {
|
||||
* @param {object} options
|
||||
* @property {string} options.name Package Name
|
||||
* @property {object} options.req Express `req` object
|
||||
* @property {boolean} options.keepUpLinkData keep up link info in package meta, last update, etc.
|
||||
* @property {function} options.callback Callback for receive data
|
||||
*/
|
||||
get_package(options) {
|
||||
this.localStorage.getPackageMetadata(options.name, (err, data) => {
|
||||
if (err && (!err.status || err.status >= 500)) {
|
||||
// report internal errors right away
|
||||
return callback(err);
|
||||
return options.callback(err);
|
||||
}
|
||||
|
||||
this._syncUplinksMetadata(name, data, options, function(err, result, uplink_errors) {
|
||||
this._syncUplinksMetadata(options.name, data, {req: options.req}, function(err, result, uplink_errors) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
return options.callback(err);
|
||||
}
|
||||
|
||||
const propertyToKeep = [...WHITELIST];
|
||||
if (options.keepUpLinkData === true) {
|
||||
propertyToKeep.push('_uplinks');
|
||||
}
|
||||
|
||||
for (let i in result) {
|
||||
if (WHITELIST.indexOf(i) === -1) {
|
||||
if (propertyToKeep.indexOf(i) === -1) { // Remove sections like '_uplinks' from response
|
||||
delete result[i];
|
||||
}
|
||||
}
|
||||
|
@ -393,7 +394,7 @@ class Storage {
|
|||
// npm can throw if this field doesn't exist
|
||||
result._attachments = {};
|
||||
|
||||
callback(null, result, uplink_errors);
|
||||
options.callback(null, result, uplink_errors);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
@ -460,9 +461,9 @@ class Storage {
|
|||
* Retrieve only private local packages
|
||||
* @param {*} callback
|
||||
*/
|
||||
get_local(callback) {
|
||||
getLocalDatabase(callback) {
|
||||
let self = this;
|
||||
let locals = this.localStorage.localList.get();
|
||||
let locals = this.localStorage.localData.get();
|
||||
let packages = [];
|
||||
|
||||
const getPackage = function(i) {
|
||||
|
@ -505,7 +506,6 @@ class Storage {
|
|||
let exists = false;
|
||||
const self = this;
|
||||
const upLinks = [];
|
||||
|
||||
if (_.isNil(packageInfo)) {
|
||||
exists = false;
|
||||
packageInfo = getDefaultMetadata(name);
|
||||
|
@ -584,7 +584,6 @@ class Storage {
|
|||
});
|
||||
}, (err, upLinksErrors) => {
|
||||
assert(!err && Array.isArray(upLinksErrors));
|
||||
|
||||
if (!exists) {
|
||||
return callback( Error[404]('no such package available')
|
||||
, null
|
||||
|
@ -629,7 +628,7 @@ class Storage {
|
|||
for (let p in config.uplinks) {
|
||||
if (Object.prototype.hasOwnProperty.call(config.uplinks, p)) {
|
||||
// instance for each up-link definition
|
||||
this.uplinks[p] = new Proxy(config.uplinks[p], config);
|
||||
this.uplinks[p] = new ProxyStorage(config.uplinks[p], config);
|
||||
this.uplinks[p].upname = p;
|
||||
}
|
||||
}
|
||||
|
@ -653,12 +652,14 @@ class Storage {
|
|||
}
|
||||
|
||||
// refresh dist-tags
|
||||
for (let i in up['dist-tags']) {
|
||||
if (local['dist-tags'][i] !== up['dist-tags'][i]) {
|
||||
if (!local['dist-tags'][i] || semver.lte(local['dist-tags'][i], up['dist-tags'][i])) {
|
||||
local['dist-tags'][i] = up['dist-tags'][i];
|
||||
const distTag = 'dist-tags';
|
||||
|
||||
for (let i in up[distTag]) {
|
||||
if (local[distTag][i] !== up[distTag][i]) {
|
||||
if (!local[distTag][i] || semver.lte(local[distTag][i], up[distTag][i])) {
|
||||
local[distTag][i] = up[distTag][i];
|
||||
}
|
||||
if (i === 'latest' && local['dist-tags'][i] === up['dist-tags'][i]) {
|
||||
if (i === 'latest' && local[distTag][i] === up[distTag][i]) {
|
||||
// if remote has more fresh package, we should borrow its readme
|
||||
local.readme = up.readme;
|
||||
}
|
||||
|
@ -668,4 +669,4 @@ class Storage {
|
|||
|
||||
}
|
||||
|
||||
module.exports = Storage;
|
||||
export default Storage;
|
||||
|
|
|
@ -1,135 +0,0 @@
|
|||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const Path = require('path');
|
||||
const logger = require('../../logger');
|
||||
|
||||
/**
|
||||
* Handle local database.
|
||||
* FUTURE: must be a plugin.
|
||||
*/
|
||||
class LocalData {
|
||||
|
||||
/**
|
||||
* Load an parse the local json database.
|
||||
* @param {*} path the database path
|
||||
*/
|
||||
constructor(path) {
|
||||
this.path = path;
|
||||
// Prevent any write action, wait admin to check what happened during startup
|
||||
this.locked = false;
|
||||
this.data = this._fetchLocalPackages();
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch local packages.
|
||||
* @private
|
||||
* @return {Object}
|
||||
*/
|
||||
_fetchLocalPackages() {
|
||||
const emptyDatabase = {list: []};
|
||||
|
||||
try {
|
||||
const dbFile = fs.readFileSync(this.path, 'utf8');
|
||||
|
||||
if (!dbFile) { // readFileSync is platform specific, FreeBSD might return null
|
||||
return emptyDatabase;
|
||||
}
|
||||
|
||||
const db = this._parseDatabase(dbFile);
|
||||
|
||||
if(!db) {
|
||||
return emptyDatabase;
|
||||
}
|
||||
|
||||
return db;
|
||||
} catch (err) {
|
||||
// readFileSync is platform specific, macOS, Linux and Windows thrown an error
|
||||
// Only recreate if file not found to prevent data loss
|
||||
if (err.code !== 'ENOENT') {
|
||||
this.locked = true;
|
||||
logger.logger.error(
|
||||
'Failed to read package database file, please check the error printed below:\n',
|
||||
`File Path: ${this.path}\n\n`,
|
||||
err
|
||||
);
|
||||
}
|
||||
return emptyDatabase;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the local database.
|
||||
* @param {Object} dbFile
|
||||
* @private
|
||||
* @return {Object}
|
||||
*/
|
||||
_parseDatabase(dbFile) {
|
||||
try {
|
||||
return JSON.parse(dbFile);
|
||||
} catch(err) {
|
||||
logger.logger.error(`Package database file corrupted (invalid JSON), please check the error printed below.\nFile Path: ${this.path}`, err);
|
||||
this.locked = true;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new element.
|
||||
* @param {*} name
|
||||
* @return {Error|*}
|
||||
*/
|
||||
add(name) {
|
||||
if (this.data.list.indexOf(name) === -1) {
|
||||
this.data.list.push(name);
|
||||
return this.sync();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove an element from the database.
|
||||
* @param {*} name
|
||||
* @return {Error|*}
|
||||
*/
|
||||
remove(name) {
|
||||
const i = this.data.list.indexOf(name);
|
||||
if (i !== -1) {
|
||||
this.data.list.splice(i, 1);
|
||||
}
|
||||
return this.sync();
|
||||
}
|
||||
|
||||
/**
|
||||
* Return all database elements.
|
||||
* @return {Array}
|
||||
*/
|
||||
get() {
|
||||
return this.data.list;
|
||||
}
|
||||
|
||||
/**
|
||||
* Syncronize {create} database whether does not exist.
|
||||
* @return {Error|*}
|
||||
*/
|
||||
sync() {
|
||||
if (this.locked) {
|
||||
logger.logger.error('Database is locked, please check error message printed during startup to prevent data loss.');
|
||||
return new Error('Verdaccio database is locked, please contact your administrator to checkout logs during verdaccio startup.');
|
||||
}
|
||||
|
||||
// Uses sync to prevent ugly race condition
|
||||
try {
|
||||
require('mkdirp').sync(Path.dirname(this.path));
|
||||
} catch(err) {
|
||||
// perhaps a logger instance?
|
||||
/* eslint no-empty:off */
|
||||
}
|
||||
try {
|
||||
fs.writeFileSync(this.path, JSON.stringify(this.data));
|
||||
} catch (err) {
|
||||
return err;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = LocalData;
|
|
@ -1,256 +0,0 @@
|
|||
/* eslint prefer-spread: "off" */
|
||||
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const createError = require('http-errors');
|
||||
const mkdirp = require('mkdirp');
|
||||
const MyStream = require('@verdaccio/streams');
|
||||
const locker = require('@verdaccio/file-locking');
|
||||
const fileExist = 'EEXISTS';
|
||||
const noSuchFile = 'ENOENT';
|
||||
|
||||
const fSError = function(code) {
|
||||
const err = createError(code);
|
||||
err.code = code;
|
||||
return err;
|
||||
};
|
||||
|
||||
const readFile = function(name) {
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.readFile(name, (err, data) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
resolve(data);
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
const tempFile = function(str) {
|
||||
return `${str}.tmp${String(Math.random()).substr(2)}`;
|
||||
};
|
||||
|
||||
const renameTmp = function(src, dst, _cb) {
|
||||
const cb = function(err) {
|
||||
if (err) {
|
||||
fs.unlink(src, function() {});
|
||||
}
|
||||
_cb(err);
|
||||
};
|
||||
|
||||
if (process.platform !== 'win32') {
|
||||
return fs.rename(src, dst, cb);
|
||||
}
|
||||
|
||||
// windows can't remove opened file,
|
||||
// but it seem to be able to rename it
|
||||
const tmp = tempFile(dst);
|
||||
fs.rename(dst, tmp, function(err) {
|
||||
fs.rename(src, dst, cb);
|
||||
if (!err) {
|
||||
fs.unlink(tmp, () => {});
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
const writeFile = function(dest, data, cb) {
|
||||
const createTempFile = function(cb) {
|
||||
const tempFilePath = tempFile(dest);
|
||||
fs.writeFile(tempFilePath, data, function(err) {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
}
|
||||
renameTmp(tempFilePath, dest, cb);
|
||||
});
|
||||
};
|
||||
|
||||
createTempFile(function(err) {
|
||||
if (err && err.code === noSuchFile) {
|
||||
mkdirp(path.dirname(dest), function(err) {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
}
|
||||
createTempFile(cb);
|
||||
});
|
||||
} else {
|
||||
cb(err);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
const createWriteStream = function(name) {
|
||||
const uploadStream = new MyStream.UploadTarball();
|
||||
let _ended = 0;
|
||||
uploadStream.on('end', function() {
|
||||
_ended = 1;
|
||||
});
|
||||
|
||||
fs.exists(name, function(exists) {
|
||||
if (exists) {
|
||||
return uploadStream.emit('error', fSError(fileExist));
|
||||
}
|
||||
|
||||
const temporalName = `${name}.tmp-${String(Math.random()).replace(/^0\./, '')}`;
|
||||
const file = fs.createWriteStream(temporalName);
|
||||
let opened = false;
|
||||
uploadStream.pipe(file);
|
||||
|
||||
uploadStream.done = function() {
|
||||
const onend = function() {
|
||||
file.on('close', function() {
|
||||
renameTmp(temporalName, name, function(err) {
|
||||
if (err) {
|
||||
uploadStream.emit('error', err);
|
||||
} else {
|
||||
uploadStream.emit('success');
|
||||
}
|
||||
});
|
||||
});
|
||||
file.destroySoon();
|
||||
};
|
||||
if (_ended) {
|
||||
onend();
|
||||
} else {
|
||||
uploadStream.on('end', onend);
|
||||
}
|
||||
};
|
||||
uploadStream.abort = function() {
|
||||
if (opened) {
|
||||
opened = false;
|
||||
file.on('close', function() {
|
||||
fs.unlink(temporalName, function() {});
|
||||
});
|
||||
}
|
||||
file.destroySoon();
|
||||
};
|
||||
file.on('open', function() {
|
||||
opened = true;
|
||||
// re-emitting open because it's handled in storage.js
|
||||
uploadStream.emit('open');
|
||||
});
|
||||
file.on('error', function(err) {
|
||||
uploadStream.emit('error', err);
|
||||
});
|
||||
});
|
||||
return uploadStream;
|
||||
};
|
||||
|
||||
const createReadStream = function(name, readTarballStream, callback) {
|
||||
let readStream = fs.createReadStream(name);
|
||||
readStream.on('error', function(err) {
|
||||
readTarballStream.emit('error', err);
|
||||
});
|
||||
readStream.on('open', function(fd) {
|
||||
fs.fstat(fd, function(err, stats) {
|
||||
if (err) return readTarballStream.emit('error', err);
|
||||
readTarballStream.emit('content-length', stats.size);
|
||||
readTarballStream.emit('open');
|
||||
readStream.pipe(readTarballStream);
|
||||
});
|
||||
});
|
||||
|
||||
readTarballStream = new MyStream.ReadTarball();
|
||||
readTarballStream.abort = function() {
|
||||
readStream.close();
|
||||
};
|
||||
return readTarballStream;
|
||||
};
|
||||
|
||||
const createFile = function(name, contents, callback) {
|
||||
fs.exists(name, function(exists) {
|
||||
if (exists) {
|
||||
return callback( fSError(fileExist) );
|
||||
}
|
||||
writeFile(name, contents, callback);
|
||||
});
|
||||
};
|
||||
|
||||
const updateFile = function(name, contents, callback) {
|
||||
fs.exists(name, function(exists) {
|
||||
if (!exists) {
|
||||
return callback( fSError(noSuchFile) );
|
||||
}
|
||||
writeFile(name, contents, callback);
|
||||
});
|
||||
};
|
||||
|
||||
const readJSON = function(name, cb) {
|
||||
readFile(name).then(function(res) {
|
||||
let args = [];
|
||||
try {
|
||||
args = [null, JSON.parse(res.toString('utf8'))];
|
||||
} catch(err) {
|
||||
args = [err];
|
||||
}
|
||||
cb.apply(null, args);
|
||||
}, function(err) {
|
||||
return cb(err);
|
||||
});
|
||||
};
|
||||
|
||||
const lock_and_read = function(name, cb) {
|
||||
locker.readFile(name, {lock: true}, function(err, res) {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
}
|
||||
return cb(null, res);
|
||||
});
|
||||
};
|
||||
|
||||
const lockAndReadJSON = function(name, cb) {
|
||||
locker.readFile(name, {
|
||||
lock: true,
|
||||
parse: true,
|
||||
}, function(err, res) {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
}
|
||||
return cb(null, res);
|
||||
});
|
||||
};
|
||||
|
||||
const unlock_file = function(name, cb) {
|
||||
locker.unlockFile(name, cb);
|
||||
};
|
||||
|
||||
const createJSON = function(name, value, cb) {
|
||||
createFile(name, JSON.stringify(value, null, '\t'), cb);
|
||||
};
|
||||
|
||||
|
||||
const updateJSON = function(name, value, cb) {
|
||||
updateFile(name, JSON.stringify(value, null, '\t'), cb);
|
||||
};
|
||||
|
||||
|
||||
const writeJSON = function(name, value, cb) {
|
||||
writeFile(name, JSON.stringify(value, null, '\t'), cb);
|
||||
};
|
||||
|
||||
// fs
|
||||
module.exports.unlink = fs.unlink;
|
||||
module.exports.rmdir = fs.rmdir;
|
||||
|
||||
// streams
|
||||
module.exports.createWriteStream = createWriteStream;
|
||||
module.exports.createReadStream = createReadStream;
|
||||
|
||||
// io
|
||||
module.exports.read = readFile;
|
||||
module.exports.write = writeFile;
|
||||
module.exports.update = updateFile;
|
||||
module.exports.create = createFile;
|
||||
|
||||
// json
|
||||
module.exports.readJSON = readJSON;
|
||||
module.exports.lockAndReadJSON = lockAndReadJSON;
|
||||
module.exports.writeJSON = writeJSON;
|
||||
module.exports.updateJSON = updateJSON;
|
||||
module.exports.createJSON = createJSON;
|
||||
|
||||
// lock
|
||||
module.exports.unlock_file = unlock_file;
|
||||
module.exports.lock_and_read = lock_and_read;
|
|
@ -1,977 +0,0 @@
|
|||
/* eslint prefer-rest-params: "off" */
|
||||
/* eslint prefer-spread: "off" */
|
||||
'use strict';
|
||||
|
||||
const assert = require('assert');
|
||||
const Crypto = require('crypto');
|
||||
const fs = require('fs');
|
||||
const Path = require('path');
|
||||
const Stream = require('stream');
|
||||
const URL = require('url');
|
||||
const async = require('async');
|
||||
const _ = require('lodash');
|
||||
|
||||
const fsStorage = require('./local-fs');
|
||||
const LocalData = require('./local-data');
|
||||
const customStream = require('@verdaccio/streams');
|
||||
|
||||
const pkgFileName = 'package.json';
|
||||
const fileExist = 'EEXISTS';
|
||||
const noSuchFile = 'ENOENT';
|
||||
const resourceNotAvailable = 'EAGAIN';
|
||||
|
||||
const generatePackageTemplate = function(name) {
|
||||
return {
|
||||
// standard things
|
||||
'name': name,
|
||||
'versions': {},
|
||||
'dist-tags': {},
|
||||
'time': {},
|
||||
|
||||
// our own object
|
||||
'_distfiles': {},
|
||||
'_attachments': {},
|
||||
'_uplinks': {},
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Implements Storage interface (same for storage.js, local-storage.js, up-storage.js).
|
||||
*/
|
||||
class LocalStorage {
|
||||
/**
|
||||
* Constructor
|
||||
* @param {Object} config config list of properties
|
||||
* @param {Object} logger reference
|
||||
* @param {Object} utils package utilities
|
||||
*/
|
||||
constructor(config, logger, utils) {
|
||||
this.config = config;
|
||||
this.utils = utils;
|
||||
this.localList = new LocalData(this._buildStoragePath(this.config));
|
||||
this.logger = logger.child({sub: 'fs'});
|
||||
}
|
||||
|
||||
/**
|
||||
* Build the local database path.
|
||||
* @param {Object} config
|
||||
* @return {string|String|*}
|
||||
* @private
|
||||
*/
|
||||
_buildStoragePath(config) {
|
||||
// FUTURE: the database might be parameterizable from config.yaml
|
||||
return Path.join(Path.resolve(Path.dirname(config.self_path || ''),
|
||||
config.storage,
|
||||
'.sinopia-db.json'
|
||||
));
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Add a package.
|
||||
* @param {*} name
|
||||
* @param {*} info
|
||||
* @param {*} callback
|
||||
* @return {Function}
|
||||
*/
|
||||
addPackage(name, info, callback) {
|
||||
const storage = this._getLocalStorage(name);
|
||||
|
||||
if (!storage) {
|
||||
return callback( this.utils.ErrorCode.get404('this package cannot be added'));
|
||||
}
|
||||
|
||||
storage.createJSON(pkgFileName, generatePackageTemplate(name), (err) => {
|
||||
if (err && err.code === fileExist) {
|
||||
return callback( this.utils.ErrorCode.get409());
|
||||
}
|
||||
|
||||
const latest = this.utils.getLatestVersion(info);
|
||||
|
||||
if (_.isNil(latest) === false && info.versions[latest]) {
|
||||
return callback(null, info.versions[latest]);
|
||||
}
|
||||
return callback();
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove package.
|
||||
* @param {*} name
|
||||
* @param {*} callback
|
||||
* @return {Function}
|
||||
*/
|
||||
removePackage(name, callback) {
|
||||
this.logger.info( {name: name}, 'unpublishing @{name} (all)');
|
||||
|
||||
let storage = this._getLocalStorage(name);
|
||||
if (!storage) {
|
||||
return callback( this.utils.ErrorCode.get404());
|
||||
}
|
||||
|
||||
storage.readJSON(pkgFileName, (err, data) => {
|
||||
if (err) {
|
||||
if (err.code === noSuchFile) {
|
||||
return callback( this.utils.ErrorCode.get404());
|
||||
} else {
|
||||
return callback(err);
|
||||
}
|
||||
}
|
||||
this._normalizePackage(data);
|
||||
|
||||
let removeFailed = this.localList.remove(name);
|
||||
if (removeFailed) {
|
||||
// This will happen when database is locked
|
||||
return callback(this.utils.ErrorCode.get422(removeFailed.message));
|
||||
}
|
||||
|
||||
storage.unlink(pkgFileName, function(err) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
const files = Object.keys(data._attachments);
|
||||
|
||||
const unlinkNext = function(cb) {
|
||||
if (files.length === 0) {
|
||||
return cb();
|
||||
}
|
||||
|
||||
let file = files.shift();
|
||||
storage.unlink(file, function() {
|
||||
unlinkNext(cb);
|
||||
});
|
||||
};
|
||||
|
||||
unlinkNext(function() {
|
||||
// try to unlink the directory, but ignore errors because it can fail
|
||||
storage.rmdir('.', function(err) {
|
||||
callback(err);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Synchronize remote package info with the local one
|
||||
* @param {*} name
|
||||
* @param {*} packageInfo
|
||||
* @param {*} callback
|
||||
*/
|
||||
updateVersions(name, packageInfo, callback) {
|
||||
this._readCreatePackage(name, (err, packageLocalJson) => {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
let change = false;
|
||||
for (let versionId in packageInfo.versions) {
|
||||
if (_.isNil(packageLocalJson.versions[versionId])) {
|
||||
const version = packageInfo.versions[versionId];
|
||||
|
||||
// we don't keep readmes for package versions,
|
||||
// only one readme per package
|
||||
delete version.readme;
|
||||
|
||||
change = true;
|
||||
packageLocalJson.versions[versionId] = version;
|
||||
|
||||
if (version.dist && version.dist.tarball) {
|
||||
let filename = URL.parse(version.dist.tarball).pathname.replace(/^.*\//, '');
|
||||
// we do NOT overwrite any existing records
|
||||
if (_.isNil(packageLocalJson._distfiles[filename])) {
|
||||
let hash = packageLocalJson._distfiles[filename] = {
|
||||
url: version.dist.tarball,
|
||||
sha: version.dist.shasum,
|
||||
};
|
||||
|
||||
const upLink = version[Symbol.for('__verdaccio_uplink')];
|
||||
|
||||
if (_.isNil(upLink) === false) {
|
||||
hash = this._updateUplinkToRemoteProtocol(hash, upLink);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for (let tag in packageInfo['dist-tags']) {
|
||||
if (!packageLocalJson['dist-tags'][tag] || packageLocalJson['dist-tags'][tag] !== packageInfo['dist-tags'][tag]) {
|
||||
change = true;
|
||||
packageLocalJson['dist-tags'][tag] = packageInfo['dist-tags'][tag];
|
||||
}
|
||||
}
|
||||
for (let up in packageInfo._uplinks) {
|
||||
if (Object.prototype.hasOwnProperty.call(packageInfo._uplinks, up)) {
|
||||
const need_change = !this.utils.is_object(packageLocalJson._uplinks[up])
|
||||
|| packageInfo._uplinks[up].etag !== packageLocalJson._uplinks[up].etag
|
||||
|| packageInfo._uplinks[up].fetched !== packageLocalJson._uplinks[up].fetched;
|
||||
|
||||
if (need_change) {
|
||||
change = true;
|
||||
packageLocalJson._uplinks[up] = packageInfo._uplinks[up];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (packageInfo.readme !== packageLocalJson.readme) {
|
||||
packageLocalJson.readme = packageInfo.readme;
|
||||
change = true;
|
||||
}
|
||||
|
||||
if ('time' in packageInfo) {
|
||||
packageLocalJson.time = packageInfo.time;
|
||||
change = true;
|
||||
}
|
||||
|
||||
if (change) {
|
||||
this.logger.debug('updating package info');
|
||||
this._writePackage(name, packageLocalJson, function(err) {
|
||||
callback(err, packageLocalJson);
|
||||
});
|
||||
} else {
|
||||
callback(null, packageLocalJson);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure the dist file remains as the same protocol
|
||||
* @param {Object} hash metadata
|
||||
* @param {String} upLink registry key
|
||||
* @private
|
||||
*/
|
||||
_updateUplinkToRemoteProtocol(hash, upLink) {
|
||||
// if we got this information from a known registry,
|
||||
// use the same protocol for the tarball
|
||||
//
|
||||
// see https://github.com/rlidwka/sinopia/issues/166
|
||||
const tarballUrl = URL.parse(hash.url);
|
||||
const uplinkUrl = URL.parse(this.config.uplinks[upLink].url);
|
||||
|
||||
if (uplinkUrl.host === tarballUrl.host) {
|
||||
tarballUrl.protocol = uplinkUrl.protocol;
|
||||
hash.registry = upLink;
|
||||
hash.url = URL.format(tarballUrl);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new version to a previous local package.
|
||||
* @param {*} name
|
||||
* @param {*} version
|
||||
* @param {*} metadata
|
||||
* @param {*} tag
|
||||
* @param {*} callback
|
||||
*/
|
||||
addVersion(name, version, metadata, tag, callback) {
|
||||
this._updatePackage(name, (data, cb) => {
|
||||
// keep only one readme per package
|
||||
data.readme = metadata.readme;
|
||||
delete metadata.readme;
|
||||
|
||||
if (data.versions[version] != null) {
|
||||
return cb( this.utils.ErrorCode.get409() );
|
||||
}
|
||||
|
||||
// if uploaded tarball has a different shasum, it's very likely that we have some kind of error
|
||||
if (this.utils.is_object(metadata.dist) && _.isString(metadata.dist.tarball)) {
|
||||
let tarball = metadata.dist.tarball.replace(/.*\//, '');
|
||||
|
||||
if (this.utils.is_object(data._attachments[tarball])) {
|
||||
|
||||
if (_.isNil(data._attachments[tarball].shasum) === false && _.isNil(metadata.dist.shasum) === false) {
|
||||
if (data._attachments[tarball].shasum != metadata.dist.shasum) {
|
||||
const errorMessage = `shasum error, ${data._attachments[tarball].shasum} != ${metadata.dist.shasum}`;
|
||||
return cb( this.utils.ErrorCode.get400(errorMessage) );
|
||||
}
|
||||
}
|
||||
|
||||
let currentDate = new Date().toISOString();
|
||||
data.time['modified'] = currentDate;
|
||||
|
||||
if (('created' in data.time) === false) {
|
||||
data.time.created = currentDate;
|
||||
}
|
||||
|
||||
data.time[version] = currentDate;
|
||||
data._attachments[tarball].version = version;
|
||||
}
|
||||
}
|
||||
|
||||
data.versions[version] = metadata;
|
||||
this.utils.tag_version(data, version, tag);
|
||||
|
||||
let addFailed = this.localList.add(name);
|
||||
if (addFailed) {
|
||||
return cb(this.utils.ErrorCode.get422(addFailed.message));
|
||||
}
|
||||
|
||||
cb();
|
||||
}, callback);
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge a new list of tags for a local packages with the existing one.
|
||||
* @param {*} name
|
||||
* @param {*} tags
|
||||
* @param {*} callback
|
||||
*/
|
||||
mergeTags(name, tags, callback) {
|
||||
this._updatePackage(name, (data, cb) => {
|
||||
for (let t in tags) {
|
||||
if (tags[t] === null) {
|
||||
delete data['dist-tags'][t];
|
||||
continue;
|
||||
}
|
||||
// be careful here with == (cast)
|
||||
if (_.isNil(data.versions[tags[t]])) {
|
||||
return cb( this._getVersionNotFound() );
|
||||
}
|
||||
|
||||
this.utils.tag_version(data, tags[t], t);
|
||||
}
|
||||
cb();
|
||||
}, callback);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return version not found
|
||||
* @return {String}
|
||||
* @private
|
||||
*/
|
||||
_getVersionNotFound() {
|
||||
return this.utils.ErrorCode.get404('this version doesn\'t exist');
|
||||
}
|
||||
/**
|
||||
* Return file no available
|
||||
* @return {String}
|
||||
* @private
|
||||
*/
|
||||
_getFileNotAvailable() {
|
||||
return this.utils.ErrorCode.get404('no such file available');
|
||||
}
|
||||
|
||||
/**
|
||||
* Replace the complete list of tags for a local package.
|
||||
* @param {*} name
|
||||
* @param {*} tags
|
||||
* @param {*} callback
|
||||
*/
|
||||
replaceTags(name, tags, callback) {
|
||||
this._updatePackage(name, (data, cb) => {
|
||||
data['dist-tags'] = {};
|
||||
|
||||
for (let t in tags) {
|
||||
if (_.isNull(tags[t])) {
|
||||
delete data['dist-tags'][t];
|
||||
continue;
|
||||
}
|
||||
|
||||
if (_.isNil(data.versions[tags[t]])) {
|
||||
return cb( this._getVersionNotFound() );
|
||||
}
|
||||
|
||||
this.utils.tag_version(data, tags[t], t);
|
||||
}
|
||||
cb();
|
||||
}, callback);
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the package metadata, tags and attachments (tarballs).
|
||||
* Note: Currently supports unpublishing only.
|
||||
* @param {*} name
|
||||
* @param {*} metadata
|
||||
* @param {*} revision
|
||||
* @param {*} callback
|
||||
* @return {Function}
|
||||
*/
|
||||
changePackage(name, metadata, revision, callback) {
|
||||
if (!this.utils.is_object(metadata.versions) || !this.utils.is_object(metadata['dist-tags'])) {
|
||||
return callback( this.utils.ErrorCode.get422());
|
||||
}
|
||||
|
||||
this._updatePackage(name, (data, cb) => {
|
||||
for (let ver in data.versions) {
|
||||
if (_.isNil(metadata.versions[ver])) {
|
||||
this.logger.info( {name: name, version: ver},
|
||||
'unpublishing @{name}@@{version}');
|
||||
delete data.versions[ver];
|
||||
for (let file in data._attachments) {
|
||||
if (data._attachments[file].version === ver) {
|
||||
delete data._attachments[file].version;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
data['dist-tags'] = metadata['dist-tags'];
|
||||
cb();
|
||||
}, function(err) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
callback();
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a tarball.
|
||||
* @param {*} name
|
||||
* @param {*} filename
|
||||
* @param {*} revision
|
||||
* @param {*} callback
|
||||
*/
|
||||
removeTarball(name, filename, revision, callback) {
|
||||
assert(this.utils.validate_name(filename));
|
||||
|
||||
this._updatePackage(name, (data, cb) => {
|
||||
if (data._attachments[filename]) {
|
||||
delete data._attachments[filename];
|
||||
cb();
|
||||
} else {
|
||||
cb(this._getFileNotAvailable());
|
||||
}
|
||||
}, (err) => {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
const storage = this._getLocalStorage(name);
|
||||
|
||||
if (storage) {
|
||||
storage.unlink(filename, callback);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a tarball.
|
||||
* @param {String} name
|
||||
* @param {String} filename
|
||||
* @return {Stream}
|
||||
*/
|
||||
addTarball(name, filename) {
|
||||
assert(this.utils.validate_name(filename));
|
||||
|
||||
let length = 0;
|
||||
const shaOneHash = Crypto.createHash('sha1');
|
||||
const uploadStream = new customStream.UploadTarball();
|
||||
const _transform = uploadStream._transform;
|
||||
const storage = this._getLocalStorage(name);
|
||||
uploadStream.abort = function() {};
|
||||
uploadStream.done = function() {};
|
||||
|
||||
uploadStream._transform = function(data) {
|
||||
shaOneHash.update(data);
|
||||
// measure the length for validation reasons
|
||||
length += data.length;
|
||||
_transform.apply(uploadStream, arguments);
|
||||
};
|
||||
|
||||
if (name === pkgFileName || name === '__proto__') {
|
||||
process.nextTick(function() {
|
||||
uploadStream.emit('error', this.utils.ErrorCode.get403());
|
||||
});
|
||||
return uploadStream;
|
||||
}
|
||||
|
||||
if (!storage) {
|
||||
process.nextTick(() => {
|
||||
uploadStream.emit('error', ('can\'t upload this package'));
|
||||
});
|
||||
return uploadStream;
|
||||
}
|
||||
|
||||
const writeStream = storage.createWriteStream(filename);
|
||||
|
||||
writeStream.on('error', (err) => {
|
||||
if (err.code === fileExist) {
|
||||
uploadStream.emit('error', this.utils.ErrorCode.get409());
|
||||
} else if (err.code === noSuchFile) {
|
||||
// check if package exists to throw an appropriate message
|
||||
this.getPackageMetadata(name, function(_err, res) {
|
||||
if (_err) {
|
||||
uploadStream.emit('error', _err);
|
||||
} else {
|
||||
uploadStream.emit('error', err);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
uploadStream.emit('error', err);
|
||||
}
|
||||
});
|
||||
|
||||
writeStream.on('open', function() {
|
||||
// re-emitting open because it's handled in storage.js
|
||||
uploadStream.emit('open');
|
||||
});
|
||||
|
||||
writeStream.on('success', () => {
|
||||
this._updatePackage(name, function updater(data, cb) {
|
||||
data._attachments[filename] = {
|
||||
shasum: shaOneHash.digest('hex'),
|
||||
};
|
||||
cb();
|
||||
}, function(err) {
|
||||
if (err) {
|
||||
uploadStream.emit('error', err);
|
||||
} else {
|
||||
uploadStream.emit('success');
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
uploadStream.abort = function() {
|
||||
writeStream.abort();
|
||||
};
|
||||
|
||||
uploadStream.done = function() {
|
||||
if (!length) {
|
||||
uploadStream.emit('error', this.utils.ErrorCode.get422('refusing to accept zero-length file'));
|
||||
writeStream.abort();
|
||||
} else {
|
||||
writeStream.done();
|
||||
}
|
||||
};
|
||||
|
||||
uploadStream.pipe(writeStream);
|
||||
|
||||
return uploadStream;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a tarball.
|
||||
* @param {*} name
|
||||
* @param {*} filename
|
||||
* @return {ReadTarball}
|
||||
*/
|
||||
getTarball(name, filename) {
|
||||
assert(this.utils.validate_name(filename));
|
||||
|
||||
const storage = this._getLocalStorage(name);
|
||||
|
||||
if (_.isNil(storage)) {
|
||||
return this._createFailureStreamResponse();
|
||||
}
|
||||
|
||||
return this._streamSuccessReadTarBall(storage, filename);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a stream that emits a read failure.
|
||||
* @private
|
||||
* @return {ReadTarball}
|
||||
*/
|
||||
_createFailureStreamResponse() {
|
||||
const stream = new customStream.ReadTarball();
|
||||
|
||||
process.nextTick(() => {
|
||||
stream.emit('error', this._getFileNotAvailable());
|
||||
});
|
||||
return stream;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a stream that emits the tarball data
|
||||
* @param {Object} storage
|
||||
* @param {String} filename
|
||||
* @private
|
||||
* @return {ReadTarball}
|
||||
*/
|
||||
_streamSuccessReadTarBall(storage, filename) {
|
||||
const stream = new customStream.ReadTarball();
|
||||
const readTarballStream = storage.createReadStream(filename);
|
||||
const e404 = this.utils.ErrorCode.get404;
|
||||
|
||||
stream.abort = function() {
|
||||
if (_.isNil(readTarballStream) === false) {
|
||||
readTarballStream.abort();
|
||||
}
|
||||
};
|
||||
|
||||
readTarballStream.on('error', function(err) {
|
||||
if (err && err.code === noSuchFile) {
|
||||
stream.emit('error', e404('no such file available'));
|
||||
} else {
|
||||
stream.emit('error', err);
|
||||
}
|
||||
});
|
||||
|
||||
readTarballStream.on('content-length', function(v) {
|
||||
stream.emit('content-length', v);
|
||||
});
|
||||
|
||||
readTarballStream.on('open', function() {
|
||||
// re-emitting open because it's handled in storage.js
|
||||
stream.emit('open');
|
||||
readTarballStream.pipe(stream);
|
||||
});
|
||||
|
||||
return stream;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve a package by name.
|
||||
* @param {*} name
|
||||
* @param {*} options
|
||||
* @param {*} callback
|
||||
* @return {Function}
|
||||
*/
|
||||
getPackageMetadata(name, options, callback) {
|
||||
if (_.isFunction(options)) {
|
||||
callback = options || {};
|
||||
}
|
||||
|
||||
const storage = this._getLocalStorage(name);
|
||||
if (_.isNil(storage)) {
|
||||
return callback( this.utils.ErrorCode.get404() );
|
||||
}
|
||||
|
||||
this.readJSON(storage, callback);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read a json file from storage.
|
||||
* @param {Object} storage
|
||||
* @param {Function} callback
|
||||
*/
|
||||
readJSON(storage, callback) {
|
||||
storage.readJSON(pkgFileName, (err, result) => {
|
||||
if (err) {
|
||||
if (err.code === noSuchFile) {
|
||||
return callback( this.utils.ErrorCode.get404() );
|
||||
} else {
|
||||
return callback(this._internalError(err, pkgFileName, 'error reading'));
|
||||
}
|
||||
}
|
||||
this._normalizePackage(result);
|
||||
callback(err, result);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Search a local package.
|
||||
* @param {*} startKey
|
||||
* @param {*} options
|
||||
* @return {Function}
|
||||
*/
|
||||
search(startKey, options) {
|
||||
const stream = new Stream.PassThrough({objectMode: true});
|
||||
|
||||
this._eachPackage((item, cb) => {
|
||||
fs.stat(item.path, (err, stats) => {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
}
|
||||
|
||||
if (stats.mtime > startKey) {
|
||||
this.getPackageMetadata(item.name, options, (err, data) => {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
}
|
||||
|
||||
const versions = this.utils.semver_sort(Object.keys(data.versions));
|
||||
const latest = data['dist-tags'] && data['dist-tags'].latest ? data['dist-tags'].latest : versions.pop();
|
||||
|
||||
if (data.versions[latest]) {
|
||||
const version = data.versions[latest];
|
||||
stream.push({
|
||||
'name': version.name,
|
||||
'description': version.description,
|
||||
'dist-tags': {latest: latest},
|
||||
'maintainers': version.maintainers || [version.author].filter(Boolean),
|
||||
'author': version.author,
|
||||
'repository': version.repository,
|
||||
'readmeFilename': version.readmeFilename || '',
|
||||
'homepage': version.homepage,
|
||||
'keywords': version.keywords,
|
||||
'bugs': version.bugs,
|
||||
'license': version.license,
|
||||
'time': {
|
||||
modified: item.time ? new Date(item.time).toISOString() : stats.mtime,
|
||||
},
|
||||
'versions': {[latest]: 'latest'},
|
||||
});
|
||||
}
|
||||
|
||||
cb();
|
||||
});
|
||||
} else {
|
||||
cb();
|
||||
}
|
||||
});
|
||||
}, function on_end(err) {
|
||||
if (err) return stream.emit('error', err);
|
||||
stream.end();
|
||||
});
|
||||
|
||||
return stream;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve a wrapper that provide access to the package location.
|
||||
* @param {Object} packageInfo package name.
|
||||
* @return {Object}
|
||||
*/
|
||||
_getLocalStorage(packageInfo) {
|
||||
const path = this.__getLocalStoragePath(this.config.getMatchedPackagesSpec(packageInfo).storage);
|
||||
|
||||
if (_.isNil(path) || path === false) {
|
||||
this.logger.debug( {name: packageInfo}, 'this package has no storage defined: @{name}' );
|
||||
return null;
|
||||
}
|
||||
|
||||
return new PathWrapper(
|
||||
Path.join(
|
||||
Path.resolve(Path.dirname(this.config.self_path || ''), path),
|
||||
packageInfo
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify the right local storage location.
|
||||
* @param {String} path
|
||||
* @return {String}
|
||||
* @private
|
||||
*/
|
||||
__getLocalStoragePath(path) {
|
||||
if (_.isNil(path)) {
|
||||
path = this.config.storage;
|
||||
}
|
||||
return path;
|
||||
}
|
||||
|
||||
/**
|
||||
* Walks through each package and calls `on_package` on them.
|
||||
* @param {*} onPackage
|
||||
* @param {*} on_end
|
||||
*/
|
||||
_eachPackage(onPackage, on_end) {
|
||||
let storages = {};
|
||||
let utils = this.utils;
|
||||
|
||||
storages[this.config.storage] = true;
|
||||
if (this.config.packages) {
|
||||
Object.keys(this.config.packages || {}).map( (pkg) => {
|
||||
if (this.config.packages[pkg].storage) {
|
||||
storages[this.config.packages[pkg].storage] = true;
|
||||
}
|
||||
});
|
||||
}
|
||||
const base = Path.dirname(this.config.self_path);
|
||||
|
||||
async.eachSeries(Object.keys(storages), function(storage, cb) {
|
||||
fs.readdir(Path.resolve(base, storage), function(err, files) {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
}
|
||||
|
||||
async.eachSeries(files, function(file, cb) {
|
||||
if (file.match(/^@/)) {
|
||||
// scoped
|
||||
fs.readdir(Path.resolve(base, storage, file), function(err, files) {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
}
|
||||
|
||||
async.eachSeries(files, (file2, cb) => {
|
||||
if (utils.validate_name(file2)) {
|
||||
onPackage({
|
||||
name: `${file}/${file2}`,
|
||||
path: Path.resolve(base, storage, file, file2),
|
||||
}, cb);
|
||||
} else {
|
||||
cb();
|
||||
}
|
||||
}, cb);
|
||||
});
|
||||
} else if (utils.validate_name(file)) {
|
||||
onPackage({
|
||||
name: file,
|
||||
path: Path.resolve(base, storage, file),
|
||||
}, cb);
|
||||
} else {
|
||||
cb();
|
||||
}
|
||||
}, cb);
|
||||
});
|
||||
}, on_end);
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalise package properties, tags, revision id.
|
||||
* @param {Object} pkg package reference.
|
||||
*/
|
||||
_normalizePackage(pkg) {
|
||||
const pkgProperties = ['versions', 'dist-tags', '_distfiles', '_attachments', '_uplinks', 'time'];
|
||||
|
||||
pkgProperties.forEach((key) => {
|
||||
if (!this.utils.is_object(pkg[key])) {
|
||||
pkg[key] = {};
|
||||
}
|
||||
});
|
||||
|
||||
if (_.isString(pkg._rev) === false) {
|
||||
pkg._rev = '0-0000000000000000';
|
||||
}
|
||||
// normalize dist-tags
|
||||
this.utils.normalize_dist_tags(pkg);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve either a previous created local package or a boilerplate.
|
||||
* @param {*} name
|
||||
* @param {*} callback
|
||||
* @return {Function}
|
||||
*/
|
||||
_readCreatePackage(name, callback) {
|
||||
const storage = this._getLocalStorage(name);
|
||||
if (!storage) {
|
||||
const data = generatePackageTemplate(name);
|
||||
this._normalizePackage(data);
|
||||
return callback(null, data);
|
||||
}
|
||||
storage.readJSON(pkgFileName, (err, data) => {
|
||||
// TODO: race condition
|
||||
if (err) {
|
||||
if (err.code === noSuchFile) {
|
||||
// if package doesn't exist, we create it here
|
||||
data = generatePackageTemplate(name);
|
||||
} else {
|
||||
return callback(this._internalError(err, pkgFileName, 'error reading'));
|
||||
}
|
||||
}
|
||||
this._normalizePackage(data);
|
||||
callback(null, data);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle internal error
|
||||
* @param {*} err
|
||||
* @param {*} file
|
||||
* @param {*} message
|
||||
* @return {Object} Error instance
|
||||
*/
|
||||
_internalError(err, file, message) {
|
||||
this.logger.error( {err: err, file: file},
|
||||
message + ' @{file}: @{!err.message}' );
|
||||
return this.utils.ErrorCode.get500();
|
||||
}
|
||||
|
||||
/**
|
||||
* This function allows to update the package thread-safely
|
||||
Algorithm:
|
||||
1. lock package.json for writing
|
||||
2. read package.json
|
||||
3. updateFn(pkg, cb), and wait for cb
|
||||
4. write package.json.tmp
|
||||
5. move package.json.tmp package.json
|
||||
6. callback(err?)
|
||||
* @param {*} name package name
|
||||
* @param {*} updateFn function(package, cb) - update function
|
||||
* @param {*} _callback callback that gets invoked after it's all updated
|
||||
* @return {Function}
|
||||
*/
|
||||
_updatePackage(name, updateFn, _callback) {
|
||||
const storage = this._getLocalStorage(name);
|
||||
if (!storage) {
|
||||
return _callback( this.utils.ErrorCode.get404() );
|
||||
}
|
||||
storage.lockAndReadJSON(pkgFileName, (err, json) => {
|
||||
let locked = false;
|
||||
|
||||
// callback that cleans up lock first
|
||||
const callback = function(err) {
|
||||
let _args = arguments;
|
||||
if (locked) {
|
||||
storage.unlock_file(pkgFileName, function() {
|
||||
// ignore any error from the unlock
|
||||
_callback.apply(err, _args);
|
||||
});
|
||||
} else {
|
||||
_callback.apply(null, _args);
|
||||
}
|
||||
};
|
||||
|
||||
if (!err) {
|
||||
locked = true;
|
||||
}
|
||||
|
||||
if (err) {
|
||||
if (err.code === resourceNotAvailable) {
|
||||
return callback( this.utils.ErrorCode.get503() );
|
||||
} else if (err.code === noSuchFile) {
|
||||
return callback( this.utils.ErrorCode.get404() );
|
||||
} else {
|
||||
return callback(err);
|
||||
}
|
||||
}
|
||||
|
||||
this._normalizePackage(json);
|
||||
updateFn(json, (err) => {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
this._writePackage(name, json, callback);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the revision (_rev) string for a package.
|
||||
* @param {*} name
|
||||
* @param {*} json
|
||||
* @param {*} callback
|
||||
* @return {Function}
|
||||
*/
|
||||
_writePackage(name, json, callback) {
|
||||
// calculate revision a la couchdb
|
||||
if (typeof(json._rev) !== 'string') {
|
||||
json._rev = '0-0000000000000000';
|
||||
}
|
||||
const rev = json._rev.split('-');
|
||||
json._rev = ((+rev[0] || 0) + 1) + '-' + Crypto.pseudoRandomBytes(8).toString('hex');
|
||||
|
||||
let storage = this._getLocalStorage(name);
|
||||
if (!storage) {
|
||||
return callback();
|
||||
}
|
||||
storage.writeJSON(pkgFileName, json, callback);
|
||||
}
|
||||
}
|
||||
|
||||
const PathWrapper = (function() {
|
||||
/**
|
||||
* A wrapper adding paths to fs_storage methods.
|
||||
*/
|
||||
class Wrapper {
|
||||
|
||||
/**
|
||||
* @param {*} path
|
||||
*/
|
||||
constructor(path) {
|
||||
this.path = path;
|
||||
}
|
||||
}
|
||||
|
||||
const wrapLocalStorageMethods = function(method) {
|
||||
return function() {
|
||||
let args = Array.prototype.slice.apply(arguments);
|
||||
/* eslint no-invalid-this: off */
|
||||
args[0] = Path.join(this.path, args[0] || '');
|
||||
return fsStorage[method].apply(null, args);
|
||||
};
|
||||
};
|
||||
|
||||
for (let i in fsStorage) {
|
||||
if (fsStorage.hasOwnProperty(i)) {
|
||||
Wrapper.prototype[i] = wrapLocalStorageMethods(i);
|
||||
}
|
||||
}
|
||||
|
||||
return Wrapper;
|
||||
})();
|
||||
|
||||
module.exports = LocalStorage;
|
|
@ -1,14 +1,12 @@
|
|||
'use strict';
|
||||
|
||||
const JSONStream = require('JSONStream');
|
||||
const createError = require('http-errors');
|
||||
const _ = require('lodash');
|
||||
const request = require('request');
|
||||
const Stream = require('stream');
|
||||
const URL = require('url');
|
||||
const Logger = require('../logger');
|
||||
const Logger = require('./logger');
|
||||
const MyStreams = require('@verdaccio/streams');
|
||||
const Utils = require('../utils');
|
||||
const Utils = require('./utils');
|
||||
const zlib = require('zlib');
|
||||
|
||||
const encode = function(thing) {
|
||||
|
@ -590,4 +588,4 @@ class ProxyStorage {
|
|||
|
||||
}
|
||||
|
||||
module.exports = ProxyStorage;
|
||||
export default ProxyStorage;
|
|
@ -143,7 +143,7 @@ function filter_tarball_urls(pkg, req, config) {
|
|||
* @return {Boolean} whether a package has been tagged
|
||||
*/
|
||||
function tag_version(data, version, tag) {
|
||||
if (tag) {
|
||||
if (_.isEmpty(tag) === false) {
|
||||
if (data['dist-tags'][tag] !== version) {
|
||||
if (semver.parse(version, true)) {
|
||||
// valid version - store
|
||||
|
@ -352,6 +352,36 @@ const ErrorCode = {
|
|||
|
||||
const parseConfigFile = (config_path) => YAML.safeLoad(fs.readFileSync(config_path, 'utf8'));
|
||||
|
||||
/**
|
||||
* Check whether the path already exist.
|
||||
* @param {String} path
|
||||
* @return {Boolean}
|
||||
*/
|
||||
function folder_exists(path) {
|
||||
try {
|
||||
const stat = fs.statSync(path);
|
||||
return stat.isDirectory();
|
||||
} catch(_) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether the file already exist.
|
||||
* @param {String} path
|
||||
* @return {Boolean}
|
||||
*/
|
||||
function fileExists(path) {
|
||||
try {
|
||||
const stat = fs.statSync(path);
|
||||
return stat.isFile();
|
||||
} catch(_) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.folder_exists = folder_exists;
|
||||
module.exports.file_exists = fileExists;
|
||||
module.exports.parseInterval = parseInterval;
|
||||
module.exports.semver_sort = semverSort;
|
||||
module.exports.parse_address = parse_address;
|
||||
|
|
|
@ -1,3 +1,13 @@
|
|||
module.exports.spliceURL = function spliceURL() {
|
||||
return Array.from(arguments).reduce((lastResult, current) => lastResult + current).replace(/([^:])(\/)+(.)/g, `$1/$3`);
|
||||
};
|
||||
// @flow
|
||||
import crypto from 'crypto';
|
||||
|
||||
export function spliceURL(...args: Array<string>): string {
|
||||
return Array.from(args).reduce((lastResult, current) => lastResult + current).replace(/([^:])(\/)+(.)/g, `$1/$3`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get MD5 from string
|
||||
*/
|
||||
export function stringToMD5(string: string): string {
|
||||
return crypto.createHash('md5').update(string).digest('hex');
|
||||
}
|
||||
|
|
15
src/utils/user.js
Normal file
15
src/utils/user.js
Normal file
|
@ -0,0 +1,15 @@
|
|||
// @flow
|
||||
import {stringToMD5} from './string';
|
||||
|
||||
/**
|
||||
* Generate gravatar url from email address
|
||||
*/
|
||||
export function generateGravatarUrl(email?: string): string {
|
||||
if (typeof email === 'string') {
|
||||
email = email.trim().toLocaleLowerCase();
|
||||
let emailMD5 = stringToMD5(email);
|
||||
return `https://www.gravatar.com/avatar/${emailMD5}`;
|
||||
} else {
|
||||
return 'https://www.gravatar.com/avatar/00000000000000000000000000000000?d=mm';
|
||||
}
|
||||
}
|
|
@ -5,6 +5,9 @@
|
|||
"jest": true,
|
||||
"es6": true
|
||||
},
|
||||
"globals": {
|
||||
"__DEBUG__": true
|
||||
},
|
||||
"rules": {
|
||||
"require-jsdoc": 0,
|
||||
"no-console": [
|
||||
|
|
|
@ -11,6 +11,7 @@ import storage from '../../../utils/storage';
|
|||
|
||||
import classes from './header.scss';
|
||||
import './logo.png';
|
||||
import {getRegistryURL} from '../../../utils/url';
|
||||
|
||||
export default class Header extends React.Component {
|
||||
state = {
|
||||
|
@ -25,6 +26,7 @@ export default class Header extends React.Component {
|
|||
super(props);
|
||||
this.toggleLoginModal = this.toggleLoginModal.bind(this);
|
||||
this.handleSubmit = this.handleSubmit.bind(this);
|
||||
this.handleInput = this.handleInput.bind(this);
|
||||
}
|
||||
|
||||
toggleLoginModal() {
|
||||
|
@ -138,8 +140,7 @@ export default class Header extends React.Component {
|
|||
}
|
||||
|
||||
render() {
|
||||
// Don't add slash if it's not a sub directory
|
||||
const registryURL = `${location.origin}${location.pathname === '/' ? '' : location.pathname}`;
|
||||
const registryURL = getRegistryURL();
|
||||
|
||||
return (
|
||||
<header className={ classes.header }>
|
||||
|
@ -169,9 +170,9 @@ export default class Header extends React.Component {
|
|||
</Alert>
|
||||
}
|
||||
<br/>
|
||||
<Input placeholder="Username" onChange={this.handleInput.bind(this, 'username')} />
|
||||
<Input name="username" placeholder="Username" onChange={this.handleInput.bind(this, 'username')} />
|
||||
<br/><br/>
|
||||
<Input type="password" placeholder="Type your password" onChange={this.handleInput.bind(this, 'password')} />
|
||||
<Input name="password" type="password" placeholder="Type your password" onChange={this.handleInput.bind(this, 'password')} />
|
||||
</Dialog.Body>
|
||||
<Dialog.Footer className="dialog-footer">
|
||||
<Button onClick={ () => this.toggleLoginModal() }>
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
font-size: 24px;
|
||||
}
|
||||
|
||||
p {
|
||||
.noPkgIntro {
|
||||
line-height: 1.5;
|
||||
margin: 0 auto;
|
||||
font-size: 14px;
|
||||
|
|
|
@ -1,16 +1,15 @@
|
|||
|
||||
import React from 'react';
|
||||
import SyntaxHighlighter, {registerLanguage} from 'react-syntax-highlighter/dist/light';
|
||||
import sunburst from 'react-syntax-highlighter/src/styles/sunburst';
|
||||
import js from 'react-syntax-highlighter/dist/languages/javascript';
|
||||
|
||||
import classes from './help.scss';
|
||||
import {getRegistryURL} from '../../../utils/url';
|
||||
|
||||
registerLanguage('javascript', js);
|
||||
|
||||
const Help = () => {
|
||||
// Don't add slash if it's not a sub directory
|
||||
const registryURL = `${location.origin}${location.pathname === '/' ? '' : location.pathname}`;
|
||||
const registryURL = getRegistryURL();
|
||||
|
||||
return (
|
||||
<div className={classes.help}>
|
||||
|
@ -18,7 +17,7 @@ const Help = () => {
|
|||
<h1 className={classes.noPkgTitle}>
|
||||
No Package Published Yet
|
||||
</h1>
|
||||
<p>
|
||||
<div className={classes.noPkgIntro}>
|
||||
<div>
|
||||
To publish your first package just:
|
||||
</div>
|
||||
|
@ -26,15 +25,15 @@ const Help = () => {
|
|||
<strong>
|
||||
1. Login
|
||||
</strong>
|
||||
<SyntaxHighlighter language='javascript' style={sunburst}>
|
||||
<SyntaxHighlighter language='javascript' style={sunburst} id="adduser">
|
||||
{`npm adduser --registry ${registryURL}`}
|
||||
</SyntaxHighlighter>
|
||||
<strong>2. Publish</strong>
|
||||
<SyntaxHighlighter language='javascript' style={sunburst}>
|
||||
<SyntaxHighlighter language='javascript' style={sunburst} id="publish">
|
||||
{`npm publish --registry ${registryURL}`}
|
||||
</SyntaxHighlighter>
|
||||
<strong>3. Refresh this page!</strong>
|
||||
</p>
|
||||
</div>
|
||||
</li>
|
||||
</div>
|
||||
);
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
|
||||
import React from 'react';
|
||||
import PropTypes from 'prop-types';
|
||||
import isNil from 'lodash/isNil';
|
||||
|
@ -8,7 +7,6 @@ import Readme from '../Readme';
|
|||
import classes from './packageDetail.scss';
|
||||
|
||||
const PackageDetail = (props) => {
|
||||
|
||||
const displayState = (readMe) => {
|
||||
if (isNil(readMe)) {
|
||||
return;
|
||||
|
@ -19,7 +17,6 @@ const PackageDetail = (props) => {
|
|||
return (
|
||||
<div className={classes.pkgDetail}>
|
||||
<h1 className={ classes.title }>{ props.package }</h1>
|
||||
<hr/>
|
||||
<div className={classes.readme}>
|
||||
{displayState(props.readMe)}
|
||||
</div>
|
||||
|
|
|
@ -2,8 +2,13 @@
|
|||
|
||||
.pkgDetail {
|
||||
.title {
|
||||
font-size: 28px;
|
||||
color: $text-black;
|
||||
font-size: 38px;
|
||||
color: $primary-color;
|
||||
border-bottom: 1px solid $border-color;
|
||||
text-transform: capitalize;
|
||||
font-weight: 600;
|
||||
margin: 0 0 10px;
|
||||
padding-bottom: 5px;
|
||||
}
|
||||
|
||||
.readme {
|
||||
|
|
25
src/webui/src/components/PackageSidebar/Module/index.jsx
Normal file
25
src/webui/src/components/PackageSidebar/Module/index.jsx
Normal file
|
@ -0,0 +1,25 @@
|
|||
import React from 'react';
|
||||
import PropTypes from 'prop-types';
|
||||
|
||||
import classes from './style.scss';
|
||||
|
||||
export default function Module({title, description, children, className}) {
|
||||
return (
|
||||
<div className={`${classes.module} ${className}`}>
|
||||
<h2 className={classes.moduleTitle}>
|
||||
{title}
|
||||
{description && <span>{description}</span>}
|
||||
</h2>
|
||||
<div>
|
||||
{children}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
Module.propTypes = {
|
||||
title: PropTypes.string.isRequired,
|
||||
description: PropTypes.string,
|
||||
children: PropTypes.any.isRequired,
|
||||
className: PropTypes.string
|
||||
};
|
21
src/webui/src/components/PackageSidebar/Module/style.scss
Normal file
21
src/webui/src/components/PackageSidebar/Module/style.scss
Normal file
|
@ -0,0 +1,21 @@
|
|||
@import '../../../styles/variable';
|
||||
|
||||
.module {
|
||||
.moduleTitle {
|
||||
display: flex;
|
||||
align-items: flex-end;
|
||||
font-size: 24px;
|
||||
color: $primary-color;
|
||||
margin: 0 0 10px;
|
||||
padding: 5px 0;
|
||||
font-weight: 600;
|
||||
border-bottom: 1px solid $border-color;
|
||||
|
||||
span { // description
|
||||
font-size: 14px;
|
||||
color: $text-grey;
|
||||
margin-left: auto;
|
||||
font-weight: lighter;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,11 @@
|
|||
import React from 'react';
|
||||
import PropTypes from 'prop-types';
|
||||
|
||||
import classes from './style.scss';
|
||||
|
||||
export default function ModuleContentPlaceholder({text}) {
|
||||
return <p className={classes.emptyPlaceholder}>{text}</p>;
|
||||
}
|
||||
ModuleContentPlaceholder.propTypes = {
|
||||
text: PropTypes.string.isRequired
|
||||
};
|
|
@ -0,0 +1,8 @@
|
|||
@import '../../../styles/variable';
|
||||
|
||||
.emptyPlaceholder {
|
||||
text-align: center;
|
||||
margin: 20px 0;
|
||||
font-size: 16px;
|
||||
color: $text-grey;
|
||||
}
|
60
src/webui/src/components/PackageSidebar/index.jsx
Normal file
60
src/webui/src/components/PackageSidebar/index.jsx
Normal file
|
@ -0,0 +1,60 @@
|
|||
import React from 'react';
|
||||
import PropTypes from 'prop-types';
|
||||
import LastSync from './modules/LastSync';
|
||||
import Maintainers from './modules/Maintainers';
|
||||
import Dependencies from './modules/Dependencies';
|
||||
|
||||
import API from '../../../utils/api';
|
||||
|
||||
export default class PackageSidebar extends React.Component {
|
||||
state = {};
|
||||
|
||||
static propTypes = {
|
||||
packageName: PropTypes.string.isRequired
|
||||
};
|
||||
|
||||
constructor(props) {
|
||||
super(props);
|
||||
this.loadPackageData = this.loadPackageData.bind(this);
|
||||
}
|
||||
|
||||
async componentDidMount() {
|
||||
await this.loadPackageData(this.props.packageName);
|
||||
}
|
||||
|
||||
async componentWillReceiveProps(newProps) {
|
||||
if (newProps.packageName !== this.props.packageName) {
|
||||
await this.loadPackageData(newProps.packageName);
|
||||
}
|
||||
}
|
||||
|
||||
async loadPackageData(packageName) {
|
||||
let packageMeta;
|
||||
|
||||
try {
|
||||
packageMeta = (await API.get(`sidebar/${packageName}`)).data;
|
||||
} catch (err) {
|
||||
this.setState({
|
||||
failed: true
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
this.setState({
|
||||
packageMeta
|
||||
});
|
||||
}
|
||||
|
||||
render() {
|
||||
let {packageMeta} = this.state;
|
||||
|
||||
return packageMeta ?
|
||||
(<aside>
|
||||
<LastSync packageMeta={packageMeta} />
|
||||
<Maintainers packageMeta={packageMeta} />
|
||||
<Dependencies packageMeta={packageMeta} />
|
||||
{/* Package management module? Help us implement it! */}
|
||||
</aside>):
|
||||
(<aside>Loading package information...</aside>);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,47 @@
|
|||
import React from 'react';
|
||||
import PropTypes from 'prop-types';
|
||||
import _ from 'lodash';
|
||||
import Module from '../../Module';
|
||||
|
||||
import classes from './style.scss';
|
||||
import {getDetailPageURL} from '../../../../../utils/url';
|
||||
import ModuleContentPlaceholder from '../../ModuleContentPlaceholder';
|
||||
|
||||
export default class Dependencies extends React.Component {
|
||||
static propTypes = {
|
||||
packageMeta: PropTypes.object.isRequired
|
||||
};
|
||||
|
||||
get dependencies() {
|
||||
return _.get(this, 'props.packageMeta.latest.dependencies', {});
|
||||
}
|
||||
|
||||
render() {
|
||||
let dependencies = this.dependencies;
|
||||
let dependenciesList = Object.keys(dependencies);
|
||||
|
||||
if (!dependenciesList.length) {
|
||||
return <ModuleContentPlaceholder text="Zero Dependencies!"/>;
|
||||
}
|
||||
|
||||
return (
|
||||
<Module
|
||||
title="Dependencies"
|
||||
className={classes.dependenciesModule}
|
||||
>
|
||||
<ul>
|
||||
{
|
||||
dependenciesList.map((dependenceName, index) => {
|
||||
return (
|
||||
<li key={index} title={`Depend on version: ${dependencies[dependenceName]}`}>
|
||||
<a href={getDetailPageURL(dependenceName)}>{dependenceName}</a>
|
||||
{index < dependenciesList.length - 1 && <span>, </span>}
|
||||
</li>
|
||||
);
|
||||
})
|
||||
}
|
||||
</ul>
|
||||
</Module>
|
||||
);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,13 @@
|
|||
@import '../../../../styles/variable';
|
||||
|
||||
.dependenciesModule {
|
||||
li {
|
||||
display: inline-block;
|
||||
font-size: 14px;
|
||||
line-height: 1.5;
|
||||
|
||||
a {
|
||||
color: $primary-color;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,56 @@
|
|||
import React from 'react';
|
||||
import PropTypes from 'prop-types';
|
||||
import Module from '../../Module';
|
||||
import datetime from '../../../../../utils/datetime';
|
||||
import classes from './style.scss';
|
||||
|
||||
export default class LastSync extends React.Component {
|
||||
static propTypes = {
|
||||
packageMeta: PropTypes.object.isRequired
|
||||
};
|
||||
|
||||
get lastUpdate() {
|
||||
let lastUpdate = 0;
|
||||
Object.keys(this.props.packageMeta._uplinks).forEach((upLinkName) => {
|
||||
const status = this.props.packageMeta._uplinks[upLinkName];
|
||||
|
||||
if (status.fetched > lastUpdate) {
|
||||
lastUpdate = status.fetched;
|
||||
}
|
||||
});
|
||||
|
||||
return lastUpdate ? datetime(lastUpdate) : '';
|
||||
}
|
||||
|
||||
get recentReleases() {
|
||||
let recentReleases = Object.keys(this.props.packageMeta.time).map((version) => {
|
||||
return {
|
||||
version,
|
||||
time: datetime(this.props.packageMeta.time[version])
|
||||
};
|
||||
});
|
||||
|
||||
return recentReleases.slice(recentReleases.length - 3, recentReleases.length).reverse();
|
||||
}
|
||||
|
||||
render() {
|
||||
return (
|
||||
<Module
|
||||
title="Last Sync"
|
||||
description={this.lastUpdate}
|
||||
className={classes.releasesModule}
|
||||
>
|
||||
<ul>
|
||||
{this.recentReleases.map((versionInfo) => {
|
||||
return (
|
||||
<li key={versionInfo.version}>
|
||||
<span>{versionInfo.version}</span>
|
||||
<span>{versionInfo.time}</span>
|
||||
</li>
|
||||
);
|
||||
})}
|
||||
</ul>
|
||||
</Module>
|
||||
);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,10 @@
|
|||
.releasesModule {
|
||||
li {
|
||||
display: flex;
|
||||
font-size: 14px;
|
||||
line-height: 2;
|
||||
span:last-child {
|
||||
margin-left: auto;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,19 @@
|
|||
import React from 'react';
|
||||
import PropTypes from 'prop-types';
|
||||
|
||||
import classes from './style.scss';
|
||||
|
||||
export default function MaintainerInfo({title, name, avatar}) {
|
||||
let avatarDescription = `${title} ${name}'s avatar`;
|
||||
return (
|
||||
<div className={classes.maintainer} title={name}>
|
||||
<img src={avatar} alt={avatarDescription} title={avatarDescription}/>
|
||||
<span>{name}</span>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
MaintainerInfo.propTypes = {
|
||||
title: PropTypes.string.isRequired,
|
||||
name: PropTypes.string.isRequired,
|
||||
avatar: PropTypes.string.isRequired
|
||||
};
|
|
@ -0,0 +1,25 @@
|
|||
.maintainer {
|
||||
$mine-height: 30px;
|
||||
display: flex;
|
||||
line-height: $mine-height;
|
||||
cursor: default;
|
||||
|
||||
&:not(:last-child) {
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
img {
|
||||
width: $mine-height;
|
||||
height: $mine-height;
|
||||
margin-right: 10px;
|
||||
border-radius: 100%;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
span {
|
||||
font-size: 14px;
|
||||
flex-shrink: 1;
|
||||
white-space: nowrap;
|
||||
word-break: break-all;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,84 @@
|
|||
import React from 'react';
|
||||
import PropTypes from 'prop-types';
|
||||
import _ from 'lodash';
|
||||
import Module from '../../Module';
|
||||
|
||||
import classes from './style.scss';
|
||||
import MaintainerInfo from './MaintainerInfo';
|
||||
|
||||
export default class Maintainers extends React.Component {
|
||||
static propTypes = {
|
||||
packageMeta: PropTypes.object.isRequired
|
||||
};
|
||||
|
||||
state = {};
|
||||
|
||||
constructor(props) {
|
||||
super(props);
|
||||
this.handleShowAllContributors = this.handleShowAllContributors.bind(this);
|
||||
}
|
||||
|
||||
get author() {
|
||||
return _.get(this, 'props.packageMeta.latest.author');
|
||||
}
|
||||
|
||||
get contributors() {
|
||||
let contributors = _.get(this, 'props.packageMeta.latest.contributors', {});
|
||||
return _.filter(contributors, (contributor) => {
|
||||
return (
|
||||
contributor.name !== _.get(this, 'author.name') &&
|
||||
contributor.email !== _.get(this, 'author.email')
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
get showAllContributors() {
|
||||
return this.state.showAllContributors || _.size(this.contributors) <= 5;
|
||||
}
|
||||
|
||||
get uniqueContributors() {
|
||||
if (!this.contributors) return [];
|
||||
|
||||
return _.uniqBy(this.contributors, (contributor) => contributor.name).slice(0, 5);
|
||||
}
|
||||
|
||||
handleShowAllContributors() {
|
||||
this.setState({
|
||||
showAllContributors: true
|
||||
});
|
||||
}
|
||||
|
||||
renderContributors() {
|
||||
if (!this.contributors) return null;
|
||||
|
||||
return (this.showAllContributors ? this.contributors : this.uniqueContributors)
|
||||
.map((contributor, index) => {
|
||||
return <MaintainerInfo key={index} title="Contributors" name={contributor.name} avatar={contributor.avatar}/>;
|
||||
});
|
||||
}
|
||||
|
||||
render() {
|
||||
let author = this.author;
|
||||
|
||||
return (
|
||||
<Module
|
||||
title="Maintainers"
|
||||
className={classes.maintainersModule}
|
||||
>
|
||||
<ul>
|
||||
{author && <MaintainerInfo title="Author" name={author.name} avatar={author.avatar}/>}
|
||||
{this.renderContributors()}
|
||||
</ul>
|
||||
{!this.showAllContributors && (
|
||||
<button
|
||||
onClick={this.handleShowAllContributors}
|
||||
className={classes.showAllContributors}
|
||||
title="Current list only show the author and first 5 contributors unique by name"
|
||||
>
|
||||
Show all contributor
|
||||
</button>
|
||||
)}
|
||||
</Module>
|
||||
);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,14 @@
|
|||
@import '../../../../styles/variable';
|
||||
|
||||
.maintainersModule {
|
||||
.showAllContributors {
|
||||
cursor: pointer;
|
||||
width: 100%;
|
||||
background: none;
|
||||
border: none;
|
||||
font-size: 14px;
|
||||
color: $primary-color;
|
||||
text-align: center;
|
||||
padding: 10px 0;
|
||||
}
|
||||
}
|
|
@ -1,2 +1,25 @@
|
|||
@import '../../styles/variable';
|
||||
|
||||
.twoColumn {
|
||||
@include container-size();
|
||||
margin: auto 10px;
|
||||
display: flex;
|
||||
|
||||
> div {
|
||||
&:first-child {
|
||||
flex-shrink: 1;
|
||||
min-width: 300px;
|
||||
width: 100%;
|
||||
}
|
||||
}
|
||||
|
||||
> aside {
|
||||
&:last-child {
|
||||
margin-left: auto;
|
||||
|
||||
padding-left: 15px;
|
||||
flex-shrink: 0;
|
||||
width: 285px;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,44 +0,0 @@
|
|||
import React from 'react';
|
||||
import PropTypes from 'prop-types';
|
||||
import {Loading} from 'element-react';
|
||||
import isEmpty from 'lodash/isEmpty';
|
||||
|
||||
import PackageDetail from '../../components/PackageDetail';
|
||||
import NotFound from '../../components/NotFound';
|
||||
import API from '../../../utils/api';
|
||||
|
||||
const loadingMessage = 'Loading...';
|
||||
|
||||
export default class Detail extends React.Component {
|
||||
static propTypes = {
|
||||
match: PropTypes.object
|
||||
}
|
||||
|
||||
state = {
|
||||
readMe: '',
|
||||
notFound: false,
|
||||
}
|
||||
|
||||
async componentDidMount() {
|
||||
try {
|
||||
const resp = await API.get(`package/readme/${this.props.match.params.package}`);
|
||||
this.setState({
|
||||
readMe: resp.data
|
||||
});
|
||||
} catch (err) {
|
||||
this.setState({
|
||||
notFound: true
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
render() {
|
||||
if (this.state.notFound) {
|
||||
return <NotFound
|
||||
pkg={this.props.match.params.package}/>;
|
||||
} else if (isEmpty(this.state.readMe)) {
|
||||
return <Loading text={loadingMessage} />;
|
||||
}
|
||||
return <PackageDetail readMe={this.state.readMe} package={this.props.match.params.package}/>;
|
||||
}
|
||||
}
|
76
src/webui/src/modules/detail/index.jsx
Normal file
76
src/webui/src/modules/detail/index.jsx
Normal file
|
@ -0,0 +1,76 @@
|
|||
import React from 'react';
|
||||
import PropTypes from 'prop-types';
|
||||
import {Loading} from 'element-react';
|
||||
import isEmpty from 'lodash/isEmpty';
|
||||
|
||||
import PackageDetail from '../../components/PackageDetail';
|
||||
import NotFound from '../../components/NotFound';
|
||||
import API from '../../../utils/api';
|
||||
|
||||
import classes from './detail.scss';
|
||||
import PackageSidebar from '../../components/PackageSidebar/index';
|
||||
|
||||
const loadingMessage = 'Loading...';
|
||||
|
||||
export default class Detail extends React.Component {
|
||||
static propTypes = {
|
||||
match: PropTypes.object
|
||||
};
|
||||
|
||||
state = {
|
||||
readMe: '',
|
||||
notFound: false,
|
||||
};
|
||||
|
||||
getPackageName(props = this.props) {
|
||||
let params = props.match.params;
|
||||
return `${(params.scope && '@' + params.scope + '/') || ''}${params.package}`;
|
||||
}
|
||||
get packageName() {
|
||||
return this.getPackageName();
|
||||
}
|
||||
|
||||
async componentDidMount() {
|
||||
await this.loadPackageInfo(this.packageName);
|
||||
}
|
||||
|
||||
async componentWillReceiveProps(newProps) {
|
||||
let packageName = this.getPackageName(newProps);
|
||||
if (packageName === this.packageName) return;
|
||||
|
||||
await this.loadPackageInfo(packageName);
|
||||
}
|
||||
|
||||
async loadPackageInfo(packageName) {
|
||||
this.setState({
|
||||
readMe: ''
|
||||
});
|
||||
|
||||
try {
|
||||
const resp = await API.get(`package/readme/${packageName}`);
|
||||
this.setState({
|
||||
readMe: resp.data
|
||||
});
|
||||
} catch (err) {
|
||||
this.setState({
|
||||
notFound: true
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
render() {
|
||||
let {notFound, readMe} = this.state;
|
||||
|
||||
if (notFound) {
|
||||
return <NotFound pkg={this.packageName}/>;
|
||||
} else if (isEmpty(readMe)) {
|
||||
return <Loading text={loadingMessage} />;
|
||||
}
|
||||
return (
|
||||
<div className={classes.twoColumn}>
|
||||
<PackageDetail readMe={readMe} package={this.packageName}/>
|
||||
<PackageSidebar packageName={this.packageName} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
}
|
|
@ -13,7 +13,8 @@ const RouterApp = () => {
|
|||
<div className="container">
|
||||
<Switch>
|
||||
<Route exact path="/(search/:keyword)?" component={ Home } />
|
||||
<Route path="/detail/:package*" component={Detail} />
|
||||
<Route exact path="/detail/@:scope/:package" component={Detail} />
|
||||
<Route exact path="/detail/:package" component={Detail} />
|
||||
</Switch>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -3,6 +3,13 @@
|
|||
body {
|
||||
font-family: $font;
|
||||
font-size: 12px;
|
||||
color: $text-black;
|
||||
}
|
||||
|
||||
ul {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
list-style: none;
|
||||
}
|
||||
|
||||
:global {
|
||||
|
|
|
@ -4,13 +4,15 @@
|
|||
margin-right: auto;
|
||||
width: 100%;
|
||||
min-width: 400px;
|
||||
max-width: 960px;
|
||||
max-width: 1140px;
|
||||
}
|
||||
|
||||
$space-lg: 30px;
|
||||
$font: "Arial";
|
||||
// Font family from Bootstrap v4 Reboot.css
|
||||
$font: -apple-system, system-ui, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol";
|
||||
|
||||
/* Colors */
|
||||
$primary-color: #de4136;
|
||||
$border-color: #e4e8f1;
|
||||
$border-color: #e3e3e3;
|
||||
$text-black: #3c3c3c;
|
||||
$text-grey: #95989A;
|
||||
|
|
|
@ -1 +1,3 @@
|
|||
__webpack_public_path__ = window.VERDACCIO_API_URL.replace(/\/verdaccio\/$/, '/static/') // eslint-disable-line
|
||||
if (!__DEBUG__) {
|
||||
__webpack_public_path__ = window.VERDACCIO_API_URL.replace(/\/verdaccio\/$/, '/static/') // eslint-disable-line
|
||||
}
|
||||
|
|
16
src/webui/utils/datetime.js
Normal file
16
src/webui/utils/datetime.js
Normal file
|
@ -0,0 +1,16 @@
|
|||
/**
|
||||
* Date time in LocaleString
|
||||
* @param {string} input
|
||||
* @returns {string}
|
||||
*/
|
||||
export default function datetime(input) {
|
||||
const date = new Date(input);
|
||||
return date.toLocaleString('en-GB', {
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
year: 'numeric',
|
||||
hour: 'numeric',
|
||||
minute: 'numeric',
|
||||
hour12: true
|
||||
});
|
||||
}
|
12
src/webui/utils/url.js
Normal file
12
src/webui/utils/url.js
Normal file
|
@ -0,0 +1,12 @@
|
|||
export function getRegistryURL() {
|
||||
// Don't add slash if it's not a sub directory
|
||||
return `${location.origin}${location.pathname === '/' ? '' : location.pathname}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get specified package detail page url
|
||||
* @param {string} packageName
|
||||
*/
|
||||
export function getDetailPageURL(packageName) {
|
||||
return `${getRegistryURL()}/#/detail/${packageName}`;
|
||||
}
|
|
@ -14,7 +14,8 @@
|
|||
2,
|
||||
{
|
||||
"allow": [
|
||||
"log"
|
||||
"log",
|
||||
"error"
|
||||
]
|
||||
}
|
||||
],
|
||||
|
|
|
@ -1,7 +0,0 @@
|
|||
All tests are split in three folders:
|
||||
|
||||
- `unit` - Tests that cover functions that transform data in an non-trivial way. These tests simply `require()` a few files and run code in there, so they are very fast.
|
||||
- `functional` - Tests that launch a verdaccio instance and perform a series of requests to it over http. They are slower than unit tests.
|
||||
- `integration` - Tests that launch a verdaccio instance and do requests to it using npm. They are really slow and can hit a real npm registry.
|
||||
|
||||
Unit and functional tests are executed automatically by running `npm test` from the project's root directory. Integration tests are supposed to be executed manually from time to time.
|
|
@ -1,51 +1,46 @@
|
|||
'use strict';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
|
||||
const Server = require('../lib/server');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
module.exports = function() {
|
||||
const server = new Server('http://localhost:55551/');
|
||||
|
||||
describe('npm adduser', function() {
|
||||
export default function(server) {
|
||||
describe('npm adduser', () => {
|
||||
const user = String(Math.random());
|
||||
const pass = String(Math.random());
|
||||
before(function() {
|
||||
beforeAll(function() {
|
||||
return server.auth(user, pass)
|
||||
.status(201)
|
||||
.body_ok(/user .* created/);
|
||||
});
|
||||
|
||||
it('should create new user', function() {});
|
||||
test('should create new user', () => {});
|
||||
|
||||
it('should log in', function() {
|
||||
test('should log in', () => {
|
||||
return server.auth(user, pass)
|
||||
.status(201)
|
||||
.body_ok(/you are authenticated as/);
|
||||
});
|
||||
|
||||
it('should not register more users', function() {
|
||||
test('should not register more users', () => {
|
||||
return server.auth(String(Math.random()), String(Math.random()))
|
||||
.status(409)
|
||||
.body_error(/maximum amount of users reached/);
|
||||
});
|
||||
});
|
||||
|
||||
describe('should adduser created with htpasswd', function() {
|
||||
describe('should adduser created with htpasswd', () => {
|
||||
const user = 'preexisting';
|
||||
const pass = 'preexisting';
|
||||
|
||||
before(function() {
|
||||
beforeAll(function() {
|
||||
return fs.appendFileSync(
|
||||
path.join(__dirname, '../store/test-storage', '.htpasswd'),
|
||||
'preexisting:$apr1$4YSboUa9$yVKjE7.PxIOuK3M4D7VjX.'
|
||||
);
|
||||
});
|
||||
|
||||
it('should log in', function() {
|
||||
test('should log in', () => {
|
||||
return server.auth(user, pass)
|
||||
.status(201)
|
||||
.body_ok(/you are authenticated as/);
|
||||
});
|
||||
});
|
||||
};
|
||||
}
|
||||
|
|
|
@ -1,13 +1,10 @@
|
|||
'use strict';
|
||||
export default function(server) {
|
||||
|
||||
module.exports = function() {
|
||||
let server = process.server;
|
||||
|
||||
describe('logout', function() {
|
||||
it('should log out', function() {
|
||||
describe('logout', () => {
|
||||
test('should log out', () => {
|
||||
return server.logout('some-token')
|
||||
.status(200)
|
||||
.body_ok(/Logged out/);
|
||||
});
|
||||
});
|
||||
};
|
||||
}
|
||||
|
|
|
@ -1,102 +1,96 @@
|
|||
'use strict';
|
||||
|
||||
const assert = require('assert');
|
||||
const crypto = require('crypto');
|
||||
|
||||
import assert from 'assert';
|
||||
import crypto from 'crypto';
|
||||
|
||||
function readfile(folderPath) {
|
||||
return require('fs').readFileSync(__dirname + '/' + folderPath);
|
||||
}
|
||||
|
||||
function getPackage(name) {
|
||||
return require('./fixtures/package')(name);
|
||||
return require('../fixtures/package')(name);
|
||||
}
|
||||
|
||||
function createHash() {
|
||||
return crypto.createHash('sha1');
|
||||
}
|
||||
|
||||
module.exports = function () {
|
||||
let server = process.server;
|
||||
let server2 = process.server2;
|
||||
export default function(server, server2) {
|
||||
describe('basic test endpoints', () => {
|
||||
|
||||
describe('basic test endpoints', function () {
|
||||
require('./whoIam')(server);
|
||||
require('./ping')(server);
|
||||
|
||||
require('./basic/whoIam')(server);
|
||||
require('./basic/ping')(server);
|
||||
describe('handling packages', () => {
|
||||
|
||||
describe('handling packages', function () {
|
||||
|
||||
before(function () {
|
||||
beforeAll(function () {
|
||||
return server.addPackage('testpkg');
|
||||
});
|
||||
|
||||
before(function () {
|
||||
beforeAll(function () {
|
||||
return server.addPackage('testpkg-single-tarball');
|
||||
});
|
||||
|
||||
it('creating new package', function () {/* test for before() */
|
||||
test('creating new package', () => {/* test for before() */
|
||||
});
|
||||
|
||||
it('downloading non-existent tarball', function () {
|
||||
test('downloading non-existent tarball', () => {
|
||||
return server.getTarball('testpkg', 'blahblah').status(404).body_error(/no such file/);
|
||||
});
|
||||
|
||||
it('uploading incomplete tarball', function () {
|
||||
return server.putTarballIncomplete('testpkg', 'blahblah1', readfile('fixtures/binary'), 3000);
|
||||
test('uploading incomplete tarball', () => {
|
||||
return server.putTarballIncomplete('testpkg', 'blahblah1', readfile('../fixtures/binary'), 3000);
|
||||
});
|
||||
|
||||
describe('publishing package', function () {
|
||||
describe('publishing package', () => {
|
||||
|
||||
before(function () {
|
||||
return server.putTarball('testpkg', 'blahblah', readfile('fixtures/binary'))
|
||||
beforeAll(function () {
|
||||
return server.putTarball('testpkg', 'blahblah', readfile('../fixtures/binary'))
|
||||
.status(201)
|
||||
.body_ok(/.*/);
|
||||
});
|
||||
|
||||
before(function () {
|
||||
return server.putTarball('testpkg-single-tarball', 'single', readfile('fixtures/binary'))
|
||||
beforeAll(function () {
|
||||
return server.putTarball('testpkg-single-tarball', 'single', readfile('../fixtures/binary'))
|
||||
.status(201)
|
||||
.body_ok(/.*/);
|
||||
});
|
||||
|
||||
after(function () {
|
||||
afterAll(function () {
|
||||
return server.removeTarball('testpkg').status(201);
|
||||
});
|
||||
|
||||
it('remove a tarball', function () {
|
||||
test('remove a tarball', () => {
|
||||
/* test for before() */
|
||||
});
|
||||
|
||||
it('uploading new tarball', function () {
|
||||
test('uploading new tarball', () => {
|
||||
/* test for after() */
|
||||
});
|
||||
|
||||
it('remove non existing tarball', function () {
|
||||
test('remove non existing tarball', () => {
|
||||
return server.removeTarball('testpkg404').status(404);
|
||||
});
|
||||
|
||||
it('remove non existing single tarball', function () {
|
||||
test('remove non existing single tarball', () => {
|
||||
return server.removeSingleTarball('', 'fakeFile').status(404);
|
||||
});
|
||||
|
||||
// testexp-incomplete
|
||||
|
||||
it('remove existing single tarball', function () {
|
||||
test('remove existing single tarball', () => {
|
||||
return server.removeSingleTarball('testpkg-single-tarball', 'single').status(201);
|
||||
});
|
||||
|
||||
// testexp-incomplete
|
||||
|
||||
it('downloading newly created tarball', function () {
|
||||
test('downloading newly created tarball', () => {
|
||||
return server.getTarball('testpkg', 'blahblah')
|
||||
.status(200)
|
||||
.then(function (body) {
|
||||
assert.deepEqual(body, readfile('fixtures/binary'));
|
||||
assert.deepEqual(body, readfile('../fixtures/binary'));
|
||||
});
|
||||
});
|
||||
|
||||
it('uploading new package version (bad sha)', function () {
|
||||
test('uploading new package version (bad sha)', () => {
|
||||
let pkg = getPackage('testpkg');
|
||||
pkg.dist.shasum = createHash().update('fake').digest('hex');
|
||||
|
||||
|
@ -105,22 +99,22 @@ module.exports = function () {
|
|||
.body_error(/shasum error/);
|
||||
});
|
||||
|
||||
describe('publishing version', function () {
|
||||
describe('publishing version', () => {
|
||||
|
||||
before(function () {
|
||||
beforeAll(function () {
|
||||
const pkg = getPackage('testpkg');
|
||||
|
||||
pkg.dist.shasum = createHash().update(readfile('fixtures/binary')).digest('hex');
|
||||
pkg.dist.shasum = createHash().update(readfile('../fixtures/binary')).digest('hex');
|
||||
return server.putVersion('testpkg', '0.0.1', pkg)
|
||||
.status(201)
|
||||
.body_ok(/published/);
|
||||
});
|
||||
|
||||
it('uploading new package version', function () {
|
||||
test('uploading new package version', () => {
|
||||
/* test for before() */
|
||||
});
|
||||
|
||||
it('downloading newly created package', function () {
|
||||
test('downloading newly created package', () => {
|
||||
return server.getPackage('testpkg')
|
||||
.status(200)
|
||||
.then(function (body) {
|
||||
|
@ -133,7 +127,7 @@ module.exports = function () {
|
|||
});
|
||||
});
|
||||
|
||||
it('downloading package via server2', function () {
|
||||
test('downloading package via server2', () => {
|
||||
return server2.getPackage('testpkg')
|
||||
.status(200)
|
||||
.then(function (body) {
|
||||
|
@ -149,26 +143,29 @@ module.exports = function () {
|
|||
});
|
||||
});
|
||||
|
||||
describe('handle failures on endpoints', function () {
|
||||
describe('handle failures on endpoints', () => {
|
||||
|
||||
|
||||
it('should fails trying to fetch non-existent package', function () {
|
||||
test('should fails trying to fetch non-existent package', () => {
|
||||
return server.getPackage('testpkg').status(404).body_error(/no such package/);
|
||||
});
|
||||
|
||||
it('should fails on publish a version for non existing package', function () {
|
||||
return server.putVersion('testpxg', '0.0.1', getPackage('testpxg'))
|
||||
.status(404)
|
||||
.body_error(/no such package/);
|
||||
});
|
||||
test(
|
||||
'should fails on publish a version for non existing package',
|
||||
() => {
|
||||
return server.putVersion('testpxg', '0.0.1', getPackage('testpxg'))
|
||||
.status(404)
|
||||
.body_error(/no such package/);
|
||||
}
|
||||
);
|
||||
|
||||
it('should be a package not found', function () {
|
||||
return server.putTarball('nonExistingPackage', 'blahblah', readfile('fixtures/binary'))
|
||||
test('should be a package not found', () => {
|
||||
return server.putTarball('nonExistingPackage', 'blahblah', readfile('../fixtures/binary'))
|
||||
.status(404)
|
||||
.body_error(/no such/);
|
||||
});
|
||||
|
||||
it('should fails on publish package in a bad uplink', function () {
|
||||
test('should fails on publish package in a bad uplink', () => {
|
||||
return server.putPackage('baduplink', getPackage('baduplink'))
|
||||
.status(503)
|
||||
.body_error(/one of the uplinks is down, refuse to publish/);
|
||||
|
@ -176,5 +173,4 @@ module.exports = function () {
|
|||
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
}
|
|
@ -5,7 +5,7 @@ const _ = require('lodash');
|
|||
|
||||
module.exports = function(server) {
|
||||
|
||||
it('ping', function () {
|
||||
test('ping', () => {
|
||||
return server.ping().then(function (data) {
|
||||
// it's always an empty object
|
||||
assert.ok(_.isObject(data));
|
||||
|
|
|
@ -4,7 +4,7 @@ const assert = require('assert');
|
|||
|
||||
module.exports = function(server) {
|
||||
|
||||
it('who am I?', function () {
|
||||
test('who am I?', () => {
|
||||
return server.whoami().then(function (username) {
|
||||
assert.equal(username, 'test');
|
||||
});
|
||||
|
|
|
@ -33,9 +33,9 @@
|
|||
"tarball": "http://localhost:55551/__NAME__/-/blahblah"
|
||||
}
|
||||
},
|
||||
"1.1": {
|
||||
"1.1.0": {
|
||||
"name": "__NAME__",
|
||||
"version": "1.1",
|
||||
"version": "1.1.0",
|
||||
"dist": {
|
||||
"shasum": "fake",
|
||||
"tarball": "http://localhost:55551/__NAME__/-/blahblah"
|
||||
|
@ -43,7 +43,6 @@
|
|||
}
|
||||
},
|
||||
"dist-tags": {
|
||||
"something": "0.1.1alpha",
|
||||
"bad": "1.1"
|
||||
"latest": "1.1.0"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,48 +1,44 @@
|
|||
'use strict';
|
||||
|
||||
const assert = require('assert');
|
||||
const crypto = require('crypto');
|
||||
import assert from 'assert';
|
||||
import crypto from 'crypto';
|
||||
|
||||
function readfile(x) {
|
||||
return require('fs').readFileSync(__dirname + '/' + x);
|
||||
}
|
||||
|
||||
module.exports = function() {
|
||||
const server = process.server;
|
||||
const server2 = process.server2;
|
||||
export default function (server, server2) {
|
||||
|
||||
it('downloading non-existent tarball #1 / srv2', function() {
|
||||
test('downloading non-existent tarball #1 / srv2', () => {
|
||||
return server2.getTarball('testpkg-gh29', 'blahblah')
|
||||
.status(404)
|
||||
.body_error(/no such package/);
|
||||
});
|
||||
|
||||
describe('pkg-gh29', function() {
|
||||
before(function() {
|
||||
describe('pkg-gh29', () => {
|
||||
beforeAll(function() {
|
||||
return server.putPackage('testpkg-gh29', require('./fixtures/package')('testpkg-gh29'))
|
||||
.status(201)
|
||||
.body_ok(/created new package/);
|
||||
});
|
||||
|
||||
it('creating new package / srv1', function() {});
|
||||
test('creating new package / srv1', () => {});
|
||||
|
||||
it('downloading non-existent tarball #2 / srv2', function() {
|
||||
test('downloading non-existent tarball #2 / srv2', () => {
|
||||
return server2.getTarball('testpkg-gh29', 'blahblah')
|
||||
.status(404)
|
||||
.body_error(/no such file/);
|
||||
});
|
||||
|
||||
describe('tarball', function() {
|
||||
before(function() {
|
||||
describe('tarball', () => {
|
||||
beforeAll(function() {
|
||||
return server.putTarball('testpkg-gh29', 'blahblah', readfile('fixtures/binary'))
|
||||
.status(201)
|
||||
.body_ok(/.*/);
|
||||
});
|
||||
|
||||
it('uploading new tarball / srv1', function() {});
|
||||
test('uploading new tarball / srv1', () => {});
|
||||
|
||||
describe('pkg version', function() {
|
||||
before(function() {
|
||||
describe('pkg version', () => {
|
||||
beforeAll(function() {
|
||||
const pkg = require('./fixtures/package')('testpkg-gh29');
|
||||
|
||||
pkg.dist.shasum = crypto.createHash('sha1').update(readfile('fixtures/binary')).digest('hex');
|
||||
|
@ -51,9 +47,9 @@ module.exports = function() {
|
|||
.body_ok(/published/);
|
||||
});
|
||||
|
||||
it('uploading new package version / srv1', function() {});
|
||||
test('uploading new package version / srv1', () => {});
|
||||
|
||||
it('downloading newly created tarball / srv2', function() {
|
||||
test('downloading newly created tarball / srv2', () => {
|
||||
return server2.getTarball('testpkg-gh29', 'blahblah')
|
||||
.status(200)
|
||||
.then(function(body) {
|
||||
|
@ -63,5 +59,4 @@ module.exports = function() {
|
|||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
}
|
||||
|
|
116
test/functional/index.func.js
Normal file
116
test/functional/index.func.js
Normal file
|
@ -0,0 +1,116 @@
|
|||
// @flow
|
||||
import _ from 'lodash';
|
||||
|
||||
// we need this for notifications
|
||||
import {setup} from '../../src/lib/logger';
|
||||
setup();
|
||||
|
||||
import {VerdaccioConfig} from './lib/verdaccio-server';
|
||||
import VerdaccioProcess from './lib/server_process';
|
||||
import ExpressServer from './lib/simple_server';
|
||||
import Server from './lib/server';
|
||||
import type {IServerProcess, IServerBridge} from './lib/types';
|
||||
|
||||
import basic from './basic/basic.spec';
|
||||
import packageAccess from './package/access.spec';
|
||||
import packageGzip from './package/gzip.spec';
|
||||
import packageScoped from './package/scoped.spec';
|
||||
import tags from './tags/tags.spec';
|
||||
import preserveTags from './tags/preserve_tags.spec';
|
||||
import addtag from './tags/addtag.spec';
|
||||
import adduser from './adduser/adduser';
|
||||
import logout from './adduser/logout';
|
||||
import notify from './notifications/notify';
|
||||
import incomplete from './sanity/incomplete';
|
||||
import mirror from './sanity/mirror';
|
||||
import readme from './readme/readme.spec';
|
||||
import gh29 from './gh29';
|
||||
import nullstorage from './sanity/nullstorage';
|
||||
import racycrash from './sanity/racycrash';
|
||||
import security from './sanity/security';
|
||||
import race from './performance/race';
|
||||
import pluginsAuth from './plugins/auth.spec';
|
||||
import upLinkCache from './uplink.cache.spec';
|
||||
import upLinkAuth from './uplink.auth.spec';
|
||||
|
||||
describe('functional test verdaccio', function() {
|
||||
const EXPRESS_PORT = 55550;
|
||||
const SILENCE_LOG = !process.env.VERDACCIO_DEBUG;
|
||||
const processRunning = [];
|
||||
const config1 = new VerdaccioConfig(
|
||||
'./store/test-storage',
|
||||
'./store/config-1.yaml',
|
||||
'http://localhost:55551/');
|
||||
const config2 = new VerdaccioConfig(
|
||||
'./store/test-storage2',
|
||||
'./store/config-2.yaml',
|
||||
'http://localhost:55552/');
|
||||
const config3 = new VerdaccioConfig(
|
||||
'./store/test-storage3',
|
||||
'./store/config-3.yaml',
|
||||
'http://localhost:55553/');
|
||||
const server1: IServerBridge = new Server(config1.domainPath);
|
||||
const server2: IServerBridge = new Server(config2.domainPath);
|
||||
const server3: IServerBridge = new Server(config3.domainPath);
|
||||
const process1: IServerProcess = new VerdaccioProcess(config1, server1, SILENCE_LOG);
|
||||
const process2: IServerProcess = new VerdaccioProcess(config2, server2, SILENCE_LOG);
|
||||
const process3: IServerProcess = new VerdaccioProcess(config3, server3, SILENCE_LOG);
|
||||
const express: any = new ExpressServer();
|
||||
|
||||
beforeAll((done) => {
|
||||
Promise.all([
|
||||
process1.init(),
|
||||
process2.init(),
|
||||
process3.init()]).then((forks) => {
|
||||
_.map(forks, (fork) => {
|
||||
processRunning.push(fork[0]);
|
||||
});
|
||||
express.start(EXPRESS_PORT).then((app) =>{
|
||||
done();
|
||||
}, (err) => {
|
||||
done(err);
|
||||
});
|
||||
}).catch((error) => {
|
||||
done(error);
|
||||
});
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
_.map(processRunning, (fork) => {
|
||||
fork.stop();
|
||||
});
|
||||
express.server.close();
|
||||
});
|
||||
|
||||
// list of test
|
||||
// note: order of the following calls is important
|
||||
packageAccess(server1);
|
||||
basic(server1, server2);
|
||||
gh29(server1, server2);
|
||||
tags(server1, express.app);
|
||||
packageGzip(server1, express.app);
|
||||
incomplete(server1, express.app);
|
||||
mirror(server1, server2);
|
||||
preserveTags(server1, server2, express.app);
|
||||
readme(server1, server2);
|
||||
nullstorage(server1, server2);
|
||||
race(server1);
|
||||
racycrash(server1, express.app);
|
||||
packageScoped(server1, server2);
|
||||
security(server1);
|
||||
addtag(server1);
|
||||
pluginsAuth(server2);
|
||||
notify(express.app);
|
||||
// requires packages published to server1/server2
|
||||
upLinkCache(server1, server2, server3);
|
||||
upLinkAuth();
|
||||
adduser(server1);
|
||||
logout(server1);
|
||||
});
|
||||
|
||||
process.on('unhandledRejection', function(err) {
|
||||
console.error("unhandledRejection", err);
|
||||
process.nextTick(function() {
|
||||
throw err;
|
||||
});
|
||||
});
|
|
@ -1,123 +0,0 @@
|
|||
'use strict';
|
||||
|
||||
require('./lib/startup');
|
||||
|
||||
const _ = require('lodash');
|
||||
const assert = require('assert');
|
||||
const exec = require('child_process').exec;
|
||||
|
||||
describe('Create registry servers', function() {
|
||||
const server = process.server;
|
||||
const server2 = process.server2;
|
||||
const server3 = process.server3;
|
||||
|
||||
before(function(done) {
|
||||
Promise.all([
|
||||
require('./lib/startup').start('./store/test-storage', '/store/config-1.yaml'),
|
||||
require('./lib/startup').start('./store/test-storage2', '/store/config-2.yaml'),
|
||||
require('./lib/startup').start('./store/test-storage3', '/store/config-3.yaml'),
|
||||
]).then(() => {
|
||||
done();
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
before(function() {
|
||||
return Promise.all([server, server2, server3].map(function(server) {
|
||||
return server.debug().status(200).then(function(body) {
|
||||
server.pid = body.pid;
|
||||
|
||||
return new Promise(function(resolve, reject) {
|
||||
exec('lsof -p ' + Number(server.pid), function(err, result) {
|
||||
if (_.isNil(err) === false) {
|
||||
reject(err);
|
||||
}
|
||||
|
||||
assert.equal(err, null);
|
||||
server.fdlist = result.replace(/ +/g, ' ');
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
});
|
||||
}));
|
||||
});
|
||||
|
||||
before(function testBasicAuthentication() {
|
||||
return Promise.all([server, server2, server3].map(function(server) {
|
||||
// log in on server1
|
||||
return server.auth('test', 'test')
|
||||
.status(201)
|
||||
.body_ok(/'test'/);
|
||||
|
||||
}));
|
||||
});
|
||||
|
||||
it('authenticate', function() {
|
||||
/* test for before() */
|
||||
});
|
||||
|
||||
require('./package/access')();
|
||||
require('./basic')();
|
||||
require('./gh29')();
|
||||
require('./tags/tags')();
|
||||
require('./package/gzip.spec')();
|
||||
require('./sanity/incomplete')();
|
||||
require('./sanity/mirror')();
|
||||
require('./tags/preserve_tags.spec')();
|
||||
require('./readme/readme.spec')();
|
||||
require('./sanity/nullstorage')();
|
||||
require('./performance/race')();
|
||||
require('./sanity/racycrash')();
|
||||
require('./package/scoped.spec')();
|
||||
require('./sanity/security')();
|
||||
require('./adduser/adduser')();
|
||||
require('./adduser/logout')();
|
||||
require('./tags/addtag.spec')();
|
||||
require('./plugins/auth.spec')();
|
||||
require('./plugins/middleware.spec')();
|
||||
require('./notifications/notify')();
|
||||
// requires packages published to server1/server2
|
||||
require('./uplink.cache.spec')();
|
||||
require('./uplink.auth.spec')();
|
||||
|
||||
after(function(done) {
|
||||
const check = (server) => {
|
||||
return new Promise(function(resolve, reject) {
|
||||
exec(`lsof -p ${parseInt(server.pid, 10)}`, function(err, result) {
|
||||
if (err) {
|
||||
reject();
|
||||
} else {
|
||||
result = result.split('\n').filter(function(query) {
|
||||
if (query.match(/TCP .*->.* \(ESTABLISHED\)/)) {
|
||||
return false;
|
||||
}
|
||||
if (query.match(/\/libcrypt-[^\/]+\.so/)) {
|
||||
return false;
|
||||
}
|
||||
if (query.match(/\/node_modules\/crypt3\/build\/Release/)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}).join('\n').replace(/ +/g, ' ');
|
||||
assert.equal(server.fdlist, result);
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
Promise.all([check(server), check(server2), check(server3)]).then(function() {
|
||||
done();
|
||||
}, (reason) => {
|
||||
assert.equal(reason, null);
|
||||
done();
|
||||
});
|
||||
|
||||
});
|
||||
});
|
||||
|
||||
process.on('unhandledRejection', function(err) {
|
||||
process.nextTick(function() {
|
||||
throw err;
|
||||
});
|
||||
});
|
|
@ -1,17 +1,20 @@
|
|||
'use strict';
|
||||
// @flow
|
||||
|
||||
import assert from 'assert';
|
||||
import request from 'request';
|
||||
import _ from 'lodash';
|
||||
import type {IRequestPromise} from './types';
|
||||
|
||||
const assert = require('assert');
|
||||
const request = require('request');
|
||||
const requestData = Symbol('smart_request_data');
|
||||
const _ = require('lodash');
|
||||
|
||||
class PromiseAssert extends Promise {
|
||||
export class PromiseAssert extends Promise<any> implements IRequestPromise{
|
||||
|
||||
constructor(options) {
|
||||
constructor(options: any) {
|
||||
super(options);
|
||||
}
|
||||
|
||||
status(expected) {
|
||||
status(expected: number) {
|
||||
// $FlowFixMe
|
||||
const selfData = this[requestData];
|
||||
|
||||
return injectResponse(this, this.then(function(body) {
|
||||
|
@ -25,8 +28,9 @@ class PromiseAssert extends Promise {
|
|||
}));
|
||||
}
|
||||
|
||||
body_ok(expected) {
|
||||
const self_data = this[requestData];
|
||||
body_ok(expected: any) {
|
||||
// $FlowFixMe
|
||||
const selfData = this[requestData];
|
||||
|
||||
return injectResponse(this, this.then(function(body) {
|
||||
try {
|
||||
|
@ -37,18 +41,20 @@ class PromiseAssert extends Promise {
|
|||
}
|
||||
assert.equal(body.error, null);
|
||||
} catch(err) {
|
||||
self_data.error.message = err.message;
|
||||
throw self_data.error;
|
||||
selfData.error.message = err.message;
|
||||
throw selfData.error;
|
||||
}
|
||||
return body;
|
||||
}));
|
||||
}
|
||||
|
||||
|
||||
body_error(expected) {
|
||||
const self_data = this[requestData];
|
||||
body_error(expected: any) {
|
||||
// $FlowFixMe
|
||||
const selfData = this[requestData];
|
||||
|
||||
return injectResponse(this, this.then(function(body) {
|
||||
// console.log("======>smartRequest body_error://", body);
|
||||
try {
|
||||
if (_.isRegExp(expected)) {
|
||||
assert(body.error.match(expected), body.error + ' doesn\'t match ' + expected);
|
||||
|
@ -57,19 +63,21 @@ class PromiseAssert extends Promise {
|
|||
}
|
||||
assert.equal(body.ok, null);
|
||||
} catch(err) {
|
||||
self_data.error.message = err.message;
|
||||
throw self_data.error;
|
||||
selfData.error.message = err.message;
|
||||
throw selfData.error;
|
||||
}
|
||||
return body;
|
||||
}));
|
||||
}
|
||||
|
||||
request(callback) {
|
||||
request(callback: any) {
|
||||
// $FlowFixMe
|
||||
callback(this[requestData].request);
|
||||
return this;
|
||||
}
|
||||
|
||||
response(cb) {
|
||||
response(cb: any) {
|
||||
// $FlowFixMe
|
||||
const selfData = this[requestData];
|
||||
|
||||
return injectResponse(this, this.then(function(body) {
|
||||
|
@ -78,26 +86,30 @@ class PromiseAssert extends Promise {
|
|||
}));
|
||||
}
|
||||
|
||||
send(data) {
|
||||
send(data: any) {
|
||||
// $FlowFixMe
|
||||
this[requestData].request.end(data);
|
||||
return this;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
function injectResponse(smartObject, promise) {
|
||||
function injectResponse(smartObject: any, promise: Promise<any>): Promise<any> {
|
||||
// $FlowFixMe
|
||||
promise[requestData] = smartObject[requestData];
|
||||
return promise;
|
||||
}
|
||||
|
||||
function smartRequest(options) {
|
||||
const smartObject = {};
|
||||
|
||||
function smartRequest(options: any): Promise<any> {
|
||||
const smartObject: any = {};
|
||||
|
||||
smartObject[requestData] = {};
|
||||
smartObject[requestData].error = Error();
|
||||
Error.captureStackTrace(smartObject[requestData].error, smartRequest);
|
||||
|
||||
const result = new PromiseAssert(function(resolve, reject) {
|
||||
const promiseResult: Promise<any> = new PromiseAssert(function(resolve, reject) {
|
||||
// store request reference on symbol
|
||||
smartObject[requestData].request = request(options, function(err, res, body) {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
|
@ -105,12 +117,13 @@ function smartRequest(options) {
|
|||
|
||||
// store the response on symbol
|
||||
smartObject[requestData].response = res;
|
||||
// console.log("======>smartRequest RESPONSE: ", body);
|
||||
resolve(body);
|
||||
});
|
||||
});
|
||||
|
||||
return injectResponse(smartObject, result);
|
||||
return injectResponse(smartObject, promiseResult);
|
||||
}
|
||||
|
||||
module.exports = smartRequest;
|
||||
export default smartRequest;
|
||||
|
||||
|
|
|
@ -1,29 +1,35 @@
|
|||
'use strict';
|
||||
// @flow
|
||||
|
||||
const assert = require('assert');
|
||||
const request = require('./request');
|
||||
const _ = require('lodash');
|
||||
import _ from 'lodash';
|
||||
import assert from 'assert';
|
||||
import smartRequest from './request';
|
||||
import type {IServerBridge} from './types';
|
||||
|
||||
const buildAuthHeader = (user, pass) => {
|
||||
const buildAuthHeader = (user, pass): string => {
|
||||
return `Basic ${(new Buffer(`${user}:${pass}`)).toString('base64')}`;
|
||||
};
|
||||
|
||||
class Server {
|
||||
export default class Server implements IServerBridge {
|
||||
url: string;
|
||||
userAgent: string;
|
||||
authstr: string;
|
||||
|
||||
constructor(url) {
|
||||
constructor(url: string) {
|
||||
this.url = url.replace(/\/$/, '');
|
||||
this.userAgent = 'node/v8.1.2 linux x64';
|
||||
this.authstr = buildAuthHeader('test', 'test');
|
||||
}
|
||||
|
||||
request(options) {
|
||||
request(options: any): any {
|
||||
// console.log("--->$$$$ REQUEST", options);
|
||||
assert(options.uri);
|
||||
const headers = options.headers || {};
|
||||
|
||||
headers.accept = headers.accept || 'application/json';
|
||||
headers['user-agent'] = headers['user-agent'] || this.userAgent;
|
||||
headers.authorization = headers.authorization || this.authstr;
|
||||
|
||||
return request({
|
||||
return smartRequest({
|
||||
url: this.url + options.uri,
|
||||
method: options.method || 'GET',
|
||||
headers: headers,
|
||||
|
@ -32,7 +38,7 @@ class Server {
|
|||
});
|
||||
}
|
||||
|
||||
auth(name, password) {
|
||||
auth(name: string, password: string) {
|
||||
this.authstr = buildAuthHeader(name, password);
|
||||
return this.request({
|
||||
uri: `/-/user/org.couchdb.user:${encodeURIComponent(name)}/-rev/undefined`,
|
||||
|
@ -49,7 +55,7 @@ class Server {
|
|||
});
|
||||
}
|
||||
|
||||
logout(token) {
|
||||
logout(token: string) {
|
||||
return this.request({
|
||||
uri: `/-/user/token/${encodeURIComponent(token)}`,
|
||||
method: 'DELETE',
|
||||
|
@ -57,14 +63,14 @@ class Server {
|
|||
}
|
||||
|
||||
|
||||
getPackage(name) {
|
||||
getPackage(name: string) {
|
||||
return this.request({
|
||||
uri: `/${encodeURIComponent(name)}`,
|
||||
method: 'GET',
|
||||
});
|
||||
}
|
||||
|
||||
putPackage(name, data) {
|
||||
putPackage(name: string, data) {
|
||||
if (_.isObject(data) && !Buffer.isBuffer(data)) {
|
||||
data = JSON.stringify(data);
|
||||
}
|
||||
|
@ -77,10 +83,11 @@ class Server {
|
|||
}).send(data);
|
||||
}
|
||||
|
||||
putVersion(name, version, data) {
|
||||
putVersion(name: string, version: string, data: any) {
|
||||
if (_.isObject(data) && !Buffer.isBuffer(data)) {
|
||||
data = JSON.stringify(data);
|
||||
}
|
||||
|
||||
return this.request({
|
||||
uri: `/${encodeURIComponent(name)}/${encodeURIComponent(version)}/-tag/latest`,
|
||||
method: 'PUT',
|
||||
|
@ -90,7 +97,7 @@ class Server {
|
|||
}).send(data);
|
||||
}
|
||||
|
||||
getTarball(name, filename) {
|
||||
getTarball(name: string, filename: string) {
|
||||
return this.request({
|
||||
uri: `/${encodeURIComponent(name)}/-/${encodeURIComponent(filename)}`,
|
||||
method: 'GET',
|
||||
|
@ -98,7 +105,7 @@ class Server {
|
|||
});
|
||||
}
|
||||
|
||||
putTarball(name, filename, data) {
|
||||
putTarball(name: string, filename: string, data: any) {
|
||||
return this.request({
|
||||
uri: `/${encodeURIComponent(name)}/-/${encodeURIComponent(filename)}/whatever`,
|
||||
method: 'PUT',
|
||||
|
@ -108,7 +115,7 @@ class Server {
|
|||
}).send(data);
|
||||
}
|
||||
|
||||
removeTarball(name) {
|
||||
removeTarball(name: string) {
|
||||
return this.request({
|
||||
uri: `/${encodeURIComponent(name)}/-rev/whatever`,
|
||||
method: 'DELETE',
|
||||
|
@ -118,7 +125,7 @@ class Server {
|
|||
});
|
||||
}
|
||||
|
||||
removeSingleTarball(name, filename) {
|
||||
removeSingleTarball(name: string, filename: string) {
|
||||
return this.request({
|
||||
uri: `/${encodeURIComponent(name)}/-/${filename}/-rev/whatever`,
|
||||
method: 'DELETE',
|
||||
|
@ -129,7 +136,7 @@ class Server {
|
|||
}
|
||||
|
||||
|
||||
addTag(name, tag, version) {
|
||||
addTag(name: string, tag: string, version: string) {
|
||||
return this.request({
|
||||
uri: `/${encodeURIComponent(name)}/${encodeURIComponent(tag)}`,
|
||||
method: 'PUT',
|
||||
|
@ -139,7 +146,7 @@ class Server {
|
|||
}).send(JSON.stringify(version));
|
||||
}
|
||||
|
||||
putTarballIncomplete(name, filename, data, size, cb) {
|
||||
putTarballIncomplete(name: string, filename: string, data: any, size: number, cb: Function) {
|
||||
let promise = this.request({
|
||||
uri: `/${encodeURIComponent(name)}/-/${encodeURIComponent(filename)}/whatever`,
|
||||
method: 'PUT',
|
||||
|
@ -172,7 +179,7 @@ class Server {
|
|||
});
|
||||
}
|
||||
|
||||
addPackage(name) {
|
||||
addPackage(name: string) {
|
||||
return this.putPackage(name, require('../fixtures/package')(name))
|
||||
.status(201)
|
||||
.body_ok('created new package');
|
||||
|
@ -205,7 +212,4 @@ class Server {
|
|||
},
|
||||
})
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = Server;
|
||||
|
|
73
test/functional/lib/server_process.js
Normal file
73
test/functional/lib/server_process.js
Normal file
|
@ -0,0 +1,73 @@
|
|||
// @flow
|
||||
import _ from 'lodash';
|
||||
import rimRaf from 'rimraf';
|
||||
import path from 'path';
|
||||
import {fork} from 'child_process';
|
||||
import type {IVerdaccioConfig, IServerBridge, IServerProcess} from './types';
|
||||
|
||||
export default class VerdaccioProcess implements IServerProcess {
|
||||
|
||||
bridge: IServerBridge;
|
||||
config: IVerdaccioConfig;
|
||||
childFork: any;
|
||||
silence: boolean;
|
||||
|
||||
constructor(config: IVerdaccioConfig, bridge: IServerBridge, silence: boolean = true) {
|
||||
this.config = config;
|
||||
this.bridge = bridge;
|
||||
this.silence = silence;
|
||||
}
|
||||
|
||||
init(): Promise<any> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const verdaccioRegisterWrap: string = path.join(__dirname, '../../../bin/verdaccio');
|
||||
const storageDir: string = path.join(__dirname, `/../${this.config.storagePath}`);
|
||||
const configPath: string = path.join(__dirname, '../', this.config.configPath);
|
||||
|
||||
rimRaf(storageDir, (err) => {
|
||||
if (_.isNil(err) === false) {
|
||||
reject(err);
|
||||
}
|
||||
|
||||
this.childFork = fork(verdaccioRegisterWrap,
|
||||
['-c', configPath],
|
||||
{
|
||||
silent: this.silence
|
||||
}
|
||||
);
|
||||
|
||||
this.childFork.on('message', (msg) => {
|
||||
if ('verdaccio_started' in msg) {
|
||||
this.bridge.debug().status(200).then((body) => {
|
||||
this.bridge.auth('test', 'test')
|
||||
.status(201)
|
||||
.body_ok(/'test'/)
|
||||
.then(() => {
|
||||
resolve([this, body.pid]);
|
||||
}, reject)
|
||||
}, reject);
|
||||
}
|
||||
});
|
||||
|
||||
this.childFork.on('error', function(err) {
|
||||
reject(err);
|
||||
});
|
||||
|
||||
this.childFork.on('disconnect', function(err) {
|
||||
reject(err);
|
||||
});
|
||||
|
||||
this.childFork.on('exit', function(err) {
|
||||
reject(err);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
stop(): void {
|
||||
return this.childFork.kill('SIGINT');
|
||||
}
|
||||
|
||||
}
|
26
test/functional/lib/simple_server.js
Normal file
26
test/functional/lib/simple_server.js
Normal file
|
@ -0,0 +1,26 @@
|
|||
// @flow
|
||||
import express from 'express';
|
||||
import bodyParser from 'body-parser';
|
||||
|
||||
export default class ExpressServer {
|
||||
app: any;
|
||||
server: any;
|
||||
|
||||
constructor() {
|
||||
this.app = express();
|
||||
this.server;
|
||||
}
|
||||
|
||||
start(port: number): Promise<any> {
|
||||
return new Promise((resolve, reject) => {
|
||||
this.app.use(bodyParser.json());
|
||||
this.app.use(bodyParser.urlencoded({
|
||||
extended: true
|
||||
}));
|
||||
|
||||
this.server = this.app.listen(port, function starExpressServer() {
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
|
@ -1,79 +0,0 @@
|
|||
'use strict';
|
||||
|
||||
const _ = require('lodash');
|
||||
const fork = require('child_process').fork;
|
||||
const bodyParser = require('body-parser');
|
||||
const express = require('express');
|
||||
const rimRaf = require('rimraf');
|
||||
const path = require('path');
|
||||
const Server = require('./server');
|
||||
|
||||
const forks = process.forks = [];
|
||||
process.server = new Server('http://localhost:55551/');
|
||||
process.server2 = new Server('http://localhost:55552/');
|
||||
process.server3 = new Server('http://localhost:55553/');
|
||||
const app = express();
|
||||
app.use(bodyParser.json());
|
||||
app.use(bodyParser.urlencoded({
|
||||
extended: true
|
||||
}));
|
||||
process.express = app;
|
||||
process.express.listen(55550);
|
||||
|
||||
module.exports.start = function(dir, conf) {
|
||||
return new Promise(function(resolve, reject) {
|
||||
const storageDir = path.join(__dirname, `/../${dir}`);
|
||||
const configPath = path.join(__dirname, '../', conf);
|
||||
rimRaf(storageDir, function(err) {
|
||||
if(_.isNil(err) === false) {
|
||||
reject(err);
|
||||
}
|
||||
const filteredArguments = process.execArgv = process.execArgv.filter(function(x) {
|
||||
// filter out --debug-brk and --inspect-brk since Node7
|
||||
return (x.indexOf('--debug-brk') === -1 && x.indexOf('--inspect-brk') === -1);
|
||||
});
|
||||
|
||||
const childFork = fork(__dirname + '/../../../bin/verdaccio',
|
||||
['-c', configPath],
|
||||
{
|
||||
silent: !process.env.TRAVIS
|
||||
// silent: false
|
||||
}
|
||||
);
|
||||
|
||||
forks.push(childFork);
|
||||
|
||||
childFork.on('message', function(msg) {
|
||||
if ('verdaccio_started' in msg) {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
|
||||
childFork.on('error', function(err) {
|
||||
reject(err);
|
||||
});
|
||||
|
||||
childFork.on('disconnect', function(err) {
|
||||
reject(err);
|
||||
});
|
||||
|
||||
childFork.on('exit', function(err) {
|
||||
reject(err);
|
||||
});
|
||||
|
||||
process.execArgv = filteredArguments;
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
process.on('exit', function() {
|
||||
if (_.isNil(forks[0]) === false) {
|
||||
forks[0].kill();
|
||||
}
|
||||
if (_.isNil(forks[1]) === false) {
|
||||
forks[1].kill();
|
||||
}
|
||||
if (_.isNil(forks[2]) === false) {
|
||||
forks[2].kill();
|
||||
}
|
||||
});
|
|
@ -1,13 +1,14 @@
|
|||
|
||||
const crypto = require('crypto');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
import crypto from 'crypto';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
|
||||
exports.generateSha = function generateSha(key) {
|
||||
function generateSha(key) {
|
||||
return crypto.createHash('sha1', 'binary').update(key).digest('hex');
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
exports.readFile = function readFile(filePath) {
|
||||
function readFile(filePath) {
|
||||
return fs.readFileSync(path.join(__dirname, `/${filePath}`));
|
||||
}
|
||||
|
||||
export { generateSha, readFile }
|
||||
|
|
52
test/functional/lib/types.js
Normal file
52
test/functional/lib/types.js
Normal file
|
@ -0,0 +1,52 @@
|
|||
// @flow
|
||||
|
||||
export interface IVerdaccioConfig {
|
||||
storagePath: string;
|
||||
configPath: string;
|
||||
domainPath: string;
|
||||
}
|
||||
|
||||
export interface IRequestPromise {
|
||||
status(reason: any): any;
|
||||
body_ok(reason: any): any;
|
||||
body_error(reason: any): any;
|
||||
request(reason: any): any;
|
||||
response(reason: any): any;
|
||||
send(reason: any): any;
|
||||
}
|
||||
|
||||
export interface IServerProcess {
|
||||
bridge: IServerBridge;
|
||||
config: IVerdaccioConfig;
|
||||
childFork: any;
|
||||
silence: boolean;
|
||||
init(): Promise<any>;
|
||||
stop(): void;
|
||||
}
|
||||
|
||||
declare class verdaccio$PromiseAssert<IRequestPromise> extends Promise<any> {
|
||||
constructor(options: any): IRequestPromise;
|
||||
}
|
||||
|
||||
export interface IServerBridge {
|
||||
url: string;
|
||||
userAgent: string;
|
||||
authstr: string;
|
||||
request(options: any): typeof verdaccio$PromiseAssert;
|
||||
auth(name: string, password: string): IRequestPromise;
|
||||
logout(token: string): Promise<any>;
|
||||
auth(name: string, password: string): IRequestPromise;
|
||||
getPackage(name: string): Promise<any>;
|
||||
putPackage(name: string, data: any): Promise<any>;
|
||||
putVersion(name: string, version: string, data: any): Promise<any>;
|
||||
getTarball(name: string, filename: string): Promise<any>;
|
||||
putTarball(name: string, filename: string, data: any): Promise<any>;
|
||||
removeTarball(name: string): Promise<any>;
|
||||
removeSingleTarball(name: string, filename: string): Promise<any>;
|
||||
addTag(name: string, tag: string, version: string): Promise<any>;
|
||||
putTarballIncomplete(name: string, filename: string, data: any, size: number, cb: Function): Promise<any>;
|
||||
addPackage(name: string): Promise<any>;
|
||||
whoami(): Promise<any>;
|
||||
ping(): Promise<any>;
|
||||
debug(): IRequestPromise;
|
||||
}
|
15
test/functional/lib/verdaccio-server.js
Normal file
15
test/functional/lib/verdaccio-server.js
Normal file
|
@ -0,0 +1,15 @@
|
|||
// @flow
|
||||
import type {IVerdaccioConfig} from './types';
|
||||
|
||||
export class VerdaccioConfig implements IVerdaccioConfig {
|
||||
|
||||
storagePath: string;
|
||||
configPath: string;
|
||||
domainPath: string;
|
||||
|
||||
constructor(storagePath: string, configPath: string, domainPath: string) {
|
||||
this.storagePath = storagePath;
|
||||
this.configPath = configPath;
|
||||
this.domainPath = domainPath;
|
||||
}
|
||||
}
|
|
@ -1,12 +1,9 @@
|
|||
'use strict';
|
||||
import assert from 'assert';
|
||||
import _ from 'lodash';
|
||||
|
||||
const assert = require('assert');
|
||||
const _ = require('lodash');
|
||||
const notify = require('../../../src/lib/notify').notify;
|
||||
|
||||
module.exports = function() {
|
||||
const express = process.express;
|
||||
import {notify} from '../../../src/lib/notify';
|
||||
|
||||
export default function(express) {
|
||||
const config = {
|
||||
notify: {
|
||||
method: 'POST',
|
||||
|
@ -18,9 +15,9 @@ module.exports = function() {
|
|||
}
|
||||
};
|
||||
|
||||
describe('notifications', function () {
|
||||
describe('notifications', () => {
|
||||
|
||||
before(function () {
|
||||
beforeAll(function () {
|
||||
express.post('/api/notify', function (req, res) {
|
||||
res.send(req.body);
|
||||
});
|
||||
|
@ -30,7 +27,7 @@ module.exports = function() {
|
|||
});
|
||||
});
|
||||
|
||||
it('notification should be send', function (done) {
|
||||
test('notification should be send', done => {
|
||||
const metadata = {
|
||||
name: "pkg-test"
|
||||
};
|
||||
|
@ -46,7 +43,7 @@ module.exports = function() {
|
|||
});
|
||||
});
|
||||
|
||||
it('notification should be send single header', function (done) {
|
||||
test('notification should be send single header', done => {
|
||||
const metadata = {
|
||||
name: "pkg-test"
|
||||
};
|
||||
|
@ -67,23 +64,25 @@ module.exports = function() {
|
|||
});
|
||||
});
|
||||
|
||||
it('notification should be send multiple notifications endpoints', function (done) {
|
||||
const metadata = {
|
||||
name: "pkg-test"
|
||||
};
|
||||
// let notificationsCounter = 0;
|
||||
test(
|
||||
'notification should be send multiple notifications endpoints',
|
||||
done => {
|
||||
const metadata = {
|
||||
name: "pkg-test"
|
||||
};
|
||||
// let notificationsCounter = 0;
|
||||
|
||||
const multipleNotificationsEndpoint = {
|
||||
notify: []
|
||||
};
|
||||
const multipleNotificationsEndpoint = {
|
||||
notify: []
|
||||
};
|
||||
|
||||
for (let i = 0; i < 10; i++) {
|
||||
const notificationSettings = _.cloneDeep(config.notify);
|
||||
// basically we allow al notifications
|
||||
notificationSettings.packagePattern = /^pkg-test$/;
|
||||
// notificationSettings.packagePatternFlags = 'i';
|
||||
multipleNotificationsEndpoint.notify.push(notificationSettings);
|
||||
}
|
||||
for (let i = 0; i < 10; i++) {
|
||||
const notificationSettings = _.cloneDeep(config.notify);
|
||||
// basically we allow al notifications
|
||||
notificationSettings.packagePattern = /^pkg-test$/;
|
||||
// notificationSettings.packagePatternFlags = 'i';
|
||||
multipleNotificationsEndpoint.notify.push(notificationSettings);
|
||||
}
|
||||
|
||||
notify(metadata, multipleNotificationsEndpoint).then(function (body) {
|
||||
body.forEach(function(notification) {
|
||||
|
@ -98,7 +97,7 @@ module.exports = function() {
|
|||
});
|
||||
});
|
||||
|
||||
it('notification should fails', function (done) {
|
||||
test('notification should fails', done => {
|
||||
const metadata = {
|
||||
name: "pkg-test"
|
||||
};
|
||||
|
@ -115,4 +114,4 @@ module.exports = function() {
|
|||
});
|
||||
|
||||
});
|
||||
};
|
||||
}
|
||||
|
|
|
@ -1,19 +1,16 @@
|
|||
'use strict';
|
||||
export default function(server) {
|
||||
|
||||
module.exports = function() {
|
||||
describe('package access control', function() {
|
||||
|
||||
const server = process.server;
|
||||
describe('package access control', () => {
|
||||
const buildToken = (auth) => {
|
||||
return `Basic ${(new Buffer(auth).toString('base64'))}`;
|
||||
};
|
||||
let oldAuth;
|
||||
|
||||
before(function() {
|
||||
beforeAll(function() {
|
||||
oldAuth = server.authstr;
|
||||
});
|
||||
|
||||
after(function() {
|
||||
afterAll(function() {
|
||||
server.authstr = oldAuth;
|
||||
});
|
||||
|
||||
|
@ -24,15 +21,18 @@ module.exports = function() {
|
|||
* @param ok {boolean}
|
||||
*/
|
||||
function checkAccess(auth, pkg, ok) {
|
||||
it((ok ? 'allows' : 'forbids') +' access ' + auth + ' to ' + pkg, function() {
|
||||
server.authstr = auth ? buildToken(auth) : undefined;
|
||||
let req = server.getPackage(pkg);
|
||||
if (ok) {
|
||||
return req.status(404).body_error(/no such package available/);
|
||||
} else {
|
||||
return req.status(403).body_error(/not allowed to access package/);
|
||||
test(
|
||||
(ok ? 'allows' : 'forbids') +' access ' + auth + ' to ' + pkg,
|
||||
() => {
|
||||
server.authstr = auth ? buildToken(auth) : undefined;
|
||||
let req = server.getPackage(pkg);
|
||||
if (ok) {
|
||||
return req.status(404).body_error(/no such package available/);
|
||||
} else {
|
||||
return req.status(403).body_error(/not allowed to access package/);
|
||||
}
|
||||
}
|
||||
});
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -42,7 +42,7 @@ module.exports = function() {
|
|||
* @param ok {boolean}
|
||||
*/
|
||||
function checkPublish(auth, pkg, ok) {
|
||||
it(`${(ok ? 'allows' : 'forbids')} publish ${auth} to ${pkg}`, function() {
|
||||
test(`${(ok ? 'allows' : 'forbids')} publish ${auth} to ${pkg}`, () => {
|
||||
server.authstr = auth ? buildToken(auth) : undefined;
|
||||
const req = server.putPackage(pkg, require('../fixtures/package')(pkg));
|
||||
if (ok) {
|
||||
|
@ -72,7 +72,7 @@ module.exports = function() {
|
|||
checkPublish(undefined, testAccessOnly, false);
|
||||
checkPublish(badCredentials, testAccessOnly, false);
|
||||
|
||||
// all are allowed to publish
|
||||
// // all are allowed to publish
|
||||
checkAccess(validCredentials, testPublishOnly, false);
|
||||
checkAccess(undefined, testPublishOnly, false);
|
||||
checkAccess(badCredentials, testPublishOnly, false);
|
||||
|
@ -96,4 +96,4 @@ module.exports = function() {
|
|||
checkPublish(undefined, testOnlyAuth, false);
|
||||
checkPublish(badCredentials, testOnlyAuth, false);
|
||||
});
|
||||
};
|
||||
}
|
|
@ -1,20 +1,14 @@
|
|||
'use strict';
|
||||
import assert from 'assert';
|
||||
import zlib from 'zlib';
|
||||
import {readFile} from '../lib/test.utils';
|
||||
|
||||
require('../lib/startup');
|
||||
export default function(server, express) {
|
||||
|
||||
const assert = require('assert');
|
||||
const zlib = require('zlib');
|
||||
const utils = require('../lib/test.utils');
|
||||
|
||||
module.exports = function() {
|
||||
let server = process.server;
|
||||
let express = process.express;
|
||||
|
||||
describe('test gzip support', function() {
|
||||
before(function() {
|
||||
describe('test gzip support', () => {
|
||||
beforeAll(function() {
|
||||
express.get('/testexp_gzip', function(req, res) {
|
||||
const pkg = eval(
|
||||
'(' + utils.readFile('../fixtures/publish.json5')
|
||||
'(' + readFile('../fixtures/publish.json5')
|
||||
.toString('utf8')
|
||||
.replace(/__NAME__/g, 'testexp_gzip')
|
||||
.replace(/__VERSION__/g, '0.0.1')
|
||||
|
@ -46,11 +40,11 @@ module.exports = function() {
|
|||
});
|
||||
});
|
||||
|
||||
it('should not fail on bad gzip', function() {
|
||||
test('should not fail on bad gzip', () => {
|
||||
return server.getPackage('testexp_baddata').status(404);
|
||||
});
|
||||
|
||||
it('should understand gzipped data from uplink', function() {
|
||||
test('should understand gzipped data from uplink', () => {
|
||||
return server.getPackage('testexp_gzip')
|
||||
.status(200)
|
||||
.response(function(res) {
|
||||
|
@ -62,7 +56,7 @@ module.exports = function() {
|
|||
});
|
||||
});
|
||||
|
||||
it('should serve gzipped data', function() {
|
||||
test('should serve gzipped data', () => {
|
||||
return server.request({
|
||||
uri: '/testexp_gzip',
|
||||
encoding: null,
|
||||
|
@ -91,5 +85,4 @@ module.exports = function() {
|
|||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
}
|
||||
|
|
|
@ -1,14 +1,11 @@
|
|||
'use strict';
|
||||
|
||||
const assert = require('assert');
|
||||
const utils = require ('../lib/test.utils');
|
||||
import assert from 'assert';
|
||||
import {generateSha} from '../lib/test.utils';
|
||||
|
||||
module.exports = function() {
|
||||
const server = process.server;
|
||||
const server2 = process.server2;
|
||||
export default function(server, server2) {
|
||||
|
||||
describe('test-scoped', function() {
|
||||
before(function() {
|
||||
describe('test-scoped', () => {
|
||||
beforeAll(function() {
|
||||
return server.request({
|
||||
uri: '/@test%2fscoped',
|
||||
headers: {
|
||||
|
@ -19,7 +16,7 @@ module.exports = function() {
|
|||
}).status(201);
|
||||
});
|
||||
|
||||
it('should publish scope package', function() {});
|
||||
test('should publish scope package', () => {});
|
||||
|
||||
describe('should get scoped packages tarball', () => {
|
||||
const uploadScopedTarBall = (server) => {
|
||||
|
@ -27,22 +24,22 @@ module.exports = function() {
|
|||
.status(200)
|
||||
.then(function(body) {
|
||||
// not real sha due to utf8 conversion
|
||||
assert.strictEqual(utils.generateSha(body),
|
||||
assert.strictEqual(generateSha(body),
|
||||
'6e67b14e2c0e450b942e2bc8086b49e90f594790');
|
||||
});
|
||||
};
|
||||
|
||||
it('should be a scoped tarball from server1', () => {
|
||||
test('should be a scoped tarball from server1', () => {
|
||||
return uploadScopedTarBall(server);
|
||||
});
|
||||
|
||||
it('should be a scoped tarball from server2', () => {
|
||||
test('should be a scoped tarball from server2', () => {
|
||||
return uploadScopedTarBall(server2);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('should retrieve scoped packages', function() {
|
||||
describe('should retrieve scoped packages', () => {
|
||||
const testScopePackage = (server, port) => server.getPackage('@test/scoped')
|
||||
.status(200)
|
||||
.then(function(body) {
|
||||
|
@ -53,17 +50,17 @@ module.exports = function() {
|
|||
assert.deepEqual(body['dist-tags'], {latest: '1.0.0'});
|
||||
});
|
||||
|
||||
it('scoped package on server1', () => {
|
||||
test('scoped package on server1', () => {
|
||||
return testScopePackage(server, '55551');
|
||||
});
|
||||
|
||||
it('scoped package on server2', () => {
|
||||
test('scoped package on server2', () => {
|
||||
return testScopePackage(server2, '55552');
|
||||
});
|
||||
});
|
||||
|
||||
describe('should retrieve a scoped packages under nginx', function() {
|
||||
it('should work nginx workaround', () => {
|
||||
describe('should retrieve a scoped packages under nginx', () => {
|
||||
test('should work nginx workaround', () => {
|
||||
return server2.request({
|
||||
uri: '/@test/scoped/1.0.0'
|
||||
}).status(200)
|
||||
|
@ -75,4 +72,4 @@ module.exports = function() {
|
|||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
}
|
||||
|
|
|
@ -1,23 +1,21 @@
|
|||
'use strict';
|
||||
import assert from 'assert';
|
||||
import async from 'async';
|
||||
|
||||
let assert = require('assert');
|
||||
let async = require('async');
|
||||
let _oksum = 0;
|
||||
const racePkg = require('../fixtures/package');
|
||||
|
||||
module.exports = function () {
|
||||
let server = process.server;
|
||||
export default function(server) {
|
||||
|
||||
describe('race', function () {
|
||||
before(function () {
|
||||
describe('race', () => {
|
||||
beforeAll(function () {
|
||||
return server.putPackage('race', racePkg('race'))
|
||||
.status(201)
|
||||
.body_ok(/created new package/);
|
||||
});
|
||||
|
||||
it('creating new package', function () {});
|
||||
test('creating new package', () => {});
|
||||
|
||||
it('uploading 10 same versions', function (callback) {
|
||||
test('uploading 10 same versions', callback => {
|
||||
let fns = [];
|
||||
for (let i = 0; i < 10; i++) {
|
||||
fns.push(function (cb_) {
|
||||
|
@ -64,7 +62,7 @@ module.exports = function () {
|
|||
});
|
||||
});
|
||||
|
||||
it('uploading 10 diff versions', function (callback) {
|
||||
test('uploading 10 diff versions', callback => {
|
||||
let fns = [];
|
||||
for (let i = 0; i < 10; i++) {
|
||||
(function (i) {
|
||||
|
@ -107,7 +105,7 @@ module.exports = function () {
|
|||
});
|
||||
});
|
||||
|
||||
after('downloading package', function () {
|
||||
afterAll(function () {
|
||||
return server.getPackage('race')
|
||||
.status(200)
|
||||
.then(function (body) {
|
||||
|
@ -115,5 +113,4 @@ module.exports = function () {
|
|||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue